diff --git a/.gitattributes b/.gitattributes index d7cd0904031..058cbdb9c2f 100644 --- a/.gitattributes +++ b/.gitattributes @@ -14,3 +14,5 @@ frontend/website/static/*.bc.js filter=lfs diff=lfs merge=lfs -text src/app/archive/archive_graphql_schema.json linguist-generated=true docs/res/block_production_fsm.dot.png filter=lfs diff=lfs merge=lfs -text rfcs/res/hard-fork-package-generation-buildkite-pipeline.dot.png filter=lfs diff=lfs merge=lfs -text +*.ml linguist-language=OCaml +*.mli linguist-language=OCaml diff --git a/.github/ISSUE_TEMPLATE/1-BUG_REPORT.yml b/.github/ISSUE_TEMPLATE/1-BUG_REPORT.yml index 35a8e2d8e19..dcd046d1fd9 100644 --- a/.github/ISSUE_TEMPLATE/1-BUG_REPORT.yml +++ b/.github/ISSUE_TEMPLATE/1-BUG_REPORT.yml @@ -55,6 +55,13 @@ body: description: Describe what actually happened. validations: required: true + - type: textarea + attributes: + label: Daemon version + description: Use the command "mina version" to get this information. + placeholder: Commit 68200c7b409e105d174f079c5c39f7926819784b on branch master + validations: + required: true - type: dropdown id: frequency validations: diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 65ae1e59ace..a9af11921fc 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -35,7 +35,6 @@ Explain your changes: Explain how you tested your changes: * - Checklist: - [ ] Dependency versions are unchanged diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 439e8a0c2c9..df1923dd33f 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -4,8 +4,14 @@ on: schedule: - cron: '0 0 * * *' +permissions: + contents: read + jobs: stale: + permissions: + issues: write # for actions/stale to close stale issues + pull-requests: write # for actions/stale to close stale PRs runs-on: ubuntu-latest steps: - uses: actions/stale@main diff --git a/.mailmap b/.mailmap new file mode 100644 index 00000000000..7b50e5b14c7 --- /dev/null +++ b/.mailmap @@ -0,0 +1,5 @@ +ember arlynx +ember arlynx +ember arlynx +ember arlynx +ember arlynx diff --git a/.prettierignore b/.prettierignore deleted file mode 100644 index 66279798c55..00000000000 --- a/.prettierignore +++ /dev/null @@ -1,2 +0,0 @@ -src/lib/crypto/**/*.js -src/lib/snarkyjs/src/bindings/kimchi/js/**/*.js diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 3187e319392..5857ace122f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -83,7 +83,7 @@ for common troubleshooting steps. There are three main pieces of Mina documentation: -1. The [https://github.com/o1-labs/docs2)](https://github.com/o1-labs/docs2) repository for the [Mina Protocol](https://docs.minaprotocol.com/) docs website. +1. The [https://github.com/o1-labs/docs2](https://github.com/o1-labs/docs2) repository for the [Mina Protocol](https://docs.minaprotocol.com/) docs website. 2. The `README.md` files in various directories explain the contents of that directory at a high level: the purpose of the library, design constraints, anything else specific to that directory. diff --git a/LICENSE b/LICENSE index 16fe87b06e8..fd0e020276f 100644 --- a/LICENSE +++ b/LICENSE @@ -186,7 +186,7 @@ APPENDIX: How to apply the Apache License to your work. same "printed page" as the copyright notice for easier identification within third-party archives. -Copyright [yyyy] [name of copyright owner] +Copyright 2024. Mina Foundation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/Makefile b/Makefile index 4cac7167ff9..31096a2c4c4 100644 --- a/Makefile +++ b/Makefile @@ -73,7 +73,7 @@ endif genesis_ledger: ocaml_checks $(info Building runtime_genesis_ledger) - ulimit -s 65532 && (ulimit -n 10240 || true) && env MINA_COMMIT_SHA1=$(GITLONGHASH) dune exec --profile=$(DUNE_PROFILE) src/app/runtime_genesis_ledger/runtime_genesis_ledger.exe -- --genesis-dir $(GENESIS_DIR) + (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && env MINA_COMMIT_SHA1=$(GITLONGHASH) dune exec --profile=$(DUNE_PROFILE) src/app/runtime_genesis_ledger/runtime_genesis_ledger.exe -- --genesis-dir $(GENESIS_DIR) $(info Genesis ledger and genesis proof generated) # Checks that every OCaml packages in the project build without issues @@ -82,32 +82,32 @@ check: ocaml_checks libp2p_helper build: ocaml_checks reformat-diff libp2p_helper $(info Starting Build) - ulimit -s 65532 && (ulimit -n 10240 || true) && env MINA_COMMIT_SHA1=$(GITLONGHASH) dune build src/app/logproc/logproc.exe src/app/cli/src/mina.exe --profile=$(DUNE_PROFILE) + (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && env MINA_COMMIT_SHA1=$(GITLONGHASH) dune build src/app/logproc/logproc.exe src/app/cli/src/mina.exe --profile=$(DUNE_PROFILE) $(info Build complete) build_all_sigs: ocaml_checks reformat-diff libp2p_helper $(info Starting Build) - ulimit -s 65532 && (ulimit -n 10240 || true) && env MINA_COMMIT_SHA1=$(GITLONGHASH) dune build src/app/logproc/logproc.exe src/app/cli/src/mina.exe src/app/cli/src/mina_testnet_signatures.exe src/app/cli/src/mina_mainnet_signatures.exe --profile=$(DUNE_PROFILE) + (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && env MINA_COMMIT_SHA1=$(GITLONGHASH) dune build src/app/logproc/logproc.exe src/app/cli/src/mina.exe src/app/cli/src/mina_testnet_signatures.exe src/app/cli/src/mina_mainnet_signatures.exe --profile=$(DUNE_PROFILE) $(info Build complete) build_archive: ocaml_checks reformat-diff $(info Starting Build) - ulimit -s 65532 && (ulimit -n 10240 || true) && dune build src/app/archive/archive.exe --profile=$(DUNE_PROFILE) + (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && dune build src/app/archive/archive.exe --profile=$(DUNE_PROFILE) $(info Build complete) build_archive_all_sigs: ocaml_checks reformat-diff $(info Starting Build) - ulimit -s 65532 && (ulimit -n 10240 || true) && dune build src/app/archive/archive.exe src/app/archive/archive_testnet_signatures.exe src/app/archive/archive_mainnet_signatures.exe --profile=$(DUNE_PROFILE) + (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && dune build src/app/archive/archive.exe src/app/archive/archive_testnet_signatures.exe src/app/archive/archive_mainnet_signatures.exe --profile=$(DUNE_PROFILE) $(info Build complete) build_rosetta: ocaml_checks $(info Starting Build) - ulimit -s 65532 && (ulimit -n 10240 || true) && dune build src/app/archive/archive.exe src/app/rosetta/rosetta.exe src/app/rosetta/ocaml-signer/signer.exe --profile=$(DUNE_PROFILE) + (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && dune build src/app/archive/archive.exe src/app/rosetta/rosetta.exe src/app/rosetta/ocaml-signer/signer.exe --profile=$(DUNE_PROFILE) $(info Build complete) build_rosetta_all_sigs: ocaml_checks $(info Starting Build) - ulimit -s 65532 && (ulimit -n 10240 || true) && dune build src/app/archive/archive.exe src/app/archive/archive_testnet_signatures.exe src/app/archive/archive_mainnet_signatures.exe src/app/rosetta/rosetta.exe src/app/rosetta/rosetta_testnet_signatures.exe src/app/rosetta/rosetta_mainnet_signatures.exe src/app/rosetta/ocaml-signer/signer.exe src/app/rosetta/ocaml-signer/signer_testnet_signatures.exe src/app/rosetta/ocaml-signer/signer_mainnet_signatures.exe --profile=$(DUNE_PROFILE) + (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && dune build src/app/archive/archive.exe src/app/archive/archive_testnet_signatures.exe src/app/archive/archive_mainnet_signatures.exe src/app/rosetta/rosetta.exe src/app/rosetta/rosetta_testnet_signatures.exe src/app/rosetta/rosetta_mainnet_signatures.exe src/app/rosetta/ocaml-signer/signer.exe src/app/rosetta/ocaml-signer/signer_testnet_signatures.exe src/app/rosetta/ocaml-signer/signer_mainnet_signatures.exe --profile=$(DUNE_PROFILE) $(info Build complete) build_intgtest: ocaml_checks @@ -117,17 +117,12 @@ build_intgtest: ocaml_checks rosetta_lib_encodings: ocaml_checks $(info Starting Build) - ulimit -s 65532 && (ulimit -n 10240 || true) && dune build src/lib/rosetta_lib/test/test_encodings.exe --profile=mainnet + (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && dune build src/lib/rosetta_lib/test/test_encodings.exe --profile=mainnet $(info Build complete) rosetta_lib_encodings_nonconsensus: ocaml_checks $(info Starting Build) - ulimit -s 65532 && (ulimit -n 10240 || true) && dune build src/nonconsensus/rosetta_lib/test/test_encodings.exe --profile=nonconsensus_mainnet - $(info Build complete) - -dhall_types: ocaml_checks - $(info Starting Build) - ulimit -s 65532 && (ulimit -n 10240 || true) && dune build src/app/dhall_types/dump_dhall_types.exe --profile=dev + (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && dune build src/nonconsensus/rosetta_lib/test/test_encodings.exe --profile=nonconsensus_mainnet $(info Build complete) replayer: ocaml_checks @@ -137,37 +132,37 @@ replayer: ocaml_checks delegation_compliance: ocaml_checks $(info Starting Build) - ulimit -s 65532 && (ulimit -n 10240 || true) && dune build src/app/delegation_compliance/delegation_compliance.exe --profile=testnet_postake_medium_curves + (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && dune build src/app/delegation_compliance/delegation_compliance.exe --profile=testnet_postake_medium_curves $(info Build complete) missing_blocks_auditor: ocaml_checks $(info Starting Build) - ulimit -s 65532 && (ulimit -n 10240 || true) && dune build src/app/missing_blocks_auditor/missing_blocks_auditor.exe --profile=testnet_postake_medium_curves + (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && dune build src/app/missing_blocks_auditor/missing_blocks_auditor.exe --profile=testnet_postake_medium_curves $(info Build complete) extract_blocks: ocaml_checks $(info Starting Build) - ulimit -s 65532 && (ulimit -n 10240 || true) && dune build src/app/extract_blocks/extract_blocks.exe --profile=testnet_postake_medium_curves + (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && dune build src/app/extract_blocks/extract_blocks.exe --profile=testnet_postake_medium_curves $(info Build complete) archive_blocks: ocaml_checks $(info Starting Build) - ulimit -s 65532 && (ulimit -n 10240 || true) && dune build src/app/archive_blocks/archive_blocks.exe --profile=testnet_postake_medium_curves + (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && dune build src/app/archive_blocks/archive_blocks.exe --profile=testnet_postake_medium_curves $(info Build complete) patch_archive_test: ocaml_checks $(info Starting Build) - ulimit -s 65532 && (ulimit -n 10240 || true) && dune build src/app/patch_archive_test/patch_archive_test.exe --profile=testnet_postake_medium_curves + (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && dune build src/app/patch_archive_test/patch_archive_test.exe --profile=testnet_postake_medium_curves $(info Build complete) genesis_ledger_from_tsv: ocaml_checks $(info Starting Build) - ulimit -s 65532 && (ulimit -n 10240 || true) && dune build src/app/genesis_ledger_from_tsv/genesis_ledger_from_tsv.exe --profile=testnet_postake_medium_curves + (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && dune build src/app/genesis_ledger_from_tsv/genesis_ledger_from_tsv.exe --profile=testnet_postake_medium_curves $(info Build complete) swap_bad_balances: ocaml_checks $(info Starting Build) - ulimit -s 65532 && (ulimit -n 10240 || true) && dune build src/app/swap_bad_balances/swap_bad_balances.exe --profile=testnet_postake_medium_curves + (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && dune build src/app/swap_bad_balances/swap_bad_balances.exe --profile=testnet_postake_medium_curves $(info Build complete) heap_usage: ocaml_checks @@ -191,7 +186,7 @@ macos-portable: @echo Find coda-daemon-macos.zip inside _build/ update-graphql: - ulimit -s 65532 && (ulimit -n 10240 || true) && dune build --profile=$(DUNE_PROFILE) graphql_schema.json + (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && dune build --profile=$(DUNE_PROFILE) graphql_schema.json ######################################## ## Lint @@ -214,7 +209,7 @@ check-proof-systems-submodule: ####################################### ## Environment setup -macos-setup-download: +macos-setup: ./scripts/macos-setup-brew.sh ######################################## @@ -237,12 +232,12 @@ deb_optimized: build_pv_keys: ocaml_checks $(info Building keys) - ulimit -s 65532 && (ulimit -n 10240 || true) && env MINA_COMMIT_SHA1=$(GITLONGHASH) dune exec --profile=$(DUNE_PROFILE) src/lib/snark_keys/gen_keys/gen_keys.exe -- --generate-keys-only + (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && env MINA_COMMIT_SHA1=$(GITLONGHASH) dune exec --profile=$(DUNE_PROFILE) src/lib/snark_keys/gen_keys/gen_keys.exe -- --generate-keys-only $(info Keys built) build_or_download_pv_keys: ocaml_checks $(info Building keys) - ulimit -s 65532 && (ulimit -n 10240 || true) && env MINA_COMMIT_SHA1=$(GITLONGHASH) dune exec --profile=$(DUNE_PROFILE) src/lib/snark_keys/gen_keys/gen_keys.exe -- --generate-keys-only + (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && env MINA_COMMIT_SHA1=$(GITLONGHASH) dune exec --profile=$(DUNE_PROFILE) src/lib/snark_keys/gen_keys/gen_keys.exe -- --generate-keys-only $(info Keys built) publish_debs: @@ -309,8 +304,10 @@ endif %.conv.tex.png: %.conv.tex cd $(dir $@) && pdflatex -halt-on-error -shell-escape $(notdir $<) +# TODO: this, but smarter so we don't have to add every library doc_diagram_sources=$(addprefix docs/res/,*.dot *.tex *.conv.tex) doc_diagram_sources+=$(addprefix rfcs/res/,*.dot *.tex *.conv.tex) +doc_diagram_sources+=$(addprefix src/lib/transition_frontier/res/,*.dot *.tex *.conv.tex) doc_diagrams: $(addsuffix .png,$(wildcard $(doc_diagram_sources))) ######################################## @@ -325,4 +322,4 @@ ml-docs: ocaml_checks # https://www.gnu.org/software/make/manual/html_node/Phony-Targets.html # HACK: cat Makefile | egrep '^\w.*' | sed 's/:/ /' | awk '{print $1}' | grep -v myprocs | sort | xargs -.PHONY: all build check-format clean deb dev mina-docker reformat doc_diagrams ml-docs macos-setup macos-setup-download setup-opam libp2p_helper dhall_types replayer missing_blocks_auditor extract_blocks archive_blocks genesis_ledger_from_tsv ocaml_version ocaml_word_size ocaml_checks +.PHONY: all build check-format clean deb dev mina-docker reformat doc_diagrams ml-docs macos-setup setup-opam libp2p_helper dhall_types replayer missing_blocks_auditor extract_blocks archive_blocks genesis_ledger_from_tsv ocaml_version ocaml_word_size ocaml_checks diff --git a/README-ci-failures.md b/README-ci-failures.md index 7cd528d5a87..cfd8f4bc81d 100644 --- a/README-ci-failures.md +++ b/README-ci-failures.md @@ -5,6 +5,9 @@ label and comments by MinaProtocol organization members containing exactly `!ci-build-me`. If your CI job has not started after adding the `ci-build-me` label, please comment on the pull request with `!ci-build-me` to attempt to re-trigger the script. +If no CI jobs started, check that your membership to O(1) Labs/mina organisation +is public. If your membership is private, the jobs will not started and +`!ci-build-me` won't have an impact. If CI jobs are not running after applying both the `ci-build-me` label and comment, you may be able to find and fix the error in the script. The script @@ -16,7 +19,7 @@ for the changes before attempting to deploy the fixed script. ## Integration test failures If your CI error is related to a timeout logged by one of the integration test -runnners, this is a known issue and re-running the test in the Buildkite +runners, this is a known issue and re-running the test in the Buildkite interface will usually succeed. If an issue arises, please post an update in both `development` on the Mina @@ -29,7 +32,7 @@ The CI runs its jobs in multiple Docker images. The images that it is using are specified in `buildkite/src/Constants/COntainerImages.dhall`: the CI uses all Debian images prefixed by `minaToolchainBuster`. -Theses images are generated by the CI itself, in particular based on the content of the +These images are generated by the CI itself, in particular based on the content of the `dockerfiles` directory and the `opam.export` file (which describes versions of OCaml packages). If you PR modifies how the images are generated (for example by changing a package version in `opam.export`), then the CI will not automatically diff --git a/README-dev.md b/README-dev.md index 5151e17c579..2fec130e83b 100644 --- a/README-dev.md +++ b/README-dev.md @@ -26,7 +26,7 @@ Quick start instructions: git clone git@github.com:MinaProtocol/mina.git ``` -If you have already done that, remember that the MinaProtocol and o1-labs repositories do not accept the password authentication used by the https URLs. You must set GitHub repos to pull and push over ssh: + If you have already done that, remember that the MinaProtocol and o1-labs repositories do not accept the password authentication used by the https URLs. You must set GitHub repos to pull and push over ssh: ```sh git config --global url.ssh://git@github.com/.insteadOf https://github.com/ @@ -52,13 +52,83 @@ You can build Mina using Docker. Using Docker works in any dev environment. See ### Developer Setup (MacOS) -- Invoke `make macos-setup` - - You will be prompted to add a number of `export`s in your shell config file. Do so. - - If this is your first time using OCaml, be sure to run `eval $(opam config env)` -- Install [rustup](https://rustup.rs/). -- Invoke `make build` -- Jump to [customizing your editor for autocomplete](#customizing-your-dev-environment-for-autocompletemerlin) -- Note: If you are seeing conf-openssl install errors, try running `export PKG_CONFIG_PATH=$(brew --prefix openssl@1.1)/lib/pkgconfig` and try `opam switch import opam.export` again. +1. Upgrade to the latest version of macOS. +2. Install Xcode Command Line Tools: + + ```sh + xcode-select --install + ``` + +3. Invoke `make macos-setup`. + + - When prompted, confirm that you want to add a number of exports in your shell config file. + - Make sure to `source` your shell config file or create a new terminal. + - If this is your first time using OCaml, be sure to run: + + ```sh + eval $(opam config env) + ``` + +1. Install [rustup](https://rustup.rs/). +2. Create your switch with deps `opam switch import --switch mina opam.export` + + M1- and M2- operating systems experience issues because Homebrew does not link include files automatically. + + If you get an error about failing to find `gmp.h`, update your `~/.zshrc` or `~/.bashrc` with: + + ```sh + export CFLAGS="-I/opt/homebrew/Cellar/gmp/6.2.1_1/include/" + ``` + + or run: + + ```sh + env CFLAGS="/opt/homebrew/Cellar/gmp/6.2.1_1/include/" opam install conf-gmp.2 + ``` + + If you get an error about failing to find `lmdb.h`, update your `~/.zshrc` or `~/.bashrc` with: + + ```text + export CPATH="$HOMEBREW_PREFIX/include:$CPATH" + export LIBRARY_PATH="$HOMEBREW_PREFIX/lib:$LIBRARY_PATH" + export PATH="$(brew --prefix lmdb)/bin:$PATH" + export PKG_CONFIG_PATH=$(brew --prefix lmdb)/lib/pkgconfig:$PKG_CONFIG_PATH + ``` + + - Note:If you get conf-openssl install errors, try running `export PKG_CONFIG_PATH=$(brew --prefix openssl@1.1)/lib/pkgconfig` and try `opam switch import opam.export` again. + - If prompted, run `opam user-setup install` to enable opam-user-setup support for Merlin. + +3. Pin dependencies that override opam versions: + + ```sh + scripts/pin-external-packages.sh + ``` + +7. Install the correct version of golang: + + - `goenv init` + - To make sure the right `goenv` is used, update your shell env script with: + + ```text + eval "$(goenv init -)" + export PATH="/Users/$USER/.goenv/shims:$PATH" + ``` + + - `goenv install 1.18.10` + - `goenv global 1.18.10` + - Check that the `go version` returns the right version, otherwise you see the message `compile:version "go1.18.10" does not match go tool version "go1.20.2"`. If so, run `brew remove go` or get the matching version. + +9. Invoke `make build`. + + If you get errors about `libp2p` and `capnp`, try with `brew install capnp`. + +9. For better IDE support, install the OCaml-LSP language server for OCaml: + + ```sh + opam install ocaml-lsp-server + ``` + +10. Set up your IDE. See [Customizing your dev environment for autocomplete/merlin](https://github.com/MinaProtocol/mina/blob/develop/README-dev.md#customizing-your-dev-environment-for-autocompletemerlin). ### Developer Setup (Linux) @@ -72,7 +142,7 @@ To get all of the required opam dependencies, run: opam switch import opam.export ``` -_*NOTE:*_ The `switch` command provides a `dune_wrapper` binary that you can use instead of dune and fails early if your switch becomes out of sync with the `opam.export` file. +**NOTE**: The `switch` command provides a `dune_wrapper` binary that you can use instead of dune and fails early if your switch becomes out of sync with the `opam.export` file. Some dependencies that are not taken from `opam` or integrated with `dune` must be added manually. Run the `scripts/pin-external-packages.sh` script. @@ -84,9 +154,7 @@ A number of C libraries are expected to be available in the system and are also #### Customizing your dev environment for autocomplete/merlin -[dev-env]: #dev-env - -If you use vim, add this snippet in your `.vimrc` file to use Merlin. (Note: Be sure to change the HOME directory to match yours.) +If you use vim, add this snippet in your `.vimrc` file to use Merlin. (Note:Be sure to change the HOME directory to match yours.) ```bash let s:ocamlmerlin="/Users/USERNAME/.opam/4.14.0/share/merlin" @@ -103,27 +171,27 @@ let g:syntastic_ocaml_checkers=['merlin'] - If you use emacs, install the `opam` packages mentioned above and also install `tuareg`. Add the following to your `.emacs` file: -```lisp -(let ((opam-share (ignore-errors (car (process-lines "opam" "var" "share"))))) - (when (and opam-share (file-directory-p opam-share)) - ;; Register Merlin - (add-to-list 'load-path (expand-file-name "emacs/site-lisp" opam-share)) - (load "tuareg-site-file") - (autoload 'merlin-mode "merlin" nil t nil) - ;; Automatically start it in OCaml buffers - (add-hook 'tuareg-mode-hook 'merlin-mode t) - (add-hook 'caml-mode-hook 'merlin-mode t))) -``` + ```lisp + (let ((opam-share (ignore-errors (car (process-lines "opam" "var" "share"))))) + (when (and opam-share (file-directory-p opam-share)) + ;; Register Merlin + (add-to-list 'load-path (expand-file-name "emacs/site-lisp" opam-share)) + (load "tuareg-site-file") + (autoload 'merlin-mode "merlin" nil t nil) + ;; Automatically start it in OCaml buffers + (add-hook 'tuareg-mode-hook 'merlin-mode t) + (add-hook 'caml-mode-hook 'merlin-mode t))) + ``` -To use the Emacs built-in autocomplete, use `M-x completion-at-point` or `M-tab`. There are other -Emacs autocompletion packages; see [Emacs from scratch](https://github.com/ocaml/merlin/wiki/emacs-from-scratch). + To use the Emacs built-in autocomplete, use `M-x completion-at-point` or `M-tab`. There are other + Emacs autocompletion packages; see [Emacs from scratch](https://github.com/ocaml/merlin/wiki/emacs-from-scratch). - If you use VSCode, set up Merlin to work inside VSCode: - Make sure to be in the right switch (mina) - Add the [OCaml Platform](https://marketplace.visualstudio.com/items?itemName=ocamllabs.ocaml-platform) extension - You might get a prompt to install `ocaml-lsp-server` as well in the Sandbox - You might get a prompt to install `ocamlformat-rpc` as well in the Sandbox - - Type "shell command: install code command in PATH" + - Type "shell command:install code command in PATH" - Close all windows and instances of VSCode - From terminal, in your mina directory, run `code .` - Run `dune build` in the terminal inside VSCode @@ -133,19 +201,19 @@ Emacs autocompletion packages; see [Emacs from scratch](https://github.com/ocaml The source code for the Mina node is located in `src/app/cli/`. After it is compiled, you can run the compiled binary like this: ```shell -$ dune exec src/app/cli/src/mina.exe -- daemon --libp2p-keypair /path/to/key +dune exec src/app/cli/src/mina.exe -- daemon --libp2p-keypair /path/to/key ``` The results of a successful build appear in `_build/default/src/app/cli/src/mina.exe`. The default configuration of the node depends on the build profile that is used during compilation. To connect to some networks, you need to compile the daemon with a specific profile. -Some setup is required: +*Some setup is required*: -1. Generate a key pair so that the daemon can create an account to issue blocks from using the same `mina.exe` binary: +Generate a key pair so that the daemon can create an account to issue blocks from using the same `mina.exe` binary: ```shell -$ dune exec src/app/cli/src/mina.exe -- libp2p generate-keypair --privkey-path /path/to/key +dune exec src/app/cli/src/mina.exe -- libp2p generate-keypair --privkey-path /path/to/key ``` When prompted, enter a passphrase. During development, you can leave it blank for convenience, but using a passphrase is strongly encouraged when running a real node! @@ -155,12 +223,12 @@ an environment variable `MINA_LIBP2P_PASS`, which must be defined even if the pa The `/path/to/key` must belong to the user running the daemon. Set these filesystem permissions: ```shell -$ chmod 0600 /path/to/key -$ chmod 0700 /path/to +chmod 0600 /path/to/key +chmod 0700 /path/to ``` Additionally, you must provide a list of peers to connect to bootstrap the node. -The list of peers depends on the network you want to connect to and is announced when the network is being launched. For Mainnet, the list of peers is avaialable at: +The list of peers depends on the network you want to connect to and is announced when the network is being launched. For Mainnet, the list of peers is available at: https://storage.googleapis.com/mina-seed-lists/mainnet_seeds.txt. The `daemon.json` config file also contains bootstrap data that is specific to the network the node is trying to connect to and must be tailored specifically for a particular network. This file can also override some of the configuration options selected during compilation. The `daemon.json` file can be extracted from the Docker image @@ -171,13 +239,13 @@ The aforementioned bootstrap data includes the genesis ledger, i.e. the initial When all of this setup is complete, you can launch the daemon. The following command assumes the key passphrase is set to `pass`: ```shell -$ MINA_LIBP2P_PASS=pass dune exec src/app/cli/src/mina.exe -- daemon --libp2p-keypair /path/to/key --peer-list-url https://example.peer.list --config-file /custom/path/to/daemon.json +MINA_LIBP2P_PASS=pass dune exec src/app/cli/src/mina.exe -- daemon --libp2p-keypair /path/to/key --peer-list-url https://example.peer.list --config-file /custom/path/to/daemon.json ``` The `--seed` flag tells the daemon to run a fresh network of its own. When this flag is used, specifying a peer list is not required, but is still possible. With `--seed` option the node does not crash, even if it does not manage to connect to any peers. To learn more, see the command line help: ```shell -$ dune exec src/app/cli/src/mina.exe -- -help +dune exec src/app/cli/src/mina.exe -- -help ``` The command line help is the place to learn about other options to the Mina CLI and how to connect to an existing network, such as Mainnet. @@ -188,10 +256,10 @@ The Makefile contains placeholder targets for all the common tasks that need to The most important `make` targets are: -- `build`: build everything -- `libp2p_helper`: build the libp2p helper -- `reformat`: automatically use `ocamlformat` to reformat the source files (use - it if the hook fails during a commit) +- `build`: build the Mina binary +- `build_intgtest`: build the [`test_executive`](./src/app/test_executive/README.md#using-lucy) for running integration tests +- `libp2p_helper`: build the [`libp2p_helper`](./src/app/libp2p_helper/README.md) +- `reformat`: automatically use `ocamlformat` to reformat the source files (use it if the hook fails during a commit) We use the [Dune](https://github.com/ocaml/dune/) build system for OCaml code. @@ -202,20 +270,20 @@ OCaml dependencies live in the [`opam.export`](./opam.export) file. This file is To add a new dependency, you most likely will need to create a new fresh switch to avoid pushing in any local dependency (like `ocaml-lsp`). The following commands assume that the version of the OCaml compiler used in the codebase is 4.14.0: ```shell -$ opam switch create mina_fresh 4.14.0 -$ opam switch import opam.export +opam switch create mina_fresh 4.14.0 +opam switch import opam.export ``` -After that, install your dependency. You might have to specify versions of current dependencies to avoid having to upgrade dependencies. For example: +After that, install your dependency. You might have to specify versions of current dependencies to avoid having to upgrade dependencies. For example: -```console -$ opam install alcotest cmdliner=1.0.3 fmt=0.8.6 +```sh +opam install alcotest cmdliner=1.0.3 fmt=0.8.6 ``` Then, run the following command to update the `ocaml.export` file: -```console -$ opam switch export opam.export +```sh +opam switch export opam.export ``` ## Steps for adding a new OCaml pinned dependency @@ -275,7 +343,7 @@ To override these constants, pass a json file to `runtime_genesis_ledger.exe` wi The exe then packages the overridden constants along with the genesis ledger and the genesis proof for the daemon to consume. - 2. Constants that can be overriden at runtime are: + 2. Constants that can be overridden at runtime are: - genesis_state_timestamp - transaction pool max size diff --git a/README-spacetime.md b/README-spacetime.md index b248970778c..4185b0f4687 100644 --- a/README-spacetime.md +++ b/README-spacetime.md @@ -8,7 +8,7 @@ for revealing the location of memory leaks. To use Spacetime, you need an OCaml compiler built with that feature enabled. There are Spacetime-enabled compiler switches within OPAM. For example, there's the switch 4.07.1+spacetime, which works with same OPAM packages as vanilla 4.07.1. To build -with it in your enviroment, install the switch: +with it in your environment, install the switch: ``` opam switch create 4.07.1+spacetime # or the current OCaml version for building Coda diff --git a/README.md b/README.md index 49c0cf54099..b848104e1d8 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,9 @@ +### Build status + +| Develop | Berkeley | Compatible | +| ------- | -------- | ---------- | +| [![Build status - develop](https://badge.buildkite.com/0c47452f3ea619d3217d388e0de522b218db28c3e161887a9a.svg?branch=develop)](https://buildkite.com/o-1-labs-2/mina-end-to-end-nightlies) | [![Build status - berkeley](https://badge.buildkite.com/0c47452f3ea619d3217d388e0de522b218db28c3e161887a9a.svg?branch=berkeley)](https://buildkite.com/o-1-labs-2/mina-end-to-end-nightlies) | [![Build status - compatible](https://badge.buildkite.com/0c47452f3ea619d3217d388e0de522b218db28c3e161887a9a.svg?branch=compatible)](https://buildkite.com/o-1-labs-2/mina-end-to-end-nightlies) + Mina logo diff --git a/automation/package.json b/automation/package.json index fb6798953dc..81190979179 100644 --- a/automation/package.json +++ b/automation/package.json @@ -16,7 +16,7 @@ ], "author": "o1labs", "license": "MIT", - "homepage": "https://codaprotocol.com", + "homepage": "https://minaprotocol.com", "repository": "https://github.com/MinaProtocol/mina", "bugs": "https://github.com/MinaProtocol/mina/issues", "devDependencies": { diff --git a/automation/scripts/github_branch_autosync/.gitignore b/automation/scripts/github_branch_autosync/.gitignore new file mode 100644 index 00000000000..97af16c72d8 --- /dev/null +++ b/automation/scripts/github_branch_autosync/.gitignore @@ -0,0 +1,127 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ diff --git a/automation/scripts/github_branch_autosync/Makefile b/automation/scripts/github_branch_autosync/Makefile new file mode 100644 index 00000000000..d70046813ce --- /dev/null +++ b/automation/scripts/github_branch_autosync/Makefile @@ -0,0 +1,47 @@ +clean: + find . -type f -name '*.py[co]' -delete -o -type d -name __pycache__ -delete + find . -type f -name '*.zip' -delete + +# This hack address issue on gcloud functions framework when importing local module +# in python. We need to remove leading '.' from import +# https://github.com/GoogleCloudPlatform/functions-framework-python/pull/169 +fix_comp_on_gcloud: + sed -i 's/from .lib import /from lib import /' github_autosync/gcloud_entrypoint/main.py + +zip_package: clean fix_comp_on_gcloud + zip -r github_autosync.zip github_autosync/gcloud_entrypoint + +run-tests: + python3 -X tracemalloc=25 -m unittest discover -t tests -s tests + +deploy: zip_package check-env + @gcloud functions deploy AutoSyncBranches \ + --project=o1labs-192920 \ + --region=us-central1 \ + --runtime=python311 \ + --source=github_autosync/gcloud_entrypoint \ + --memory=1024MB \ + --timeout=300 \ + --trigger-http \ + --allow-unauthenticated \ + --entry-point=handle_incoming_commit_push \ + --set-env-vars=WEBHOOK_APP_USER=$(WEBHOOK_APP_USER),WEBHOOK_APP_REPO=$(WEBHOOK_APP_REPO),WEBHOOK_APP_TOKEN=$(WEBHOOK_APP_TOKEN),WEBHOOK_APP_GITHUB_SECRET=$(WEBHOOK_APP_GITHUB_SECRET) + + @echo --- reverts import fixing --- + sed -i 's/from lib import /from .lib import /' github_autosync/gcloud_entrypoint/main.py + +check-env: +# Lack of indentation is required: +# https://stackoverflow.com/questions/4728810/how-to-ensure-makefile-variable-is-set-as-a-prerequisite +ifndef WEBHOOK_APP_USER + $(error WEBHOOK_APP_USER is undefined) +endif +ifndef WEBHOOK_APP_REPO + $(error WEBHOOK_APP_REPO is undefined) +endif +ifndef WEBHOOK_APP_TOKEN + $(error WEBHOOK_APP_TOKEN is undefined) +endif +ifndef WEBHOOK_APP_GITHUB_SECRET + $(error WEBHOOK_APP_GITHUB_SECRET is undefined) +endif \ No newline at end of file diff --git a/automation/scripts/github_branch_autosync/README.md b/automation/scripts/github_branch_autosync/README.md new file mode 100644 index 00000000000..3876c1fdec8 --- /dev/null +++ b/automation/scripts/github_branch_autosync/README.md @@ -0,0 +1,235 @@ +# GITHUB Auto sync tool + +Aim of this project is to satisfy needs for automatic branch synchronization between important branches in github. So far this is manual process of detecting changes between branches, Pull requests creation, checking if there is no merge conflicts and pushing changes to target branch. +This tool automate this process. It is possible to deploy project to google cloud function module. + +## Business logic + +### Requirements: + +For MVP we want only merge conflicts detection. + +- [x] There should NOT be a PR created if there is no merge conflicts. +- [x] There should be a PR created with assignee list tagged who should fix the PR. +- [x] Program should detect changes immediately and perform merging attempt. +- [ ] There should be a solution for updating already existing PR with new conflicting changes +- [ ] In future we maybe would like to attach some small buildkite pipeline for testing purposes (TBD) + +### Design + +Program mainly operates on Github REST API. It creates a thin layer of configuration and logic on top of python library (PyGithub). + +It is prepared to receive github webhook payload json on new commit to specified branches and to be deployed in google cloud function + +#### Basic flow: +- Perform diff between incoming source and target branches +- Create branch containing commits + + a) If branch already exists, push this commit and tag assigners that there was yet another commit check if there are merge conflicts + + b) if there are conflicts : create pr from temp branch to target branch. Add proper description. Add assigners which should fix the pr + + c) if there are not conflicts : start buildkite pipeline (TBD) to verify changes. If they passes merge pr and exit + +##### Examples: + +###### No conflict + +![No conflict](./docs/res/CASE1.jpg) + +###### Conflict + +![Conflict](./docs/res/CASE2.jpg) + +###### Update sync branch while on conflict + +![Update branch while conflict](./docs/res/CASE3.jpg) + +# Configuration + +Configuration is defined as module in `./github_autosync/gcloud_entrypoint/lib/config.py` + +Below more detailed description of each section + +## Branches + +Controls relation between branches. Dictionary key is a branch name on which change we will try to merge to branch with name as value. + +For example tuple master -> develop: + +If there is a new commit on master branch, program will attempt to merge new changes to develop branch. We can have more than one branch mapping: +``` +branches = dict( + master = 'develop', + develop = 'featureA' +) +``` + +## Github + +Github access settings. Points to user (or organization), repository and access token. Access token can be classic or fine-grained. However if latter is used, then an issue can be encountered during e2e test run, since it uses Graphql Api. +Implementation of fine-grained is still TBD: (https://github.blog/2022-10-18-introducing-fine-grained-personal-access-tokens-for-github/) + +Token need to have permission to: +- list prs, +- list branches, +- create new branch, +- create new pr, +- delete branch, +- merge branch. + +Example: + +``` +github = { + "token": "....", + "username": "dkijania", + "repo": "myproject", + "new_branch_name_format": "sync-{source_branch}-with-{target_branch}" +} +``` + +## Pull Request Configuration + +Specific settings for PR creation (if there is necessity to do it based on branch merge conflict). + +example: + +``` +pr = { + "title_prefix": "[Branches auto sync failure] ", + "assignees": ["dkijania"], + "body_prefix": "This is auto-generated PR in order to solve merge conflicts between two branches.", + "draft": 'false', + "labels": ["auto-sync"] +} +``` + +## Buildkite (TBD) + + +# CLI + +For debugging purposes cli entry point can be used. All it need is a properly configured program and payload.json file. + +Example: + +``` +python3 github_autosync payload.json +``` + +Where `payload.json` is a webhook event json payload. + +**WARNING:** + +**Changes made in such run will also be persistent (as running tool on gcloud)** + + +# Tests + +## Setup + +Test run requires below setup: + +- Classic Github Token need to be used, +- Sample github project need to be created. Alternatively existing project (https://github.com/dkijania/webhook_test) can be used. Please contact dariusz@o1labs.org in order to gain access. +- Set environment variables: + - WEBHOOK_APP_USER - owner of repo + - WEBHOOK_APP_REPO - repository name + - WEBHOOK_APP_TOKEN - classic token with access to above repo + +## Run + +``` + make run-tests +``` + +### Warnings during test execution + +Test execution may produce warnings which are related to known issue: +https://github.com/PyGithub/PyGithub/issues/1372 + +They manifest as warnings in console or log output similar to: + +``` +sys:1: ResourceWarning: unclosed +sys:1: ResourceWarning: unclosed +``` + +# GCloud Deployment + +## Setup + +Your gcloud account need to be configured. Please run: + +``` +$ gcloud auth login +``` + +and follow instructions if you are not logged to gcloud cli. + +### Set env variables + +``` +$export WEBHOOK_APP_USER=owner of repo +$export WEBHOOK_APP_REPO=repository name +$export WEBHOOK_APP_TOKEN=classic token or fine grained token +$export WEBHOOK_APP_GITHUB_SECRET=webhook github secret +``` + +#### Notes on WEBHOOK_APP_GITHUB_SECRET +Github secret can be acquired from existing gcloud storage: + +`https://console.cloud.google.com/security/secret-manager/secret/WEBHOOK_APP_GITHUB_SECRET/versions?project=o1labs-192920` + +Usually we don't want to update it as this leads to required update of github secret token in github. +However, if there is a such necessity below steps will help perform such operation: + +1. Generate token locally +``` +$ openssl rand -hex 20 +``` + +2. Copy token to github webhook event settings: + +Follow instructions on: https://docs.github.com/en/webhooks-and-events/webhooks/securing-your-webhooks#setting-your-secret-token + +3. Set environment variable +``` +$set WEBHOOK_APP_GITHUB_SECRET={output from command 1.} +``` + +#### Notes on WEBHOOK_APP_TOKEN + +Valid github token (classic or fine-grained) should have following permissions: +- list prs, +- list branches, +- create new branch, +- create new pr, +- delete branch, +- merge branch. + +Both fine-grained tokens or classic are acceptable. However when running tests please ensure that classic token is used as we are +using github graphql instance (for creating commits) which is not supporting fine-grained token yet + +### Run + +In order to deploy application to gcloud first run: + +``` +make deploy +``` + +This deploys to https://console.cloud.google.com/functions/details/us-central1/AutoSyncBranches + +### Post deploy checks + +Please ensure that proper permissions are set for cloud function. Github webhook need below permission: + +| Role | Group | +|-------|-------| +| Cloud Functions Invoker | allUsers | + +**Note:** + +While generally it is unsafe to allow all users invoke cloud function, we have a safe guard in form of validating response with github secret. diff --git a/automation/scripts/github_branch_autosync/docs/res/CASE1.jpg b/automation/scripts/github_branch_autosync/docs/res/CASE1.jpg new file mode 100644 index 00000000000..825fa5bb7e7 --- /dev/null +++ b/automation/scripts/github_branch_autosync/docs/res/CASE1.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:87bb8575ed49da4b98d0494f2117d85b2ef922f2c74c821b3c62110b3e879774 +size 33159 diff --git a/automation/scripts/github_branch_autosync/docs/res/CASE2.jpg b/automation/scripts/github_branch_autosync/docs/res/CASE2.jpg new file mode 100644 index 00000000000..8c0f0406ab8 --- /dev/null +++ b/automation/scripts/github_branch_autosync/docs/res/CASE2.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bcd36ff4f1257411a7c08b4b861964aaa3e61e77568f986cb9dcf94a76cca1bf +size 41727 diff --git a/automation/scripts/github_branch_autosync/docs/res/CASE3.jpg b/automation/scripts/github_branch_autosync/docs/res/CASE3.jpg new file mode 100644 index 00000000000..de005df392a --- /dev/null +++ b/automation/scripts/github_branch_autosync/docs/res/CASE3.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3b52e0ad98bf3c366495ef08172b82a3a40cecd25e5901b33311f07435cc7c07 +size 38677 diff --git a/automation/scripts/github_branch_autosync/github_autosync/__main__.py b/automation/scripts/github_branch_autosync/github_autosync/__main__.py new file mode 100644 index 00000000000..e9125ab989a --- /dev/null +++ b/automation/scripts/github_branch_autosync/github_autosync/__main__.py @@ -0,0 +1,32 @@ +""" Cli & Debug entrypoint """ + +import json +import argparse +import os +import sys +from gcloud_entrypoint import handle_incoming_commit_push_json,config,verify_signature + +parser = argparse.ArgumentParser() +parser.add_argument('--operation', "-o",type=str, help='debug operation to perform',required=True) +parser.add_argument('--payload', "-p",type=str, help='test file from github webhook push event',required=False) +parser.add_argument('--secret', "-s", type=str, help='secret for calculating signature',required=False) +parser.add_argument('--incoming_signature', "-i",type=str, help='payload signature',required=False) + +args = parser.parse_args() + +if "verify" in args.operation: + if not os.path.isfile(args.payload): + sys.exit('cannot find test file :',args.payload) + + with open(args.payload,encoding="utf-8") as file: + data = json.load(file) + json_payload = json.dumps(data) + verify_signature(json_payload, args.secret, "sha=" + args.incoming_signature) + +elif "handle_payload" in args.operation: + with open(args.payload,encoding="utf-8") as file: + data = json.load(file) + json_payload = json.dumps(data) + handle_incoming_commit_push_json(data,config=config) +else: + print("operation no supported", file=sys.stderr) \ No newline at end of file diff --git a/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/__init__.py b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/__init__.py new file mode 100644 index 00000000000..ba37eac5fe2 --- /dev/null +++ b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/__init__.py @@ -0,0 +1,2 @@ +"""Entrypoint init""" +from .main import handle_incoming_commit_push_json,config,verify_signature diff --git a/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/__init__.py b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/__init__.py new file mode 100644 index 00000000000..e966ff1a8ba --- /dev/null +++ b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/__init__.py @@ -0,0 +1,5 @@ +from .buildkite import BuildkiteApi +from .config import * +from .github import GithubApi, GithubException +from .request_parser import CommitInfo,GithubPayloadInfo +from .request_validator import verify_signature,is_push_event \ No newline at end of file diff --git a/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/buildkite.py b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/buildkite.py new file mode 100644 index 00000000000..3674c636be1 --- /dev/null +++ b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/buildkite.py @@ -0,0 +1,28 @@ +''' + Module for Buildkite operations +''' +from pybuildkite.buildkite import Buildkite + +class BuildkiteApi: + """ Api for running buildkite pipeline. Currently not used""" + + def __init__(self, config): + self.buildkite = Buildkite() + self.buildkite.set_access_token(config["token"]) + self.org = config["org"] + self.pipeline = config["pipeline"] + + + def run_pipeline(self, sha, branch, message): + ''' + Runs pipeline for given branch. + + Parameters: + sha (str): Commit sha. + branch (str): Branch name. + message (str): Message seen on buildkite job. + Returns: + Buildkite pipeline handle. + ''' + return self.buildkite.builds().create_build(self.org, self.pipeline, sha, branch, + clean_checkout=True, message=message) diff --git a/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/config.py b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/config.py new file mode 100644 index 00000000000..6a27ade5f98 --- /dev/null +++ b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/config.py @@ -0,0 +1,73 @@ +''' +Main configuration file for auto-sync logic. +One can define relation between branches in section 'branches' or accessibility settings +for Buildkite and Github +''' +import os + +''' +Controls relation between branches. Dictionary Key is a branch name +on which change we will try to merge to branch with name as value. +For example tuple develop -> compatible: +If there is a new commit on develop branch, +program will attempt to merge new changes to compatible branch +''' +branches = dict( + compatible = 'berkeley', + berkeley = 'develop' +) + +''' + Settings for github repository. + dryrun: if set to true, program will not perform any operations but will printout + token: github webhook secret (for validation of request) + username: owner of repo + repo: repo name + secret: valid github token (classic or fine-grained) + WARNING: + + Token need to have permission to: + - list prs + - list branches + - create new branch + - create new pr + - delete branch + - merge branch +''' +github = { + "dryrun": False, + "token": os.environ["WEBHOOK_APP_TOKEN"], + "username": os.environ["WEBHOOK_APP_USER"], + "repo": os.environ["WEBHOOK_APP_REPO"], + "secret": os.environ["WEBHOOK_APP_GITHUB_SECRET"] +} + +def tmp_branch_name(source_branch,target_branch): + ''' + Method which will be used for naming temp branch (needed for checking merge ability) + ''' + return f"fix-conflict-of-{source_branch}-and-{target_branch}" + +''' +Specific settings for PR creation (if there is necessity to do it based on current repo situation). +''' +pr = { + "title_prefix": "[Fix me] Merge conflict between ", + "assignees": ["dkijania"], + "body_prefix": "This is auto-generated PR in order to solve merge conflicts between two branches.", + "draft": 'false', + "labels": ["auto-sync"], + "alert_header": """ +# :exclamation: New Conflict detected :exclamation: +This PR conflicts with one of our main branches. As a result below Pull requests were created to aid you in resolving merge conflicts. Each temporary branch contains *cherry picked* changes from this PR. +""" +} + +''' + Buildkite specific settings +''' +buildkite = { + "token": "...", + "org": "mina-foundation", + "pipeline": "test-buildkite" +} diff --git a/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/github.py b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/github.py new file mode 100644 index 00000000000..84ce5849fca --- /dev/null +++ b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/github.py @@ -0,0 +1,374 @@ +''' Github api tailored for auto-sync needs''' + +import json +from github import Github,PullRequest,InputGitTreeElement +import requests + +class GithubException(Exception): + """Exception raised for errors when interacting with Github REST api. + + Attributes: + message -- explanation of the error + """ + def __init__(self, message): + super().__init__(message) + +class GithubApi: + ''' + Responsible for various operation on github rest api + like creating new branches or merging changes. + Is tightly coupled with config module + ''' + + def __init__(self, config): + self.token = config["token"] + self.username = config["username"] + self.repo = config["repo"] + self.github = Github(self.token) + self.default_timeout = 60 + self.dryrun = bool(config["dryrun"]) + + def repository(self): + ''' + Retrieves github repository based on configuration + ''' + return Repository(self.github, + self.username, + self.repo, + self.dryrun, + self.get_authorization_header, + self.default_timeout) + + def branch(self, name): + ''' + Retrieves github branch from configured repository with given name + + Parameters: + name (string): Branch name + + Returns: + branch object + ''' + return self.repository().get_branch(branch=name) + + def get_diff_commits(self, left_branch, right_branch): + ''' + Retrieves differences between two branches + + Parameters: + left_branch (string): Left branch name + right_branch (string): Right branch name + + Returns: + commit compare object + ''' + + left_branch_ref = self.branch(left_branch).commit.sha + right_branch_ref = self.branch(right_branch).commit.sha + return self.repository().compare(left_branch_ref, right_branch_ref) + + def has_merge_conflict(self,base_branch,head_branch): + ''' + Detects if two branches have merge conflict. + It doesn't use github rest api for this purpose, but a little 'hack' + by not accessing REST api but sending request to part of github web which is + only indicating mergeability. Then, it scrapes text visible on page to detect if + branches are mergeable or not. It uses 60s. of timeout for response. However, usually + the response is immediate. + + Parameters: + base_branch (string): Branch name to which we want to merge + head_branch (string): Branch name from which we want to merge + + Returns: + boolean indicating if branches are mergeable. True if they are, False otherwise + ''' + res = requests.get(f'https://github.com/{self.username}/{self.repo}/branches/pre_mergeable/{base_branch}...{head_branch}', + timeout=60) + return "Able to merge" not in res.text + + def create_new_branch(self, branch_name, from_branch): + ''' + Creates new branch + + Parameters: + branch_name (string): New branch name + from_branch (string): Branch name from which we create new branch + + Returns: + new branch object + ''' + from_branch_sha = self.branch(from_branch).commit.sha + branch_ref_name = f"refs/heads/{branch_name}" + + return self.repository().create_git_ref(branch_ref_name,from_branch_sha) + + def fast_forward(self, source, target): + ''' + Fast forward source branch name to target branch commit. Method extract head commit sha + from target branch and update reference sha of source branch. + + Unfortunately this method is not available in pygithub library. + Therefore we are accessing REST api directly + + Parameters: + source (string): Branch name to update + target (string): Branch name to which head commit we want to update + + Returns: + fast forward response json + + Raises: + GithubException: On request failure. + ''' + + target_sha = self.branch(name=target).commit.sha + return self.repository().fast_forward(source,target_sha) + + @property + def get_authorization_header(self): + """ + Gets authorization header for situation when we need to bypass pygithub library + """ + return {'Authorization': "Bearer " + self.token } + + def delete_branch(self, branch_name): + ''' + Deletes branch. According to github documentation this operation will also remove + all PRs that relates to given branch + + Parameters: + branch_name (string): Branch name to delete + + Raises: + GithubException: On request failure. + ''' + self.repository().delete_branch(branch_name) + + def cherry_pick_commits(self,new_branch,commits,skip_merges): + ''' + Cherry picks commits to new branch. It doesn't perform true git cherry pick + but rather manually copies git tree and along with commit messages and applies + it to new base tree + + Parameters: + new_branch (string): Branch name to insert commits + commits (List of GitCommit): List of commits to apply + skip_merges (Bool): Flag which controls if we should apply merge commits + ''' + if skip_merges: + commits = list(filter(lambda commit: len(commit.parents) < 2, commits)) + + for commit in commits: + template_tree = self.repository().inner.get_git_tree(commit.sha) + + branch_obj = self.repository().inner.get_branch(new_branch) + base_tree = self.repository().inner.get_git_tree(branch_obj.commit.sha) + + + inputs = [] + for element in template_tree.tree: + inputs.append(InputGitTreeElement( + path=element.path, mode=element.mode, type=element.type, sha=element.sha + )) + + tree = self.repository().inner.create_git_tree(inputs, base_tree) + commit = self.repository().inner.create_git_commit( + message=commit.commit.message, + tree=tree, + parents=[branch_obj.commit.commit] + ) + + self.repository().update_ref(new_branch,commit.sha) + + def create_pull_request(self,config,source_branch,target_branch,new_branch): + """ + Creates new pull request + + Parameters: + config (config): Config module + source_branch (string): Branch name from new branch was created + target_branch (string): Branch name to which we want to merge changes + new_branch (string): temporary branch which will be used to check mergeability and perform merge + + Returns: + return PullRequest object + """ + title = config.pr["title_prefix"] + f" {source_branch} and {target_branch}" + assignee_tags = list(map(lambda x: "@" + x, config.pr["assignees"])) + separator = ", " + body = config.pr["body_prefix"] + "\n" + separator.join(assignee_tags) + base = target_branch + head = new_branch + draft = bool(config.pr["draft"]) + self.repository().create_pull(title=title,body=body,base=base,head=head,draft=draft,assignees=assignee_tags,labels=config.pr["labels"]) + return title + + def create_pull_request_for_tmp_branch(self,config,source_branch,temp_branch): + """ + Creates new pull request + + Parameters: + config (config): Config module + source_branch (string): Branch name from new branch was created + target_branch (string): Branch name to which we want to merge changes + new_branch (string): temporary branch which will be used to check mergeability and perform merge + + Returns: + return PullRequest object + """ + title = config.pr["title_prefix"] + f"{source_branch} from {temp_branch} for commit {self.branch(source_branch).commit.sha[0:6]}" + assignee_tags = list(map(lambda x: "@" + x, config.pr["assignees"])) + separator = ", " + body = config.pr["body_prefix"] + "\n" + separator.join(assignee_tags) + base = temp_branch + head = source_branch + draft = bool(config.pr["draft"]) + self.repository().create_pull(title,body,base,head,draft,assignees=config.pr["assigness"],labels=config.pr["labels"]) + return title + + def branch_exists(self, branch): + """ + Returns true if branch by given name exists. False otherwise + + Parameters: + branch (string): branch name + """ + return any(x.name == branch for x in self.repository().get_branches()) + + def merge(self,base,head,message): + """ + Merges head branch to base branch + + Parameters: + base (string): base branch name + head (string): head branch name + commit (string): commit message + + """ + self.repository().merge(base,head,message) + + def get_opened_not_draft_prs_from(self,head): + """ + Get prs with given head which are non draft and opened + + Parameters: + head (string): head branch name + """ + return list(self.repository().get_pulls_from(head,draft=False,open=True)) + +class Repository: + """ + Class responsible for low level operation on github + For testing purposes it can be configured to just printout + operations meant to perform (dryrun) + """ + + def __init__(self,github,username,repo,dryrun,authorization_header,timeout): + self.inner = github.get_repo(username + "/" + repo) + self.username = username + self.repo = repo + self.dryrun = dryrun + self.dryrun_suffix = "[DRYRUN]" + self.authorization_header = authorization_header + self.timeout = timeout + + def get_branches(self): + return self.inner.get_branches() + + def merge(self,base,head,message): + if self.dryrun: + print(f'{self.dryrun_suffix} Merge {head} to {base} with message {message}') + else: + self.inner.merge(base,head,message) + + def get_pulls_from(self,head,open,draft): + """ + Get prs with given head and state + + Parameters: + head (string): head branch name + open (Bool): is opened + draft (Bool): is draft PR + """ + state = "open" if open else "closed" + return filter(lambda x: x.draft == draft and x.head.ref == head, self.inner.get_pulls(state,head)) + + def create_pull(self,title,body,base,head,draft,assignees,labels): + if self.dryrun: + print(f'{self.dryrun_suffix} Pull request created:') + print(f"{self.dryrun_suffix} title: '{title}'") + print(f"{self.dryrun_suffix} body: '{body}'") + print(f"{self.dryrun_suffix} base: '{base}'") + print(f"{self.dryrun_suffix} head: '{head}'") + print(f"{self.dryrun_suffix} is draft: '{draft}'") + print(f"{self.dryrun_suffix} assignees: '{assignees}'") + print(f"{self.dryrun_suffix} labels: '{labels}'") + else: + pull = self.inner.create_pull(title,body,base,head,draft) + for assignee in assignees: + pull.add_to_assignees(assignee) + + for label in labels: + pull.add_to_labels(label) + def create_git_ref(self,branch_ref_name,from_branch_sha): + if self.dryrun: + print(f'{self.dryrun_suffix} New branch created:') + print(f"{self.dryrun_suffix} name: '{branch_ref_name}'") + print(f"{self.dryrun_suffix} head: '{from_branch_sha}'") + else: + self.inner.create_git_ref(branch_ref_name,from_branch_sha) + + def compare(self,left_branch_ref, right_branch_ref): + return self.inner.compare(left_branch_ref,right_branch_ref) + + def get_branch(self,branch): + try: + return self.inner.get_branch(branch) + except Exception as ex: + raise GithubException(f'unable to find branch "{branch}" due to {ex}') from ex + + def fast_forward(self,source,target_sha): + if self.dryrun: + print(f"{self.dryrun_suffix} Fast forward '{source}' to '{target_sha}'") + return target_sha + res = requests.patch(f"https://api.github.com/repos/{self.username}/{self.repo}/git/refs/heads/{source}", + json={"sha": target_sha}, + headers=self.authorization_header, + timeout=self.timeout + ) + if res.status_code == 200: + output = json.loads(res.text) + return output["object"]["sha"] + raise GithubException(f'unable to fast forward branch {source} due to : {res.text}') + + def update_ref(self,source,target_sha): + """ + Force update ref to new sha + + Parameters: + source (string): source branch name + target_sha (Bool): target ref + """ + if self.dryrun: + print(f"{self.dryrun_suffix} Updating ref '{source}' to '{target_sha}'") + return target_sha + res = requests.patch(f"https://api.github.com/repos/{self.username}/{self.repo}/git/refs/heads/{source}", + json={"sha": target_sha, "force": True}, + headers=self.authorization_header, + timeout=self.timeout + ) + if res.status_code == 200: + output = json.loads(res.text) + return output["object"]["sha"] + raise GithubException(f'unable to fast forward branch {source} due to : {res.text}') + + + def delete_branch(self,branch_name): + if self.dryrun: + print(f"{self.dryrun_suffix} Delete branch '{branch_name}'") + else: + res = requests.delete(f"https://api.github.com/repos/{self.username}/{self.repo}/git/refs/heads/{branch_name}", + headers=self.authorization_header,timeout=self.timeout) + if not res.status_code == 204: + raise GithubException(f"unable to delete branch '{branch_name}' due to : '{res.text}'. Status code: '{res.status_code}'") diff --git a/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/request_parser.py b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/request_parser.py new file mode 100644 index 00000000000..35af76ed702 --- /dev/null +++ b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/request_parser.py @@ -0,0 +1,49 @@ +""" + Module responsible for extracting information from github webhook event payload json +""" + +class GithubPayloadInfo(object): + """ + Class responsible for parsing webhook event payload json + """ + def __init__(self, json): + self.data = json + + @property + def incoming_branch(self): + """ + Gets full branch id (/refs/head/{}) + """ + branch_id = self.data["ref"] + return str.split(branch_id,"/")[2] + + @property + def commits(self): + """ + Gets commits info + """ + return list(map(CommitInfo, self.data["commits"])) + +class CommitInfo(object): + """ + Responsible for providing information about commit + """ + def __init__(self, json): + self.data = json + + @property + def files(self): + """ + Returns all files touched by this commit + """ + added = self.data["added"] + removed = self.data["removed"] + modified = self.data["modified"] + return added + removed + modified + + @property + def message(self): + """ + Gets commit message + """ + return self.data["message"] diff --git a/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/request_validator.py b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/request_validator.py new file mode 100644 index 00000000000..f612304e320 --- /dev/null +++ b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/request_validator.py @@ -0,0 +1,26 @@ +import hashlib +import hmac +from http.client import HTTPException + +def verify_signature(payload_body, secret_token, signature_header): + """Verify that the payload was sent from GitHub by validating SHA256. + + Raise and return 403 if not authorized. + + Taken from: https://docs.github.com/en/webhooks-and-events/webhooks/securing-your-webhooks + + Args: + payload_body: original request body to verify (request.body()) + secret_token: GitHub app webhook token (WEBHOOK_SECRET) + signature_header: header received from GitHub (x-hub-signature-256) + """ + if not signature_header: + raise HTTPException(status_code=403, detail="x-hub-signature-256 header is missing!") + hash_object = hmac.new(secret_token.encode('utf-8'), msg=payload_body, digestmod=hashlib.sha256) + expected_signature = "sha256=" + hash_object.hexdigest() + if not hmac.compare_digest(expected_signature, signature_header): + raise HTTPException(status_code=403, detail="Request signatures didn't match!") + +def is_push_event(request): + """ Verifies if request has is push github event """ + return "push" == request.headers.get("X-GitHub-Event","") \ No newline at end of file diff --git a/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/main.py b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/main.py new file mode 100644 index 00000000000..ee65dacdb31 --- /dev/null +++ b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/main.py @@ -0,0 +1,191 @@ +''' Main module for handling incoming github webhook event''' + +from .lib import GithubPayloadInfo, config, GithubApi, GithubException, verify_signature,is_push_event + +def handle_incoming_commit_push(request): + """Responds to any HTTP request. + Args: + request (flask.Request): HTTP request object. + Returns: + The response text or any set of values that can be turned into a + Response object using + `make_response `. + """ + verify_signature(request.data, config.github["secret"], request.headers['x-hub-signature-256']) + if not is_push_event(request): + print("not a push event. skipping...") + return + + handle_incoming_commit_push_json(request.json,config=config) + print("done") + return + +def handle_incoming_commit_push_in_stable_branches(source_branch): + """Hand incoming commit on major branch. + Args: + source_branch (String): Name of branch which commit was pushed to. + """ + + target_branch = config.branches[source_branch] + github = GithubApi(config.github) + print(f"generating diff between {source_branch} and '{target_branch}'...") + cmp = github.get_diff_commits(target_branch,source_branch) + + if cmp.status == "identical": + print(f"'{source_branch}' and '{target_branch}' branches are identical. skipping merge...") + return + if cmp.status == "behind": + print(f"'{source_branch}' is behind '{target_branch}'. skipping merge...") + return + + if cmp.status == "ahead": + print(f"'{source_branch}' is ahead of '{target_branch}'. It is enough just to fast-forward...") + new_sha = github.fast_forward(target_branch,source_branch) + print(f'branch {target_branch} successfully fast-forward. It is now on commit: {new_sha}') + return + + print(f"'{source_branch}' and '{target_branch}' branches are not identical, both branches contains different commits (there are 'diverged'). approaching merge...") + new_branch = config.tmp_branch_name(source_branch,target_branch) + + if github.branch_exists(new_branch): + print(f'temporary sync branch {new_branch} already exists. fast-forwarding or creating yet another pr for new changes') + + try: + new_sha = github.fast_forward(new_branch,source_branch) + print(f'branch {new_branch} successfully fast-forward. It is now on commit: {new_sha}') + except GithubException: + title = github.create_pull_request_for_tmp_branch(config,source_branch,new_branch) + print(f"new PR: '{title}' created. Please resolve it before merge...") + + else: + print(f'creating new sync branch {new_branch} to incorporate changes from {source_branch} to {target_branch}') + github.create_new_branch(new_branch,source_branch) + + print("checking mergeability...") + + if github.has_merge_conflict(new_branch,target_branch): + print("branches have a merge conflict! creating PR to address those changes...") + title = github.create_pull_request(config,source_branch,target_branch,new_branch) + print(f"new PR: '{title}' created. Please resolve it before merge...") + + else: + print(f"there is no merge conflict. merging {new_branch} into {target_branch}...") + github.merge(target_branch,new_branch, f"Github Autosync: {source_branch} -> {target_branch}") + github.delete_branch(new_branch) + +def get_branches_earlier_in_chain(branches,branch): + """ Retrieves names of branches earlier in the chain that incoming one + Args: + branches (Dictionary): Configuration element which defines branches relation. + branch (String): Incoming branch + Returns: + List of branches earlier in the chain + """ + inv_branches = {v: k for k, v in branches.items()} + return get_branches_later_in_chain(inv_branches,branch) + +def get_branches_later_in_chain(branches,branch): + """ Retrieves names of branches earlier in the chain that incoming one + Args: + branches (Dictionary): Configuration element which defines branches relation. + branch (String): Incoming branch + Returns: + List of branches later in the chain + """ + output = [] + next = branches.get(branch) + while next is not None: + output.append(next) + next = branches.get(next) + return output + + +def handle_pr(pr,github,source_branch): + """ Handle push in personal pr + Args: + pr (PullRequest): Configuration element which defines branches relation. + github (Github): Github wrapper + branch (String): Incoming branch + """ + branches = get_branches_earlier_in_chain(config.branches,pr.base.ref) + later_branches = get_branches_later_in_chain(config.branches,pr.base.ref) + branches.extend(later_branches) + + data = [] + + for branch in branches: + if github.has_merge_conflict(branch,source_branch): + print(f"{branch} and {source_branch} branches have a merge conflict! creating PR to address those changes...") + + new_branch = config.tmp_branch_name(source_branch,branch) + + if github.branch_exists(new_branch): + print(f"{new_branch} already exists therefore we will recreate it") + github.delete_branch(new_branch) + + github.create_new_branch(new_branch,branch) + + commits = pr.get_commits() + github.cherry_pick_commits(new_branch,commits,skip_merges=True) + + title = github.create_pull_request(config,source_branch,branch,new_branch) + print(f"new PR: '{title}' created. Please resolve it before merge...") + + for pr in github.repository().inner.get_pulls(head=new_branch): + if pr.title == title: + data.append((pr.html_url,new_branch,branch)) + if any(data): + pr.create_issue_comment(comment_conflict(data)) + +def handle_incoming_commit_push_in_personal_branches(source_branch): + """ + Main handler for change in personal branch + """ + github = GithubApi(config.github) + + pull_requests = github.get_opened_not_draft_prs_from(source_branch) + + if not any(pull_requests): + print(f"skipping... merge check as branch {source_branch} does not have any non-draft pr opened") + + for pr in pull_requests: + handle_pr(pr,github,source_branch) + +def comment_conflict(data): + """ + Template for issue comment after conflict in PR is detected + """ + content = config.pr["alert_header"] + """ + + + + + + +""" + for (url,base,branch) in data: + content = content + f""" + + + + + +""" + content = content + """ +
Pull request name Temporary branch name Conflicting branch
{url} {base} {branch}
+""" + return content + +def handle_incoming_commit_push_json(json,config): + """ + Main logic for handling incoming github webhook event + """ + payload_info= GithubPayloadInfo(json) + + source_branch = payload_info.incoming_branch + + if not source_branch in config.branches: + handle_incoming_commit_push_in_personal_branches(source_branch) + else: + handle_incoming_commit_push_in_stable_branches(source_branch) + \ No newline at end of file diff --git a/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/requirements.txt b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/requirements.txt new file mode 100644 index 00000000000..5cfa315a3ab --- /dev/null +++ b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/requirements.txt @@ -0,0 +1,4 @@ +pybuildkite==1.2.2 +PyGithub==1.58.1 +requests==2.22.0 + diff --git a/automation/scripts/github_branch_autosync/tests/config.py b/automation/scripts/github_branch_autosync/tests/config.py new file mode 100644 index 00000000000..1b48375789d --- /dev/null +++ b/automation/scripts/github_branch_autosync/tests/config.py @@ -0,0 +1,22 @@ +""" Test config """ +import os + +branches = {} + +github = { + "token": os.environ["WEBHOOK_APP_TOKEN"], + "username": os.environ["WEBHOOK_APP_USER"], + "repo": os.environ["WEBHOOK_APP_REPO"], +} + +def tmp_branch_name(source_branch,target_branch): + return f"sync-{source_branch}-with-{target_branch}" + +pr = { + "title_prefix": "[Branches auto sync failure] ", + "assignees": [os.environ["WEBHOOK_APP_USER"]], + "body_prefix": "This is auto-generated PR in order to solve merge conflicts between two branches.", + "draft": 'false', + "maintainer_can_modify": 'false', + "labels": ["auto-sync"] +} \ No newline at end of file diff --git a/automation/scripts/github_branch_autosync/tests/payload.json b/automation/scripts/github_branch_autosync/tests/payload.json new file mode 100644 index 00000000000..75dabc729da --- /dev/null +++ b/automation/scripts/github_branch_autosync/tests/payload.json @@ -0,0 +1,192 @@ +{ + "ref": "refs/heads/rampup", + "before": "b1c422e8f098a7312fd68560bddc283746c24bda", + "after": "d7b348d83c39bf94bdeaac6cb644b2c65088164d", + "repository": { + "id": 353642475, + "node_id": "MDEwOlJlcG9zaXRvcnkzNTM2NDI0NzU=", + "name": "rust-bdd", + "full_name": "dkijania/rust-bdd", + "private": false, + "owner": { + "name": "dkijania", + "email": "dariusz.kijania@gmail.com", + "login": "dkijania", + "id": 20424186, + "node_id": "MDQ6VXNlcjIwNDI0MTg2", + "avatar_url": "https://avatars.githubusercontent.com/u/20424186?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/dkijania", + "html_url": "https://github.com/dkijania", + "followers_url": "https://api.github.com/users/dkijania/followers", + "following_url": "https://api.github.com/users/dkijania/following{/other_user}", + "gists_url": "https://api.github.com/users/dkijania/gists{/gist_id}", + "starred_url": "https://api.github.com/users/dkijania/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/dkijania/subscriptions", + "organizations_url": "https://api.github.com/users/dkijania/orgs", + "repos_url": "https://api.github.com/users/dkijania/repos", + "events_url": "https://api.github.com/users/dkijania/events{/privacy}", + "received_events_url": "https://api.github.com/users/dkijania/received_events", + "type": "User", + "site_admin": false + }, + "html_url": "https://github.com/dkijania/rust-bdd", + "description": "Example of bdd in rust", + "fork": false, + "url": "https://github.com/dkijania/rust-bdd", + "forks_url": "https://api.github.com/repos/dkijania/rust-bdd/forks", + "keys_url": "https://api.github.com/repos/dkijania/rust-bdd/keys{/key_id}", + "collaborators_url": "https://api.github.com/repos/dkijania/rust-bdd/collaborators{/collaborator}", + "teams_url": "https://api.github.com/repos/dkijania/rust-bdd/teams", + "hooks_url": "https://api.github.com/repos/dkijania/rust-bdd/hooks", + "issue_events_url": "https://api.github.com/repos/dkijania/rust-bdd/issues/events{/number}", + "events_url": "https://api.github.com/repos/dkijania/rust-bdd/events", + "assignees_url": "https://api.github.com/repos/dkijania/rust-bdd/assignees{/user}", + "branches_url": "https://api.github.com/repos/dkijania/rust-bdd/branches{/branch}", + "tags_url": "https://api.github.com/repos/dkijania/rust-bdd/tags", + "blobs_url": "https://api.github.com/repos/dkijania/rust-bdd/git/blobs{/sha}", + "git_tags_url": "https://api.github.com/repos/dkijania/rust-bdd/git/tags{/sha}", + "git_refs_url": "https://api.github.com/repos/dkijania/rust-bdd/git/refs{/sha}", + "trees_url": "https://api.github.com/repos/dkijania/rust-bdd/git/trees{/sha}", + "statuses_url": "https://api.github.com/repos/dkijania/rust-bdd/statuses/{sha}", + "languages_url": "https://api.github.com/repos/dkijania/rust-bdd/languages", + "stargazers_url": "https://api.github.com/repos/dkijania/rust-bdd/stargazers", + "contributors_url": "https://api.github.com/repos/dkijania/rust-bdd/contributors", + "subscribers_url": "https://api.github.com/repos/dkijania/rust-bdd/subscribers", + "subscription_url": "https://api.github.com/repos/dkijania/rust-bdd/subscription", + "commits_url": "https://api.github.com/repos/dkijania/rust-bdd/commits{/sha}", + "git_commits_url": "https://api.github.com/repos/dkijania/rust-bdd/git/commits{/sha}", + "comments_url": "https://api.github.com/repos/dkijania/rust-bdd/comments{/number}", + "issue_comment_url": "https://api.github.com/repos/dkijania/rust-bdd/issues/comments{/number}", + "contents_url": "https://api.github.com/repos/dkijania/rust-bdd/contents/{+path}", + "compare_url": "https://api.github.com/repos/dkijania/rust-bdd/compare/{base}...{head}", + "merges_url": "https://api.github.com/repos/dkijania/rust-bdd/merges", + "archive_url": "https://api.github.com/repos/dkijania/rust-bdd/{archive_format}{/ref}", + "downloads_url": "https://api.github.com/repos/dkijania/rust-bdd/downloads", + "issues_url": "https://api.github.com/repos/dkijania/rust-bdd/issues{/number}", + "pulls_url": "https://api.github.com/repos/dkijania/rust-bdd/pulls{/number}", + "milestones_url": "https://api.github.com/repos/dkijania/rust-bdd/milestones{/number}", + "notifications_url": "https://api.github.com/repos/dkijania/rust-bdd/notifications{?since,all,participating}", + "labels_url": "https://api.github.com/repos/dkijania/rust-bdd/labels{/name}", + "releases_url": "https://api.github.com/repos/dkijania/rust-bdd/releases{/id}", + "deployments_url": "https://api.github.com/repos/dkijania/rust-bdd/deployments", + "created_at": 1617268643, + "updated_at": "2023-03-28T04:59:17Z", + "pushed_at": 1680164399, + "git_url": "git://github.com/dkijania/rust-bdd.git", + "ssh_url": "git@github.com:dkijania/rust-bdd.git", + "clone_url": "https://github.com/dkijania/rust-bdd.git", + "svn_url": "https://github.com/dkijania/rust-bdd", + "homepage": null, + "size": 66, + "stargazers_count": 0, + "watchers_count": 0, + "language": "Rust", + "has_issues": true, + "has_projects": true, + "has_downloads": true, + "has_wiki": true, + "has_pages": false, + "has_discussions": false, + "forks_count": 0, + "mirror_url": null, + "archived": false, + "disabled": false, + "open_issues_count": 1, + "license": null, + "allow_forking": true, + "is_template": false, + "web_commit_signoff_required": false, + "topics": [ + + ], + "visibility": "public", + "forks": 0, + "open_issues": 1, + "watchers": 0, + "default_branch": "main", + "stargazers": 0, + "master_branch": "main" + }, + "pusher": { + "name": "dkijania", + "email": "dariusz.kijania@gmail.com" + }, + "sender": { + "login": "dkijania", + "id": 20424186, + "node_id": "MDQ6VXNlcjIwNDI0MTg2", + "avatar_url": "https://avatars.githubusercontent.com/u/20424186?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/dkijania", + "html_url": "https://github.com/dkijania", + "followers_url": "https://api.github.com/users/dkijania/followers", + "following_url": "https://api.github.com/users/dkijania/following{/other_user}", + "gists_url": "https://api.github.com/users/dkijania/gists{/gist_id}", + "starred_url": "https://api.github.com/users/dkijania/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/dkijania/subscriptions", + "organizations_url": "https://api.github.com/users/dkijania/orgs", + "repos_url": "https://api.github.com/users/dkijania/repos", + "events_url": "https://api.github.com/users/dkijania/events{/privacy}", + "received_events_url": "https://api.github.com/users/dkijania/received_events", + "type": "User", + "site_admin": false + }, + "created": false, + "deleted": false, + "forced": true, + "base_ref": null, + "compare": "https://github.com/dkijania/rust-bdd/compare/b1c422e8f098...d7b348d83c39", + "commits": [ + { + "id": "d7b348d83c39bf94bdeaac6cb644b2c65088164d", + "tree_id": "ec62c6eedbfff5457f2b683205db4dc0333d827c", + "distinct": true, + "message": "change in berkeley", + "timestamp": "2023-03-30T10:19:43+02:00", + "url": "https://github.com/dkijania/rust-bdd/commit/d7b348d83c39bf94bdeaac6cb644b2c65088164d", + "author": { + "name": "dkijania", + "email": "dariusz@o1labs.org" + }, + "committer": { + "name": "dkijania", + "email": "dariusz@o1labs.org" + }, + "added": [ + + ], + "removed": [ + + ], + "modified": [ + "src/cucumber/debug.rs" + ] + } + ], + "head_commit": { + "id": "d7b348d83c39bf94bdeaac6cb644b2c65088164d", + "tree_id": "ec62c6eedbfff5457f2b683205db4dc0333d827c", + "distinct": true, + "message": "change in berkeley", + "timestamp": "2023-03-30T10:19:43+02:00", + "url": "https://github.com/dkijania/rust-bdd/commit/d7b348d83c39bf94bdeaac6cb644b2c65088164d", + "author": { + "name": "dkijania", + "email": "dariusz@o1labs.org" + }, + "committer": { + "name": "dkijania", + "email": "dariusz@o1labs.org" + }, + "added": [ + + ], + "removed": [ + + ], + "modified": [ + "src/cucumber/debug.rs" + ] + } + } \ No newline at end of file diff --git a/automation/scripts/github_branch_autosync/tests/test_e2e.py b/automation/scripts/github_branch_autosync/tests/test_e2e.py new file mode 100644 index 00000000000..d754f64e600 --- /dev/null +++ b/automation/scripts/github_branch_autosync/tests/test_e2e.py @@ -0,0 +1,129 @@ +""" E2E tests for auto-sync merges""" + +import random +import unittest +from github_autosync.gcloud_entrypoint.main import handle_incoming_commit_push_json +from tests import config,utils +from github_autosync.gcloud_entrypoint.lib.github import GithubApi + +class TestEndToEndFlow(unittest.TestCase): + + generator = None + github = None + + @classmethod + def setUpClass(cls): + cls.generator = utils.BranchNamesGenerator() + cls.github = GithubApi(config.github) + + def push_commit_to(self, branch,some_source_file): + change = "change" + str(random.randint(0, 100_000)) + utils.create_simple_commit(self.github, config.github,branch,"commit", some_source_file, change) + + def assert_on_the_same_commit(self, left, right): + left_sha = self.github.branch(left).commit.sha + right_sha = self.github.branch(right).commit.sha + + self.assertEqual(left_sha,right_sha) + + def assert_temp_sync_branch_created(self, new_branch): + self.assertTrue(self.github.branch_exists(new_branch)) + + def assert_temp_sync_branch_was_cleaned(self,base,head): + self.assertFalse(self.github.branch_exists(config.tmp_branch_name(base,head))) + + def assert_pr_created(self,base,head): + prs = self.github.repository().get_pulls(base,head).get_page(0) + + self.assertEqual(len(prs),1) + + pr = prs[0] + self.assertEqual(config.pr["assignees"],list(map(lambda x: x.login, pr.assignees))) + self.assertTrue(config.pr["title_prefix"] in pr.title) + self.assertEqual(config.pr["labels"],list(map(lambda x: x.name, pr.labels))) + self.assertTrue(config.pr["body_prefix"] in pr.body) + self.assertEqual(bool(config.pr["draft"]),pr.draft) + + def handle_commit_event(self,branch): + handle_incoming_commit_push_json(json={ "ref": "refs/heads/" + branch},config=config) + + def test_no_conflict(self): + compatible,develop,some_source_file = self.generator.generate_unique_names() + + + self.push_commit_to(compatible,some_source_file) + self.handle_commit_event(compatible) + + self.assert_on_the_same_commit(compatible,develop) + self.assert_temp_sync_branch_was_cleaned(compatible,develop) + + def test_conflict(self): + compatible,develop,some_source_file = self.generator.generate_unique_names() + + # Creating conflict + self.push_commit_to(develop,some_source_file) + self.push_commit_to(compatible,some_source_file) + + self.handle_commit_event(compatible) + + temp_sync_branch = config.tmp_branch_name(compatible,develop) + self.assert_temp_sync_branch_created(temp_sync_branch) + self.assert_pr_created(base=develop,head=temp_sync_branch) + + def test_update_stable_branch_while_conflict(self): + compatible,develop,some_source_file = self.generator.generate_unique_names() + + # Creating conflict + self.push_commit_to(develop,some_source_file) + self.push_commit_to(compatible,some_source_file) + + self.handle_commit_event(compatible) + + temp_sync_branch = config.tmp_branch_name(compatible,develop) + self.assert_pr_created(base=develop,head=temp_sync_branch) + + self.push_commit_to(compatible,some_source_file) + self.handle_commit_event(compatible) + + # sync branch should fast forward to compatible head + temp_branch_head = self.github.branch(temp_sync_branch).commit.sha + compatible_head = self.github.branch(compatible).commit.sha + develop_head = self.github.branch(develop).commit.sha + + self.assertEqual(temp_branch_head,compatible_head) + self.assertNotEqual(compatible_head,develop_head) + + def test_update_stable_branch_while_conflict_causes_conflict_with_temp_branch(self): + compatible,develop,some_source_file = self.generator.generate_unique_names() + temp_branch = config.tmp_branch_name(compatible,develop) + + # Creating conflict + self.push_commit_to(develop,some_source_file) + self.push_commit_to(compatible,some_source_file) + + self.handle_commit_event(compatible) + + # attempt to fix merge conflict + self.push_commit_to(temp_branch,some_source_file) + + # but then compatible got yet another commit which now creates conflict not only with develop + # but also with sync branch + self.push_commit_to(compatible,some_source_file) + + self.handle_commit_event(compatible) + + # as a result we should have two prs original one and new for fixing intermittent conflict + self.assert_pr_exist(base=temp_branch,head=compatible) + self.assert_pr_exist(base=temp_branch,head=develop) + + def assert_pr_exist(self,base,head): + prs = self.github.repository().get_pulls(base,head).get_page(0) + self.assertEqual(1,len(prs)) + + @classmethod + def tearDownClass(cls): + cls.generator.tear_down() + + +if __name__ == '__main__': + unittest.main() diff --git a/automation/scripts/github_branch_autosync/tests/test_payload_parser.py b/automation/scripts/github_branch_autosync/tests/test_payload_parser.py new file mode 100644 index 00000000000..9c499679c0e --- /dev/null +++ b/automation/scripts/github_branch_autosync/tests/test_payload_parser.py @@ -0,0 +1,30 @@ +import unittest +import json + +from github_autosync.gcloud_entrypoint.lib.request_parser import GithubPayloadInfo + +class TestPayloadParser(unittest.TestCase): + + data = None + + @classmethod + def setUpClass(cls): + with open("tests/payload.json",encoding="utf-8") as file: + data = json.load(file) + cls.data = data + + def test_incoming_branch(self): + info = GithubPayloadInfo(self.data) + self.assertEqual("rampup",info.incoming_branch) + + def test_commits(self): + info = GithubPayloadInfo(self.data) + commits = info.commits + self.assertEqual(1,len(commits)) + commit = commits[0] + self.assertEqual(["src/cucumber/debug.rs"],commit.files) + self.assertEqual("change in berkeley",commit.message) + +if __name__ == '__main__': + unittest.main() + diff --git a/automation/scripts/github_branch_autosync/tests/utils.py b/automation/scripts/github_branch_autosync/tests/utils.py new file mode 100644 index 00000000000..10ac1f17453 --- /dev/null +++ b/automation/scripts/github_branch_autosync/tests/utils.py @@ -0,0 +1,97 @@ +""" Test utility module """ + +import random +import base64 +from gql import gql, Client +from gql.transport.requests import RequestsHTTPTransport +from github_autosync.gcloud_entrypoint.lib.github import GithubApi +from tests import config + + +def create_simple_commit(github_api,config,branch, message, path, content ): + """ + Creates simple commit. + + Parameters: + github_api (GithubApi): Github api + config (config): Test config module + branch (string): Branch name to which we commit + message (string): commit message + path (string): path to file which will receive new content + content (string): new content for file in 'path' argument + + Returns: + Graphql response + """ + head = github_api.branch(name=branch).commit.sha + sample_string_bytes = content.encode("ascii") + base64_bytes = base64.b64encode(sample_string_bytes) + base64_string = base64_bytes.decode("ascii") + + transport = RequestsHTTPTransport(url="https://api.github.com/graphql",headers=github_api.get_authorization_header) + + client = Client(transport=transport) + mutation = gql( + """ + mutation ($input: CreateCommitOnBranchInput!) { + createCommitOnBranch(input: $input) { + commit { url } + } + } + """ + ) + + variables = { + "input": { + "branch": { + "repositoryNameWithOwner": config["username"] +"/" + config["repo"], + "branchName": branch + }, + "message": {"headline": message }, + "fileChanges": { + "additions": [{ + "path": path, + "contents": base64_string + }] + }, + "expectedHeadOid": head + }} + + res = client.execute(mutation, variable_values=variables) + transport.close() + client.close_sync() + return res + +class BranchNamesGenerator(object): + """ + Utility class to generate unique (with reasonable uniqueness) names for branches + and files which are about to be edited + """ + def __init__(self): + self.store = [] + self.github= GithubApi(config.github) + + def generate_unique_names(self): + """ + Generates unique tuple of two branches and file to edit. + Then stores branches in inner dict for later clean up + """ + rand = str(random.randint(0, 100_000)) + compatible_branch = "compatible" + rand + develop_branch = "develop_" + rand + file_to_edit = f"README_{rand}.md" + self.github.create_new_branch(compatible_branch,"main") + self.github.create_new_branch(develop_branch,"main") + + config.branches[compatible_branch] = develop_branch + self.store.extend([compatible_branch,develop_branch,config.tmp_branch_name(compatible_branch,develop_branch)]) + return (compatible_branch,develop_branch,file_to_edit) + + def tear_down(self): + """ + Deletes all branches that class is aware of + """ + all_branches = self.github.repository().get_branches().get_page(0) + for branch in self.store: + if any(x.name == branch for x in all_branches): + self.github.delete_branch(branch) diff --git a/automation/terraform/modules/google-cloud/cloud-postgres/README.md b/automation/terraform/modules/google-cloud/cloud-postgres/README.md new file mode 100644 index 00000000000..7ac9d424959 --- /dev/null +++ b/automation/terraform/modules/google-cloud/cloud-postgres/README.md @@ -0,0 +1,26 @@ +# Google Cloud Postgres Deployment + +This terraform configuration is used to deploy an instance of Google Cloud Postgres. Although the default configuration works without creating a conflict, it is recommended to deploy the postgres instance as a module within a larger terraform deployment (which passes it unique var values). + +The default configuration uses Google Secret Manager to pull in a password for the default `postgres` user. After deployment, the assigned IP addresses, username, and password will be printed to the terminal as shown below: + +``` +Outputs: + +cloud_postgres_ip = tolist([ + { + "ip_address" = "35.35.35.35" <---- example IP + "time_to_retire" = "" + "type" = "PRIMARY" + }, + { + "ip_address" = "34.34.34.34" <---- example IP + "time_to_retire" = "" + "type" = "OUTGOING" + }, +]) +db_password = "PASSWORD_HERE" +db_user = "postgres" +``` + +The `PRIMARY` IP should be used when connecting to the new instance. By default, not database or schema is defined on the newly deployed db. diff --git a/automation/terraform/modules/google-cloud/cloud-postgres/main.tf b/automation/terraform/modules/google-cloud/cloud-postgres/main.tf new file mode 100644 index 00000000000..bcf21243df1 --- /dev/null +++ b/automation/terraform/modules/google-cloud/cloud-postgres/main.tf @@ -0,0 +1,36 @@ +# Configure the Google Cloud provider +provider "google" { + project = var.gcp_project + region = var.gcp_region +} + +resource "random_id" "instance_id" { + byte_length = 4 +} + +data "google_secret_manager_secret_version" "db_password" { + provider = google + secret = var.db_pass +} + +# Create a Google Cloud SQL PostgreSQL instance +resource "google_sql_database_instance" "postgres_instance" { + name = "${var.db_name}-${random_id.instance_id.hex}" + database_version = var.postgres_version + project = var.gcp_project + region = var.gcp_region + settings { + tier = var.db_spec + user_labels = { + service = var.service_label + } + } + deletion_protection = var.deletion_protection +} + +# Define the database user +resource "google_sql_user" "database_user" { + name = var.db_user + instance = google_sql_database_instance.postgres_instance.name + password = data.google_secret_manager_secret_version.db_password.secret_data +} diff --git a/automation/terraform/modules/google-cloud/cloud-postgres/output.tf b/automation/terraform/modules/google-cloud/cloud-postgres/output.tf new file mode 100644 index 00000000000..b6f2e78cd34 --- /dev/null +++ b/automation/terraform/modules/google-cloud/cloud-postgres/output.tf @@ -0,0 +1,13 @@ +output "cloud_postgres_ip" { + value = google_sql_database_instance.postgres_instance.ip_address +} + +output "db_user" { + value = google_sql_user.database_user.name +} + +output "db_password" { + value = data.google_secret_manager_secret_version.db_password.secret_data +} + + diff --git a/automation/terraform/modules/google-cloud/cloud-postgres/vars.tf b/automation/terraform/modules/google-cloud/cloud-postgres/vars.tf new file mode 100644 index 00000000000..fe6c59fbdd6 --- /dev/null +++ b/automation/terraform/modules/google-cloud/cloud-postgres/vars.tf @@ -0,0 +1,39 @@ +variable "gcp_project" { + default = "o1labs-192920" +} + +variable "gcp_region" { + default = "us-east4" +} + +variable "gcp_zone" { + default = "us-east4-b" +} + +variable "db_name" { + default = "o1db" +} + +variable "db_user" { + default = "postgres" +} + +variable "db_pass" { + default = "o1db-pass" +} + +variable "deletion_protection" { + default = false +} + +variable "postgres_version" { + default = "POSTGRES_14" +} + +variable "db_spec" { + default = "db-g1-small" +} + +variable "service_label" { + default = "none" +} diff --git a/automation/terraform/modules/kubernetes/testnet/locals.tf b/automation/terraform/modules/kubernetes/testnet/locals.tf index 4b95460573f..686d01aab29 100644 --- a/automation/terraform/modules/kubernetes/testnet/locals.tf +++ b/automation/terraform/modules/kubernetes/testnet/locals.tf @@ -27,6 +27,7 @@ locals { // TODO: Change this to a better name seedPeers = var.additional_peers logLevel = var.log_level + startFilteredLogs = var.start_filtered_logs logSnarkWorkGossip = var.log_snark_work_gossip logTxnPoolGossip = var.log_txn_pool_gossip ports = { @@ -75,6 +76,7 @@ locals { logLevel = var.log_level logSnarkWorkGossip = var.log_snark_work_gossip logPrecomputedBlocks = var.log_precomputed_blocks + startFilteredLogs = var.start_filtered_logs logTxnPoolGossip = var.log_txn_pool_gossip uploadBlocksToGCloud = var.upload_blocks_to_gcloud # seedPeersURL = var.seed_peers_url diff --git a/automation/terraform/modules/kubernetes/testnet/variables.tf b/automation/terraform/modules/kubernetes/testnet/variables.tf index 4903f91e398..064337a8a3a 100644 --- a/automation/terraform/modules/kubernetes/testnet/variables.tf +++ b/automation/terraform/modules/kubernetes/testnet/variables.tf @@ -160,6 +160,11 @@ variable "log_precomputed_blocks" { default = false } +variable "start_filtered_logs" { + type = list(string) + default = [] +} + variable "log_txn_pool_gossip" { type = bool default = false diff --git a/automation/terraform/modules/o1-integration/inputs.tf b/automation/terraform/modules/o1-integration/inputs.tf index a87df4471b8..ccf21707a73 100644 --- a/automation/terraform/modules/o1-integration/inputs.tf +++ b/automation/terraform/modules/o1-integration/inputs.tf @@ -93,6 +93,11 @@ variable "log_precomputed_blocks" { type = bool } +variable "start_filtered_logs" { + type = list(string) + default = [] +} + variable "worker_cpu_request" { type = number default = 0 diff --git a/automation/terraform/modules/o1-integration/testnet.tf b/automation/terraform/modules/o1-integration/testnet.tf index 062188987c2..1b83c07ffe2 100644 --- a/automation/terraform/modules/o1-integration/testnet.tf +++ b/automation/terraform/modules/o1-integration/testnet.tf @@ -39,6 +39,7 @@ module "kubernetes_testnet" { archive_configs = local.archive_node_configs log_precomputed_blocks = var.log_precomputed_blocks + start_filtered_logs = var.start_filtered_logs log_txn_pool_gossip = true archive_node_count = var.archive_node_count diff --git a/automation/terraform/modules/o1-testnet/inputs.tf b/automation/terraform/modules/o1-testnet/inputs.tf index 9841807a9eb..809040cd6d2 100644 --- a/automation/terraform/modules/o1-testnet/inputs.tf +++ b/automation/terraform/modules/o1-testnet/inputs.tf @@ -275,6 +275,11 @@ variable "log_precomputed_blocks" { default = false } +variable "start_filtered_logs" { + type = list(string) + default = [] +} + variable "worker_cpu_request" { type = number default = 0 diff --git a/automation/terraform/modules/o1-testnet/testnet.tf b/automation/terraform/modules/o1-testnet/testnet.tf index c40ea76910d..733a8b4b1cd 100644 --- a/automation/terraform/modules/o1-testnet/testnet.tf +++ b/automation/terraform/modules/o1-testnet/testnet.tf @@ -43,6 +43,7 @@ module "kubernetes_testnet" { log_level = var.log_level log_txn_pool_gossip = var.log_txn_pool_gossip log_precomputed_blocks = var.log_precomputed_blocks + start_filtered_logs = var.start_filtered_logs agent_min_fee = var.agent_min_fee agent_max_fee = var.agent_max_fee diff --git a/automation/terraform/modules/testnet-alerts/templates/testnet-alert-rules.yml.tpl b/automation/terraform/modules/testnet-alerts/templates/testnet-alert-rules.yml.tpl index 9812696c78a..58d458b5b66 100644 --- a/automation/terraform/modules/testnet-alerts/templates/testnet-alert-rules.yml.tpl +++ b/automation/terraform/modules/testnet-alerts/templates/testnet-alert-rules.yml.tpl @@ -255,25 +255,6 @@ groups: description: "{{ $value }} blocks have been validated on network {{ $labels.testnet }} in the last hour (according to some node)." runbook: "https://www.notion.so/minaprotocol/FewBlocksPerHour-47a6356f093242d988b0d9527ce23478" - - alert: StuckInBootstrap - expr: count by (testnet) (increase(Coda_Runtime_process_uptime_ms_total{syncStatus = "BOOTSTRAP"}[2h]) >= 7200000) > 0 - for: ${alert_evaluation_duration} - labels: - testnet: "{{ $labels.testnet }}" - severity: critical - annotations: - summary: "One or more {{ $labels.testnet }} nodes are stuck at bootstrap for more than 2 hours" - - - alert: StuckInCatchup - expr: count by (testnet) (increase(Coda_Runtime_process_uptime_ms_total{syncStatus = "CATCHUP"}[2h]) >= 7200000) > 0 - for: ${alert_evaluation_duration} - labels: - testnet: "{{ $labels.testnet }}" - severity: critical - annotations: - summary: "One or more {{ $labels.testnet }} nodes are stuck at catchup for more than 2 hours" - - - name: Warnings rules: - alert: HighBlockGossipLatency @@ -638,7 +619,25 @@ groups: summary: "One or more {{ $labels.testnet }} nodes are stuck at an old block height (Observed block height did not increase in the last 30m)" description: "{{ $value }} blocks have been validated on network {{ $labels.testnet }} in the last hour (according to some node)." runbook: "https://www.notion.so/minaprotocol/FewBlocksPerHour-47a6356f093242d988b0d9527ce23478" - + + - alert: StuckInBootstrap + expr: max by (testnet) (increase(Coda_Runtime_process_uptime_ms_total{${berkeley_testnet},syncStatus = "BOOTSTRAP"}[2h])) >= 6000000 + for: ${alert_evaluation_duration} + labels: + testnet: "{{ $labels.testnet }}" + severity: critical + annotations: + summary: "One or more {{ $labels.testnet }} nodes are stuck at bootstrap for more than 100 mins within the recent 2 hours" + + - alert: StuckInCatchup + expr: max by (testnet) (increase(Coda_Runtime_process_uptime_ms_total{${berkeley_testnet},syncStatus = "CATCHUP"}[2h])) >= 6000000 + for: ${alert_evaluation_duration} + labels: + testnet: "{{ $labels.testnet }}" + severity: critical + annotations: + summary: "One or more {{ $labels.testnet }} nodes are stuck at catchup for more than 100 mins within the recent 2 hours" + - alert: HighBlockGossipLatency expr: max by (testnet) (max_over_time(Coda_Block_latency_gossip_time {${berkeley_testnet},${synced_status_filter}} [${alert_timeframe}])) > 200 for: ${alert_evaluation_duration} diff --git a/buildkite/scripts/build-release.sh b/buildkite/scripts/build-release.sh index 91ff6659fb2..4baee264719 100755 --- a/buildkite/scripts/build-release.sh +++ b/buildkite/scripts/build-release.sh @@ -20,4 +20,4 @@ echo "--- Upload debs to amazon s3 repo" ./buildkite/scripts/publish-deb.sh echo "--- Git diff after build is complete:" -git diff --exit-code -- . +#git diff --exit-code -- . diff --git a/buildkite/scripts/check-compatibility.sh b/buildkite/scripts/check-compatibility.sh new file mode 100755 index 00000000000..f1e571fdb9b --- /dev/null +++ b/buildkite/scripts/check-compatibility.sh @@ -0,0 +1,213 @@ +#!/bin/bash + +# start mainline branch daemon as seed, see if PR branch daemon can sync to it + +# don't exit if docker download fails +set +e + +function get_shas { + SHAS=$(git log -n 10 --format="%h" --abbrev=7 --no-merges) +} + +function image_tag { + SHA=$1 + IMAGE_TAG="$SHA-bullseye-berkeley" +} + +function download-docker { + SHA=$1 + image_tag $SHA + docker pull gcr.io/o1labs-192920/mina-daemon:$IMAGE_TAG +} + +function try_docker_shas { + DOCKER_SHAS=$1 + GOT_DOCKER=0 + + for sha in $DOCKER_SHAS; do + download-docker $sha + if [ $? -eq 0 ] ; then + GOT_DOCKER=1 + image_tag $sha + break + else + echo "No docker available for SHA=$sha" + fi + done +} + +function image_id { + TAG=$1 + IMAGE_ID=$(docker images | grep $TAG | head -n 1 | awk '{print $3}') +} + +function gen_libp2p_keypair { + IMAGE_ID=$1 + DOCKER_TAG=$2 + + CONTAINER=$(docker run -d -e MINA_LIBP2P_PASS='' --entrypoint mina $IMAGE_ID libp2p generate-keypair --privkey-path libp2p) + + # allow time for key to be written + sleep 10 + + docker commit $CONTAINER "mina_ci":$DOCKER_TAG + + image_id $DOCKER_TAG + + COMMITTED_IMAGE_ID=$IMAGE_ID + + echo "Committed image:" $DOCKER_TAG:$COMMITTED_IMAGE_ID +} + +function boot_and_sync { + IMAGE_ID=$1 + EXTERNAL_PORT=$2 + REST_PORT=$3 + PEER_ID=$4 + PEER_PORT=$5 + + if [ ! -z $PEER_ID ] && [ ! -z $PEER_PORT ]; then + echo "Running with peer" $PEER_ID "on port" $PEER_PORT + PEER_FLAG="--peer /ip4/127.0.0.1/tcp/"$PEER_PORT"/p2p/"$PEER_ID + SEED_FLAG="" + else + echo "Running as seed" + PEER_FLAG="" + SEED_FLAG="--seed" + fi + + DAEMON_CONTAINER=$(docker run --entrypoint mina -d -e MINA_LIBP2P_PASS='' $IMAGE_ID daemon \ + --libp2p-keypair ./libp2p --external-port $EXTERNAL_PORT --rest-port $REST_PORT $PEER_FLAG $SEED_FLAG) + + # allow time to boot + sleep 20 + + SYNCED=0 + REST_SERVER="http://127.0.0.1:$REST_PORT/graphql" + + # print logs + docker container logs $DAEMON_CONTAINER --follow & + + while [ $SYNCED -eq 0 ]; do + SYNC_STATUS=$(docker container exec -it $DAEMON_CONTAINER \ + curl -g -X POST -H "Content-Type: application/json" -d '{"query":"query { syncStatus }"}' ${REST_SERVER}) + + # "connection refused" until GraphQL server up + GOT_SYNC_STATUS=$(echo ${SYNC_STATUS} | grep "syncStatus") + if [ ! -z $GOT_SYNC_STATUS ]; then + echo $(date +'%Y-%m-%d %H:%M:%S') ". Sync status:" $GOT_SYNC_STATUS + fi + + SYNCED=$(echo ${SYNC_STATUS} | grep -c "SYNCED") + sleep 5 + done +} + +function rm_docker_container { + IMAGE_ID=$1 + + DOCKER_CONTAINER=$(docker ps -a | grep $IMAGE_ID | awk '{print $1}') + + docker kill $DOCKER_CONTAINER + docker rm $DOCKER_CONTAINER +} + +### start of code + +if [[ $# -ne 1 ]]; then + echo "Usage: $0 " + exit 1 +fi + +MAINLINE_BRANCH=$1 + +case "$BUILDKITE_PULL_REQUEST_BASE_BRANCH" in + develop) ;; + *) + echo "PR is not against develop, not running the $MAINLINE_BRANCH compatibility test" + exit 0 +esac + +### Download docker images + +echo "Current branch is $BUILDKITE_BRANCH" + +echo "Checking out $MAINLINE_BRANCH branch" +git checkout $MAINLINE_BRANCH +git pull + +echo "Getting $MAINLINE_BRANCH docker" +get_shas +try_docker_shas "$SHAS" + +if [ $GOT_DOCKER -eq 1 ] ; then + echo "Got $MAINLINE_BRANCH docker" +else + echo "Could not find $MAINLINE_BRANCH docker" + exit 1 +fi + +MAIN_BRANCH_IMAGE_TAG=$IMAGE_TAG + +CURR_BRANCH=$(git rev-parse --symbolic-full-name --abbrev-ref HEAD) + +echo "Checking out PR branch" +git checkout $CURR_BRANCH + +echo "Getting PR docker" +get_shas +try_docker_shas "$SHAS" + +if [ $GOT_DOCKER -eq 1 ] ; then + echo "Got docker for PR branch" +else + echo "Could not find a docker for PR branch" + exit 1 +fi + +PR_IMAGE_TAG=$IMAGE_TAG + +echo "${MAINLINE_BRANCH} image tag:" $MAIN_BRANCH_IMAGE_TAG +echo "PR image tag:" $PR_IMAGE_TAG + +image_id $MAIN_BRANCH_IMAGE_TAG +MAIN_BRANCH_IMAGE_ID=$IMAGE_ID + +echo "${MAINLINE_BRANCH} image id:" $MAIN_BRANCH_IMAGE_ID + +image_id $PR_IMAGE_TAG +PR_IMAGE_ID=$IMAGE_ID + +echo "PR image id:" $PR_IMAGE_ID + +### Run docker images + +# generate libp2p keypair for mainline branch +gen_libp2p_keypair $MAIN_BRANCH_IMAGE_ID "${MAINLINE_BRANCH}_docker" + +MAIN_BRANCH_COMMITTED_IMAGE_ID=$COMMITTED_IMAGE_ID +MAIN_BRANCH_LIBP2P_PEER_ID=$(docker run -e MINA_LIBP2P_PASS='' --entrypoint mina $MAIN_BRANCH_COMMITTED_IMAGE_ID \ + libp2p dump-keypair --privkey-path libp2p | awk -F , '(NR==2){print $3}') + +echo "${MAINLINE_BRANCH} libp2p peer id:" $MAIN_BRANCH_LIBP2P_PEER_ID + +echo "Booting ${MAINLINE_BRANCH} daemon" +boot_and_sync $MAIN_BRANCH_COMMITTED_IMAGE_ID 8302 3085 + +echo "${MAINLINE_BRANCH} seed done bootstrapping" + +# generate PR libp2p keypair +gen_libp2p_keypair $PR_IMAGE_ID "pr_docker" + +PR_COMMITTED_IMAGE_ID=$COMMITTED_IMAGE_ID + +echo "Booting PR daemon" + +boot_and_sync $PR_COMMITTED_IMAGE_ID 8305 3086 $MAIN_BRANCH_LIBP2P_PEER_ID 8302 + +echo "PR daemon synced to ${MAINLINE_BRANCH} daemon!" + +echo "Removing docker containers" + +rm_docker_container $MAIN_BRANCH_COMMITTED_IMAGE_ID +rm_docker_container $PR_COMMITTED_IMAGE_ID diff --git a/buildkite/scripts/export-git-env-vars.sh b/buildkite/scripts/export-git-env-vars.sh index 8dacf3fda94..1f88f4128c3 100755 --- a/buildkite/scripts/export-git-env-vars.sh +++ b/buildkite/scripts/export-git-env-vars.sh @@ -6,7 +6,7 @@ echo "Exporting Variables: " export MINA_REPO="https://github.com/MinaProtocol/mina.git" function find_most_recent_numeric_tag() { - TAG=$(git describe --always --abbrev=0 $1 | sed 's!/!-!g; s!_!-!g') + TAG=$(git describe --always --abbrev=0 $1 | sed 's!/!-!g; s!_!-!g; s!#!-!g') if [[ $TAG != [0-9]* ]]; then TAG=$(find_most_recent_numeric_tag $TAG~) fi @@ -14,7 +14,7 @@ function find_most_recent_numeric_tag() { } export GITHASH=$(git rev-parse --short=7 HEAD) -export GITBRANCH=$(git rev-parse --symbolic-full-name --abbrev-ref HEAD | sed 's!/!-!g; s!_!-!g' ) +export GITBRANCH=$(git rev-parse --symbolic-full-name --abbrev-ref HEAD | sed 's!/!-!g; s!_!-!g; s!#!-!g' ) export THIS_COMMIT_TAG=$(git tag --points-at HEAD) export PROJECT="mina" @@ -25,7 +25,7 @@ export BUILD_URL=${BUILDKITE_BUILD_URL} set -u export MINA_DEB_CODENAME=${MINA_DEB_CODENAME:=bullseye} -[[ -n "$BUILDKITE_BRANCH" ]] && export GITBRANCH=$(echo "$BUILDKITE_BRANCH" | sed 's!/!-!g; s!_!-!g') +[[ -n "$BUILDKITE_BRANCH" ]] && export GITBRANCH=$(echo "$BUILDKITE_BRANCH" | sed 's!/!-!g; s!_!-!g; s!#!-!g') if [ "${BUILDKITE_REPO}" != "${MINA_REPO}" ]; then @@ -75,12 +75,12 @@ fi if [[ -n "${THIS_COMMIT_TAG}" ]]; then # If the commit is tagged export MINA_DEB_VERSION="${GITTAG}-${GITHASH}" - export MINA_DOCKER_TAG="$(echo "${MINA_DEB_VERSION}-${MINA_DEB_CODENAME}" | sed 's!/!-!g; s!_!-!g')" else export MINA_DEB_VERSION="${GITTAG}-${GITBRANCH}-${GITHASH}" - export MINA_DOCKER_TAG="$(echo "${MINA_DEB_VERSION}-${MINA_DEB_CODENAME}" | sed 's!/!-!g; s!_!-!g')" fi +export MINA_DOCKER_TAG="$(echo "${MINA_DEB_VERSION}-${MINA_DEB_CODENAME}" | sed 's!/!-!g; s!_!-!g; s!#!-!g')" + # Determine the packages to build (mainnet y/N) case $GITBRANCH in compatible|master|release/1*) # whitelist of branches that are "mainnet-like" diff --git a/buildkite/scripts/merges-cleanly.sh b/buildkite/scripts/merges-cleanly.sh index 8743b4e3af9..04bdfddad61 100755 --- a/buildkite/scripts/merges-cleanly.sh +++ b/buildkite/scripts/merges-cleanly.sh @@ -2,15 +2,20 @@ BRANCH=$1 CURRENT=$(git branch --show-current) -echo 'Testing for conflicts between the current branch `'"${CURRENT}"'` and `'"${BRANCH}"'`...' +echo 'Testing for conflicts between the current branch `'"${BUILDKITE_PULL_REQUEST_BASE_BRANCH}"'` and `'"${BRANCH}"'`...' # Adapted from this stackoverflow answer: https://stackoverflow.com/a/10856937 # The git merge-tree command shows the content of a 3-way merge without # touching the index, which we can then search for conflict markers. -# Tell git where to find ssl certs -git config --global http.sslCAInfo /etc/ssl/certs/ca-bundle.crt +# Only execute in the CI. If the script is run locally, it messes us the user +# config +if [ "${BUILDKITE:-false}" == true ] +then + # Tell git where to find ssl certs + git config --global http.sslCAInfo /etc/ssl/certs/ca-bundle.crt +fi # Fetch a fresh copy of the repo git fetch origin @@ -31,6 +36,12 @@ if [ $RET -eq 0 ]; then echo "No conflicts found against upstream branch ${BRANCH}" exit 0 else + # exclude branches for which merging cleanly is not a hard requirement + if [ "${BUILDKITE_PULL_REQUEST_BASE_BRANCH}" == "o1js-main" ]; then + echo "Conflicts were found, but the current branch does not have to merge cleanly. Exiting with code 0." + exit 0 + fi + # Found a conflict echo "[ERROR] This pull request conflicts with $BRANCH, please open a new pull request against $BRANCH at this link:" echo "https://github.com/MinaProtocol/mina/compare/${BRANCH}...${BUILDKITE_BRANCH}" diff --git a/buildkite/scripts/run-snark-transaction-profiler.sh b/buildkite/scripts/run-snark-transaction-profiler.sh index f7298d9c62d..baba9a6561b 100755 --- a/buildkite/scripts/run-snark-transaction-profiler.sh +++ b/buildkite/scripts/run-snark-transaction-profiler.sh @@ -11,7 +11,6 @@ apt-get install -y git apt-transport-https ca-certificates tzdata curl python3 TESTNET_NAME="berkeley" git config --global --add safe.directory /workdir - source buildkite/scripts/export-git-env-vars.sh echo "Installing mina daemon package: mina-${TESTNET_NAME}=${MINA_DEB_VERSION}" diff --git a/buildkite/scripts/test-nix.sh b/buildkite/scripts/test-nix.sh index 4df85e344c3..d7ca529bc9a 100755 --- a/buildkite/scripts/test-nix.sh +++ b/buildkite/scripts/test-nix.sh @@ -43,6 +43,8 @@ fi # run chown to the current user to fix it chown -R "${USER}" /workdir +nix-env -i git-lfs + git config --global --add safe.directory /workdir git fetch diff --git a/buildkite/scripts/unit-test.sh b/buildkite/scripts/unit-test.sh index 4e66304320e..b5bc641b66d 100755 --- a/buildkite/scripts/unit-test.sh +++ b/buildkite/scripts/unit-test.sh @@ -22,16 +22,12 @@ time make build echo "--- Build all targets" dune build "${path}" --profile="${profile}" -j16 +echo "--- Check for changes to verification keys" +time dune runtest "src/app/print_blockchain_snark_vk" --profile="${profile}" -j16 + # Turn on the proof-cache assertion, so that CI will fail if the proofs need to # be updated. export ERROR_ON_PROOF=true -# Note: By attempting a re-run on failure here, we can avoid rebuilding and -# skip running all of the tests that have already succeeded, since dune will -# only retry those tests that failed. echo "--- Run unit tests" -time dune runtest "${path}" --profile="${profile}" -j16 || \ -(./scripts/link-coredumps.sh && \ - echo "--- Retrying failed unit tests" && \ - time dune runtest "${path}" --profile="${profile}" -j16 || \ - (./scripts/link-coredumps.sh && false)) +time dune runtest "${path}" --profile="${profile}" -j16 || (./scripts/link-coredumps.sh) diff --git a/buildkite/scripts/version-linter.sh b/buildkite/scripts/version-linter.sh index bde39be6f40..0344021efb4 100755 --- a/buildkite/scripts/version-linter.sh +++ b/buildkite/scripts/version-linter.sh @@ -7,6 +7,8 @@ if [[ $# -ne 1 ]]; then exit 1 fi +TESTNET_NAME="${TESTNET_NAME:-berkeley}" + # Don't prompt for answers during apt-get install export DEBIAN_FRONTEND=noninteractive diff --git a/buildkite/src/Command/Base.dhall b/buildkite/src/Command/Base.dhall index 178ab0570d0..e7ad61f81d8 100644 --- a/buildkite/src/Command/Base.dhall +++ b/buildkite/src/Command/Base.dhall @@ -101,10 +101,11 @@ let Config = , docker_login : Optional DockerLogin.Type , summon : Optional Summon.Type , retries : List Retry.Type + , flake_retry_limit: Optional Natural , soft_fail : Optional B/SoftFail , skip: Optional B/Skip , `if` : Optional B/If - , timeout_in_minutes : Optional Natural + , timeout_in_minutes : Optional Integer } , default = { depends_on = [] : List TaggedKey.Type @@ -114,10 +115,11 @@ let Config = , artifact_paths = [] : List SelectFiles.Type , env = [] : List TaggedKey.Type , retries = [] : List Retry.Type + , flake_retry_limit = Some 0 , soft_fail = None B/SoftFail , skip = None B/Skip , `if` = None B/If - , timeout_in_minutes = None Natural + , timeout_in_minutes = None Integer } } @@ -155,6 +157,7 @@ let build : Config.Type -> B/Command.Type = \(c : Config.Type) -> else Some (B/ArtifactPaths.String (SelectFiles.compile c.artifact_paths)), key = Some c.key, label = Some c.label, + timeout_in_minutes = c.timeout_in_minutes, retry = Some { -- we only consider automatic retries @@ -181,11 +184,14 @@ let build : Config.Type -> B/Command.Type = \(c : Config.Type) -> retry.limit }) -- per https://buildkite.com/docs/agent/v3#exit-codes: - ([ + ( + [ -- infra error Retry::{ exit_status = ExitStatus.Code -1, limit = Some 4 }, -- infra error Retry::{ exit_status = ExitStatus.Code +255, limit = Some 4 }, + -- common/flake error + Retry::{ exit_status = ExitStatus.Code +1, limit = c.flake_retry_limit }, -- apt-get update race condition error Retry::{ exit_status = ExitStatus.Code +100, limit = Some 4 }, -- Git checkout error diff --git a/buildkite/src/Command/TestExecutive.dhall b/buildkite/src/Command/TestExecutive.dhall index e7a2a9f367a..524ea2c628f 100644 --- a/buildkite/src/Command/TestExecutive.dhall +++ b/buildkite/src/Command/TestExecutive.dhall @@ -27,12 +27,6 @@ in label = "${testName} integration test", key = "integration-test-${testName}", target = Size.Integration, - depends_on = dependsOn, - retries = [ - -- common/flake error - Command.Retry::{ exit_status = Command.ExitStatus.Code +1, limit = Some 4 }, - -- Blindly retry 4 more times anyway. Why not. - Command.Retry::{ exit_status = Command.ExitStatus.Any, limit = Some 4 } - ] + depends_on = dependsOn } } diff --git a/buildkite/src/Constants/ContainerImages.dhall b/buildkite/src/Constants/ContainerImages.dhall index ef68185e868..470e4663885 100644 --- a/buildkite/src/Constants/ContainerImages.dhall +++ b/buildkite/src/Constants/ContainerImages.dhall @@ -4,10 +4,10 @@ -- NOTE: minaToolchainBookworm is also used for building Ubuntu Jammy packages in CI { toolchainBase = "codaprotocol/ci-toolchain-base:v3", - minaToolchainBuster = "gcr.io/o1labs-192920/mina-toolchain@sha256:563fd7adda282fb3b6765c1811a3566e0fa0560f5d1c5270003483030d82d394", - minaToolchainBullseye = "gcr.io/o1labs-192920/mina-toolchain@sha256:49891eb46089f937f054afa464ce9868529981b92b30740cce32ef60957a1098", - minaToolchainBookworm = "gcr.io/o1labs-192920/mina-toolchain@sha256:49891eb46089f937f054afa464ce9868529981b92b30740cce32ef60957a1098", - minaToolchain = "gcr.io/o1labs-192920/mina-toolchain@sha256:49891eb46089f937f054afa464ce9868529981b92b30740cce32ef60957a1098", + minaToolchainBuster = "gcr.io/o1labs-192920/mina-toolchain@sha256:71173ebccf6af3e24d27262a5071f3dd0bd2c40b9de1c258422fdb9419507d3c", + minaToolchainBullseye = "gcr.io/o1labs-192920/mina-toolchain@sha256:9c4062e76fcd910ad60d3f1f58e2395f6a5e70f16fbef422442aedb70112ac73", + minaToolchainBookworm = "gcr.io/o1labs-192920/mina-toolchain@sha256:9c4062e76fcd910ad60d3f1f58e2395f6a5e70f16fbef422442aedb70112ac73", + minaToolchain = "gcr.io/o1labs-192920/mina-toolchain@sha256:9c4062e76fcd910ad60d3f1f58e2395f6a5e70f16fbef422442aedb70112ac73", elixirToolchain = "elixir:1.10-alpine", nodeToolchain = "node:14.13.1-stretch-slim", ubuntu2004 = "ubuntu:20.04", diff --git a/buildkite/src/Constants/DebianVersions.dhall b/buildkite/src/Constants/DebianVersions.dhall index fed818e5e51..98afdc77227 100644 --- a/buildkite/src/Constants/DebianVersions.dhall +++ b/buildkite/src/Constants/DebianVersions.dhall @@ -70,6 +70,7 @@ let minimalDirtyWhen = [ S.exactly "scripts/rebuild-deb" "sh", S.exactly "scripts/release-docker" "sh", S.exactly "buildkite/scripts/build-artifact" "sh", + S.exactly "buildkite/scripts/check-compatibility" "sh", -- Snark profiler dirtyWhen S.exactly "buildkite/src/Jobs/Test/RunSnarkProfiler" "dhall", S.exactly "buildkite/scripts/run-snark-transaction-profiler" "sh", diff --git a/buildkite/src/Jobs/Lint/Merge.dhall b/buildkite/src/Jobs/Lint/Merge.dhall index 59ba5ee4e91..44be58216ed 100644 --- a/buildkite/src/Jobs/Lint/Merge.dhall +++ b/buildkite/src/Jobs/Lint/Merge.dhall @@ -51,6 +51,7 @@ Pipeline.build commands = [ Cmd.run "buildkite/scripts/merges-cleanly.sh berkeley"] , label = "Check merges cleanly into berkeley" , key = "clean-merge-berkeley" + , soft_fail = Some (B/SoftFail.Boolean True) , target = Size.Small , docker = Some Docker::{ image = (../../Constants/ContainerImages.dhall).toolchainBase diff --git a/buildkite/src/Jobs/Test/BerkeleyCompatibility.dhall b/buildkite/src/Jobs/Test/BerkeleyCompatibility.dhall new file mode 100644 index 00000000000..fbe72a1751c --- /dev/null +++ b/buildkite/src/Jobs/Test/BerkeleyCompatibility.dhall @@ -0,0 +1,47 @@ +let JobSpec = ../../Pipeline/JobSpec.dhall +let Pipeline = ../../Pipeline/Dsl.dhall +let PipelineMode = ../../Pipeline/Mode.dhall +let PipelineTag = ../../Pipeline/Tag.dhall +let Prelude = ../../External/Prelude.dhall + +let Cmd = ../../Lib/Cmds.dhall +let S = ../../Lib/SelectFiles.dhall +let D = S.PathPattern + +let Command = ../../Command/Base.dhall +let RunInToolchain = ../../Command/RunInToolchain.dhall +let Docker = ../../Command/Docker/Type.dhall +let Size = ../../Command/Size.dhall + +let dependsOn = [ + { name = "MinaArtifactBullseye", key = "daemon-berkeley-bullseye-docker-image" } +] + +in Pipeline.build Pipeline.Config::{ + spec = + JobSpec::{ + dirtyWhen = [ + S.strictlyStart (S.contains "src"), + S.exactly "buildkite/scripts/check-compatibility" "sh", + S.exactly "buildkite/src/Jobs/Test/BerkeleyCompatibility" "dhall" + ], + path = "Test", + tags = [ PipelineTag.Type.Long, PipelineTag.Type.Test ], + name = "BerkeleyCompatibility" + }, + steps = [ + Command.build Command.Config::{ + commands = [ + Cmd.run "buildkite/scripts/check-compatibility.sh berkeley" + ], + label = "Test: berkeley compatibilty test", + key = "berkeley-compatibilty-test", + target = Size.XLarge, + docker = None Docker.Type, + depends_on = dependsOn, + timeout_in_minutes = Some +60 + } + ] +} + + diff --git a/buildkite/src/Jobs/Test/DevelopCompatibility.dhall b/buildkite/src/Jobs/Test/DevelopCompatibility.dhall new file mode 100644 index 00000000000..ba907ed0d5e --- /dev/null +++ b/buildkite/src/Jobs/Test/DevelopCompatibility.dhall @@ -0,0 +1,47 @@ +let JobSpec = ../../Pipeline/JobSpec.dhall +let Pipeline = ../../Pipeline/Dsl.dhall +let PipelineMode = ../../Pipeline/Mode.dhall +let PipelineTag = ../../Pipeline/Tag.dhall +let Prelude = ../../External/Prelude.dhall + +let Cmd = ../../Lib/Cmds.dhall +let S = ../../Lib/SelectFiles.dhall +let D = S.PathPattern + +let Command = ../../Command/Base.dhall +let RunInToolchain = ../../Command/RunInToolchain.dhall +let Docker = ../../Command/Docker/Type.dhall +let Size = ../../Command/Size.dhall + +let dependsOn = [ + { name = "MinaArtifactBullseye", key = "daemon-berkeley-bullseye-docker-image" } +] + +in Pipeline.build Pipeline.Config::{ + spec = + JobSpec::{ + dirtyWhen = [ + S.strictlyStart (S.contains "src"), + S.exactly "buildkite/scripts/check-compatibility" "sh", + S.exactly "buildkite/src/Jobs/Test/DevelopCompatibility" "dhall" + ], + path = "Test", + tags = [ PipelineTag.Type.Long, PipelineTag.Type.Test ], + name = "DevelopCompatibility" + }, + steps = [ + Command.build Command.Config::{ + commands = [ + Cmd.run "buildkite/scripts/check-compatibility.sh develop" + ], + label = "Test: develop compatibilty test", + key = "develop-compatibilty-test", + target = Size.XLarge, + docker = None Docker.Type, + depends_on = dependsOn, + timeout_in_minutes = Some +60 + } + ] +} + + diff --git a/buildkite/src/Jobs/Test/FuzzyZkappTest.dhall b/buildkite/src/Jobs/Test/FuzzyZkappTest.dhall index 5dd541ab0e0..826f38be20d 100644 --- a/buildkite/src/Jobs/Test/FuzzyZkappTest.dhall +++ b/buildkite/src/Jobs/Test/FuzzyZkappTest.dhall @@ -26,9 +26,9 @@ let buildTestCmd : Text -> Text -> Natural -> Natural -> Size -> Command.Type = key = key, target = cmd_target, docker = None Docker.Type, - artifact_paths = [ S.contains "core_dumps/*" ] + artifact_paths = [ S.contains "core_dumps/*" ], + flake_retry_limit = Some 0 } - in Pipeline.build diff --git a/buildkite/src/Jobs/Test/Libp2pUnitTest.dhall b/buildkite/src/Jobs/Test/Libp2pUnitTest.dhall index 76098a7f444..ce5a974ee22 100644 --- a/buildkite/src/Jobs/Test/Libp2pUnitTest.dhall +++ b/buildkite/src/Jobs/Test/Libp2pUnitTest.dhall @@ -57,7 +57,7 @@ Pipeline.build key = "libp2p-bs-qc", target = Size.Large, docker = None Docker.Type, - timeout_in_minutes = Some 45 + timeout_in_minutes = Some +45 } ] diff --git a/buildkite/src/Jobs/Test/ValidationService.dhall b/buildkite/src/Jobs/Test/ValidationService.dhall deleted file mode 100644 index cce41331433..00000000000 --- a/buildkite/src/Jobs/Test/ValidationService.dhall +++ /dev/null @@ -1,33 +0,0 @@ -let S = ../../Lib/SelectFiles.dhall -let JobSpec = ../../Pipeline/JobSpec.dhall -let Pipeline = ../../Pipeline/Dsl.dhall -let PipelineTag = ../../Pipeline/Tag.dhall -let Command = ../../Command/Base.dhall -let Docker = ../../Command/Docker/Type.dhall -let Size = ../../Command/Size.dhall -let ValidationService = ../../Projects/ValidationService.dhall - -in Pipeline.build Pipeline.Config::{ - spec = - let dirtyDhallDir = S.strictlyStart (S.contains "buildkite/src/Jobs/Test/ValidationService") - in JobSpec::{ - dirtyWhen = [ - dirtyDhallDir, - S.strictlyStart (S.contains ValidationService.rootPath) - ], - path = "Test", - name = "ValidationService", - tags = [ PipelineTag.Type.Fast, PipelineTag.Type.Test ] - }, - steps = [ - Command.build Command.Config::{ - commands = ValidationService.initCommands # [ - ValidationService.runMix "test" - ], - label = "Validation service tests; executes the ExUnit test suite", - key = "validation-service-test", - target = Size.Small, - docker = Some Docker::{ image = ValidationService.containerImage } - } - ] -} diff --git a/dockerfiles/stages/1-build-deps b/dockerfiles/stages/1-build-deps index ea7bc50d364..65e0f9f748f 100644 --- a/dockerfiles/stages/1-build-deps +++ b/dockerfiles/stages/1-build-deps @@ -20,12 +20,15 @@ ARG GO_VERSION=1.19.11 ARG GO_CAPNP_VERSION=v3.0.0-alpha.5 # Rust Version passed into rustup-init, can also be "stable", "nightly" or similar -ARG RUST_VERSION=1.63.0 +# This should stay in line with: +# - src/lib/crypto/kimchi_bindings/stubs/rust-toolchain.toml +# - src/lib/crypto/proof-systems/rust-toolchain.toml +ARG RUST_VERSION=1.72 # Nightly Rust Version used for WebAssembly builds # - src/lib/crypto/kimchi_bindings/wasm/rust-toolchain.toml -ARG RUST_NIGHTLY=2022-09-12 +ARG RUST_NIGHTLY=2023-09-01 # wasm-pack version -ARG WASM_PACK_VERSION=v0.10.3 +ARG WASM_PACK_VERSION=v0.12.1 # Rocksdb commit tag/branch to clone ARG ROCKSDB_VERSION=v5.18.4 diff --git a/docs/README.md b/docs/README.md index e12901ca674..00a529f7026 100644 --- a/docs/README.md +++ b/docs/README.md @@ -2,4 +2,4 @@ The docs for the Mina Protocol website are published on [docs.minaprotocol.com](https://docs.minaprotocol.com/). -The docs repository is [https://github.com/o1-labs/docs2/)https://github.com/o1-labs/docs2/](https://github.com/o1-labs/docs2/)https://github.com/o1-labs/docs2/). +The docs repository is [https://github.com/o1-labs/docs2/](https://github.com/o1-labs/docs2/). diff --git a/docs/docker.md b/docs/docker.md index 0d01d41f4eb..08f72511583 100644 --- a/docs/docker.md +++ b/docs/docker.md @@ -28,7 +28,7 @@ docker run codaprotocol/coda-daemon: daemon -help ### Running Coda with a Container Orchestrator -Currently, the implementation of the Kademlia DHT in use by the Coda Daemon is a tad tempermental, and requires consistent ports to be set on both the host and container. +Currently, the implementation of the Kademlia DHT in use by the Coda Daemon is a tad temperamental, and requires consistent ports to be set on both the host and container. There is a bug issue [here](https://github.com/CodaProtocol/coda/issues/2947) that details the problem. In the meantime, it is easiest to avoid any sort of bridge networking and run the Daemon container on the host network, especially if you'd like to use non-default ports. diff --git a/docs/res/transition_frontier_diagram.tex.png b/docs/res/transition_frontier_diagram.tex.png deleted file mode 100644 index e8a3a31f605..00000000000 --- a/docs/res/transition_frontier_diagram.tex.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:1925d3b821fbac581a6c7d5481c6f48817ad0f409b7a676e958ee3e37c6633e1 -size 21139 diff --git a/docs/specs/consensus/README.md b/docs/specs/consensus/README.md index 8addb58709b..0444fe5ad73 100644 --- a/docs/specs/consensus/README.md +++ b/docs/specs/consensus/README.md @@ -99,7 +99,7 @@ These are the `mainnet` parameters Mina uses for Samasika | Field | Value | Description | | - | - | - | -| `delta` | `0` | Maximum permissable delay of packets (in slots after the current) | +| `delta` | `0` | Maximum permissible delay of packets (in slots after the current) | | `k` | `290` | Depth of finality (number of confirmations) | | `slots_per_epoch` | `7140` | Number of slots per epoch | | `slots_duration` | `180000` (= 3m) | Slot duration in ms | diff --git a/docs/specs/types_and_structures/serialized_srs.md b/docs/specs/types_and_structures/serialized_srs.md index 93ec2de2d0b..daac332490b 100644 --- a/docs/specs/types_and_structures/serialized_srs.md +++ b/docs/specs/types_and_structures/serialized_srs.md @@ -4,5 +4,5 @@ This describes the SRS binary file format | Field | Type | Description | | - | - | - | -| `g` | `Vector` | For comitting polynomials | +| `g` | `Vector` | For committing polynomials | | `h ` | `GAffine` | Blinding factor | diff --git a/docs/tracing.md b/docs/tracing.md index f75585d021f..3afa23026d3 100644 --- a/docs/tracing.md +++ b/docs/tracing.md @@ -15,7 +15,7 @@ Chrome trace-viewer to view. You can do this with `src/app/trace-tool`. Using it is simple if you have Rust installed: `cd src/app/trace-tool; cargo run --release /path/to/1234.trace > trace.json`. Then you can load the file from `chrome://tracing`. -Each row correponds to a "task" as created by either `O1trace.trace_task` or +Each row corresponds to a "task" as created by either `O1trace.trace_task` or `O1trace.trace_recurring_task`. A recurring task starts with `R&`. Internally it works by doing one `trace_task` per call, and the `trace-tool` knows how to collapse them all into one row. diff --git a/flake.nix b/flake.nix index 64d9a952855..03a5275bf43 100644 --- a/flake.nix +++ b/flake.nix @@ -288,12 +288,12 @@ # Main user-facing binaries. packages = rec { inherit (ocamlPackages) - mina devnet mainnet mina_tests mina-ocaml-format test_executive; + mina devnet mainnet mina_tests mina-ocaml-format mina_client_sdk test_executive with-instrumentation; inherit (pkgs) libp2p_helper kimchi_bindings_stubs snarky_js leaderboard validation trace-tool zkapp-cli; inherit (dockerImages) - mina-image-slim mina-image-full mina-archive-image-full; + mina-image-slim mina-image-full mina-archive-image-full mina-image-instr-full; mina-deb = debianPackages.mina; default = mina; }; diff --git a/frontend/ci-build-me/README.md b/frontend/ci-build-me/README.md index 5b3de329115..bf33574f65b 100644 --- a/frontend/ci-build-me/README.md +++ b/frontend/ci-build-me/README.md @@ -27,3 +27,14 @@ gcloud functions deploy githubWebhookHandler \ ``` This deploys to https://us-central1-o1labs-192920.cloudfunctions.net/githubWebhookHandler + +## Update Branch Protection Rules + +In order to gate a new branch with this mechanism, github needs to see this job run (but generally we don't actually run a job here, + just block on its existence). This means that if months pass between changes, github will stop showing the buildkite/mina-pr-gating job + in their UI and therefore you cannot block new branches on it. + +To fix this, run the PR gating job manually in the builkite UI here: https://buildkite.com/o-1-labs-2/mina-pr-gating + +Just running the job once will re-populate it in github's dropdown menus so that you can add the gate to a new branch. +This does not require a redeploy unless you're also intending to change the mechanism of activation or the list of users with this power. diff --git a/frontend/ci-build-me/src/index.js b/frontend/ci-build-me/src/index.js index fcfa0dd796f..fe67d7e56c4 100644 --- a/frontend/ci-build-me/src/index.js +++ b/frontend/ci-build-me/src/index.js @@ -64,9 +64,8 @@ const getRequest = async (url) => { const handler = async (event, req) => { const buildkiteTrigger = {}; - if (event == "issue_comment") { - // PR Gating Lifting section - if ( + // PR Gating Lifting section + if ( // we are creating the comment req.body.action == "created" && // and this is actually a pull request @@ -92,11 +91,11 @@ const handler = async (event, req) => { "mina-pr-gating", { PR_GATE: "lifted" } ); - return [buildkite, null]; + return buildkite; } else { return [ - "comment author is not (publically) a member of the core team", - "comment author is not (publically) a member of the core team", + "comment author is not authorized to approve for mainnet", + "comment author is not authorized to approve for mainnet", ]; } } @@ -171,9 +170,42 @@ const handler = async (event, req) => { ]; } } - } - return [null, null]; -}; + + else if ( + // we are creating the comment + req.body.action == "created" && + // and this is actually a pull request + req.body.issue.pull_request && + req.body.issue.pull_request.url && + // and the comment contents is exactly the slug we are looking for + req.body.comment.body == "!ci-toolchain-me" + ) { + const orgData = await getRequest(req.body.sender.organizations_url); + // and the comment author is part of the core team + if ( + orgData.data.filter((org) => org.login == "MinaProtocol").length > 0 + ) { + const prData = await getRequest(req.body.issue.pull_request.url); + const buildkite = await runBuild( + { + sender: req.body.sender, + pull_request: prData.data, + }, + "mina-toolchains-build", + {} + ); + return [buildkite]; + } else { + // NB: Users that are 'privately' a member of the org will not be able to trigger CI jobs + return [ + "comment author is not (publically) a member of the core team", + "comment author is not (publically) a member of the core team", + ]; + } + } + + return null; + }; /** * HTTP Cloud Function for GitHub Webhook events. @@ -201,24 +233,16 @@ exports.githubWebhookHandler = async (req, res) => { github.validateWebhook(req); const githubEvent = req.headers["x-github-event"]; - const [buildkite, circle] = await handler(githubEvent, req); + const buildkite = await handler(githubEvent, req); if (buildkite && buildkite.web_url) { console.info(`Triggered buildkite build at ${buildkite.web_url}`); } else { console.error(`Failed to trigger buildkite build for some reason:`); console.error(buildkite); } - - if (circle && circle.number) { - console.info(`Triggered circle build #${circle.number}`); - } else { - console.error(`Failed to trigger circle build for some reason:`); - console.error(circle); - } - res.status(200); console.info(`HTTP 200: ${githubEvent} event`); - res.send({ buildkite, circle } || {}); + res.send({ buildkite } || {}); } catch (e) { if (e instanceof HTTPError) { res.status(e.statusCode).send(e.message); diff --git a/graphql_schema.json b/graphql_schema.json index 5c3c65184b5..8b2b2b4ddcd 100644 --- a/graphql_schema.json +++ b/graphql_schema.json @@ -6447,68 +6447,6 @@ "enumValues": null, "possibleTypes": null }, - { - "kind": "ENUM", - "name": "sign", - "description": null, - "fields": null, - "inputFields": null, - "interfaces": null, - "enumValues": [ - { - "name": "PLUS", - "description": null, - "isDeprecated": false, - "deprecationReason": null - }, - { - "name": "MINUS", - "description": null, - "isDeprecated": false, - "deprecationReason": null - } - ], - "possibleTypes": null - }, - { - "kind": "OBJECT", - "name": "SignedFee", - "description": "Signed fee", - "fields": [ - { - "name": "sign", - "description": "+/-", - "args": [], - "type": { - "kind": "NON_NULL", - "name": null, - "ofType": { "kind": "ENUM", "name": "sign", "ofType": null } - }, - "isDeprecated": false, - "deprecationReason": null - }, - { - "name": "feeMagnitude", - "description": "Fee", - "args": [], - "type": { - "kind": "NON_NULL", - "name": null, - "ofType": { - "kind": "SCALAR", - "name": "Amount", - "ofType": null - } - }, - "isDeprecated": false, - "deprecationReason": null - } - ], - "inputFields": null, - "interfaces": [], - "enumValues": null, - "possibleTypes": null - }, { "kind": "OBJECT", "name": "WorkDescription", @@ -6593,7 +6531,7 @@ "name": null, "ofType": { "kind": "OBJECT", - "name": "SignedFee", + "name": "FeeExcess", "ofType": null } }, @@ -6625,7 +6563,7 @@ "name": null, "ofType": { "kind": "OBJECT", - "name": "SignedFee", + "name": "SignedAmount", "ofType": null } }, @@ -10539,81 +10477,192 @@ "possibleTypes": null }, { - "kind": "SCALAR", - "name": "PendingCoinbaseHash", - "description": - "Base58Check-encoded hash of a pending coinbase hash", - "fields": null, - "inputFields": null, - "interfaces": null, - "enumValues": null, - "possibleTypes": null - }, - { - "kind": "SCALAR", - "name": "PendingCoinbaseAuxHash", - "description": - "Base58Check-encoded hash of a pending coinbase auxiliary hash", - "fields": null, + "kind": "OBJECT", + "name": "SignedFee", + "description": "Signed fee", + "fields": [ + { + "name": "sign", + "description": "+/-", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { "kind": "ENUM", "name": "sign", "ofType": null } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "feeMagnitude", + "description": "Fee", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { "kind": "SCALAR", "name": "Fee", "ofType": null } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], "inputFields": null, - "interfaces": null, + "interfaces": [], "enumValues": null, "possibleTypes": null }, { - "kind": "SCALAR", - "name": "StagedLedgerAuxHash", - "description": - "Base58Check-encoded hash of the staged ledger hash's aux_hash", - "fields": null, + "kind": "OBJECT", + "name": "FeeExcess", + "description": "Fee excess divided into left, right components", + "fields": [ + { + "name": "feeTokenLeft", + "description": "Token id for left component of fee excess", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "TokenId", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "feeExcessLeft", + "description": "Fee for left component of fee excess", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SignedFee", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "feeTokenRight", + "description": "Token id for right component of fee excess", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "TokenId", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "feeExcessRight", + "description": "Fee for right component of fee excess", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SignedFee", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], "inputFields": null, - "interfaces": null, + "interfaces": [], "enumValues": null, "possibleTypes": null }, { - "kind": "SCALAR", - "name": "LedgerHash", - "description": "Base58Check-encoded ledger hash", + "kind": "ENUM", + "name": "sign", + "description": null, "fields": null, "inputFields": null, "interfaces": null, - "enumValues": null, + "enumValues": [ + { + "name": "PLUS", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "MINUS", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], "possibleTypes": null }, { "kind": "OBJECT", - "name": "BlockchainState", - "description": null, + "name": "SignedAmount", + "description": "Signed amount", "fields": [ { - "name": "date", - "description": - "date (stringified Unix time - number of milliseconds since January 1, 1970)", + "name": "sign", + "description": "+/-", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { "kind": "ENUM", "name": "sign", "ofType": null } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "amountMagnitude", + "description": "Amount", "args": [], "type": { "kind": "NON_NULL", "name": null, "ofType": { "kind": "SCALAR", - "name": "BlockTime", + "name": "Amount", "ofType": null } }, "isDeprecated": false, "deprecationReason": null - }, + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "LocalState", + "description": null, + "fields": [ { - "name": "utcDate", - "description": - "utcDate (stringified Unix time - number of milliseconds since January 1, 1970). Time offsets are adjusted to reflect true wall-clock time instead of genesis time.", + "name": "stackFrame", + "description": "Stack frame component of local state", "args": [], "type": { "kind": "NON_NULL", "name": null, "ofType": { "kind": "SCALAR", - "name": "BlockTime", + "name": "FieldElem", "ofType": null } }, @@ -10621,15 +10670,15 @@ "deprecationReason": null }, { - "name": "snarkedLedgerHash", - "description": "Base58Check-encoded hash of the snarked ledger", + "name": "callStack", + "description": "Call stack component of local state", "args": [], "type": { "kind": "NON_NULL", "name": null, "ofType": { "kind": "SCALAR", - "name": "LedgerHash", + "name": "FieldElem", "ofType": null } }, @@ -10637,16 +10686,16 @@ "deprecationReason": null }, { - "name": "stagedLedgerHash", + "name": "transactionCommitment", "description": - "Base58Check-encoded hash of the staged ledger hash's main ledger hash", + "Transaction commitment component of local state", "args": [], "type": { "kind": "NON_NULL", "name": null, "ofType": { "kind": "SCALAR", - "name": "LedgerHash", + "name": "FieldElem", "ofType": null } }, @@ -10654,16 +10703,16 @@ "deprecationReason": null }, { - "name": "stagedLedgerAuxHash", + "name": "fullTransactionCommitment", "description": - "Base58Check-encoded hash of the staged ledger hash's aux_hash", + "Full transaction commitment component of local state", "args": [], "type": { "kind": "NON_NULL", "name": null, "ofType": { "kind": "SCALAR", - "name": "StagedLedgerAuxHash", + "name": "FieldElem", "ofType": null } }, @@ -10671,16 +10720,15 @@ "deprecationReason": null }, { - "name": "stagedLedgerPendingCoinbaseAux", - "description": - "Base58Check-encoded staged ledger hash's pending_coinbase_aux", + "name": "excess", + "description": "Excess component of local state", "args": [], "type": { "kind": "NON_NULL", "name": null, "ofType": { - "kind": "SCALAR", - "name": "PendingCoinbaseAuxHash", + "kind": "OBJECT", + "name": "SignedAmount", "ofType": null } }, @@ -10688,16 +10736,15 @@ "deprecationReason": null }, { - "name": "stagedLedgerPendingCoinbaseHash", - "description": - "Base58Check-encoded hash of the staged ledger hash's pending_coinbase_hash", + "name": "supplyIncrease", + "description": "Supply increase component of local state", "args": [], "type": { "kind": "NON_NULL", "name": null, "ofType": { - "kind": "SCALAR", - "name": "PendingCoinbaseHash", + "kind": "OBJECT", + "name": "SignedAmount", "ofType": null } }, @@ -10705,7 +10752,551 @@ "deprecationReason": null }, { - "name": "stagedLedgerProofEmitted", + "name": "ledger", + "description": "Ledger component of local state", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "LedgerHash", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "success", + "description": "Success component of local state", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "accountUpdateIndex", + "description": "Account update index component of local state", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "UInt32", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "failureStatusTable", + "description": "Failure status table component of local state", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + } + } + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "willSucceed", + "description": "Will-succeed component of local state", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "StateStack", + "description": null, + "fields": [ + { + "name": "initial", + "description": "Initial hash", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "FieldElem", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "current", + "description": "Current hash", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "FieldElem", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "PendingCoinbaseStack", + "description": null, + "fields": [ + { + "name": "dataStack", + "description": "Data component of pending coinbase stack", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "FieldElem", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "stateStack", + "description": "State component of pending coinbase stack", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "StateStack", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "Registers", + "description": null, + "fields": [ + { + "name": "firstPassLedger", + "description": "First pass ledger hash", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "LedgerHash", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "secondPassLedger", + "description": "Second pass ledger hash", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "LedgerHash", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "pendingCoinbaseStack", + "description": "Pending coinbase stack", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "PendingCoinbaseStack", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "localState", + "description": "Local state", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "LocalState", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "SnarkedLedgerState", + "description": null, + "fields": [ + { + "name": "sourceRegisters", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "Registers", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "targetRegisters", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "Registers", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "connectingLedgerLeft", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "LedgerHash", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "connectingLedgerRight", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "LedgerHash", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "supplyIncrease", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SignedAmount", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "feeExcess", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "FeeExcess", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "sokDigest", + "description": "Placeholder for SOK digest", + "args": [], + "type": { "kind": "SCALAR", "name": "String", "ofType": null }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "SCALAR", + "name": "PendingCoinbaseHash", + "description": + "Base58Check-encoded hash of a pending coinbase hash", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "SCALAR", + "name": "PendingCoinbaseAuxHash", + "description": + "Base58Check-encoded hash of a pending coinbase auxiliary hash", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "SCALAR", + "name": "StagedLedgerAuxHash", + "description": + "Base58Check-encoded hash of the staged ledger hash's aux_hash", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "SCALAR", + "name": "LedgerHash", + "description": "Base58Check-encoded ledger hash", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "BlockchainState", + "description": null, + "fields": [ + { + "name": "date", + "description": + "date (stringified Unix time - number of milliseconds since January 1, 1970)", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "BlockTime", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "utcDate", + "description": + "utcDate (stringified Unix time - number of milliseconds since January 1, 1970). Time offsets are adjusted to reflect true wall-clock time instead of genesis time.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "BlockTime", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "snarkedLedgerHash", + "description": "Base58Check-encoded hash of the snarked ledger", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "LedgerHash", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "stagedLedgerHash", + "description": + "Base58Check-encoded hash of the staged ledger hash's main ledger hash", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "LedgerHash", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "stagedLedgerAuxHash", + "description": + "Base58Check-encoded hash of the staged ledger hash's aux_hash", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "StagedLedgerAuxHash", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "stagedLedgerPendingCoinbaseAux", + "description": + "Base58Check-encoded staged ledger hash's pending_coinbase_aux", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "PendingCoinbaseAuxHash", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "stagedLedgerPendingCoinbaseHash", + "description": + "Base58Check-encoded hash of the staged ledger hash's pending_coinbase_hash", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "PendingCoinbaseHash", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "stagedLedgerProofEmitted", "description": "Block finished a staged ledger, and a proof was emitted from it and included into this block's proof. If there is no transition frontier available or no block found, this will return null.", "args": [], @@ -10713,6 +11304,22 @@ "isDeprecated": false, "deprecationReason": null }, + { + "name": "ledgerProofStatement", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SnarkedLedgerState", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, { "name": "bodyReference", "description": @@ -12043,7 +12650,7 @@ { "name": "lastEpochDelegators", "description": - "The list of accounts which are delegating to you in the last epoch (note that the info is recorded in the one before last epoch epoch so it might not be up to date with the current account status)", + "The list of accounts which are delegating to you in the last epoch (note that the info is recorded in the one before last epoch so it might not be up to date with the current account status)", "args": [], "type": { "kind": "LIST", diff --git a/helm/block-producer/templates/block-producer.yaml b/helm/block-producer/templates/block-producer.yaml index 0d45a38ce23..b113508395c 100644 --- a/helm/block-producer/templates/block-producer.yaml +++ b/helm/block-producer/templates/block-producer.yaml @@ -203,6 +203,9 @@ spec: {{- if $.Values.mina.logPrecomputedBlocks }} "-log-precomputed-blocks", "true", {{- end -}} + {{- range $.Values.mina.startFilteredLogs }} + "--start-filtered-logs", {{ . | quote }}, + {{- end -}} {{- if $.Values.mina.logTxnPoolGossip }} "-log-txn-pool-gossip", "true", {{- end -}} @@ -427,4 +430,4 @@ spec: {{ end }} --- {{ end }} ---- \ No newline at end of file +--- diff --git a/helm/block-producer/values.yaml b/helm/block-producer/values.yaml index 1f9b60d429d..b8588f5667c 100644 --- a/helm/block-producer/values.yaml +++ b/helm/block-producer/values.yaml @@ -6,6 +6,7 @@ mina: logLevel: "Debug" logSnarkWorkGossip: false logPrecomputedBlocks: false + startFilteredLogs: [] logTxnPoolGossip: false image: gcr.io/o1labs-192920/mina-daemon:1.2.0beta8-5b35b27-devnet useCustomEntrypoint: false diff --git a/helm/cron_jobs/README.md b/helm/cron_jobs/README.md new file mode 100644 index 00000000000..edd63378621 --- /dev/null +++ b/helm/cron_jobs/README.md @@ -0,0 +1,18 @@ +Replayer cron jobs +================== + +There are replayer cron jobs for Mainnet, Devnet, and Berkeley. These +jobs are run daily, to replay a day's worth of transactions. + +Each cron job downloads the most recent archive dump corresponding to +a network, and loads the data into PostgreSQL. That results in an +archive database. The most recent replayer checkpoint file is +downloaded, which provides the starting point for the replayer. When +the replayer runs, it creates new checkpoint files every 50 +blocks. When the replayer finishes, it uploads the most recent +checkpoint file, so it can be used in the following day's run. If +there are any errors, the replayer logs are also uploaded. + +There is a separate checkpoint file bucket for each network. Both the +checkpoint files and error files for a given network are uploaded to +the same bucket. diff --git a/helm/plain-node/templates/plain-node.yaml b/helm/plain-node/templates/plain-node.yaml index a1d46e28e44..250640b3e8f 100644 --- a/helm/plain-node/templates/plain-node.yaml +++ b/helm/plain-node/templates/plain-node.yaml @@ -54,6 +54,9 @@ spec: {{- if $.Values.mina.logPrecomputedBlocks }} "-log-precomputed-blocks", "true", {{- end -}} + {{- range $.Values.mina.startFilteredLogs }} + "--start-filtered-logs", {{ . | quote }}, + {{- end -}} {{- if $.Values.mina.logTxnPoolGossip }} "-log-txn-pool-gossip", "true", {{- end -}} diff --git a/helm/plain-node/values.yaml b/helm/plain-node/values.yaml index bede27d750a..3fb8bc42cbd 100644 --- a/helm/plain-node/values.yaml +++ b/helm/plain-node/values.yaml @@ -3,6 +3,7 @@ mina: runtimeConfig: generateGenesisProof: true logPrecomputedBlocks: true + startFilteredLogs: [] logTxnPoolGossip: false maxConnections: 200 image: gcr.io/o1labs-192920/mina-daemon:1.2.0beta8-5b35b27-devnet diff --git a/helm/seed-node/templates/seed-node.yaml b/helm/seed-node/templates/seed-node.yaml index 9691f36109c..02e715f7051 100644 --- a/helm/seed-node/templates/seed-node.yaml +++ b/helm/seed-node/templates/seed-node.yaml @@ -88,6 +88,9 @@ spec: {{- if $.Values.mina.logPrecomputedBlocks }} "-log-precomputed-blocks", "true", {{- end -}} + {{- range $.Values.mina.startFilteredLogs }} + "--start-filtered-logs", {{ . | quote }}, + {{- end -}} {{- if $.Values.mina.logTxnPoolGossip }} "-log-txn-pool-gossip", "true", {{- end -}} diff --git a/helm/seed-node/values.yaml b/helm/seed-node/values.yaml index da4eaf4b446..e5b3f0db668 100644 --- a/helm/seed-node/values.yaml +++ b/helm/seed-node/values.yaml @@ -3,6 +3,7 @@ mina: runtimeConfig: generateGenesisProof: true logPrecomputedBlocks: true + startFilteredLogs: [] logTxnPoolGossip: false maxConnections: 200 image: gcr.io/o1labs-192920/mina-daemon:1.2.0beta8-5b35b27-devnet diff --git a/nix/README.md b/nix/README.md index 725f98976ac..5e07ffee572 100644 --- a/nix/README.md +++ b/nix/README.md @@ -169,7 +169,11 @@ Now, whenever you start vim from `nix develop mina#with-lsp`, it should just wor ##### Emacs You need to install [tuareg](https://github.com/ocaml/tuareg) and a LSP client, like [lsp-mode](https://github.com/emacs-lsp/lsp-mode) or [eglot](https://github.com/joaotavora/eglot). +You do not need to use [merlin](https://github.com/ocaml/merlin) directly (through `merlin-mode`), as `ocaml-lsp-server` that LSP client will use uses `merlin` backend. +Note that LSP with flycheck and similar tools will not provide the global project compilation functionality, they will focus on individual buffers instead. +To compile the whole project you can still use `M-x compile` or anything else; compilation results will be then seen by the LSP/flycheck. This should just work without any configuration, as long as you start it from `nix develop mina#with-lsp`. +If you prefer to have just one instance of `emacs` running, consider installing `direnv` as explained in the sections below: emacs packages [envrc](https://github.com/purcell/envrc) and [emacs-direnv](https://github.com/wbolster/emacs-direnv) (just `direnv` in MELPA) provide integration with the tool, allowing emacs to use nix-defined sandbox variables when the open buffer is a repository file. ### "Pure" build @@ -210,7 +214,7 @@ branches, or otherwise changing the dependency tree of Mina. TL;DR: ``` $(nix build mina#mina-image-full) | docker load -# Also available: mina-image-slim, mina-archive-image-full +# Also available: mina-image-slim, mina-image-instr, mina-archive-image-full, ``` Since a "pure" build can happen entirely inside the Nix sandbox, we can use its @@ -228,6 +232,8 @@ us-west2-docker.pkg.dev/o1labs-192920/nix-containers/mina-image-full:develop` . The `slim` image only has the Mina daemon itself, whereas `full` images also contain many useful tools, such as coreutils, fake init, jq, etc. +The `instr` image is a replica of `full` image with additional instrumenation data. + ### Debian package TL;DR: @@ -376,6 +382,14 @@ networking inside the Nix sandbox (in order to vendor all the dependencies using specified explicitly. This is the hash you're updating by running `./nix/update-libp2p-hashes.sh`. +### Notes on instrumenation package + +`nix build mina#mina_with_instrumentation` allows to build a special version on mina + with instrumentation enabled. This can be helpful if one would like verify +what is a code coverage of end-to-end/manual tests performed over mina under development. +Additionally there is a docker image available which wraps up above mina build into full mina image. +One can prepare it using command: `$(nix build mina#mina-image-instr-full --print-out-paths) | docker load` + ### Discovering all the packages this Flake provides `nix flake show` doesn't work due to @@ -601,4 +615,4 @@ Before running any `dune` commands. Alternatively, you can just run your commands inside `nix develop --ignore-environment mina`, which unsets all the outside environment variables, -resulting in a more reproducible but less convenient environment. \ No newline at end of file +resulting in a more reproducible but less convenient environment. diff --git a/nix/docker.nix b/nix/docker.nix index 748c2336952..6904a3f9681 100644 --- a/nix/docker.nix +++ b/nix/docker.nix @@ -1,5 +1,5 @@ { lib, dockerTools, buildEnv, ocamlPackages_mina, runCommand, dumb-init -, coreutils, bashInteractive, python3, libp2p_helper, procps, postgresql, curl +, coreutils, findutils, bashInteractive, python3, libp2p_helper, procps, postgresql, curl , jq, stdenv, rsync, bash, gnutar, gzip, currentTime, flockenzeit }: let created = flockenzeit.lib.ISO-8601 currentTime; @@ -47,12 +47,13 @@ let ''; }; - mkFullImage = name: packages: dockerTools.streamLayeredImage { + mkFullImage = name: packages: additional_envs: dockerTools.streamLayeredImage { name = "${name}-full"; inherit created; contents = [ dumb-init coreutils + findutils bashInteractive python3 libp2p_helper @@ -65,7 +66,7 @@ let chmod 777 tmp ''; config = { - env = [ "MINA_TIME_OFFSET=0" ]; + env = [ "MINA_TIME_OFFSET=0" ] ++ additional_envs; WorkingDir = "/root"; cmd = [ "/bin/dumb-init" "/entrypoint.sh" ]; }; @@ -77,6 +78,7 @@ in { inherit created; contents = [ ocamlPackages_mina.mina.out ]; }; + mina-image-full = mkFullImage "mina" (with ocamlPackages_mina; [ mina-build-config mina-daemon-scripts @@ -85,6 +87,18 @@ in { mina.mainnet mina.genesis ]); + + # Image with enhanced binary capable of generating coverage report on mina exit + # For more details please visit: https://github.com/aantron/bisect_ppx/blob/master/doc/advanced.md#sigterm-handling + mina-image-instr-full = mkFullImage "mina-instr" (with ocamlPackages_mina; [ + mina-build-config + mina-daemon-scripts + + with_instrumentation.out + mina.mainnet + mina.genesis + ]) ["BISECT_SIGTERM=yes"]; + mina-archive-image-full = mkFullImage "mina-archive" (with ocamlPackages_mina; [ mina-archive-scripts gnutar @@ -92,4 +106,4 @@ in { mina.archive ]); -} +} \ No newline at end of file diff --git a/nix/go.nix b/nix/go.nix index d7a20ac8528..c833d8c52a9 100644 --- a/nix/go.nix +++ b/nix/go.nix @@ -37,22 +37,22 @@ final: prev: { src = ../src/app/libp2p_helper/src; doCheck = false; # TODO: tests hang vendorSha256 = let hashes = final.lib.importJSON ./libp2p_helper.json; in - # sanity check, to make sure the fixed output drv doesn't keep working + # sanity check, to make sure the fixed output drv doesn't keep working # when the inputs change if builtins.hashFile "sha256" ../src/app/libp2p_helper/src/go.mod - == hashes."go.mod" - && builtins.hashFile "sha256" ../src/app/libp2p_helper/src/go.sum - == hashes."go.sum" then + == hashes."go.mod" + && builtins.hashFile "sha256" ../src/app/libp2p_helper/src/go.sum + == hashes."go.sum" then hashes.vendorSha256 else final.lib.warn - '' - Below, you will find an error about a hash mismatch. - This is likely because you have updated go.mod and/or go.sum in libp2p_helper. - Please, locate the "got: " hash in the aforementioned error. If it's in SRI format (sha256-<...>), copy the entire hash, including the `sha256-'. Otherwise (if it's in the base32 format, like `sha256:<...>'), copy only the base32 part, without `sha256:'. - Then, run ./nix/update-libp2p-hashes.sh "" - '' - final.lib.fakeHash; + '' + Below, you will find an error about a hash mismatch. + This is likely because you have updated go.mod and/or go.sum in libp2p_helper. + Please, locate the "got: " hash in the aforementioned error. If it's in SRI format (sha256-<...>), copy the entire hash, including the `sha256-'. Otherwise (if it's in the base32 format, like `sha256:<...>'), copy only the base32 part, without `sha256:'. + Then, run ./nix/update-libp2p-hashes.sh "" + '' + final.lib.fakeHash; NO_MDNS_TEST = 1; # no multicast support inside the nix sandbox overrideModAttrs = n: { # Yo dawg diff --git a/nix/ocaml.nix b/nix/ocaml.nix index 25cd212c553..958ff1e54ec 100644 --- a/nix/ocaml.nix +++ b/nix/ocaml.nix @@ -147,7 +147,7 @@ let MINA_COMMIT_DATE = ""; MINA_BRANCH = ""; - DUNE_PROFILE = "devnet"; + DUNE_PROFILE = "dev"; NIX_LDFLAGS = optionalString (pkgs.stdenv.isDarwin && pkgs.stdenv.isAarch64) @@ -259,6 +259,22 @@ let # Same as above, but wrapped with version info. mina = wrapMina self.mina-dev { }; + # Mina with additional instrumentation info. + with-instrumentation-dev = self.mina-dev.overrideAttrs (oa: { + pname = "with-instrumentation"; + outputs = [ "out" ]; + + buildPhase = '' + dune build --display=short --profile=testnet_postake_medium_curves --instrument-with bisect_ppx src/app/cli/src/mina.exe + ''; + installPhase = '' + mkdir -p $out/bin + mv _build/default/src/app/cli/src/mina.exe $out/bin/mina + ''; + }); + + with-instrumentation = wrapMina self.with-instrumentation-dev { }; + mainnet-pkg = self.mina-dev.overrideAttrs (s: { version = "mainnet"; DUNE_PROFILE = "mainnet"; diff --git a/nix/rust.nix b/nix/rust.nix index 6b420677751..0d68d1d0691 100644 --- a/nix/rust.nix +++ b/nix/rust.nix @@ -8,10 +8,16 @@ let # override stdenv.targetPlatform here, if neccesary }; toolchainHashes = { - "1.67.0" = "sha256-riZUc+R9V35c/9e8KJUE+8pzpXyl0lRXt3ZkKlxoY0g="; - "nightly-2023-02-05" = - "sha256-MM8fdvveBEWzpwjH7u6C0F7qSWGPIMpfZWLgVxSqtxY="; - # copy this line with the correct toolchain name + "1.72" = "sha256-dxE7lmCFWlq0nl/wKcmYvpP9zqQbBitAQgZ1zx9Ooik="; + "nightly-2023-09-01" = "sha256-zek9JAnRaoX8V0U2Y5ssXVe9tvoQ0ERGXfUCUGYdrMA="; + # copy the placeholder line with the correct toolchain name when adding a new toolchain + # That is, + # 1. Put the correct version name; + # + # 2. Put the hash you get in line "got" from the error you obtain, which looks like + # error: hash mismatch in fixed-output derivation '/nix/store/XXXXX' + # specified: sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA= + # got: sha256-Q9UgzzvxLi4x9aWUJTn+/5EXekC98ODRU1TwhUs9RnY= "placeholder" = "sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="; }; # rust-toolchain.toml -> { rustc, cargo, rust-analyzer, ... } @@ -135,10 +141,10 @@ in version = deps.wasm-bindgen.version; src = final.fetchCrate { inherit pname version; - sha256 = "sha256-0rK+Yx4/Jy44Fw5VwJ3tG243ZsyOIBBehYU54XP/JGk="; + sha256 = "sha256-0u9bl+FkXEK2b54n7/l9JOCtKo+pb42GF9E1EnAUQa0="; }; - cargoSha256 = "sha256-vcpxcRlW1OKoD64owFF6mkxSqmNrvY+y3Ckn5UwEQ50="; + cargoSha256 = "sha256-AsZBtE2qHJqQtuCt/wCAgOoxYMfvDh8IzBPAOkYSYko="; nativeBuildInputs = [ final.pkg-config ]; buildInputs = with final; @@ -150,8 +156,9 @@ in checkInputs = [ final.nodejs ]; - # other tests require it to be ran in the wasm-bindgen monorepo - cargoTestFlags = [ "--test=interface-types" ]; + # other tests, like --test=wasm-bindgen, require it to be ran in the + # wasm-bindgen monorepo + cargoTestFlags = [ "--test=reference" ]; }; in rustPlatform.buildRustPackage { @@ -200,4 +207,3 @@ in cargoLock.lockFile = ../src/app/trace-tool/Cargo.lock; }; } - diff --git a/rfcs/0006-receipt-chain-proving.md b/rfcs/0006-receipt-chain-proving.md index 935eec9ff3d..49c807a6e0e 100644 --- a/rfcs/0006-receipt-chain-proving.md +++ b/rfcs/0006-receipt-chain-proving.md @@ -49,7 +49,7 @@ module type Receipt_chain_database_intf = sig end ``` -`prove` will provide a merkle list of a proving receipt `h_1` and it's corresponding transaction `t_1` to a resulting_receipt `r_k` and it's corresponding transaction `r_k`, inclusively. Therefore, the output will be in the form of [(t_1, r_1), ... (t_k, r_k)], where $r_i = h(r_{i-1}, i_k)$ for $i = 2...k$ +`prove` will provide a merkle list of a proving receipt `h_1` and its corresponding transaction `t_1` to a resulting_receipt `r_k` and its corresponding transaction `t_k`, inclusively. Therefore, the output will be in the form of [(t_1, r_1), ... (t_k, r_k)], where $r_i = h(t_{i}, r_{i-1})$ for $i = 2...k$ `add` stores a transaction into a client's database as a value. The key is computed by using the transaction payload and the previous receipt_chain_hash. This receipt_chain_hash is computed within the `add` function. As a result, the computed `receipt_chain_hash` is returned diff --git a/rfcs/0019-epoch-ledger-sync.md b/rfcs/0019-epoch-ledger-sync.md index 39a3f473371..c97306b6155 100644 --- a/rfcs/0019-epoch-ledger-sync.md +++ b/rfcs/0019-epoch-ledger-sync.md @@ -62,4 +62,4 @@ This overall approach has a number of drawbacks. For one, it enforces a hard req ## Rationale and alternatives [rationale-and-alternatives]: #rationale-and-alternatives -An alternative to this would be to pull the responsiblity for providing this information out into a 3rd party service outside of the network protocol. If this was done, it would lift the need for every node to store this large amount of data locally and enable them to also synchronize more quickly as they would not need to download the entire epoch ledger but, rather, could just download the accounts and associated merkle proofs they are interested in evaluating VRFs for. However, this comes with a number of other issues, mostly related to high level concerns about the protocol's ability to maintain itself without external 3rd party services, and I cannot speak on those much as I cannot properly weight the implications. +An alternative to this would be to pull the responsibility for providing this information out into a 3rd party service outside of the network protocol. If this was done, it would lift the need for every node to store this large amount of data locally and enable them to also synchronize more quickly as they would not need to download the entire epoch ledger but, rather, could just download the accounts and associated merkle proofs they are interested in evaluating VRFs for. However, this comes with a number of other issues, mostly related to high level concerns about the protocol's ability to maintain itself without external 3rd party services, and I cannot speak on those much as I cannot properly weight the implications. diff --git a/rfcs/0023-glossary-terms.md b/rfcs/0023-glossary-terms.md index f85c5cd0169..c01b49930e5 100644 --- a/rfcs/0023-glossary-terms.md +++ b/rfcs/0023-glossary-terms.md @@ -107,7 +107,7 @@ Additionally, the usage of "work" in snark worker can be misconstrued as a conne ### Protocol Transaction **Concept:** A transaction issued by the protocol - currently a fee transfer or coinbase (both are structurally identical). -**Rationale:** In discussions with parties that were not familiar with the protocol, fee transfers were misunderstood as representations of fees associated with a user transaction. As soon as it was explained that these were transactions programatically issued by the protocol rather than a user, it became clear. As such, it is recommend to partition transactions into *user transactions* and *protocol transactions*. +**Rationale:** In discussions with parties that were not familiar with the protocol, fee transfers were misunderstood as representations of fees associated with a user transaction. As soon as it was explained that these were transactions programmatically issued by the protocol rather than a user, it became clear. As such, it is recommend to partition transactions into *user transactions* and *protocol transactions*. **Downsides:** If there will ever be fee transfers issued by users, this will break the proposed structure. However, there currently doesn't seem to be any plans currently to do that. diff --git a/rfcs/0026-transition-caching.md b/rfcs/0026-transition-caching.md index e6a6aadfa07..55fe951ef32 100644 --- a/rfcs/0026-transition-caching.md +++ b/rfcs/0026-transition-caching.md @@ -17,7 +17,7 @@ NOTE: This RFC has been re-scoped to only address duplicate transitions already The goal here is to introduce a new cache to the system: the `Unprocessed_transition_cache`. -`Unprocessed_transition_cache` is scoped explicitly to the `Transition_frontier_controller`. The set stored in this cache represents the set of transitions which have been read from the network but have not yet been processed and added to the transition frontier. Since the lifetime of elements in the set are finite, the `Unprocessed_transition_cache` can be represented as a hash set. It will be the responsiblity of the transition validator to add items to this cache, and the responsibility of the processor to invalidate the cache once transitions are added to the transition frontier. Transitions which are determined to be invalid need to also be invalidated. +`Unprocessed_transition_cache` is scoped explicitly to the `Transition_frontier_controller`. The set stored in this cache represents the set of transitions which have been read from the network but have not yet been processed and added to the transition frontier. Since the lifetime of elements in the set are finite, the `Unprocessed_transition_cache` can be represented as a hash set. It will be the responsibility of the transition validator to add items to this cache, and the responsibility of the processor to invalidate the cache once transitions are added to the transition frontier. Transitions which are determined to be invalid need to also be invalidated. In order to assist in ensuring that items in the cache are properly invalidated, I recommend the introduction of a `'a Cached.t` type which will track the state of the item in one or more caches. The `Cached` module would provide an interface for performing cache related actions and would track a boolean value representing whether or not the required actions have been performed. What's special about the `'a Cached.t` type is that it will have a custom finalization handler which will throw an exception if no cache actions have been performed by the time it is garbage collected. This exception is toggled by a debug flag. When the debug flag is off, the finalization handler will nearly log a message. diff --git a/rfcs/0048-rosetta-zkapps.md b/rfcs/0048-rosetta-zkapps.md index 4673149c2ba..35bbb5d65d0 100644 --- a/rfcs/0048-rosetta-zkapps.md +++ b/rfcs/0048-rosetta-zkapps.md @@ -54,7 +54,7 @@ Most of the changes will be localized to supporting new zkApps related [Rosetta These new transactions will be present in both the `/block` and `/mempool` endpoints. -Note: GraphQL queries for zkApp transactions can be [generated programatically](https://github.com/MinaProtocol/mina/blob/develop/src/lib/mina_base/parties.ml#L1431) as they are quite large. +Note: GraphQL queries for zkApp transactions can be [generated programmatically](https://github.com/MinaProtocol/mina/blob/develop/src/lib/mina_base/parties.ml#L1431) as they are quite large. **Operations for zkApps** diff --git a/rfcs/0050-genesis-ledger-export.md b/rfcs/0050-genesis-ledger-export.md new file mode 100644 index 00000000000..5b784076353 --- /dev/null +++ b/rfcs/0050-genesis-ledger-export.md @@ -0,0 +1,176 @@ +## Summary + +This RFC describes the procedure to generate a genesis ledger from a +running network, using a node connected to that network. + +## Motivation + +The procedure described here is a part of the hard fork procedure, +which aims at spawning a new network, being a direct continuation of +the mainnet (or any other Mina network for that matter). To enable +this, the ledger of the old network must be exported in some form and +then fed into the newly created network. Because the new network's +initial state can be fed into nodes in a configuration file, it makes +sense to generate that file directly from the old node. Then necessary +updates can be made to it manually to update various protocol +constants, and then the new configuration file can be handed over to +node operators. + +## Detailed design + +The genesis ledger export is achieved using a GraphQL field named +`fork_config`. Asking for this field requires providing a slot or a +state hash of the block that we want to base the exported ledger on. +This field, if asked for, contains a new runtime configuration, +automatically updated with: + +* the dump of the **staged ledger** at the fork point +* updated values of `Fork_config`, i.e. previous state hash, previous +blockchain length and previous global slot; +* Current epoch ledger; +* Current epoch data (total currency and seed); +* Next epoch ledger; +* Next epoch data (total currency and seed); +* Protocol state at the fork point; + +**IMPORTANT**: as of now the `genesis_ledger_timestamp` is **not** +being updated and must be manually set to the right value (which is at +the moment unknown). + +By the fork point above we mean the last block before the slot where +no more transactions were accepted (transaction-stop slot). + +Thus generated configuration can be saved to a file, modified if +needed and fed directly into a new node, running a different protocol +version, using `--config-file` flag. As of the moment of writing this, +`compatible` and `berkeley` branches' configuration files are +compatible with each other (see: [PR #13768](https://github.com/MinaProtocol/mina/pull/13768)). +Sadly since then that compatibility has been broken by [PR #14014](https://github.com/MinaProtocol/mina/pull/14014). +We need to either port this change back to `compatible` or create a +migration script which will adapt a `mainnet` config file to the +format required by `berkeley`. The former solution would probably +be better. + +The `fork_config` field has been added to GraphQL in [PR +#13787](https://github.com/MinaProtocol/mina/pull/13787). It needs to +be extended to return the blockchain state for a given block (height +or state hash) so that we can export the desired ledger after the +blockchain has moved on. + +## Drawbacks + +This RFC provides a simple enough procedure to generate the genesis +ledger for the new network. However, it's not without its problems. + +### File size + +At the moment the mainnet has more than 100 000 accounts created. +Each account takes at least 4 lines in the configuration, which adds +up to around 600kB of JSON data. The daemon can take considerable time +at startup to parse it and load its contents into memory. If we move +on with this approach, it might be desirable to make a dedicated +effort to improving the configuration parsing speed, as these files +will only grow larger in subsequent hard forks. Alternatively, we +might want to devise a better (less verbose) storage mechanism for the +genesis ledger. + +### Security concerns + +The generated genesis ledger is prone to malevolent manual +modifications. Beyond containing the hash of the previous ledger, it's +unprotected from tampering with. + +One way to improve this is to provide an external program, capable of +computing hash of the ledger as it will be after the config is loaded +into a node. Users will be able to obtain a raw fork config file from +their nodes. Later, given the official config for the new network, +they will be able to run the program against both files and compute +ledger hashes. The reason why this is needed is that the configuration +file will likely contain some manual updates. For instance the genesis +ledger timestamp will need to be updated manually when the start time +of the new network is known. Further changes may concern genesis +constants and other network configuration. All these changes should be +ignored during the hash computation and only the genesis ledger itself +should be taken into consideration. This way a user seeing that the +configuration file is not identical to the one they computed, still +does not contain any changes to the genesis ledger. + +Further protection against tampering with the ledger we gain from the +fact that all the nodes must use the same one, or they'll be kicked +out from the network. + +## Rationale and alternatives + +The presented way of handling the ledger export is the simplest one +and the easiest to implement. The security concern indicated above +cannot be mitigated with any method currently available. In order to +overcome it, we would have to re-think the whole procedure and somehow +continue the existing network with the changed protocol instead of +creating a new one. + +It seems reasonable to export the ledger in binary form instead, but +currently the node does not persist the staged ledger in any way that +could survive the existing node and could be loaded by another one. +Even if we had such a process, the encoding of the ledger would have +to be compatible between `compatible` and `berkeley`, which could be +difficult to maintain in any binary format. + +Otherwise there's no reasonable alternative to the process described. + +## Prior art + +Some of the existing blockchains, like Tezos, deal with the protocol +upgrade problem, avoiding hard-forking entirely, and therefore +avoiding the ledger export in particular. They achieve it by careful +software design in which the protocol (containing in particular the +consensus mechanism and transaction logic) consists in a plugin to the +daemon, which can be loaded and unloaded at runtime. Thus the protocol +update is as simple as loading another plugin at runtime and does not +even require a node restart. + +It would certainly be beneficial to Mina to implement a similar +solution, but this is obviously a huge amount of work (involving +redesigning the whole code base), which makes it infeasible for the +moment. + +## Unresolved questions + +The genesis timestamp of the new network needs to be specified in the +runtime configuration, but it is as of now (and will probably remain +for some time still) unknown. This makes it hard to put it into the +configuration in any automated fashion. Relying on personnel +performing the hard fork to update it is far from ideal, but there +seems to be no better solution available at the moment. + +Also epoch seeds from mainnet are incompatible with those on berkeley. +When epoch ledgers are being exported from a compatible node and +transferred into a berkeley node, the latter cannot load them, because +Base58check fails to decode them. This is a problem we need to overcome +or decide that we won't export the epoch ledgers and assume they're +the same as the genesis ledger for the purpose of hard fork. + +## Testing + +An automatic integration test will be written to check that the data is +being exported properly. The procedure is to start a fresh network and +generate a couple of transactions. Then the transactions are stopped. +Finally the ledger export is performed and the test compares the +exported state to the current state of the blockchain as obtained +through GraphQL. These checks must take into account the fact, that +it has changed slightly since the transaction stop (a couple additional +blocks might have been produced). However, all balances should definitely +be the same (after the transaction stop no transactions are allowed, there +are no fees of coinbase rewards anymore). + +The procedure can also be tested manually as follows: +* Sync up with the mainnet. +* Export the genesis ledger at any point in time. +* The program mentioned in a previous section can be +used to verify the exported ledger. +* Possibly add an account you control and change everyone's +delegation to point at that account so that you can produce +blocks. +* Start a new network with the exported state. +* The new network should be able to produce blocks. +* All the accounts should have the same balances and +delegates as on the mainnet at the moment of export. diff --git a/rfcs/0050-protocol-versioning.md b/rfcs/0051-protocol-versioning.md similarity index 91% rename from rfcs/0050-protocol-versioning.md rename to rfcs/0051-protocol-versioning.md index 2f657657882..bc2277e09fd 100644 --- a/rfcs/0050-protocol-versioning.md +++ b/rfcs/0051-protocol-versioning.md @@ -4,13 +4,13 @@ Protocol versioning is the system by which we identify different versions of the ## Summary -There are multiple dimensions of compatability between software operating a decentralized protocol. In this RFC, we concretely breakdown those dimensions of compatability into a hierarchy, and then propose a semver-inspired verisioning scheme that utilizes that hierarchy. +There are multiple dimensions of compatibility between software operating a decentralized protocol. In this RFC, we concretely breakdown those dimensions of compatibility into a hierarchy, and then propose a semver-inspired verisioning scheme that utilizes that hierarchy. ## Motivation -The motivation for this comes from a few angles. Firstly, having a versioning scheme for the protocol itself allows developers in the ecosystem to more accurately discuss compatability of different software. For instance, if any 2 separate implementations of the daemon exist, then the protocol version can identify if those 2 implementations are compatible. Tools that process data from the network can also identify the versions of the protocol they are compatible with, or even dynamically support multiple versions of the protocol via configuration. +The motivation for this comes from a few angles. Firstly, having a versioning scheme for the protocol itself allows developers in the ecosystem to more accurately discuss compatibility of different software. For instance, if any 2 separate implementations of the daemon exist, then the protocol version can identify if those 2 implementations are compatible. Tools that process data from the network can also identify the versions of the protocol they are compatible with, or even dynamically support multiple versions of the protocol via configuration. -Besides this, we also want a way to programatically reason about different versions of the protocol from within our software, including having the ability to be aware of protocol versions within the snark proofs that Mina uses. +Besides this, we also want a way to programmatically reason about different versions of the protocol from within our software, including having the ability to be aware of protocol versions within the snark proofs that Mina uses. The solution in this RFC is optimizing for simplicity and clarity. @@ -22,7 +22,7 @@ Compatibility of the transaction system can be thought of as the mapping of ledg Compatibility of the networking protocol can be thought of as the set of all RPC messages, wires types, gossip topics, p2p feature set, and participation rules. Participation rules include any rule in the protocol that regards certain behavior as malicious or otherwise not permitted. This ranges from consensus details to bannable offenses and even verification logic for gossip messages. It's important the capture all of this under the umbrella of networking protocol compatibility since divergences in these details can lead to unintended forks on the chain. -To label versions of our daemon software, we will use the following versioning convention, inspired by semver: `..`. In this setup, the version of the transaction system is the most dominant version, since any updates to it necessitate a hardfork, and when reasoning about the logic of the chain state, it is the most significant version number. The prefix of the `.` uniquely identifies a particular hard fork version, since the pair of those 2 versions determines the full compatability of an implementation. The leftover `` is retained for the usage of individual daemon implementations to use however they see fit. We leave this detail to each implementor of the daemon, as the meaning of these versions is intended to be specific to the implementation. +To label versions of our daemon software, we will use the following versioning convention, inspired by semver: `..`. In this setup, the version of the transaction system is the most dominant version, since any updates to it necessitate a hardfork, and when reasoning about the logic of the chain state, it is the most significant version number. The prefix of the `.` uniquely identifies a particular hard fork version, since the pair of those 2 versions determines the full compatibility of an implementation. The leftover `` is retained for the usage of individual daemon implementations to use however they see fit. We leave this detail to each implementor of the daemon, as the meaning of these versions is intended to be specific to the implementation. For the existing daemon implementation in OCaml, which is maintained in the `MinaProtocol/mina` repository, we will use the following versioning convention for the ``: `-`. Here, the `` will be used to denotate any breaking API changes for user-facing APIs the daemon supports (CLI, GraphQL, Archive Format). Whenever we add, remove, deprecate, or modify the interface of existing APIs, this version must be incremented. The `` is used in the same way the patch version is utilized in semver: to signify that there are new backwards compatible bug fixes or improvements. We may also add an additional suffix to the `` if there is a different variant of the artifact for that version. For instance, if we were testing some optimizations behind a feature flag, but weren't ready to ship it to the stable version of the software (or didn't want to for some reason), then we could maintain a variant build for that artifact by appending a `-opt` prefix to the version. diff --git a/rfcs/0051-verification-key-permissions.md b/rfcs/0052-verification-key-permissions.md similarity index 99% rename from rfcs/0051-verification-key-permissions.md rename to rfcs/0052-verification-key-permissions.md index 0b361ad675d..d1bf1ddd1a5 100644 --- a/rfcs/0051-verification-key-permissions.md +++ b/rfcs/0052-verification-key-permissions.md @@ -10,7 +10,7 @@ In this RFC, we describe a configuration for the verification key permissions th ## Motivation -At the launch of the Berkeley network, the Mina Protocol does not yet guarantee backwards compatability of zkApps in future upgrades. Due to this, it is possible for an immutable contract to break -- any interactions with it that require proofs to be verified against it's verification key will no longer be accepted by the chain. Similarly, the on-chain features that the contract relied on may have been removed or modified. +At the launch of the Berkeley network, the Mina Protocol does not yet guarantee backwards compatibility of zkApps in future upgrades. Due to this, it is possible for an immutable contract to break -- any interactions with it that require proofs to be verified against it's verification key will no longer be accepted by the chain. Similarly, the on-chain features that the contract relied on may have been removed or modified. We wish for zkApps developers to be able to make their contract immutable without putting their contract at risk to breaking upon a future release, rendering any funds locked up in the contract inaccessible. diff --git a/rfcs/0054-limit-zkapp-cmds-per-block.md b/rfcs/0054-limit-zkapp-cmds-per-block.md new file mode 100644 index 00000000000..c108d793bcc --- /dev/null +++ b/rfcs/0054-limit-zkapp-cmds-per-block.md @@ -0,0 +1,191 @@ +## Summary + +During the ITN stress testing it was noticed that daemon's memory +consumption tends to increase dramatically after a block containing a +large number of zkApp commands. Before appropriate optimizations can +be developed, we need a temporary solution to prevent nodes crashing +due to insufficient memory. The idea is to limit the number of zkApp +commands that can be included in any single block. + +## Motivation + +By limiting the number of zkApp commands going into blocks we avoid +the aforementioned issue until a proper solution can be devised and +implemented. The root cause of the issue is that proofs contained +within these commands are stored in the scan state and tend to occupy +a lot of space. Fixing these storage issues won't affect the +protocol, so ideally we want a workaround that doesn't affect the +protocol either, so that at the convenient time we can turn it off +without making a fork. + +## Detailed design + +Since the solution should not affect the protocol, it should be +implemented at the mempool/block producer boundary. In the mempool +there is `transactions` function, which returns a sequence of +transactions from the mempool in the order of decreasing transaction +fees. The `create_diff` function in `Staged_ledger` then takes that +sequence and tries to apply as many transactions from it as can fit +into the block. In the latter function it is possible to simply +count successfully applied zkApp commands and filter out any +transactions which: +- would violate the set zkApp command limit +- or depend on any previously filtered transactions because of + a nonce increase. + +The exact number of zkApps allowed in each block should be set +dynamically, so that we can adjust it without redeploying nodes. +Therefore we are going to provide an authorised GraphQL mutation +to alter the setting at runtime. A sensible default will be compiled +into the binary as well. + +The setting can be stored in the Mina_lib configuration and +initialized when the mempool is being created at startup. +The limit will also be controllable through an authenticated GraphQL +mutation, which will update the setting in the configuration at +runtime. + +## Drawbacks + +Any non-protocol-level solution to this issue has a drawback that a +malicious node operator could modify their node to turn off the +safeguard. However, because the safeguard only affects block +production, it doesn't really matter unless the malicious agent is +going to produce blocks. If so, their chance of conducting a +successful DoS attack against the network is proportional to their +stake, but their incentive to do so is **inversely** proportional +to their stake, which means the more capable one is to conduct the +attack, the more they are going to lose in case of success. + +With the safeguard turned on, if the zkApps are coming in faster than +they can be processed, they will stack up in nodes' mempools. +Mempools **will** eventually overflow, which means that either some of +these zkApp commands or some regular user commands will start to +drop. This will likely inflate transaction fees as users will attempt +to get their transactions into increasingly crowded mempools. Also a +lot of transactions will be lost in the process due to mempool +overflow. + +Some payments and delegations may wait a long time for inclusion or +even get dropped if they are created by the same fee payer as a +zkApp command waiting for inclusion due to the limit. This cannot +be helped, unfortunately. + +Another risk arises when we decide to turn of the limitation, because +the underlying issue is fixed. In order to safely turn the limit +off, a node needs to be updated with the fix. Because this will be +a non-breaking change, nodes may be slow to adopt it. According to +rough estimates, if 16% of the stake upgrades and turns the limit +off, they're capable of taking the non-upgraded nodes down with +memory over-consumption and taking over the network. To prevent this +we have to ensure that at least the majority of the stakeholder +upgrades as quickly as possible. + +Finally, the limit introduces an attack vector, where a malicious +party can submit `limit + 1` zkApp commands and arbitrarily many more +commands depending on them, so that they are guaranteed not to be +included. They can set up arbitrarily high fees on these commands +which won't be included in order to kick out other users' transactions +from the mempool and increase the overall fees on the network. An +attacker would have to pay the fees for all their included zkApp +commands, but not for the skipped ones Then they can use another +account to kick out their expensive transactions form the mempool. So +conducting such an attack will still be costly, but not as costly as +it should be. + +## Rationale and alternatives + +This is a temporary solution until the scan state storage can be +optimised to accommodate storing proofs more efficiently. Therefore +it is more important that it's simple and easy to implement than +to solve the problem in a robust manner. Because the issue endangers +the whole network, some smaller drawbacks are acceptable as long as +the main issue is prevented from happening. + +An alternative would be to assign more precise measurement of memory +occupied to each command and limit the amount of the total memory +occupied by commands within a block. Better still, we could compute +the difference in memory occupied by the scan state before and after +each block and make sure it does not go above certain limit. This +would, however, complicate the solution and require more time to +develop it, while it still wouldn't properly solve the problem. +Therefore we should strive for a quick solution which already improves +the situation and wait for the proper fix to come. + +## Prior art + +The problem of blockchain networks being unable to process incoming +transactions fast enough is a well-known one and there are several +techniques of dealing with it. + +One solution is to limit the block size (and hence indirectly the +number of transactions fitting in a single block). The most notable +example here is Bitcoin, which has a hard block size limit of 1MB. +This is often criticized for limiting the network's throughput +severely, but the restriction remains in place nonetheless, because +the consequences of lifting it would be even worse. + +Mina also has its own block size limit, however, the problem we are +dealing with here is different in that we've got two distinct +categories of commands, only one of which is affected. Unfortunately, +unless we move zkApp commands to a separate mempool, any limit set on +zkApp commands throughput will also affect user commands by occupying +mempool space (see Drawbacks above). + +Another solution is more related to execution time, especially that of +smart contracts, which can - in principle - run indefinitely without +termination and there is no easy way of preventing this without +hindering expressiveness of a smart contract language significantly +(due to insolvability of the halting problem). Major blockchains like +Ethereum or Tezos, instead of limiting block size directly, restrict +the number of computational steps (defined by some VM model) necessary +to replay a block. A block which cannot be replayed in the specified +number of steps is automatically considered invalid. + +The operation's execution time is modelled with gas. Each atomic +computation is assigned a gas cost roughly proportional to the time +the VM takes to execute that computation. Simultaneously, a block +is given a hard gas limit and the total gas required by all the +transactions within the block must be below that limit. + +Translating this solution to the discussed problem would involve +modelling memory occupied by each operation in the scan state with +some measure (analogous to gas) and then limiting the maximum value +of operations (expressed in that measure) fitting in a block. This +is a more complex solution than the one proposed here and probably +requires significant time to devise the right model. It wouldn't +also remove the problem of zkApp commands stacking in the mempool, +although it might make it less severe by setting a more fine-grained +limit. However, considering that it would still be a temporary +solution, it's probably not worth the effort. + +## Unresolved questions + +Are the drawbacks described in this document an acceptable trade-off +for preventing crashes due to out-of-memory issues? Is the +alternative, more fine-grained solution viable? + +## Testing + +The part of the code responsible for applying transactions from the +mempool to the ledger is not properly isolated from the surrounding +code, but it can be isolated and then unit-tested relatively easily. +This is being done in a loop, which gives us an opportunity to test +either any single step of that loop or the loop as a whole (ideally +both). In such tests the most important properties to check would +include: +- if the limit is disabled, zkApp commands are applied normally. +- no zkApp command is applied when the limit is reached. +- no transaction depending on a skipped zkApp command is ever applied. +- list of applied transactions contains at most the limit of zkApp + commands. +- if there's less than limit of zkApp commands in the mempool, more + signed commands can be applied instead. + +Additionally an integration test checking inclusion of transactions +from the mempool in a newly created block could be written. Such a +test should in particular ensure that the limit does not affect block +validation. Note that the limit can be changed dynamically, so we +can initialise a network with all nodes having the same settings and +then change it for some of them, thus examining different +configurations. diff --git a/rfcs/0055-stop-transaction-processing.md b/rfcs/0055-stop-transaction-processing.md new file mode 100644 index 00000000000..48f898f57d6 --- /dev/null +++ b/rfcs/0055-stop-transaction-processing.md @@ -0,0 +1,204 @@ +# Stop processing transactions / stop the network after a certain slot + +This PR describes the feature to stop processing transactions and to stop the +network after a certain slot, to be used in the Berkeley hard fork. + +## Summary + +Transactions come from a client or the gossip network and are processed by BPs +and SNARK workers to be included in blocks. These blocks are propagated through +the network and validated by other participants. + +In this RFC, the procedure to stop processing any new transactions and to +stop the network after a certain slot is described. This is, we define two +slots: the first one is the slot after which any blocks produced will include no +transaction at all and no fee payments, and the second one is the slot after +which no blocks are produced and blocks received are rejected. + +## Motivation + +In a hard fork scenario, we want to halt the preceding network and produce a new +genesis ledger for the succeeding network. This new genesis ledger should be +produced from a stabilised staged ledger from the preceding network. This is, we +define a point in time (slot) where the network continues to operate but with no +"activity". In detail, after this slot, the network continues to produce blocks +but without including any transactions, sets coinbase fees to zero, and ensures +there are no fees for snark work, by including no snark work in the block. This +will run for a certain number of slots, after which the network will stop +producing blocks. This will allow the network to stabilise and produce a new +genesis ledger from the last ledger produced by the network. + +This feature enables part of this procedure, by adding the definition of the +slots and the mechanisms to stop the node from processing transactions and to +stop the networks after those slots. + +## Detailed design + +The procedure to stop processing transactions and producing/validating empty +blocks after a certain slot will be as follows: + +* There will be a configuration parameter set at compile-time that will define + the slot at which the node will stop processing transactions. +* The previous configuration cannot be overridable at runtime, by design, as + this compromises the safety of the daemon software. +* The node (daemon) will stop accepting new transactions from clients after + the configured slot. +* After the configured slot, the block producer will stop including transactions + in blocks, as well as any snark work and coinbase fee will be set to zero. +* The block validator will reject blocks produced after the stop slot that + contain any transaction, snark work or a non-zero coinbase fee. +* The node should start notifying the user every 60 slots (3 hours) when transaction + processing halts in less than 480 slots (24 hours). + +To stop the network after a certain slot, the procedure will be as described +next: + +* There will be a configuration parameter set at compile-time that will define + the slot at which the node will stop the network. +* After the configured slot, the block producer will stop producing any blocks. +* The block validator will reject any blocks received after the stop network +* slot. +* The node should start notifying the user every 60 slots (3 hours) when block + production/validation halts in less than 480 slots (24 hours). + +Each of these procedures will be described in detail in the following sections. + +### Compile-time configuration + +The configuration parameters `slot_tx_end` and `slot_chain_end` will be set at +compile-time and will define the slot at which the node will stop processing +transactions and the slot at which the network stops, respectively. These +configuration parameters will be optional and will default to `None`. If +`slot_tx_end` is set to `None`, the node will not stop processing transactions. +If `slot_chain_end` is set to `None`, the node will not stop producing or +validating blocks. + +### Client submits transaction + +When a client sends a transaction to the node daemon, the node will check if +the stop transaction slot configuration is set. If so, and the current global +slot is less than the configured stop slot, the transaction will be accepted by +the node and processed as usual. If the current global slot is equal or greater +than the configured stop slot, the transaction will be rejected. The client will +be notified of the rejection and the reason why the transaction was rejected. +This improves user UX by rejecting transactions that will not be included in the +ledger in the preceding network. + +This can be done by adding these checks and subsequent rejection messages to the +GraphQL functions that receive and submit user commands. + +### Block producer + +When the block producer is producing a block, it will check if the stop network +slot configuration is set. If so, and the current global slot is equal or +greater than the configured stop slot the block producer will not produce a +block. If the configured stop slot is not set or it's greater than the current +global slot, the block producer will then check if the stop transaction slot +configuration is set. If so, and the current global slot is equal or greater +than the configured stop slot the block producer will produce a block without +any transactions nor snark work and with a coinbase fee of zero. If the +configured stop slot is not set or is greater than the current global slot, the +block producer will produce blocks as usual. + +This can be done by adding these checks to block production logic. First, +decide whether or not blocks should be produced. If the stop network slot is set +and the current global slot is equal or greater than it doesn't produce blocks. +If the previous is false, return an empty staged ledger diff instead of the +generated one whenever the stop transaction slot is defined and the current +global slot is equal or greater than it, ultimately resulting in a block +produced with no transactions, no internal commands, no completed snark work, +and a coinbase fee of zero. When doing these checks, the node will also check +for the conditions to emit the info log messages at the timings and conditions +expressed earlier. + +### Block validator + +When the block validator is validating a block, it will check if the stop +network slot configuration is set. If so, and the current global slot is equal +or greater than the configured stop slot, the block validator will reject the +block. If the stop network slot is not set or is greater than the current global +slot, the block validator will then check if the stop transaction slot +configuration is set. If so, and the global slot at which the block was produced +is less than the configured stop slot, the block validator will validate the +block as usual. If the stop transaction slot configuration is not set or is +greater than the global slot of the block, the block validator will reject +blocks that define a staged ledger diff different than the empty one. + +This can be done by adding these checks to the transition handler logic. First, reject any blocks if the stop network slot value is set and the current global +slot is greater than it. Second, and if the previous is not true, check the +staged ledger diff of the transition against the empty staged ledger diff +instead doing the usual verification process when the configured stop transaction +slot is defined and the global slot for which the block was produced is equal or greater than it. When doing these checks, the node will also check for the +conditions to emit the info log messages at the timings and conditions expressed +earlier. + +## Test plan and functional requirements + +Integration tests will be added to test the behavior of the block producer and the +block validator. The following requirements should be tested: + +* Block producer + * When the stop network slot configuration is set to `None`, the block + producer should produce blocks. + * When the stop network slot configuration is set to `` and the current + global slot is less than ``, the block producer should produce blocks. + * When the stop network slot configuration is set to `` and the current + global slot is greater or equal to ``, the block producer should not + produce blocks. + * When the stop transaction slot configuration is set to `None`, the block + producer processes transactions, snark work and coinbase fee as usual. + * When the stop transaction slot configuration is set to `` and the + current global slot is less than ``, the block producer processes + transactions, snark work and coinbase fee as usual. + * When the stop transaction slot configuration is set to `` and the + current global slot is greater or equal to ``, the block producer + produces empty blocks (blocks with an empty staged ledger diff). +* Block validator + * When the stop network slot configuration is set to `None`, the block + validator validates blocks as usual. + * When the stop network slot configuration is set to `` and the current + global slot is less than ``, the block validator validates blocks as + usual. + * When the stop network slot configuration is set to `` and the current + global slot is greater or equal to ``, the block validator rejects all + blocks. + * When the stop transaction slot configuration is set to `None`, the block + validator validates blocks as usual. + * When the stop transaction slot configuration is set to `` and the + global slot of the block is less than ``, the block validator + validates blocks as usual. + * When the stop transaction slot configuration is set to `` and the + global slot of the block is greater or equal to ``, the block + validator rejects blocks that define a staged ledger diff differently than + the empty one. +* Node/client + * When the stop transaction slot configuration is set to `None`, the node + processes transactions from clients as usual. + * When the stop transaction slot configuration is set to `` and the + current global slot is less than ``, the node processes transactions + from clients as usual. + * When the stop transaction slot configuration is set to `` and the + current global slot is greater or equal to ``, the node rejects transactions from clients. + +## Drawbacks + +Non-patched nodes or nodes with the configuration overridden will still be able +to send transactions to the network. These transactions will be included in the +transaction pool but will not be processed by the patched block producers, +alongside other transactions that may have arrived at the transaction pool +before the configured stop slot but haven't been included in a block as of that +slot. This will result in a transaction pool that will not be emptied until the +network stops and those transactions will not be included in the succeeding +network unless there's a mechanism to port them over to the new network to be +processed and included there. This might result in a bad UX, especially for users +who send transactions to the network before the configured stop slot and don't +see them included in the ledger and disappear from the transaction pool when the +network restarts. +Moreover, non-patched nodes will produce and process transactions as usual after +the transaction stop slot, resulting in these nodes constantly attempting to fork. + +## Rationale and alternatives + +## Prior art + +## Unresolved questions diff --git a/rfcs/0052-hard-fork-data-migration.md b/rfcs/0056-hard-fork-data-migration.md similarity index 100% rename from rfcs/0052-hard-fork-data-migration.md rename to rfcs/0056-hard-fork-data-migration.md diff --git a/rfcs/0057-hardcap-zkapp-commands.md b/rfcs/0057-hardcap-zkapp-commands.md new file mode 100644 index 00000000000..8c8abb00d71 --- /dev/null +++ b/rfcs/0057-hardcap-zkapp-commands.md @@ -0,0 +1,11 @@ +## Summary +Blocks containing a large number of zkApp commands have caused memory issues in the ITN. A *soft* solution has already been released (see `rfcs/0054-limit-zkapp-cmds-per-block.md`) which causes a BP node to reject zkApp transactions from its block candidate that exceed a preconfigured limit (set on either start-up, or through an authenticated GraphQL endpoint). However, we wish for a *hard* solution that will cause a node to reject any incoming block that has zkApp commands which exceed the limit. + +## Motivation +Previously, there was a zkApp Softcap Limit that could be configured either on start-up of the mina node, or through an authenticated GraphQL endpoint. However, this is not safe enough as any block-producer running a node could just recompile the code and change the configuration, circumventing the zkApp command limit. Furthermore, the limit is *soft* in the sense that a mina node will still accept blocks which exceed the configured zkApp command limit. Therefore, another mechanism is required to ensure that any block producers who attempt to bypass the limit will not have their blocks accepted. + +## Detailed design +The limit should be specified in the runtime config for maintainability and ease of release. Unlike in the softcap case, the limit needs to be implemented at the block application level, rather than the block production level, as this change impacts non-BP mina nodes, as well. One candidate for the location is the `create_diff` function in the `staged_ledger.ml`. There is already a `Validate_and_apply_transactions` section in the function that could be co-opted. + +## Testing +A simple test would be to run two nodes in a local network, with different configurations. Have the first node be a BP without this fix, and another be a non-BP node with this fix (set the limit to zero). Firing an excessive amout of zkApp command transactions at the BP node will cause it to produce a block which exceeds the zkApp command limit. Consequently, the non-BP node should stay constant at its initial block-height. diff --git a/rfcs/0058-disable-zkapp-commands.md b/rfcs/0058-disable-zkapp-commands.md new file mode 100644 index 00000000000..922b6d0d73d --- /dev/null +++ b/rfcs/0058-disable-zkapp-commands.md @@ -0,0 +1,8 @@ +## Summary +*Soft* and *hard* limits for zkApp commands have already been implemented (see `rfcs/0054-limit-zkapp-cmds-per-block.md` and `0057-hardcap-zkapp-commands.md`). However, both of these changes still permit the inclusion of zkApp commands into the Mina node's mempool, and their dissemination via gossiping. If we wish to truly disable zkApp commands in the network then a more exhaustive exclusion is required. + +## Detailed design +The change should sit behind a compile-time flag (similar to the ITN `itn_features`). Changing [this code](https://github.com/MinaProtocol/mina/blob/03c403e2c1e57a36de4e5b92f75856c825cb7e7e/src/lib/mina_base/user_command.ml#L405) so that all zkApp commands are treated as malformed will prevent them from being added to the mempool. + +## Testing +The change can be tested by switching on the flag and firing zkApp commands at a node. The node should not accept any of the zkApp commands, nor should any be gossiped to other nodes in the network, which can be checked by querying the GraphQL endpoints. diff --git a/rfcs/0059-new-transaction-model.md b/rfcs/0059-new-transaction-model.md new file mode 100644 index 00000000000..ee154066f1f --- /dev/null +++ b/rfcs/0059-new-transaction-model.md @@ -0,0 +1,183 @@ +# Redesign of the transaction execution model + +## Summary + +This proposes refactoring the transaction execution model, primarily to make it easy to implement Snapps transactions involving an arbitrary number of parties without having circuits that verify corresponding numbers of proofs. I.e., this model will make it possible to e.g., have a transaction involving 10 snapp accounts while only having a base transaction SNARK circuit that verifies a single proof. + +## Introduction + +Currently, the transaction execution part of the protocol can be thought of as running a state machine where the state consists of various things, roughly + +- current staged ledger +- a signed amount (the fee excess) +- the pending coinbase stack +- the next available token ID + +We propose extending this state to include an optional field called +`current_transaction_local_state` +consisting of +- a hash, called `id` +- a signed amount, called `excess` +- an optional token ID, called `current_token_id` +- a non-empty stack of "parties" (described below), called `remaining_parties` + +## Transactions + +Under this approach, a transaction would semantically be a sequence of "parties". A "party" is a sequence of tuples `(authorization, predicate, update)` where + +- An authorization is one of + - a proof + - a signature + - nothing +- A predicate is an encoding of a function `Account.t -> bool` +- An update is an encoding of a function `Account.t -> Account.t` + +For example, a normal payment transaction from an account at nonce `nonce` for amount `amount` with fee `fee` would be (in pseduocaml) the sequence + +```ocaml +[ { authorization= Signature ... + ; predicate= (fun a -> a.nonce == nonce) + ; update= (fun a -> {a with balance= a.balance - (amount + fee)}) + } +; { authorization= Nothing + ; predicate= (fun _ -> true) + ; update= (fun a -> {a with balance= a.balance + amount}) + } +] +``` + +A token swap trading `n` of token `A` from `sender_A` for `m` of token `B` from `sender_B`, plus a fee payment of `fee` from `fee_payer` would look like +```ocaml +[ { authorization= Signature ... + ; predicate= (fun a -> a.nonce == nonce_fee_payer) + ; update= (fun a -> {a with balance= a.balance - fee}) + } +; { authorization= Signature ... + ; predicate= (fun a -> a.nonce == nonce_A) + ; update= (fun a -> {a with balance= a.balance - n}) + } +; { authorization= Nothing + ; predicate= (fun _ -> a.token_id == A && a.public_key == sender_B) + ; update= (fun a -> {a with balance= a.balance + n}) + } +; { authorization= Signature ... + ; predicate= (fun _ -> a.token_id == B) + ; update= (fun a -> {a with balance= a.balance - m}) + } +; { authorization= Nothing + ; predicate= (fun _ -> a.token_id == B && a.public_key == sender_A) + ; update= (fun a -> {a with balance= a.balance + m}) + } +] +``` + +The authorizations will be verified against the hash of the whole list of "parties". + +When actually broadcast, transactions would be in an elaborated form containing witness information needed to actually execute them (for example, the account_id of each party), rather than the mere functions that constrain their execution, but this information is not needed inside the SNARK. + +### How transaction execution would work semantically + +Currently, the transitions in our state machine are individual transactions. This proposes extending that with the transitions + +``` +type transition = + | Transaction of Transaction.t + | Step_or_start_party_sequence of step_or_start + +type step_or_start = + | Step + | Start of party list +``` + +It remains to explain how to execute a "party" as a state transition. +In pseudocaml/snarky, it will work as follows + +```ocaml +let apply_step_or_start + (e : execution_state) (instr : step_or_start) + : execution_state + = + let local_state = + match e.current_transaction_local_state, instr with + | None, Step + | Some _, Start _ -> assert false + | None, Start ps -> + {default_local_state with parties=ps; id= hash ps} + | Some s, Step -> s + in + let {authorization; predicate; update}, remaining = + Non_empty_list.uncons local_state.remaining_parties + in + let a, merkle_path = exists ~request:Party_account in + assert (implied_root a merkle_path e.ledger_hash = e.ledger_hash) ; + assert (verify_authorization authorization a s.id) ; + assert (verify_predicate predicate a) ; + assert (auth_sufficient_given_permissions a.permissions authorization update) ; + let fee_excess_change = + match s.current_token_id with + | None -> a.token_id + | Some curr -> + if curr == a.token_id + then Currency.Fee.zero + else ( + (* If we are switching tokens, then we cannot have printed money out of thin air. *) + assert (s.excess >= 0); + if curr == Token_id.default + then s.excess + else 0 (* burn the excess of this non-default token *) + ) + in + let a' = perform_update update a in + let excess = current_transaction_local_state.excess + (a.balance - a'.balance) in + let new_ledger_hash = implied_root a' merkle_path in + match remaining with + | [] -> + assert (excess >= 0); + let fee_excess_change = + fee_excess_change + + if a.token_id == Token_id.default then excess else 0 + in + { e with current_transaction_local_state= None + ; ledger_hash= new_ledger_hash + ; fee_excess= e.fee_excess + fee_excess_change } + | _::_ -> + { e with current_transaction_local_state= + Some + { local_state with parties= remaining + ; excess= local_state.excess + excess } + ; ledger_hash= new_ledger_hash + ; fee_excess= e.fee_excess + excess_change } +``` + +### How this would boil down into "base" transaction SNARKs +The idea would be to have 3 new base transaction SNARKs corresponding to the 3 forms of authentication. Each would implement the above `apply_step_or_start` function but with the `verify_authorization` specialized to either signature verification, SNARK verification, or nothing. + + +Under this model, executing a transaction works as follows. Let `t = [t1; t2; t3]` + +### Fees and proofs required + +Instead of 2 proofs per transaction as is required now, we will switch to 2 proofs per "party". + +Similarly, we should switch to ordering transactions in the transaction pool by `fee / number of parties`. + +## Benefits of this approach + +The main benefit of this approach is that we can have a small number of base circuits, each of which has at most one verifier inside of it, while still supporting transactions containing arbitrary numbers of parties and proofs. This enables such applications as multi-token swaps and snapp interactions involving arbitrarily many accounts. + +Another benefit is the simplified and unified implementation for transaction application logic (both inside and outside the SNARK). + +## Eliminating all other user-command types + +Ideally, eventually, for simplicity, we will replace the implementation of transaction logic and transaction SNARK for the existing "special case" transactions (of payments and stake delegations) into sequences of "parties" as above. We can still keep the special case variants in the transaction type if desired. + +If we do this in the most straightforward way, a payment would go from occupying one leaf in the scan state to either 2 or 3 (if there is a separate fee payer). However, the proofs corresponding to these leaves would be correspondingly simpler. That said, there probably would be some efficiency loss and so if we want to avoid that, we can make circuits that "unroll the loop" and execute several parties per circuit. + +Specifically, for any sequence of authorization types, we can make a corresponding circuit to execute a sequence of parties with those authorization types. For example, it might be worth having a special circuit for the authorization type sequence `[ Signature; None ]` for a simple payment transaction that executes one party with a Signature authorization (the sender), and then one with no authorization (the receiver). + +## Potential issues + +- Backwards compatibility + + Before changing the special case transactions into the above, we will have to make sure all signers are updated as the signed payload will change. +- Transaction pool sorting + + Currently, transactions in the transaction pool are sorted by sender by nonce. If general sequences of parties are allowed as transactions, this will not work and we will have to figure out another way to order things. diff --git a/rfcs/0060-networking-refactor.md b/rfcs/0060-networking-refactor.md new file mode 100644 index 00000000000..e41e111e9e5 --- /dev/null +++ b/rfcs/0060-networking-refactor.md @@ -0,0 +1,372 @@ +# Mina Networking Layer Refactor + +## Summary +[summary]: #summary + +This RFC proposes an overhauling refactor for how our libp2p helper and daemon processes interface. This document will cover a new IPC interface, model for separation of concerns, and code abstraction details that should give us more performance options and flexibility as we continue to build on top of our existing gossip network implementation. + +NOTE: This RFC is kept abstract of IPC details related to moving towards bitswap. Additions to this IPC design will be discussed in a separate RFC for bitswap after this RFC is completed and agreed upon. + +## Motivation +[motivation]: #motivation + +Over the development lifecycle of Mina, we have migrated between various gossip network systems, and while we are now settled on libp2p as our gossip network toolkit, we are continuing to improve the way in which we use it by utilizing more features to optimize our network traffic and reliability. These improvements will bring even more changes in how our existing OCaml codebase will interact with our gossip network layer. However, at the moment, due to our regular migrations and changes to networking, our gossip network interface inside of OCaml is factured into 3 layers. There is quite a bit of code that is entirely outdated. Furthermore, the protocol we use to communicate between our Go and OCaml processes has become rather muddy, and as we have learned more about the performance characteristics of the 2 processes, we have realized that we need to make some serious updates to this protocol in order to prevent it from being a bottleneck in our blockchain software. + +## Detailed design +[detailed-design]: #detailed-design + +In order to achieve our goals, this RFC introduces an updated libp2p helper IPC protocol and details the new abstraction/structure of the OCaml side networking interface. + +In service of removing our existing bottlenecks around the IPC protocol, we will be removing stream state awareness from the OCaml side of the code, preferring to have the Go helper processes be the only one that dynamically manages streams (including opening, closing, and reusing multiplexed streams). In this world, the OCaml process will be fully abstracted to a request/response level of interacting with other peers on the network. + +We will also be moving the peer management logic outside of OCaml and into the Go helper process. This means the Go process is now responsible for seed management, trustlist/banlist management, and even peer selection for most requests. The OCaml process will still be the main director of peer scoring, but will no longer manage the state of peers itself (and some of the more basic peer scoring properties, such as overall message rate limiting, can just live on the Go side). There will still be edge cases in which the OCaml process will instruct the Go helper process to send requests to specific peers, but for all requests where the OCaml process does not need a specific peer to respond (eg bootstrap), the Go helper process will manage the selection logic for those peers. + +NOTE: The scope of the design discussed in this document is the full refactor, including parts which would require a hard fork to implement properly. There is a section at the end of the design section which details how we can develop this design in 2 stages such that we will be able to isolate the network-interface breaking changes that would need to be shipped in a hard fork. + +### Security + +Up front, let's identify the security aspects we are aiming to achieve in this RFC. This RFC will not cover security issues relating to rate limiting (which will be covered by the trust scoring RFC), nor issues relating to the maximum message size (which will be covered by the bitswap RFC). Our main security goals we are considering in this RFC are focused around the IPC protocol between the daemon and helper processes. Specifically, the design proposed in this RFC intentially avoids situations in which adversarial nodes could control the number of IPC messages sent between the daemon and the helper (independent of the number of messages sent over the network). In other words, this design is such that the number of IPC messages exchanged between the processes is O(1) in relation to the number of incoming network messages (this is not true of the existing design). In addition, this design limits synchronized state between the 2 processes, which prevents vulnerabilities in which an adversary may be able to desynchronize state between the daemon and helper processes. + +### IPC + +The new libp2p helper IPC protocol improves upon the previous design in a number of ways. It updates the serialization format so that there is no longer a need to base64 encode/decode messages on each side of the protocol, and it also replaces the singular bidirectional data stream over stdin/stdout with a system with multiple concurrent data streams between the two processes. In order to achieve the latter of these, the libp2p helper process will now be need to be aware of message types for messages it receives over the network (see [#8725](https://github.com/MinaProtocol/mina/pull/8725)). + +#### Data Streams + +In order to facilitate staging this work into both soft-fork and hard-fork changesets, we will abstract over the concept of a "data stream" for any unidirectional stream of communication between the helper and daemon processes. Doing so, we can discuss the long-term communication architecture for the IPC interface, but be able to write the code in a way such that we can easily swap out this architecture. The code should be implemented such that it is easy to change which messages are expected and sent over which data streams without modifying the protocol logic itself. This allows us to implement a partial version of the architecture until we are able to take the hard-fork changes. Data streams should also be implemented abstract from transport mechanism, so that we can more easily consider upgrades to our transport layer in the future (such as supporting TCP sockets and remote helpers processes). The remainder of this section will only focus on the long-term architecture (more details about how this work will be broken up and staged is available in the "Staging the Compatible and Hard Fork Changes" section of this RFC). + +#### Communication Architecture + +The helper and daemon will now exchange messages over a variety of data streams, allowing each process to prioritize data streams differently. Correctly optimizing this prioritization on the daemon side is important, since OCaml is single threaded by nature (for now). In particular, the daemon needs to be able to priotize processing and validating certain network messages in order to ensure that the node keeps in sync with the network and forwards relevant information for others to stay in sync. Specifically, the daemon needs to prioritize processing and validating new block gossips so that they can be forwarded to other nodes on the network in a timely manor. + +The transport layer we will use for these data streams will be Unix pipes. The parent daemon process can create all the required Unix pipes for the various data streams, and pass the correct file descriptors (either the write or read descriptors depending on the direction of the pipe) to the child helper process when it initializes. Pipes are considered preferable over shared memory for the data streams since they already provide synchronization primitives for reading/writing and are easier to implement correctly, though shared memory would be likely be slightly more optimized. + +Below is a proposed set of data streams we would setup for the helper IPC protocol. Keep in mind that some of these pipes require some form of message type awareness in order to be implemented. We have ongoing work that adds message type awareness to the helper process, but this work requires a hard fork. If we want to split up message-specific pipes before a hard fork, we would need to add support for message peeking to the helper (which would involve making the helper aware of at least part of the encoding format for RPC messages). + +- stdin (used only for initialization message, then closed) +- stdout (used only for helper logging) +- stderr (used only for helper logging) +- stats\_in (publishes helper stats to daemon on an interval) +- block\_gossip\_in (incoming block gossip messages) +- mempool\_gossip\_in (other incoming gossip messages, related to mempool state) +- response\_in (incoming RPC responses) +- request\_in (incoming RPC requests) +- validation\_out (all validations except request validations, which are bundled with responses) +- response\_out (outgoing RPC responses) +- broadcast\_out (outgoing broadcast messages) + +The rough priorities for reading the incoming pipes from the daemon process would be: + +- block\_gossip\_in +- response\_in +- request\_in +- mempool\_gossip\_in + +NOTE: It is critical in the implementation of this that the prioritization scheme we choose here does not allow the mempool gossip pipe to be starved. The main thing to keep in mind to avoid this is to ensure that we do not over weight reading the incoming requests, so that another node on the network cannot delay (or potentially censor) txns and snarks we are receiving over gossip. One approach towards this could be to limit the parallelism per pipe while keeping the maximum parallel messages we handle from IPC high enough such that we can always schedule new mempool gossip jobs even when there are a lot of requests. + +CONSIDER: Is it important that IPC messages include timestamps so that the daemon and helper processes can perform staleness checks as they read messages? For example: if we haven't read a mempool gossip in a while, and read one, discovering that the first message on the pipe is rather old, should we further prioritize this pipe for a bit until we catchup? A potential risk of this system is that it would be hard to guarantee that none of the data streams aren't succeptible to starvation attacks. + +#### Serialization Format + +The new serialization format will be [Cap'N Proto](https://capnproto.org/). There are already [Go](https://github.com/capnproto/go-capnproto2) and [OCaml](https://github.com/capnproto/capnp-ocaml) libraries for the Cap'N Proto serialization format, which generate code for each language based on a common schema definition of the protocol. Using Cap'N Proto instead of JSON will allow us to embed raw binary data in our IPC messages, which will avoid the rather costly and constant base64 encoding/decoding we currently do for all binary data we transfer between the processes. It's possible to keep some pipes in JSON if preferable, but within the current plan, all messages would be converted over to Cap'N Proto to avoid having to support tooling for keeping both serialization formats in sync between the processes. + +NOTE: The [OCaml Cap'N Proto library](https://github.com/capnproto/capnp-ocaml) currently has an inefficient way of handling binary data embeded in Cap'N Proto messages. It uses `Bytes.t` as the backing type for the packed data, and `string` as the type for representing the unpacked data. @mrmr1993 pointed out in the RFC review that we would save 2 memory copies if we used `Bigstring.t` as the backing type for packed data, and slices of that `Bigstring.t` for the unpacked data. These changes are fairly straightforward to make, and can be done in a fork of the library we maintain. + +#### Entrypoints + +The new libp2p helper interface would support separate entrypoints for specific libp2p tasks, which will simplify some of the IPC interface by removing one-off RPC calls from OCaml to Go. Now, there will be 3 entrypoints, 2 of which will briefly run some computation and exit the process with a result over stdout, and the last of which starts the actual helper process we will use to connect to the network. These interfaces will be accessed directly via CLI arguments rather than being triggered by IPC messages. In otherwords, the IPC system is only active when the helper is run in `gossip_network` mode. + +The supported entrypoints will be: +- `generate_keypair` +- `validate_keypair --keypair={keypair}` +- `gossip_network` + +#### Protocol + +When the lip2p helper process is first started by the Daemon (in `gossip_network` mode), the daemon will send an `init(config Config)` is written once over stdin. The information sent in this message could theoretically be passed via the CLI arguments, but doing this would lose some type safety, so we prefer to send this data over as an IPC message. Once this message has been received by the helper process, the helper process will open ports, join the network, and begin participating in the main protocol loop. In this main protocol loop, either process should expect to receive any IPC messages over any data streams at any time. + +Here is a list of the IPC messages that will be supported in each direction, along with some relevant type definitions in Go: + +```txt +Daemon -> Helper + // it's possible to just remove this message as it is just a specialized case of `sendRequests` + sendRequestToPeer(requestId RequestId, to Peer, msgType MsgType, rawData []byte) + sendRequests(requestId RequestId, to AbstractPeerGraph, msgType MsgType, rawData []byte) + sendResponse(requestId RequestId, status ValidationStatus, rawData []byte) + broadcast(msgType MsgType, rawData []byte) + validate(validation ValidationHandle, status ValidationStatus) + +Helper -> Daemon + handleRequest(requestId RequestId, from Peer, rawData []byte) + handleResponse(requestId RequestId, validation ValidationHandle, rawData []byte) + handleGossip(from Peer, validation ValidationHandle, rawData []byte) + stats(stats Stats) +``` + +```go +// == The `Config` struct is sent with the `init` message at the start of the protocol. + +// The following old fields have been completely removed: +// - `metricsPort` (moving over to push-based stats syncing, where we will sync any metrics we want to expose) +// - `unsafeNotTrustIp` (only used as a hack in old integration test framework; having it makes p2p code harder to reason about) +// - `gaterConfig` (moving towards more abstracted interface in which Go manages gating state data) +type Config struct { + networkId string // unique network identifier + privateKey string // libp2p id private key + stateDirectory string // directory to store state in (peerstore and dht will be stored/loaded from here) + listenOn []Multiaddr // interfaces we listen on + externalAddr Multiaddr // interface we advertise for other nodes to connect to + floodGossip bool // enables gossip flooding (should only be turned on for protected nodes hidden behind a sentry node) + directPeers []Multiaddr // forces the node to maintain connections with peers in this list (typically only used for sentry node setups and other specific networking scenarios; these peers are automatically trustlisted) + seedPeers []Multiaddr // list of seed peers to connect to initially (seeds are automatically trustlisted) + maxConnections int // maximum number of connections allowed before the connection manager begins trimming open connections + validationQueueSize int // size of the queue of active pending validation messages + // TODO: peerExchange bool vs minaPeerExchange bool (seems like at least one of these should be deprecated) + // - peerExchange == enable libp2p's concept of peer exchange in the pubsub options + // - minaPeerExchange == write random peers to connecting peers +} + +// == The `Stats` struct is sent on an interval via the `stats` message. +// == It contains metrics and statistics relevant to the helper process, +// == to be further exposed by the daemon as prometheus metrics. + +type MinMaxAvg struct { + min float64 + max float64 + avg float64 +} + +type InOut struct { + in float64 + out float64 +} + +type Stats struct { + storedPeerCount int + connectedPeerCount int + messageSize MinMaxAvg + latency MinMaxAvg + totalBandwidthUsage InOut + totalBandwidthRate InOut +} + +// == A `ValidationStatus` notifies the helper process of whether or not +// == a message was valid (or relevant). + +type ValidationStatus int +const ( + VALIDATION_ACCEPT ValidationStatus = iota + VALIDATION_REJECT + VALIDATION_IGNORE +) + +// == These types define the concept of an `AbstractPeerGraph`, which +// == describes a peer traversal algorithm for the helper to perform +// == as when finding a successful response to an RPC query. + +// Alternative (safer) representations are possible, but this is +// simplest to encode in the Cap'N Proto shared schema language. + +// Each node of the graph either allows any peer to query, or it +// identifies a specific node to query.. +type AbstractPeerType int +const ( + ANY_PEER AbstractPeerType = iota + SPECIFIC_PEER +) + +// This is essentially an ADT, but we cannot encode an ADT directly +// in Go (though we can use tagged unions when we describe this in +// Cap'N Proto). An equivalent ADT definition would be: +// type abstract_peer_node = +// | AnyPeer +// | SpecificPeer of Peer +type AbstractPeerNode struct { + typ AbstractPeerType + peer Peer // nil unless type == SPECIFIC_PEER +} + +type AbstractPeerEdge struct { + src int + dst int +} + +// A graph is interpreted by starting (in parallel) at the source +// nodes. When a node is interpreted, a request is sent to the peer +// identified by the node. If the request for a node fails, then the +// algorithm begins interpreting the successors of that node (also in +// parallel). Interpretation halts when either a single request is +// successful, or all requests fail after traversing the entire graph. +type AbstractPeerGraph struct { + sources []int + nodes []AbstractPeerNode + edges []AbstractPeerEdge +} +``` + +#### Query Control Flow + +In contrast to the prior implementation, the query control flow in the new protocol always follows the following pattern: + +1) The daemon sends 1 message to the helper to begin the query (this message may instruct the helper to begin sending out 1 or more requests, with control over maximum parallelism). +2) The helper continuously and concurrently runs the following protocol until a successful response is found: + 2.a) The helper picks a peer it has not already queried based on the daemon's request and sends a request to this peer. + 2.b) The helper streams the response back to the daemon. + 2.c) The daemon sends a validation callback to the helper. + +Keeping the peer selection logic on the helper side allows the daemon to avoid asking the helper for peers before it sends the request. Since the trust scoring state is also already on the helper process, the helper can also select peers based on their score (more details on this to come in the trust scoring RFC). The daemon can still instruct the helper process to query specifc peers, in which cases the daemon will already know of the specific peer and will not need to ask the helper for any additional information. + +#### Validation Control Flow + +The daemon has to validate incoming network messages of all types (gossip, requests, responses). As such, each IPC message from the helper process that is communicating an incoming gossip message or response includes a `ValidationHandle`, and the daemon is expected to send a `validate` message back to the helper to acknowledge the network message with a `ValidationStatus`. Incoming RPC requests are a special case, however. Since the daemon will already send a message back to the helper in the form of a `response` to the incoming RPC request, the `ValidationStatus` is provided there instead. In this case, a specific `ValidationHandle` is not required, since there is already a `RequestId` that uniquely identifies the response with the request we are validating. + +In summary, the new validation control flow is: +- gossip and response validation + - `handle{Gossip,Response}` message is sent to daemon + - `validate` is sent back to helper +- request validation + - `handleRequest` message is sent to daemon + - `sendResponse` message is sent back to helper, which contains both the response and validation state + +### Staging the Compatible and Hard Fork Changes + +Some of the key changes proposed in this RFC require a hard fork in order to be released to the network. However, the next hard fork may be a while out. We could just implement this work off of `develop` and wait until the next hard fork to ship it, but this would mean that any immediate improvements we make to the networking code on `compatible` will conflict with our refactor work on `develop`. Overall, it is still a benefit to have this refactor on `compatible` so that we can benefit from it immediately in the soft fork world while keeping the code more or less in line with the future hard fork we want to take. + +Accordingly, in order to break this work up, we can do this refactor in 2 passes: first, perform the main module and organization refactor off of `compatible`, then once that is complete and merged, perform the hard fork specific refactor off of `develop`. The `compatible` portion of the refactor can include all changes that do not effect or rely on changes to messages sent over the gossip network. Below is a detailed list of what would be included in each phase of the refactor. + +- `compatible` + - Transport layer abstraction + - OCaml module conslidation w/ new interface + - Daemon/Libp2p protocol refactor (peer abstraction et al) + - Validation, Response, Gossip, and Request pipe split +- `develop` + - Message type awareness + - Message type based pipe split + - Trust scoring based peer-selection (requires trust system) + +### OCaml Implementation + +Structure wise, the OCaml implementation will continue to model the network interface abstractly so that a dummy implementation may be used for unit testing purposes. We will continue to have an [interface](../src/lib/gossip_net/intf.ml) along with a [existential wrapper](../src/lib/gossip_net/any.ml) that provides indirection over the selected gossip network implementation. A [stub implementation](../src/lib/gossip_net/fake.ml) will also continue to exist. + +In order to maintain reasonable separation of concerns, the libp2p helper implementation of the gossip network interface will be split into 2 main modules. +- `Libp2p` :: direct low-level access to `libp2p_helper` process management and protocol +- `Mina_net` :: high-level networking interface which defines supported RPCs and exposes networking functionality to the rest of the code (publicly exposed to the rest of the code) + +The RPC interface would continue to be defined under the [current GADT based setup](../src/lib/mina_networking/mina_networking.ml). This type setup will also be extended so that `Rpc.implementation` +modules can be passed in when the gossip network subsystem is initialized. This will be an improvement to the current system in which ad-hoc functions are defined at the [Mina_lib](../src/lib/mina_lib/mina_lib.ml) layer. This module based approach will also provide a mechanism through which we can define global validation logic for RPC query responses that will automatically be applied to all RPC queries of that type. RPC queries will still be able to provide their own per-request validation logic in addition to this. + +Below is an example of what the new gossip network interface would look like from the perspective of the rest of the daemon code. Note that it is much more abstract than before, modeling our new design choices regarding migrating state from OCaml to Go. + +```ocaml +module Mina_net : sig + module Config : sig + type t = (* omitted *) + end + + module Gossip_pipes : sig + type t = + { blocks: External_transition.t Strict_pipe.Reader.t + ; txn_pool_diffs: Transaction_pool.Diff.t Strict_pipe.Reader.t + ; snark_pool_diffs: Snark_pool.Diff.t Strict_pipe.Reader.t } + end + + module Stats : sig + type t = (* omitted *) + end + + module Abstract_peer_graph = + Graph.Persistent.Digraph.ConcreteBidirectional (struct + type t = + | AnyPeer + | SpecificPeer of + [@@deriving equal, hash] + end) + + type t + + (* We can construct the gossip network subsystem using the configuration, + * the set of RPC implementations, a state which is shared with the + * handlers of the provided RPC implementations. Once constructed, the + * gossip network handle will be returned along with pipes for reading + * incoming gossip network messages. *) + val create : + Config.t + -> ('state Rpc_intf.t_with_implementation) list + -> 'state + -> (t * Gossip_pipes.t) Deferred.Or_error.t + + val stats : t -> Stats.t + + (* Query operations now have the ability to express additional validation + * logic on a per-request basis, in addition to RPC-wide validation logic + * that is defined *) + val query_peer : + t + -> Peer.t + -> ('q, 'r) Rpc_intf.rpc + -> 'q + -> ?f:('r Envelope.Incoming.t Deferred.t -> Validation_status.t Deferred.t) + -> 'r Envelope.Incoming.t Deferred.Or_error.t + + val query_peers : + t + -> Abstract_peer_graph.t + -> ('q, 'r) Rpc_intf.rpc + -> 'q + -> ?f:('r Envelope.Incoming.t Deferred.t -> Validation_status.t Deferred.t) + -> 'r Envelope.Incoming.t Deferred.Or_error.t + + val broadcast : t -> Gossip_message.t -> unit Deferred.t + + val ban_peer : t -> Peer.t -> unit Deferred.t +end +``` + +### Go Implementation + +The Go implementation will be fairly similar to how it's structured today. The scope of the state it maintains is more or less the same, the biggest changes introduced in this RFC effect the OCaml code more. The main work in Go will just be switching it to use Cap'N Proto and use the new message format instead of the old one. + +One notable change that can be made is that, since we are moving to a push-based model for libp2p helper metrics, we no longer need to host a prometheus server from Go. However, we will still want the ability to optionally host an http server that exposes the [pprof](https://golang.org/pkg/net/http/pprof/) debugger interface, which we currently support in the metrics server we run. + +## Execution +[execution]: #execution + +In order to execute on this refactor in a fashion where we can make incremental improvements on the networking layer, we will break the work up as follows: + +1. Migrate existing IPC messages to Cap'N Proto. +2. Migrate to Unix pipes; split data streams up, except for per-gossip message data streams (which requires message type awareness). +3. Migrate IPC messages to new protocol design. +4. Add message type awareness, and split up per-gossip message data streams. + +## Test Plan +[test-plan]: #test-plan + +In order to test this thoroughly, we need to run the software in a realistic networking scenario and exercise all IPC messages. This would involve connecting a node running this upgrade to a testnet, and monitoring the types of IPC messages we transmit while the node is running to ensure we hit them all. We would want to run this on a block producer with some stake, a snark coordinator, and some node that we send transactions through so that we properly test the broadcast logic. Additionally, we should exercise some bans in order to verify that our gating reconfiguration logic works as expected. Otherwise, we will use the monitoring output to inform us of any missed surface area in testing. + +## Drawbacks +[drawbacks]: #drawbacks + +- a refactor of this scope will take some time to test (given historical context for libp2p work, this could be significant) + - COUNTER: we will have to do something like this eventually anyway, better to do it now than later + +## Rationale and alternatives +[rationale-and-alternatives]: #rationale-and-alternatives + +- instead of refactoring our current go process integration, we could replace our go helper with a rust process now that there is better libp2p support in rust + - would alleviate us of our current go issues, and move to a better language that more people on the team know and can contribute to + - certainly less bugs, but certainly harder to build + - this would be a lot more work and would likely take even longer to test + - more risk associated with this route +- [ZMQ](https://zeromq.org/) could be an alternative for bounded-queue IPC + - benchmarks seem promising, but more research needs to be done +- Unix sockets could be an alternative to Unix pipes + - has the advantage that we can move processes across devices and the IPC will still work + - more overhead than Unix pipes + - with the data stream generalization, we can always swap this in if and when we decide to move processes around +- [flatbuffers](https://google.github.io/flatbuffers/) could be an alternative serialization format (with some advantages and tradeoffs vs Cap'N Proto) + - there are no existing OCaml libraries for this + +## Unresolved questions +[unresolved-questions]: #unresolved-questions + +- how should reconfiguration work? we currently support that, but should we just restart the helper process instead? diff --git a/rfcs/0061-solidity-snapps.md b/rfcs/0061-solidity-snapps.md new file mode 100644 index 00000000000..9993e885021 --- /dev/null +++ b/rfcs/0061-solidity-snapps.md @@ -0,0 +1,324 @@ +# Overview of solidity features for snapps + +This document aims to examine the features of the solidity smart contract +language, to describe how they can be simulated by snapps, and proposing +changes to the snapp transaction model for those that it currently cannot +simulate. + +This document refers to the features of v0.8.5 of the solidity language, and +makes reference to the snapp parties transaction RFC at MinaProtocol/mina#8068 +(version 95e148b4eef01c6104de21e4c6c7c7465536b9d8 at time of writing). + +## Basic features + +### State variables + +Solidity uses [state variables](https://docs.soliditylang.org/en/v0.8.5/structure-of-a-contract.html#state-variables) +to manage the internal state of a contract. We intend to simulate this state +with a 'snapp state' formed of [8 field elements](https://github.com/MinaProtocol/mina/blob/b137fbd750d9de1b5dfe009c12de134de0eb7200/src/lib/mina_base/snapp_state.ml#L17). +Where the state holds more data than will fit in 8 field elements, we can +simulate this larger storage by holding a hash of some of these variables in +place of their contents. + +In solidity, 'public' variables can be referenced by other contracts via a +function. We propose using the same method for snapps; see the function section +below for details. + +#### Off-chain storage and snapp state accessibility + +When the variables do not fit within the field elements, the data for the snapp +will not be directly available on-chain, and must be computed or retrieved from +some off-chain source. It is important to provide primitives for revealing +the updated states, otherwise updating a snapp's state may only reveal a hash, +and the new underlying data may be rendered inaccessible. + +To this end, it may be useful to add support for the poseidon hash used by mina +to IPFS, so that this data can be stored (ephemerally) in IPFS. We will also +discuss a proposal to expose data for state transitions as 'events' associated +with the parties; see the events section below for details. + +### Functions + +[Functions](https://docs.soliditylang.org/en/v0.8.5/structure-of-a-contract.html#functions) +are the primary interface of solidity contracts; in order to interact +with a smart contract, you submit a transaction that calls one of the functions +the contract exposes. These may call other functions from the same contract or +from other contracts. + +We propose simulating functions with snark proofs, where each function call +corresponds to a single snark proof. Our current snark model uses a 'wrapping' +primitive, which allows a single 'verification key' to verify wrapped proofs +witnessing one (or more) of several different 'circuits' (here, function +declarations). Function calls against different snapps require separate +'parties', although multiple calls to functions in the same snapp may be merged +into a single proof and issued as a single 'party' (either by proof composition +or inlining, depending on the use case). + +#### Arguments and returned values + +In order to simulate calling functions with arguments, and returning values +from functions, snapp parties must be able to expose some 'witness' to these +values. The format is determined by the circuit statement, but usually this +will be `hash(arguments, returned_values)`. + +*This is currently not supported by the snapp transaction model RFC.* + +**Proposal:** add an additional field element (`aux_data`) that is passed as +part of the input to the snapp proof, which may be used to bind the function's +input and returned values. + +#### Function calls between snapps + +In order for a snapp to verify that function calls are executed, snapp proofs +must be able to interrogate the other parties included in a transaction. The +current RFC doesn't identify what the proof inputs should be, but describes a +stack of parties (`parties`) and the current state of the stack when a +transaction is reached in the stack (`remaining_parties`). + +**Proposal:** pass `parties`, the stack of parties, as part of the snapp input. + +We should also consider nested function calls, each of which may result in one +or more parties (e.g. to pay one or more receivers, or to make other further +function calls). We can make these conceptually simpler and more composable by +grouping the transactions nested below a party's snapp together, in a hierarchy +of snapps. This will be particularly helpful for snapps which make recursive +calls or deeply nested calls, by letting them avoid walking arbitrarily far +along the stack of parties to find the one they care about. + +**Proposal:** use a stack of stacks for the parties involved in a transaction, +allowing each snapp to access its inner transactions by examining its stack. +For example, a snapp which calls other snapps might have a stack that looks +like +```ocaml +[ transfer_for_fee +; [ snapp1 + ; transfer1 (* Sent by snapp1 *) + ; [ snapp2 (* Called by snapp1 *) + ; transfer2 (* Sent by snapp2 *) + ; [snapp3] ] (* Called by snapp2 *) + ; transfer3 (* Sent by snapp1 *) + ; [snapp4] ] ] (* Called by snapp1 *) +``` +Concretely, this allows snapp1 to access `transfer3` and `snapp4` without +needing to know or care about the transfers and snapps executed by `snapp2`. +In the implementation, this could look something like: +```ocaml +let get_next_party + current_stack (* The stack for the most recent snapp *) + call_stack (* The partially-completed parent stacks *) + = + let next_stack, next_call_stack = + if call_stack = empty_stack then + empty_stack, empty_stack + else + call_stack.pop() + in + (* If the current stack is complete, 'return' to the previous + partially-completed one. + *) + let current_stack, call_stack = + if current_stack = empty_stack then + next_stack, next_call_stack + else + stack, call_stack + in + let stack_or_party, next_stack = current_stack.pop() in + let party, remaining_stack = + let stack = + if stack_or_party.is_party() then + (* dummy value for circuit *) + current_stack + else + stack_or_party.as_stack() + in + let popped_value, remaining_stack = stack.pop() in + if stack_or_party.is_party() then + stack_or_party.as_party(), empty_stack + else + popped_value, remaining_stack + in + let party, current_stack, next_stack = + if remaining_stack = empty_stack then + party, next_stack, empty_stack + else + party, remaining_stack, next_stack + in + let call_stack = + if next_stack = empty_stack then call_stack + else call_stack.push(next_stack) + in + party, current_stack, call_stack +``` + +This increases the number of stack operations per party from 1 to 4. + +### Function modifiers + +[Function modifiers](https://docs.soliditylang.org/en/v0.8.5/structure-of-a-contract.html#function-modifiers) +are a language level feature of solidity, and exist solely to avoid unnecessary +function calls. This requires no features at the transaction model level. + +### Events + +[Events](https://docs.soliditylang.org/en/v0.8.5/structure-of-a-contract.html#events) +in solidity are emitted by a smart contract, but are not available for use by +the contracts themselves. They are used to signal state transitions or other +information about the contract, and can be used to expose information without +the need to replay all past contract executions to discover the current state. + +*This is currently not supported by the snapp transaction model RFC.* + +**Proposal:** add an additional field to each party that contains a list of +events generated by executing a snapp, or none if it is a non-snapp party. This +event stack should be passed as part of the input to the snapp, as the output +of hash-consing the list in reverse order. (*TODO-protocol-team: decide on the +maximum number / how the number affects the txn fee / etc. to avoid abuse.*) + +#### Exposing internal state variables + +As mentioned in the 'state variables' section above, the contents of a snapp's +internal state becomes unavailable on-chain if that state is larger than the +available 8 field elements. Events give us the opportunity to re-expose this +data on chain, by e.g. emitting a `Set_x(1)` event when updating the value of +the internal variable `x` to `1`, so that the current state of the snapp can be +recovered without off-chain communication with the previous party sending the +snapp. + +This is likely to be an important feature: it's not possible to execute a snapp +without knowing the internal state, and this appears to be the easiest and most +reliable way to ensure that it is available. Without such support, it's +possible and relatively likely for a snapp's state to become unknown / +unavailable, effectively locking the snapp. + +### Errors + +Solidity +[errors](https://docs.soliditylang.org/en/v0.8.5/structure-of-a-contract.html#errors) +are triggered by a `revert` statement, and are able to carry additional +metadata. + +In the current snapp transaction RFC, this matches the behaviour of invalid +proofs, where the errors correspond to an unsatisfiable statement for a +circuit. In this model, we lose the ability to expose the additional metadata +on-chain; however, execution is not on-chain, so the relevant error metadata +can be exposed at proof-generation time instead. + +### Struct and enum types + +[Struct types](https://docs.soliditylang.org/en/v0.8.5/structure-of-a-contract.html#struct-types) +are a language feature of solidity, which is already supported by snarky. +[Enum types](https://docs.soliditylang.org/en/v0.8.5/structure-of-a-contract.html#enum-types) +have also had long-lived support in snarky, although have seen little use in +practice. + +## Types + +### Value types + +All of the +[value types](https://docs.soliditylang.org/en/v0.8.5/types.html#value-types) +supported by solidity are also supported by snarky. + +### Reference types + +[Reference types](https://docs.soliditylang.org/en/v0.8.5/types.html#reference-types) +in solidity refer to blocks of memory. These don't have a direct analog in +snarks, but can be simulated -- albeit at a much higher computational cost -- +by cryptographic primitives. Many of these primitives are already implemented +in snarky. + +### Mapping types + +[Mapping types](https://docs.soliditylang.org/en/v0.8.5/types.html#mapping-types) +are similar to reference types, but associate some 'key' value with each data +entry. This can be implemented naively on top of a primitive for +`[(key, value)]`, an array of key-value pairs, which is already available in +snarky. + +We currently do not support a primitive for de-duplication: a key may appear +multiple times in a `[(key, value)]` implementation and the prover for a snapp +could choose any of the values associated with the given key in a particular +map. This will require some research into the efficiency of the different +primitives available for this, but has no impact upon the transaction model. + +## Standard library + +### Block / transaction properties + +The +[block and transaction properties](https://docs.soliditylang.org/en/v0.8.5/units-and-global-variables.html#block-and-transaction-properties) +available in solidity are, at time of writing: +* `blockhash(uint blockNumber)` + - Can be easily supported using the protocol state hash, although the timing + is tight for successful execution (receive a block, create the snapp proof, + send it, have it included in the next block). + - **Proposal:** support the previous `protocol_state_hash` in snapp + predicates. +* `block.chainid` + - **Proposal:** expose the chain ID in the genesis constants, allow it to be + used in the snapp predicate. +* `block.coinbase` + - Snapp proofs are generated before the block producer is known. Not possible + to support. +* `block.difficulty` + - Not applicable, block difficulty is fixed. +* `block.gaslimit` + - Not applicable, we don't have a gas model. +* `block.number` + - Available by using `blockchain_length` in the snapp predicate. +* `block.timestamp` + - Available by using `timestamp` in the snapp predicate. +* `gasleft()` + - Not applicable, we don't have a gas model. +* `msg.data` + - Available as part of the snapp input +* `msg.sender` + - **Proposal:** Expose the party at the head of the parent stack as part of + the snapp input. +* `msg.sig` + - As above. +* `msg.value` + - As above. +* `tx.gasprice` + - Not applicable, we don't have a gas model. +* `tx.origin` + - Can be retrieved from the `parties` stack of parties. May be one or more + parties, depending on the structure of the transaction. + +### Account look-ups + +Solidity supports the following +[accessors on addresses](https://docs.soliditylang.org/en/v0.8.5/units-and-global-variables.html#members-of-address-types): +* `balance` + - Could use the `staged_ledger_hash` snapp predicate (currently disabled) and + perform a merkle lookup. However, this doesn't account for changes made by + previous parties in the same transaction, or previous transactions in the + block. + - **Proposal:** Add a 'lookup' transaction kind that returns the state of an + account using `aux_data`, with filters to select only the relevant data. + Snapps can then 'call' this by including this party as one of their + parties. + - Note: using this will make the snapp transaction fail if the balance of the + account differs from the one used to build the proof at proving time. +* `code`, `codehash` + - Snapp equivalent is the `verification_key`. + - Options are as above for `balance`. If this key is for one of the + snapp-permissioned parties, the key can assumed to be statically known, + since their snapp proof will be rejected and the transaction reverted if + the key has changed. +* `transfer(uint256 amount)` + - Executed by including a transfer to the party as part of the snapp's party + stack. +* `call`, `delegatecall`, `staticcall` + - Executed by including a snapp party as part of the snapp's party stack. + +### Contract-specific functions + +Solidity supports +[reference to `this` and a `selfdestruct` call](https://docs.soliditylang.org/en/v0.8.5/units-and-global-variables.html#members-of-address-types). + +We can support `this` by checking the address of the party for a particular +snapp proof, or by otherwise including its address in the snapp input. + +We currently do not support account deletion, so it is not possible to +implement an equivalent to `selfdestruct`. diff --git a/rfcs/0062-bitswap.md b/rfcs/0062-bitswap.md new file mode 100644 index 00000000000..d4a0f4209f2 --- /dev/null +++ b/rfcs/0062-bitswap.md @@ -0,0 +1,281 @@ +# Summary +[summary]: #summary + +This RFC proposes adding Bitswap to our libp2p networking stack in order to address issues related to our current gossip network pub/sub layer. + +# Motivation +[motivation]: #motivation + +Mina has very large messages that are broadcast over the gossip net pub/sub layer. This incurs a high bandwidth cost due to the nature of our pub/sub rebroadcast cycles work in order to consistently broadcast messages throughout the network. For example, we observer blocks on mainnet as large as ~2mb. This would represent only a single block, and each block broadcast message has a multiplicative cost on bandwidth as it's being broadcast throughout the network. This bandwidth cost also translates into CPU cost due to the cost of hashing incoming messages to check against the de-duplication cache before processing them. We currently observe behaviors where the libp2p helper process can be pegged at 100% CPU on certain hardware setups when there is high pub/sub throughput on the network. Since gossip pub/sub is used not only for blocks, but also transactions and snark work, the broadcasting of each of these simultaneously ends up compounding the issue. + +Implementing Bitswap in our libp2p layer will address this issue by allowing us to immediately reduce our pub/sub message size, while making the larger data referenced by pub/sub messages available upon request. It provides a mechanism for breaking up large data into chunks that can be distributed throughout the network (streamed back from multiple peers), and a system for finding peers on the network who are able to serve that data. + +# Detailed design +[detailed-design]: #detailed-design + +[Bitswap](https://docs.ipfs.io/concepts/bitswap/) is a module provided by [libp2p](https://libp2p.io/) that enables distributed data synchronization over a p2p network, somewhat comparable to how [BitTorrent](https://en.wikipedia.org/wiki/BitTorrent) works. It works by splitting up data into chunks called blocks (we will explicitly refer to these as "Bitswap blocks" to disambiguate them from "blockchain blocks"), which are structured into a DAG with a single root. When a node on the network wants to download to some data, it asks it's peers to see which (if any) have the root Bitswap block corresponding to that data. If none of the peers have the data, it falls back to querying the gossip network's [DHT](https://docs.ipfs.io/concepts/dht/#kademlia) to find a suitable node that can serve the data. + +In this design, we will lay out an architecture to support Bitswap in our Mina implementation, along with a strategy for migrating Mina blocks into Bitswap to reduce current gossip pub/sub pressure. We limit the scope of migrating Mina data to Bitswap only to blocks for the context of this RFC, but in the future, we will also investigate moving snark work, transactions, and ledger data into Bitswap. Snark work and transactions will likely be modeled similarly to Mina blocks with respect to Bitswap, but ledger data will require some special thought since it's Bitswap block representation will have overlapping Bitswap blocks across different ledgers. + +## Bitswap Block Format + +Bitswap blocks are chunks of arbitrary binary data which are content addressed by [IPFS CIDs](https://docs.ipfs.io/concepts/content-addressing/#cid-conversion). There is no pre-defined maximum size of each Bitswap block, but IPFS uses 256kb, and the maximum recommended size of a Bitswap block is 1mb. Realistically, we want Bitswap blocks to be as small as possible, so we should start at 256kb for our maximum size, but keep the size of Bitswap blocks as a parameter we can tune so that we can optimize for block size vs block count. + +While the Bitswap specification does not care about what data is stored in each block, we do require each block have a commonly-defined format: + + 1. `[2 bytes]` count of links n + 2. `[n * 32 bytes]` links (each link is a 256-bit hash) + 3. `[up to (maxBlockSize - 2 - 32 * n) bytes]` data + +Hence, data blob is converted to a tree of blocks. We advertise the "root" block of the tree as the initial block to download for each resource we store in Bitswap, and the libp2p helper process will automatically explore all the child blocks referenced throughout the tree. To construct the full binary blob out of this tree, breadth-first search (BFS) algorithm should be utilized to traverse the tree. BFS is a more favourable approach to DFS (another traversal order) as it allows to lazily load the blob by each time following nodes links to which we already have from the root block (counter to the order induced by DFS where one has to go to the deepest level before emitting a chunk of data). + +For links a 256-bit version of Blake2b hash is to be used. Packing algorithm can be implemented in the way that no padding is used in blocks and there are maximum `n = (blobSize - 32) / (maxBlockSize - 34)` blocks generated with `n - 1` blocks of exactly `maxBlockSize` bytes. + +## Bitswap Block Database + +There exist some key constraints in choosing a good solution for the Bitswap block db. Importantly, the block database needs to support a concurrent readers and a writer at the same time. Additionally, the database should be optimized for existence checks and reads, as these are the operations we will perform the most frequently against it. Some consistency guarantees (ability to rely on happens-before relation between write and read events) are also required. The database would ideally be persisted, so that we can quickly reload the data in the event of node crash (we want to avoid increasing bootstrap time for a node, as that keeps stake offline after crashes). + +Given these constraints, [LMDB](http://www.lmdb.tech/doc/) is a good choice for the Bitswap block storage. It meets all of the above criteria, including persistence. As a bonus, it's light on extra RAM usage. + +### Database Schema + +| Key | Value | Key bytes | +|----------------------|----------------------|-----------------------------| +| `status/` | integer: 0..2 | `<1><32-byte blake2b digest>` | +| `block/` | bitswap block bytes | `<0><32-byte blake2b digest>` | + +Status is an integer taking one of the values: + + * `0` (not all descendant blocks present) + * `1` (all descendant blocks present) + * `2` (delete in process) + +### Additional data to store in the Daemon db + +In addition to data already stored in the DB controlled by the Daemon, we would need to store: + +* `headerHashToRootCid`: relation between header and associated root cid (if known) +* `rootCidsToDelete`: list of root cids marked for deletion (i.e. root cids related to blocks which were completely removed from frontier) +* `recentlyCreatedBlocks`: list of recently created blocks for which we didn't receive confirming upcall + +### Invariants + +The following invariants are maintained for the block storage: + +* For each `status/{rcid}` in DB, there is an entry for `{rcid}` in the `headerHashToRootCid` +* For each cid for which `block/{cid}` exists in DB, either holds: + * There exists `status/{cid}` + * There exists `cid'` such that `cid` is in the link list of bitswap block at `block/{cid'}` +* `status/{rc}` can progress strictly in the order: `null -> 0 -> 1 -> 2 -> null` + +### Initialization + +Daemon initialization: + +1. Remove keys `k` from `rootCidsToDelete` for which `status/{k}` is absent +2. Remove blocks `b` from `recentlyCreatedBlocks` for which `status/{b.root_cid}` is present and is not equal to `0` +3. Start Helper +4. Send bulk delete request to Helper for keys from `rootCidsToDelete` +5. Send bulk download request to Helper for blocks that are known to frontier but which do not have `status/{b.root_cid} == 1` +6. Send add resource request for each block in `recentlyCreatedBlocks` + +Helper has no initialization at all. + +### Helper to Daemon interface + +Helper receives requests of kind: + +* Delete resources with root cid in list `[cid1, cid2, ...]` + * Sends a single upcall upon deletion of all of these resources +* Download resources with root cid in list `[cid1, cid2, ...]` + * Sends an upcall upon full retrieval of each resource (one per resource) +* Add resource with root cid `{root_cid}` and bitswap blocks `[block1, ..., blockN]` (no checks for hashes are made) + * Sends an upcall confirming the resource was successfully added + +### Synchronization + +Block creation: + +1. Block is added to `recentlyCreatedBlocks` +2. Add resource request is sent to Helper +3. Upon add resource confirmation upcall, block is removed from `recentlyCreatedBlocks` + +Frontier is moved forward and some old blocks get removed: + +1. Remove record for block from `headerHashToRootCid` +2. Add block to `rootCidsToDelete` +3. Delete resource request is sent to Helper +4. Upon delete resource confirmation upcall, block is removed from `rootCidsToDelete` + +A gossip for the new header is received and Daemon decides that the block body corresponding to the header has to be fetched: + +1. Store record in `headerHashToRootCid` for the header +2. Send download request to Helper +3. Upon download upcall is received, block and header are added to frontier + a. In case downloaded block came late and the block is not more of an interest, launch the deletion flow as described above + +(We assume root cid is received along with the header in the gossip) + +## Migrating Mina Blocks to Bitswap + +To migrate Mina block propagation to Bitswap, we will separate a Mina block into 2 portions: a block header, and a block body. Most of the data in a Mina block is stored inside of the `staged_ledger_diff`. The common data in every Mina block is ~8.06kb (including the `protocol_state_proof`), so using everything __except__ for the `staged_ledger_diff` as the block header seems natural. The `staged_ledger_diff` would then act as the block body for Mina blocks, and would be downloaded/made available via Bitswap rather than broadcast over pub/sub. + +When blocks are broadcast through the network now, only the block header and a root CID for the `staged_ledger_diff` are in the message. When a node receives a new block header, the node will first verify the `protocol_state_proof` (all public information that needs to be fed in for proof verification will be available in the block header). Once the proof is checked, a node would then download the `staged_ledger_diff` via Bitswap. Once that is downloaded, the node would follow the same pattern right now for generating a breadcrumb by expanding the `staged_ledger` from the parent breadcrumb and the new `staged_ledger_diff`, after which the Mina block will be fully validated. At this point, the breadcrumb is added to the frontier. + +In summation, the proposed changes in order to move Mina blocks into Bitswap are: + +1. Define separate block header (block w/o `staged_ledger_diff` with new field `staged_ledger_diff_root_cid`). +2. Verify `staged_ledger_diff_root_cid` relation to the header. +3. Rebroadcast block headers after proofs are checked, but before `staged_ledger_diff`s are verified and the breadcrumb is added to the frontier. +4. Punish block producer public keys if they submit an invalid `staged_ledger_diff` by ignoring all future block headers from that producer within the epoch (do not punish senders, as they may not have banned or checked the `staged_ledger_diff` yet). + +Punishing is done only within the epoch as otherwise punish lists would be accumulating without boundary and real adversaries will nevertheless find the way around by mnoving stake for to other address for the next epoch. + +## Verifying relation between root CID and header + +One large difference from before is that nodes will rebroadcast the block header to other nodes on the network before the `staged_ledger_diff` is downloaded and verified, in order to avoid increasing block propagation time on the network with the new addition of Bitswap. This change brings some unique problems that we need to solve now, as previously, we wouldn't forward Mina blocks to other nodes until we knew the block was fully valid. + +In the new world, an adversary could broadcast around the same block header and proof, but swap out the `staged_ledger_diff` root Bitswap block CID with different values to attack the network. An adversary can do that both being a block producer and as a man-in-the-middle (MITM). + +To rule out the MITM attack vector, a signature is to be carried along with the header. The signature is made of a pair of root CID and header hash with block producer's public key. No MITM actor can forge the signature, hence the attack becomes infeasible. + +To mitigate adversary the block producer attack case, the following tactic is employed: if a node ever downloads a `staged_ledger_diff` which does not achieve the target staged ledger hash after application to the parent staged ledger, that node will ban the block producer public key associated with the block. This significantly raises the cost of attack and makes it in effect pointless. + +# Shipping plan +[shipping]: #shipping + +Shipping of the feature is two-staged with first stage being shipped as a soft-fork release and the second stage being shipped as part of a hard-fork. + +## Soft-fork + +For soft-fork stage, here is the anticipated changeset: + +1. Support for sharing blocks via Bitswap +2. New pub/sub topics: + * `mina/block-body/1.0.0` + * `mina/tx/1.0.0` + * `mina/snark-work/1.0.0` +3. Engine to rebroadcast blocks from the new topics to old `coda/consensus-messages/0.0.1` and vice versa + +### Legacy topic management + +Most new nodes will support both old and new topics for broadcast. Nodes are able to filter subscriptions from other nodes based on what they subscribe to, configured using the [`WithSubscriptionFilter` option](https://github.com/libp2p/go-libp2p-pubsub/blob/55d412efa7f5a734d2f926e0c7c948f0ab4def21/subscription_filter.go#L36). Utilizing this, nodes that support the new topics can filter out the old topic from nodes that support both topics. By filtering the topics like this, nodes running the new version can broadcast new blocks over both topics while avoiding sending the old message format to other nodes which support the new topic. + +In particular, following method of the filter is to be implemented: + +``` +type SubscriptionFilter interface { + ... + FilterIncomingSubscriptions(peer.ID, []*pb.RPC_SubOpts) ([]*pb.RPC_SubOpts, error) +} +``` +On receiving of these incoming subscription for the old topic, we check whether the same peer is already subscribed to all three new topics and if so, subscriptions is filtered out. Node configuration code shall be implemented accordingly, subscribing to the new topics first. + +Over time, when most of the network participants adopt the newer version, only a few specifically configured nodes (including some seeds) will continue servicing the old topic, while most of the network will live entirely on the new topic. + +Mina node will take two additional arguments: + +* `--pubsub-v1`: `rw`, `ro` or default `none` +* `--pubsub-v2`: default `rw`, `ro` or `none` + +Daemon runs message propagation for legacy topic as follows: + +1. If `--pubsub-v2=rw` or `--pubsub-v2=ro`, listen to `mina/tx/1.0.0` and `mina/snark-work/1.0.0` + 1. For each valid message if `--pubsub-v1=rw` resend it to `coda/consensus-messages/0.0.1` +2. If `--pubsub-v2=rw` or `--pubsub-v2=ro`, listen to `mina/block/1.0.0` and add headers to frontier +3. For each new block in frontier, publish it to `coda/consensus-messages/0.0.1` if `--pubsub-v1=rw` +4. If `--pubsub-v1=rw` or `--pubsub-v1=ro`, listen to `coda/consensus-messages/0.0.1` + 1. For each valid block message if `--pubsub-v2=rw` resend corresponding header to `mina/block/1.0.0` + 2. For each valid transaction message if `--pubsub-v2=rw` resend it to `mina/tx/1.0.0` + 3. For each valid snark work message if `--pubsub-v2=rw` resend it to `mina/snark-work/1.0.0` + +Releasing of Bitswap will happen in two stages: + +1. Release with default parameters `--pubsub-v1=rw` and `--pubsub-v2=rw` +2. After most block producers adopt new version, change to default `--pubsub-v1=none` +3. Launch a network of "pubsub relays" which will service both `--pubsub-v1=rw` and `--pubsub-v2=rw` until the next hardfork + +### New block topic + +New block topic presents a new message type comprising: + +1. Block header (as defined above) +2. Block body certificate + +Block body certificate in turn is the data structure with the following fields: + +1. 32-byte block body root +2. Signature + +Block body root is a hash of a root bitswap block containing bytes of block body. + +Signature is a digital signature of pair `(block body root hash, block header hash)` made with secret key corresponding to block producer's key specified in the block's header. + +Frontier absorbs block headers along with the corresponding block body roots and keeps them as 1:1 relation. Whenever the same block header is received with a different block body root (and a valid block producer signature), block producer is banned from producing a block for the given slot. + +Additionally, if bitswap block referenced by block body root is broken, block producer is banned from producing a block for the given slot. + +In case of ban for a slot, all of the corresponding data is cleaned out, ban is local. + +## Hard-fork + +In the next hardfork old topic becomes entirely abandoned. + +Also, consider migrating to hash-based message ids as described in [issue #9876](https://github.com/MinaProtocol/mina/issues/9876). + +# Drawbacks +[drawbacks]: #drawbacks + +This adds significant complexity to how the protocol gossips around information. The control flow for validating blocks is more complex than before, and there is new state to synchronize between the processes in the architecture. It also adds new delays to when the full block data will be available to each node (but the tradeoff here is that we are able to more consistently gossip block +s around the network within the same slot those blocks are produced). + +# Rationale and alternatives +[rationale-and-alternatives]: #rationale-and-alternatives + +- it would be possible to download larger data from peers via RPC and still reduce the pub/sub message size, though there are some issues with this approach + - it is not guaranteed that any of your peers will have the data you need, in which case you need some alternative mechanism to discover who does have it + - puts a lot of bandwidth pressure on individual peers rather than spreading the load between multiple peers (which helps with both bandwidth pressure and data redundancy for increase availability) +- alternatives to using LMDB as the Bitswap cache + - use [SQLite](https://www.sqlite.org/index.html) + - even though all we need is a key/value db, not a relational db, SQLite is portable and performant + - would require us to enable both the [write-ahead logging](https://sqlite.org/wal.html) and use [memory-mapped I/O](https://www.sqlite.org/mmap.html) features in order to use it the way we would like to + - use raw Linux filesystem (from @georgeee) + - would use a lot of inodes and file descriptors if we do not build a mechanism that stores multiple key-value pairs in shared files, which could prove tricky to implement + - would need to solve concurrency problems related to concurrent readers/writers, which could be tricky to get correct and have confidence in + + +## Verifying root CID to header relation + +There is another option to verify the relation: include a commitment in the snark not only to the target staged ledger hash, but also the root Bitswap block CID of the `staged_ledger_diff` that brings us to that state. + +Two options compare with one other in the following way: + +| |Snark commitment|Signature | +|--------------------------|----------------|----------| +|_Snark proving time_ |Increased |Unchanged | +|_Computational complexity_|High |Low | +|_Soft-fork_ |Incompatible |Compatible| + +For the _Snark commitment_ option, adversary needs to generate a snark proof for each `staged_ledger_diff` they want to broadcast to the network, hence the high computational complexity of an attack, which is a desirable property. However, _Signature_ option is preferred due to the fact that it's softfork-compatible and doesn't increase the complexity of circuit. + +# Appendix + +_For reference on the above computation of ~8.06kb for a block without a staged ledger diff, here is a snippet of OCaml code that can be run in `dune utop src/lib/mina_transition`_ + +```ocaml +let open Core in +let open Mina_transition in +let precomputed_block = External_transition.Precomputed_block.t_of_sexp @@ Sexp.of_string External_transition_sample_precomputed_block.sample_block_sexp in +let small_precomputed_block = {precomputed_block with staged_ledger_diff = Staged_ledger_diff.empty_diff} in +let conv (t : External_transition.Precomputed_block.t) = + External_transition.create + ~protocol_state:t.protocol_state + ~protocol_state_proof:t.protocol_state_proof + ~staged_ledger_diff:t.staged_ledger_diff + ~delta_transition_chain_proof:t.delta_transition_chain_proof + ~validation_callback:(Mina_net2.Validation_callback.create_without_expiration ()) + () +in +Protocol_version.set_current (Protocol_version.create_exn ~major:0 ~minor:0 ~patch:0) ; +External_transition.Stable.Latest.bin_size_t (conv small_precomputed_block) ;; +``` diff --git a/rfcs/0063-reducing-daemon-memory-usage.md b/rfcs/0063-reducing-daemon-memory-usage.md new file mode 100644 index 00000000000..beb48e9452a --- /dev/null +++ b/rfcs/0063-reducing-daemon-memory-usage.md @@ -0,0 +1,205 @@ +## Summary +[summary]: #summary + +This RFC proposes changes to the Berkeley release of the daemon which will bring daemon's maximum possible memory usage within the range of our current hardware memory requirements of the Mainnet release. + +## Motivation +[motivation]: #motivation + +With zkApps enabled, the maximum memory usage of the daemon now exceeds the current hardware requirements (`16GB`), in the event that the network is fully saturated with max-cost zkApps transactions in blocks for an extended period of time. With the planned parameters for Berkeley, we estimate that the maximum memory usage of a daemon is around `58.144633GB`. + +## Detailed design +[detailed-design]: #detailed-design + +In order to reduce the memory usage of the daemon, we will offload some of the memory allocated data to an on-disk cache. The [memory analysis section](#memory-analysis) will show that the majority of memory in the fully saturated max-cost transaction environment comes from ledger proofs, zkApps proofs, and zkApps verification keys. Each of these pieces of data are accessed infrequently by the daemon, and, as such, do not need to be stored in RAM for fast access. + +Below is a list of all the interactions the daemon has with ledger proofs, zkApps proofs, and zkApps verification keys: + +* ledger proofs are stored upon receipt of snark work from the gossip network +* zkApps proofs and newly deployed zkApps verification keys are stored upon receipt of zkApps transactions from the gossip network +* ledger proofs, zkApps proofs, and zkApps verification keys are read when a block is produced, in order for them to be included into the staged ledger diff +* ledger proofs, zkApps proofs, and zkApps verification keys are stored upon receipt of new blocks from the gossip network +* ledger proofs, zkApps proofs, and zkApps verification keys are read when a block is applied to the ledger +* ledger proofs, zkApps proofs, and zkApps verification keys are read when serving bootstrap requests to nodes joining the network + +In order to write this data to disk, we will use the [Lightning Memory-Mapped Database](http://www.lmdb.tech/doc/) (LMDB for short). LMDB is a lightweight, portable, and performant memory map backed key-value storage database. We will demonstrate that the performance of this database is more than sufficient for our use case in the [impact analysis section](#impact-analysis). + +For this optimization, it is not important that the on-disk cache is persisted across daemon runs. Such a feature can be added in the future, and this storage layer can double as a way to better persist data from the snark pool and data references from the persistent frontier. However, for now, we will set up the daemon so that it wipes the existing on-disk cache between every restart, in order to simplify the implementation and avoid having to deal with potentially a corrupted on-disk cache (in the event the daemon or operating system did not shut down properly). This is particularly important given our choice of LMDB does not provide complete guarantees against data corruption out of the box, due to the fact memory maps can lead to partial writes if the kernel panics or is otherwise interrupted before the system is gracefully shutdown. + +To prevent disk leaks in the cache, we will use GC finalizers on cache references to count the active references the daemon has to information written to the cache. Since the daemon is always starting with a fresh on-disk cache, this will give an accurate reference count to any data cached on-disk. When a GC finalizer decrements the total reference count of an item stored on the cache to 0, it will delete that item from the cache. With this setup, the on-disk cache can only leak if there is a memory leak within the daemon itself, in which the daemon is leaking references to the cache. + +For identifying cached values by their content, we will use the Sha256 hash function, as provided by the `digestif` OCaml library (which is an existing dependency of the daemon). Sha256 is a performant an sufficiently collision resistant hashing algorithm for this use case. The usage of Sha256 here means that cache keys will be 8 bytes long, since Sha256 digests are 256 bits in length. When tracking the refcounts of cache references using the GC, we will track a hash table with the on-disk cache, which will map from Sha256 digests into active reference counts for that digest. The GC finalizers for cache references will decrement the refcount for their respective digests, removing the refcount entry and deleting the underlying item from the on-disk cache if the refcount becomes 0. + +## Memory Analysis +[memory-analysis]: #memory-analysis + +In order to accurately estimate the memory usage of the daemon in the fully saturated max-cost transaction environment, we have written a program `src/app/disk_caching_stats/disk_caching_stats.exe`. See the [README.md](src/app/disk_caching_stats/README.md) for more information on how the program calculates these estimates. + +Below is the output of the script when all of the parameters are tuned to what we have planned for the Berkeley release. + +``` +baseline = 3.064569GB +scan_states = 15.116044GB +ledger_masks = 2.292873GB +staged_ledger_diffs = 32.345724GB +snark_pool = 4.453962GB +transaction_pool = 0.871383GB +TOTAL: 58.144555GB +``` + +In this output, the baseline is made up of static measurements taken from the daemon, which represents the overhead of running a daemon regardless of throughput or transaction cost. We have then estimated the expected worst-case memory usage of the scan states, ledger masks, and staged ledger diffs, which are the overwhelming majority of data allocated for the frontier. And finally, we estimate the memory footprint of the snark pool and transaction pool. + +Now, if we adjust the estimates for proposed optimizations to store proofs and verification keys to disk (by subtracting their sizes from the memory footprint and replacing them with cache references), we get the following output. + +``` +baseline = 3.064569GB +scan_states = 3.658435GB +ledger_masks = 0.562126GB +staged_ledger_diffs = 9.202166GB +snark_pool = 0.014414GB +transaction_pool = 0.247903GB +TOTAL: 16.749612GB +``` + +As we can see, this brings the estimation down much closer to the current `16GB` hardware requirement. From here, we can look into additional optimizations to bring it down even further to fit within the current hardware requirements. Such optimizations could include: + +* Sharing the prover key allocation across the daemon's subprocesses, reducing the baseline to nearly 1/3rd of what it is now. +* Persisting the entire staged ledger diff of each block to disk, given we rarely need to read the commands contained within a staged ledger diff after the diff has been applied. The diffs only need to be sent to peers after they've been applied to the ledger, so we can just store the diffs in `bin_prot` format and not bother deserializing them when we serve them to other nodes. + +### Impact Analysis +[impact-analysis]: #impact-analysis + +There is a space-time tradeoff here in the sense that, by excising data from RAM to disk, we are now needing to perform disk I/O and deserialize/serialize data in order to perform reads/writes. So part as part of this design, it is important to show that the performance hit we take for reading/writing data cached to disk is relatively insignificant in the operation of the daemon. + + + +LMDB provides benchmarks against other similar databases [on their website](http://www.lmdb.tech/bench/microbench/). The important piece of data here is that, with a 128MB cache and values of `100,000` bytes in size, LMDB is benched at being capable of performing `1,718,213` random reads per second (about `582` nanoseconds per read). Given the amount of reads, and frequencey of reads, a daemon will be performing from this on-disk cache, these benchmarks show that reading from LMDB will have a negligible effect on daemon performance. All proofs and verification keys we would read/write to disk are under this `100,000` benchmark size, so the actual performance should be better than this. + + +Bin_prot serialization benchmarks for the proofs and verification keys have been added to the same program that does the memory impact analysis presented above. In this program, we run 10_000 trials of each operation, and take an average of the elapsed time for the entire execution. Below are the results from this program (as run on my local machine). + +``` +========================================================================================== +SERIALIZATION BENCHMARKS Pickles.Side_loaded.Proof.t +========================================================================================== +write: 32.424211502075195us (total: 324.24211502075195ms) +read: 46.872687339782715us (total: 468.72687339782715ms) + +========================================================================================== +SERIALIZATION BENCHMARKS Mina_base.Verification_key_wire.t +========================================================================================== +write: 6.0153961181640625us (total: 60.153961181640625ms) +read: 1.0202760457992552ms (total: 10.202760457992554s) + +========================================================================================== +SERIALIZATION BENCHMARKS Ledger_proof.t +========================================================================================== +write: 36.144065856933594us (total: 361.44065856933594ms) +read: 51.637029647827148us (total: 516.37029647827148ms) +``` + +Taking these numbers, we can estimate the relative impact deserialization/serialization will have on the important operations of the daemon. + +``` +========================================================================================== +SERIALIZATION OVERHEAD ESTIMATES +========================================================================================== +zkapp command ingest = 457.58285522460938us +snark work ingest = 142.36927032470706us +block ingest = 76.7938720703125ms +block production = 844.14577026367192ms +``` + +The estimates for zkapp command and snark work ingest represent the overhead to add a single new max cost zkapp command or snark work bundle to the on-disk cache. The block ingest represents the cost to add resources from max cost block to disk. This is all computed under the assumption that we take the easy-route to implementation and just serialize all values before hashing them (so that we can compute the hash from the serialized format), but the design of the implementation actually allows us to avoid doing this. The block production overhead is the amount of time the daemon would spend loading all relevant resources from disk in order to produce a max cost block. + +### Implementation + +In order to implement this change, we will create a new `disk_cache_lib` library. There already exists a `cache_lib` library in the codebase, but it's design constraints and intent are different (it represents a shared in-memory cache with explicit rules about "consuming" items from the cache). Still, the `disk_cache_lib` library will follow a similar abstractions as `cache_lib`, without the concerns for requiring consumption of cache items. + +A new `Disk_cache.t` type will be added, representing access to the on-disk cache. This value will be initialized globally at daemon startup, but will only be accessible within `disk_cache_lib` library itself. A type `'a Disk_cached.t` will be used to represent items that are stored in the on-disk cache, where the type parameter will is the underlying type of the value stored on-disk. A `'a Disk_cached.t` is initialized from a first-class module that defines the serialization, deserialization, and hashing functionality for the type it stores, with a helper `Disk_cached.Make` functor being provided to abstract over this (the same pattern utilized by `'a Hashtbl.t` and `'a Map.t` from `core`). + +Below is a mockup of what the `disk_cache_lib` library would look like, with some commented code detailing the internals of the library which are not exposed. + +```ocaml +open Core +open Async + +module Disk_cache : sig + (** Initialize the on-disk cache explicitly before interactions with it take place. *) + val initialize : unit -> unit Deferred.t + + (* type t = Lmdb.t *) + + (** Increment the cache ref count, saving a value if the ref count was 0. *) + (* val incr : t -> Disk_cached.id -> 'a Bin_prot.Type_class.t -> 'a -> unit *) + + (** Decrement the cache ref count, deleting the value if the ref count becomes 0. *) + (* val decr : t -> Disk_cached.id -> unit *) + + (** Read from the cache, crashing if the value cannot be found. *) + (* val read : t -> Disk_cached.id -> 'a Bin_prot.Type_class.t -> 'a *) +end + +module Disk_cached : sig + module type Element_intf = sig + include Binable.S + + val digest : (module Digestif.S with type ctx = 'ctx) -> 'ctx -> t -> 'ctx + end + + (* type id = Digestif.Sha256.t *) + + type 'a t (* = T : (module Element_intf with type t = 'a) * id -> 'a t *) + + (** Create a new cached reference from a value. Hashes the incoming value to check if it is already + stored in the cache, and stores it in the cache if not. This function does not keep references + to the value passed into it so that the value can be garbage collected. The caching library + tracks the total number of references created in OCaml to each value stored in the cache, and + will automatically delete the value from the cache when all references are garbage collected. + *) + val create : (module Element_intf with type t = 'a) -> 'a -> 'a t + + (** Reads from the on-disk cache. It is important that the caller does not hold onto the returned + value, otherwise they could leak the values read from the cache. + *) + val get : 'a t -> 'a + + (** Helper functor for wrapping the first-class module logic. *) + module Make : functor (Element : Element_intf) -> sig + type nonrec t = Element.t t + + val create : 'a -> t + + val get : t -> 'a + end +end +``` + +It is important that calls to `Disk_cached.get` do not hold onto the returned value after they are done reading it. This could be somewhat enforced through a more complex design, but it doesn't seem necessary at this time. + +To use this library, types that we want to cache on disk merely need to implement a `val digest : (module Digestif.S with type ctx = 'ctx) -> 'ctx -> t -> 'ctx` function in addition to the `bin_prot` functions they already implement. The `Disk_cached.Make` helper can (optionally) be called to create a module which abstracts over the details of working with the polymorphic `'a Disk_cached.t` type. This new `X.Disk_cached.t` (`X.t Disk_cache_lib.Disk_cached.t` for long hand) type can be put in-place of the `X.t` type in any in-memory structures to remove the normal in-memory references we would be maintaining to an `X.t`. When we deserialize an `X.t` from the network, we must call `X.Disk_cached.create` on that value to transfer it into an `X.Disk_cached.t`, and then throw away the `X.t` we deserialized initially. + + +## Drawbacks +[drawbacks]: #drawbacks + +This approach requires us to perform additional disk I/O in order to offload the data from RAM to disk. Our above analysis shows this will have a negligible impact on the performance of a daemon, but this approach will mean we will use more disk space than before. Based on the estimates presented above, the worst case additional disk usage will be `~42GB`. With our initial approach, we mitigate the risk of a disk usage leak by wiping out the on-disk cache when the daemon restarts. + +## Rationale and alternatives +[rationale-and-alternatives]: #rationale-and-alternatives + +The only real alternative to this approach would be to find some way to optimize the memory impact of the proofs and verification keys without writing them to disk, which would require some form of compression. Given proofs and verification keys are cryptographic data, and thus have a necessarily high degree of "randomness" in the values they contain, they are not easily compressed via normal techniques. Even still, the space/time tradeoff of compressing this data will be harder to argue for, given that we need to be able to read this data during critical operations of the daemon (block production, block ingest, snark work ingest, and zkapp command ingest). + +## Prior art +[prior-art]: #prior-art + +We do not have prior art in the direction of on-disk caching for relief of memory usage. We do have prior art for the LMDB implementation of OCaml, as we already have integrated LMDB into the Bitswap work we plan to release in a soft fork work after Berkeley. We can lean on this prior work here since we will also be using LMDB for this on-disk cache. + +## Unresolved questions +[unresolved-questions]: #unresolved-questions + +* Can we increase the hardware requirements to `32GB` at the point of the Berkeley release? +* Which of the additional recommended memory optimizations should we take first in order to bring the estimated memory usage well below the current `16GB` memory requirement? +* How much additional buffer should we leave between our memory estimate and the actual hardware requirements (accounting for RAM spikes and other processes on the system)? diff --git a/rfcs/0064-deriving-with-generics-snapps.md b/rfcs/0064-deriving-with-generics-snapps.md new file mode 100644 index 00000000000..dfa3c1192c5 --- /dev/null +++ b/rfcs/0064-deriving-with-generics-snapps.md @@ -0,0 +1,229 @@ +## Summary +[summary]: #summary + +This RFC introduces a mechanism for datatype generic programming (see [motivation](#motivation) for a definition) that is easier to maintain than ppx macros but still powerful enough for innumerable use cases in our codebase (graphql, bridges between languages, random oracle inputs, etc.). This document additionally decribes its specific application to Snapp parties transactions. While this RFC will describe all ways this approach simplifies our Snapp parties implementation, the scope of work on this project will leave some derivers for future work. + +## Motivation +[motivation]: #motivation + +Datatype generic programming sometimes referred to as reflection refers to a form of abstraction for creating reusable logic that acts on a wide variety of datatypes. Typically this is used to fold and unfold over data structures to "automatically" implement toJson, toString, hash, equals, etc. + +Specifically with respect to Snapps transactions: We have complex nested structures to define the new parties transaction and at the moment, we redeclare JSON, GraphQL, JS/OCaml bridges, specifications, and TypeScript definitions in SnarkyJS completely separately. This is error-prone, hard to maintain, and has already introduced bugs in our Snapps implementation even before we've shipped v1. Using datatype generic programming, we can define all of these only once, atomically on each primitive of the datatype, never forget to update when definitions change (the compiler will yell at us), and rely on the datatype generic machinery to perform the structural fold and unfold for us. + +In OCaml, we'd look for some form of datatype generic programming that allows us to fold and unfold over algebraic datatypes and records. Typically, in Mina's codebase we have used ppx deriving macros. `[@@deriving yojson]` derives a JSON serializer and deserializer and `[@@deriving sexp]` derives an S-expression parser and printer. + +While PPX macros are very powerful, writing custom PPX macros is extremely difficult in OCaml, unfortunately, and very hard to maintain. + +Luckily, folks at Jane Street have implemented a mechanism to mechanically, generically, define folds/unfolds on arbitrary data types in OCaml using the `[@@deriving fields]` macros that is written with "common OCaml" and anyone familiary with Jane Street libraries in OCaml, ie. most contributors to the Mina OCaml core, can maintain. + +```ocaml +(* to_string from the ppx_fields_conv docs *) +type t = { + dir : [ `Buy | `Sell ]; + quantity : int; + price : float; + mutable cancelled : bool; + symbol : string; +} [@@deriving fields] + +let to_string t = + let conv to_s = fun acc f -> + (sprintf "%s: %s" (Field.name f) (to_s (Field.get f t))) :: acc + in + let fs = + Fields.fold ~init:[] + ~dir:(conv (function `Buy -> "Buy" | `Sell -> "Sell")) + ~quantity:(conv Int.to_string) + ~price:(conv Float.to_string) + ~cancelled:(conv Bool.to_string) + in + String.concat fs ~sep:", " +``` + +This is a step in the right direction but we can make this more succinct with terser combinators -- (note we use `Fields.make_creator` so we can compose decoders with encoders) + +```ocaml +type t = { + dir : [ `Buy | `Sell ]; + quantity : int; + price : float; + mutable cancelled : bool; + symbol : string; +} [@@deriving fields] + +let to_string t = + let open To_string.Prim in + String.concat ~sep:", " @@ + (Fields.make_creator ~init:(To_string.init ()) + ~dir + ~quantity:int + ~price:float + ~cancelled:bool |> To_string.finish ()) +``` + +Further we can build combinators for horizontally composing derivers such that: + +```ocaml +(* pseudocode *) +type t = { + dir : [ `Buy | `Sell ]; + quantity : int; + price : float; + mutable cancelled : bool; + symbol : string; +} [@@deriving fields] + +let to_string, equal = + let module D = Derive.Make2(To_string)(To_equal) in + let open D.Prim in + let dir = both Dir.to_string Dir.to_yojson in + Fields.make_creator + ~init:(D.init ()) ~dir ~quantity:int ~price:float ~cancelled:bool + |> D.finish +``` + +Coupled with combinators these custom folds are almost powerful enough. + +However, sometimes we need to add metadata to our data types in order to faithfully implement some fold. For example, we may want to provide a custom field name in a JSON serializer or documentation for a GraphQL schema. + +Rather than pollute our data types we can settle for one extra relatively simple companion macro that I propose we call `[@@deriving ann]` we can pull out all the custom annotations on the datatype and finally have enough machinery for us to cleanly implement JSON, GraphQL, specifications, random oracle inputs, typescript definitions, and more. + +## Detailed design +[detailed-design]: #detailed-design + +### General Framework + +See #10132 for a proposed imlementation of the machinery in the `fields_deriver` library. + +#### Deriving Annotations + +We need to implement a `[@@deriving ann]` macro that takes a structure that looks something like: + +```ocaml +type t = + { foo : int (** foo must be greater than zero *) + ; reserved : string [@key "_reserved"] + } +[@@deriving ann] +``` + +and produces something like (sketch): + +```ocaml +let t_ann : Ann.t Field.Map.t +``` + +where there is helper code: + +```ocaml +(* helper util code that we only need once *) +module Ann = struct + type t = + { ocaml_doc : string + ; key : string + (* any other annotations we want to capture *) + } + + module Field = struct + module T = struct + type 'a t = | E : ('a, 'b) Field.t -> 'a t + (* ... with a sort function based on the Field name *) + end + + module Map = Core_kernel.Map.Make(T) + end + + (* wraps your field in the existential wrapper for you and then does a map + lookup *) + val get : ('a, 'b) Field.t -> Ann.t Field.Map.t -> Ann.t option +end +``` + +Now we can build combinators on our field folds/unfolds + +#### Combinators + +The combinators look something like this: + +```ocaml + val int_ + val string_ + val bool_ + val list_ + + module Prim = struct + val int + val string + val bool + val list + end +``` + +The derivers in `Prim` are intended to be used directly by the `make_creator` fold/unfold. Example: + +```ocaml +let open Prim in +Fields.make_creator (init ()) ~foo:int ~bar:string |> finish +``` + +The underscore-suffixed versions of the derivers are used whenenever types need to be composed -- for example, when using `list` + +```ocaml +let open Prim in +Fields.make_creator (init ()) ~foo:int ~bar:(list D.string_) |> finish +``` + +More examples are present in the first PR #10132. Suggestions on naming scheme for these is appreciated, either here or on that PR. + +### Applications for Snapps Parties (minimal) + +A minimal application of this mechanism applied to snapps transactions would be to apply these derivers to all the types involved in the Snapps parties transactions. + +The derivers we need at a minimum are: +`To_json`, `Of_json`, `Graphql_fields`, `Graphql_args` + +With these four derivers we can decode and encode JSON and send and receive JSON in GraphQL requests. + +When we bridge the `to_json` over to SnarkyJS we can generate a Snapp transaction and we'll know that it will be accepted by the GraphQL server generated via the `Graphql_fields/args` schema. + +### Applications for Snapps Parties (phase2) + +Create derivers for TypeScript `.d.ts` parties interface types and the OCaml/JavaScript bridge for these data types. + +### Other Applications + +Other applications to explore are specification deriving using ocaml doccomments on data structures and random oracle input `to_input` derivation rather than relying on HLists. + +## Drawbacks +[drawbacks]: #drawbacks + +To fully adopt this vision, we'll need to rewrite a lot of different pieces within Mina. Luckily, this can be done piecemeal and whenever we decide to allocate effort toward individual sections. + +## Rationale and alternatives +[rationale-and-alternatives]: #rationale-and-alternatives + +We could stick with PPX macros but they are too hard to write. Other sorts of code genreators don't fit into our workfflow. + +We could also not take any sort of datatype generic approach of dealing with this issue and instead write the same thing manually or stick with what we've done so far -- however, as mentioned above we are already running into bugs and are concerned about maintainability of the current implementation. + +In an effort to avoid digging ourselves further in a tech debt hole, this RFC proposes we adopt this generic programming approach immediately. + +## Prior art +[prior-art]: #prior-art + +TODO + +In Haskell, generic programming + +In Swift, generic programming + + +Discuss prior art, both the good and the bad, in relation to this proposal. + +## Unresolved questions +[unresolved-questions]: #unresolved-questions + +To resolve during implementation: +* To what extent do we re-write the datastructures in the bridge using this mechanism vs keep it scoped to GraphQL for now? + + diff --git a/scripts/Brewfile b/scripts/Brewfile index af4aabb93d3..7c95594ff22 100644 --- a/scripts/Brewfile +++ b/scripts/Brewfile @@ -14,6 +14,6 @@ brew "openssl@1.1" brew "python@3.8" brew "zlib" brew "libpq" -brew "postgresql" -brew "go" +brew "postgresql@14" brew "gnu-sed" +brew "goenv" diff --git a/scripts/Brewfile.lock.json b/scripts/Brewfile.lock.json index d281f7d6fea..133b7e46426 100644 --- a/scripts/Brewfile.lock.json +++ b/scripts/Brewfile.lock.json @@ -2,126 +2,201 @@ "entries": { "brew": { "bash": { - "version": "5.1.4", + "version": "5.2.15", "bottle": { "rebuild": 0, "root_url": "https://ghcr.io/v2/homebrew/core", "files": { + "arm64_ventura": { + "cellar": "/opt/homebrew/Cellar", + "url": "https://ghcr.io/v2/homebrew/core/bash/blobs/sha256:f3a42b9282e6779504034485634a2f3e6e3bddfc70b9990e09e66e3c8c926b7d", + "sha256": "f3a42b9282e6779504034485634a2f3e6e3bddfc70b9990e09e66e3c8c926b7d" + }, + "arm64_monterey": { + "cellar": "/opt/homebrew/Cellar", + "url": "https://ghcr.io/v2/homebrew/core/bash/blobs/sha256:5e7e3e3387fc60e907683b437ac6e64879e117a3c5c1421fe6e6257f6aaa3c69", + "sha256": "5e7e3e3387fc60e907683b437ac6e64879e117a3c5c1421fe6e6257f6aaa3c69" + }, "arm64_big_sur": { "cellar": "/opt/homebrew/Cellar", - "url": "https://ghcr.io/v2/homebrew/core/bash/blobs/sha256:253a8f71bb8ca1444fa5951caa3e4d0e6f51ca6cd6d7c9fc9f79f0c58dc3e693", - "sha256": "253a8f71bb8ca1444fa5951caa3e4d0e6f51ca6cd6d7c9fc9f79f0c58dc3e693" + "url": "https://ghcr.io/v2/homebrew/core/bash/blobs/sha256:d19858831275271cc8aa9a1a28de6223faa44c6ebbc88e83898fd559de5b627e", + "sha256": "d19858831275271cc8aa9a1a28de6223faa44c6ebbc88e83898fd559de5b627e" }, - "big_sur": { + "ventura": { "cellar": "/usr/local/Cellar", - "url": "https://ghcr.io/v2/homebrew/core/bash/blobs/sha256:1c7c13309368474e6f7b3afd9c6ba13b213b00caeb9b990e171cf5e097e8e5e1", - "sha256": "1c7c13309368474e6f7b3afd9c6ba13b213b00caeb9b990e171cf5e097e8e5e1" + "url": "https://ghcr.io/v2/homebrew/core/bash/blobs/sha256:fd01a9dbdc56f6313a725cb345a3b991cfdaa9e1a91b08fd9791a0e695b55723", + "sha256": "fd01a9dbdc56f6313a725cb345a3b991cfdaa9e1a91b08fd9791a0e695b55723" }, - "catalina": { + "monterey": { "cellar": "/usr/local/Cellar", - "url": "https://ghcr.io/v2/homebrew/core/bash/blobs/sha256:2195ea39cf6607ec440addd6aed524c5a66719e998d74d5f9595f594f6593b21", - "sha256": "2195ea39cf6607ec440addd6aed524c5a66719e998d74d5f9595f594f6593b21" + "url": "https://ghcr.io/v2/homebrew/core/bash/blobs/sha256:05a5f9435c9e9ffe8377b03e0ca6b27bbb32cc01aff47dd1692cd8d7e735ab3a", + "sha256": "05a5f9435c9e9ffe8377b03e0ca6b27bbb32cc01aff47dd1692cd8d7e735ab3a" }, - "mojave": { + "big_sur": { "cellar": "/usr/local/Cellar", - "url": "https://ghcr.io/v2/homebrew/core/bash/blobs/sha256:4a294caec86652221a9901b9d892723a84e60d05bc91155efcb661829b13a898", - "sha256": "4a294caec86652221a9901b9d892723a84e60d05bc91155efcb661829b13a898" + "url": "https://ghcr.io/v2/homebrew/core/bash/blobs/sha256:680dd3b37e17cc4fa1af6dd8c51c774dd0c9aa3e594e96527020845516b1ea77", + "sha256": "680dd3b37e17cc4fa1af6dd8c51c774dd0c9aa3e594e96527020845516b1ea77" + }, + "x86_64_linux": { + "cellar": "/home/linuxbrew/.linuxbrew/Cellar", + "url": "https://ghcr.io/v2/homebrew/core/bash/blobs/sha256:6185e7cdba0e671528c9f38b104c4af58a670240672f83537bfc95983476fbc2", + "sha256": "6185e7cdba0e671528c9f38b104c4af58a670240672f83537bfc95983476fbc2" } } } }, "yarn": { - "version": "1.22.10", - "bottle": false + "version": "1.22.19", + "bottle": { + "rebuild": 0, + "root_url": "https://ghcr.io/v2/homebrew/core", + "files": { + "all": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/yarn/blobs/sha256:79b90324a5365189a144b786e9bdb3bf32be3823e9041d5f3250ea7b804dcd0b", + "sha256": "79b90324a5365189a144b786e9bdb3bf32be3823e9041d5f3250ea7b804dcd0b" + } + } + } }, "boost": { - "version": "1.75.0_2", + "version": "1.81.0_1", "bottle": { "rebuild": 0, "root_url": "https://ghcr.io/v2/homebrew/core", "files": { + "arm64_ventura": { + "cellar": ":any", + "url": "https://ghcr.io/v2/homebrew/core/boost/blobs/sha256:8a4a21f28eea820cdfb2ca94d6a9c2ecad40592b145de06698283dc3c7ae0eeb", + "sha256": "8a4a21f28eea820cdfb2ca94d6a9c2ecad40592b145de06698283dc3c7ae0eeb" + }, + "arm64_monterey": { + "cellar": ":any", + "url": "https://ghcr.io/v2/homebrew/core/boost/blobs/sha256:da47f5dce669699eb052452fe166e5cd118a6f6d3f64abe4cae53461743a2cc2", + "sha256": "da47f5dce669699eb052452fe166e5cd118a6f6d3f64abe4cae53461743a2cc2" + }, "arm64_big_sur": { "cellar": ":any", - "url": "https://ghcr.io/v2/homebrew/core/boost/blobs/sha256:a6ca6c43f67270378ae0400e66095c329ebe90a1989a4a9c4606f1b8e72a692f", - "sha256": "a6ca6c43f67270378ae0400e66095c329ebe90a1989a4a9c4606f1b8e72a692f" + "url": "https://ghcr.io/v2/homebrew/core/boost/blobs/sha256:640b02baab8cf76935b79203660de45e0721f1428697b9916327b06e86b9300a", + "sha256": "640b02baab8cf76935b79203660de45e0721f1428697b9916327b06e86b9300a" }, - "big_sur": { + "ventura": { "cellar": ":any", - "url": "https://ghcr.io/v2/homebrew/core/boost/blobs/sha256:be8564844a1e5bb58c26287453617458db6e886f85197c8ce35c21cfa74b1bc0", - "sha256": "be8564844a1e5bb58c26287453617458db6e886f85197c8ce35c21cfa74b1bc0" + "url": "https://ghcr.io/v2/homebrew/core/boost/blobs/sha256:b3fc7aade48d9a8bec56ac3cc57a3c5ead36d67365cf3447c578cd31ddb8fbee", + "sha256": "b3fc7aade48d9a8bec56ac3cc57a3c5ead36d67365cf3447c578cd31ddb8fbee" }, - "catalina": { + "monterey": { "cellar": ":any", - "url": "https://ghcr.io/v2/homebrew/core/boost/blobs/sha256:aef0fade9e8159b572907189bb8dfd828dab94c44e036cdd782c2b3834d218f3", - "sha256": "aef0fade9e8159b572907189bb8dfd828dab94c44e036cdd782c2b3834d218f3" + "url": "https://ghcr.io/v2/homebrew/core/boost/blobs/sha256:160aabda5d6497dc72a389dd251becc971e37d4702763b3b45a5c7bbc29f0419", + "sha256": "160aabda5d6497dc72a389dd251becc971e37d4702763b3b45a5c7bbc29f0419" }, - "mojave": { + "big_sur": { "cellar": ":any", - "url": "https://ghcr.io/v2/homebrew/core/boost/blobs/sha256:e24d396d90a8db75738cba4543b678c79ef720a96bf2f93688bd2f35fef66d3a", - "sha256": "e24d396d90a8db75738cba4543b678c79ef720a96bf2f93688bd2f35fef66d3a" + "url": "https://ghcr.io/v2/homebrew/core/boost/blobs/sha256:51a2646e51a7a304848efa7cca17312c4a3acc5e28ef664037d0675c5c9a1e83", + "sha256": "51a2646e51a7a304848efa7cca17312c4a3acc5e28ef664037d0675c5c9a1e83" + }, + "x86_64_linux": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/boost/blobs/sha256:26a83186402f3625806df9d7f6e41a1188d726d7f21ee5ccbfb3310e763d1ebc", + "sha256": "26a83186402f3625806df9d7f6e41a1188d726d7f21ee5ccbfb3310e763d1ebc" } } } }, "cmake": { - "version": "3.20.1", + "version": "3.26.0", "bottle": { "rebuild": 0, "root_url": "https://ghcr.io/v2/homebrew/core", "files": { + "arm64_ventura": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/cmake/blobs/sha256:3c39c077ead3f8ccc94727c275ac16af5f75a088844df034d10b34ad85dfb8bf", + "sha256": "3c39c077ead3f8ccc94727c275ac16af5f75a088844df034d10b34ad85dfb8bf" + }, + "arm64_monterey": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/cmake/blobs/sha256:6fb143b21a378921ee86e61d0cf77584e42ead38076f92ea1ebb57dcefb6b85d", + "sha256": "6fb143b21a378921ee86e61d0cf77584e42ead38076f92ea1ebb57dcefb6b85d" + }, "arm64_big_sur": { "cellar": ":any_skip_relocation", - "url": "https://ghcr.io/v2/homebrew/core/cmake/blobs/sha256:b94fa9c13065ce31259621e1ac1ff8f46c0a6ee606a5944f2562ed86c7fcf2a6", - "sha256": "b94fa9c13065ce31259621e1ac1ff8f46c0a6ee606a5944f2562ed86c7fcf2a6" + "url": "https://ghcr.io/v2/homebrew/core/cmake/blobs/sha256:714058b6941002393dcadcefb13f5b16ae094724c734977cc6a2dcf2db5484ae", + "sha256": "714058b6941002393dcadcefb13f5b16ae094724c734977cc6a2dcf2db5484ae" }, - "big_sur": { + "ventura": { "cellar": ":any_skip_relocation", - "url": "https://ghcr.io/v2/homebrew/core/cmake/blobs/sha256:c8b975b0911f9125065459e9b55da2c43fc58485446ec35d8294d2db2ad77972", - "sha256": "c8b975b0911f9125065459e9b55da2c43fc58485446ec35d8294d2db2ad77972" + "url": "https://ghcr.io/v2/homebrew/core/cmake/blobs/sha256:96a930fa2836355c767057f336521113f419f51c2444ec0cb095a6776170997e", + "sha256": "96a930fa2836355c767057f336521113f419f51c2444ec0cb095a6776170997e" }, - "catalina": { + "monterey": { "cellar": ":any_skip_relocation", - "url": "https://ghcr.io/v2/homebrew/core/cmake/blobs/sha256:1875ab07ed5843cdc06368ae851ec1232a72bb679f70f816e549acfe5fff6c31", - "sha256": "1875ab07ed5843cdc06368ae851ec1232a72bb679f70f816e549acfe5fff6c31" + "url": "https://ghcr.io/v2/homebrew/core/cmake/blobs/sha256:ca050ee8541df0df30c3b06bcce0b8be0a37fc16dcfc83fe2c29dd6bf13b8643", + "sha256": "ca050ee8541df0df30c3b06bcce0b8be0a37fc16dcfc83fe2c29dd6bf13b8643" }, - "mojave": { + "big_sur": { "cellar": ":any_skip_relocation", - "url": "https://ghcr.io/v2/homebrew/core/cmake/blobs/sha256:0af0a3d97a83dcdece0c5a8ba867d6b199b928f1c4e0a325eef785af6b8f2f1e", - "sha256": "0af0a3d97a83dcdece0c5a8ba867d6b199b928f1c4e0a325eef785af6b8f2f1e" + "url": "https://ghcr.io/v2/homebrew/core/cmake/blobs/sha256:5175a6fee503ce7cd67fd6d23ea589995ac1d0eb8114756315a106b8261affda", + "sha256": "5175a6fee503ce7cd67fd6d23ea589995ac1d0eb8114756315a106b8261affda" + }, + "x86_64_linux": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/cmake/blobs/sha256:a71c04366f7b5fd26d49bd683ae3a2cab717967085fd60ffa8bc8c802a9f9c48", + "sha256": "a71c04366f7b5fd26d49bd683ae3a2cab717967085fd60ffa8bc8c802a9f9c48" } } } }, "gmp": { - "version": "6.2.1", + "version": "6.2.1_1", "bottle": { "rebuild": 0, "root_url": "https://ghcr.io/v2/homebrew/core", "files": { + "arm64_ventura": { + "cellar": ":any", + "url": "https://ghcr.io/v2/homebrew/core/gmp/blobs/sha256:2436cd120e5678d67c24020a50cbbf7c0220e7ecaac63981335872b9d666bcad", + "sha256": "2436cd120e5678d67c24020a50cbbf7c0220e7ecaac63981335872b9d666bcad" + }, + "arm64_monterey": { + "cellar": ":any", + "url": "https://ghcr.io/v2/homebrew/core/gmp/blobs/sha256:a43a2ae4c44d90626b835a968a32327c8b8bbf754ec1d2590f8ac656c71dace9", + "sha256": "a43a2ae4c44d90626b835a968a32327c8b8bbf754ec1d2590f8ac656c71dace9" + }, "arm64_big_sur": { "cellar": ":any", - "url": "https://ghcr.io/v2/homebrew/core/gmp/blobs/sha256:ff4ad8d068ba4c14d146abb454991b6c4f246796ec2538593dc5f04ca7593eec", - "sha256": "ff4ad8d068ba4c14d146abb454991b6c4f246796ec2538593dc5f04ca7593eec" + "url": "https://ghcr.io/v2/homebrew/core/gmp/blobs/sha256:491220f1ff2c662b96295d931a80702523eeaee681d7305fb02b561e527dcbb8", + "sha256": "491220f1ff2c662b96295d931a80702523eeaee681d7305fb02b561e527dcbb8" + }, + "ventura": { + "cellar": ":any", + "url": "https://ghcr.io/v2/homebrew/core/gmp/blobs/sha256:4c6488dfd53b8287702827a4e6d50569926417f2cd08613d37720de54b6afe0c", + "sha256": "4c6488dfd53b8287702827a4e6d50569926417f2cd08613d37720de54b6afe0c" + }, + "monterey": { + "cellar": ":any", + "url": "https://ghcr.io/v2/homebrew/core/gmp/blobs/sha256:dddc6d8c871c92f6e5fb1249c28768aa2b4b47c38836a69cf787a639cf5eee73", + "sha256": "dddc6d8c871c92f6e5fb1249c28768aa2b4b47c38836a69cf787a639cf5eee73" }, "big_sur": { "cellar": ":any", - "url": "https://ghcr.io/v2/homebrew/core/gmp/blobs/sha256:6a44705536f25c4b9f8547d44d129ae3b3657755039966ad2b86b821e187c32c", - "sha256": "6a44705536f25c4b9f8547d44d129ae3b3657755039966ad2b86b821e187c32c" + "url": "https://ghcr.io/v2/homebrew/core/gmp/blobs/sha256:e566452815d2ff5dc66da160bd1cd3d9cf02a17a07284cf0bac46496133383ae", + "sha256": "e566452815d2ff5dc66da160bd1cd3d9cf02a17a07284cf0bac46496133383ae" }, "catalina": { "cellar": ":any", - "url": "https://ghcr.io/v2/homebrew/core/gmp/blobs/sha256:35e9f82d80708ae8dea2d6b0646dcd86d692321b96effaa76b7fad4d6cffa5be", - "sha256": "35e9f82d80708ae8dea2d6b0646dcd86d692321b96effaa76b7fad4d6cffa5be" + "url": "https://ghcr.io/v2/homebrew/core/gmp/blobs/sha256:5ee7a460668864c28e541db15420e1480c3d31c5f216797a453a5310106fbc97", + "sha256": "5ee7a460668864c28e541db15420e1480c3d31c5f216797a453a5310106fbc97" }, "mojave": { "cellar": ":any", - "url": "https://ghcr.io/v2/homebrew/core/gmp/blobs/sha256:00fb998dc2abbd09ee9f2ad733ae1adc185924fb01be8814e69a57ef750b1a32", - "sha256": "00fb998dc2abbd09ee9f2ad733ae1adc185924fb01be8814e69a57ef750b1a32" + "url": "https://ghcr.io/v2/homebrew/core/gmp/blobs/sha256:b9d7d36c8d263be0e02e17d435350546f9f7008eb21b6e86bf42f719efcba85e", + "sha256": "b9d7d36c8d263be0e02e17d435350546f9f7008eb21b6e86bf42f719efcba85e" }, - "high_sierra": { - "cellar": ":any", - "url": "https://ghcr.io/v2/homebrew/core/gmp/blobs/sha256:54191ce7fa888df64b9c52870531ac0ce2e8cbd40a7c4cdec74cb2c4a421af97", - "sha256": "54191ce7fa888df64b9c52870531ac0ce2e8cbd40a7c4cdec74cb2c4a421af97" + "x86_64_linux": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/gmp/blobs/sha256:786ae29f0c0b06ea86e42bd9c6ac2c49bd5757da037dead7053e8bd612c4cf8c", + "sha256": "786ae29f0c0b06ea86e42bd9c6ac2c49bd5757da037dead7053e8bd612c4cf8c" } } } @@ -132,11 +207,31 @@ "rebuild": 0, "root_url": "https://ghcr.io/v2/homebrew/core", "files": { + "arm64_ventura": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/gpatch/blobs/sha256:51ff39d1b008d1f03d8dfc9d42ed483d64fea632b31f4ccf3dc15ddb2de09794", + "sha256": "51ff39d1b008d1f03d8dfc9d42ed483d64fea632b31f4ccf3dc15ddb2de09794" + }, + "arm64_monterey": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/gpatch/blobs/sha256:0958a773e875dfbab2e70e80cd10a0406eed6f92352ae432b44f4bf74dcce35e", + "sha256": "0958a773e875dfbab2e70e80cd10a0406eed6f92352ae432b44f4bf74dcce35e" + }, "arm64_big_sur": { "cellar": ":any_skip_relocation", "url": "https://ghcr.io/v2/homebrew/core/gpatch/blobs/sha256:c90e7baee17d21e0cb594db676912e108f7df68b71509e15d37edfadcd6b12e9", "sha256": "c90e7baee17d21e0cb594db676912e108f7df68b71509e15d37edfadcd6b12e9" }, + "ventura": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/gpatch/blobs/sha256:4578068decc9e78f130aff8e714d99a45a7154a51ce5a0e0ec4e40c31dd686bc", + "sha256": "4578068decc9e78f130aff8e714d99a45a7154a51ce5a0e0ec4e40c31dd686bc" + }, + "monterey": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/gpatch/blobs/sha256:1a3e9eb276bb35ecb33bcdc50b689f1f7cebe1d014566754c5faa85e72251789", + "sha256": "1a3e9eb276bb35ecb33bcdc50b689f1f7cebe1d014566754c5faa85e72251789" + }, "big_sur": { "cellar": ":any_skip_relocation", "url": "https://ghcr.io/v2/homebrew/core/gpatch/blobs/sha256:4c18141474072f9fac171680e75c77fa22af016d1cda998a052792980d9ce4f9", @@ -166,98 +261,153 @@ "cellar": ":any_skip_relocation", "url": "https://ghcr.io/v2/homebrew/core/gpatch/blobs/sha256:bd67af8b9c24fa785a2da2a1d3475305593dbc183331aed657313e4066de3259", "sha256": "bd67af8b9c24fa785a2da2a1d3475305593dbc183331aed657313e4066de3259" + }, + "x86_64_linux": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/gpatch/blobs/sha256:f49b09a0cf8b312de84a07f7dee7029a0965277baa080f5e4eb57c1457539325", + "sha256": "f49b09a0cf8b312de84a07f7dee7029a0965277baa080f5e4eb57c1457539325" } } } }, "jemalloc": { - "version": "5.2.1_1", + "version": "5.3.0", "bottle": { "rebuild": 0, "root_url": "https://ghcr.io/v2/homebrew/core", "files": { + "arm64_ventura": { + "cellar": ":any", + "url": "https://ghcr.io/v2/homebrew/core/jemalloc/blobs/sha256:33e0c3fbe56642e081018a9674df734d34afdc35af7d03f5dd2b484a804555e3", + "sha256": "33e0c3fbe56642e081018a9674df734d34afdc35af7d03f5dd2b484a804555e3" + }, + "arm64_monterey": { + "cellar": ":any", + "url": "https://ghcr.io/v2/homebrew/core/jemalloc/blobs/sha256:b7ef9abad498e6eb53fb476fde4396fc9ab99a23092ea14bcf576548e198f9bd", + "sha256": "b7ef9abad498e6eb53fb476fde4396fc9ab99a23092ea14bcf576548e198f9bd" + }, "arm64_big_sur": { "cellar": ":any", - "url": "https://ghcr.io/v2/homebrew/core/jemalloc/blobs/sha256:724ab5947e53f571b9fed9e776a1ba22b1d71fe27ce5775553d70e990ef9dc63", - "sha256": "724ab5947e53f571b9fed9e776a1ba22b1d71fe27ce5775553d70e990ef9dc63" + "url": "https://ghcr.io/v2/homebrew/core/jemalloc/blobs/sha256:b24e4a9413b347397a10ebc9a7a2d309d88c0f9479c1cdebe6c302acba9a43a9", + "sha256": "b24e4a9413b347397a10ebc9a7a2d309d88c0f9479c1cdebe6c302acba9a43a9" }, - "big_sur": { + "ventura": { "cellar": ":any", - "url": "https://ghcr.io/v2/homebrew/core/jemalloc/blobs/sha256:7797788be2da677a8343ac6199e2f180c2e6b627c0b9abc9da133fbc34e86678", - "sha256": "7797788be2da677a8343ac6199e2f180c2e6b627c0b9abc9da133fbc34e86678" + "url": "https://ghcr.io/v2/homebrew/core/jemalloc/blobs/sha256:66b5f3a4c4ad9f7801e6ad2e76d1586e7b57e2cc64b24c2684dd1c2af8bc82f3", + "sha256": "66b5f3a4c4ad9f7801e6ad2e76d1586e7b57e2cc64b24c2684dd1c2af8bc82f3" }, - "catalina": { + "monterey": { "cellar": ":any", - "url": "https://ghcr.io/v2/homebrew/core/jemalloc/blobs/sha256:b1b211e5bead798c236d478dd74310a97a7b59470f607b608c07222648b08bf5", - "sha256": "b1b211e5bead798c236d478dd74310a97a7b59470f607b608c07222648b08bf5" + "url": "https://ghcr.io/v2/homebrew/core/jemalloc/blobs/sha256:27ae29c02d718c38ee5f623c3ef08ad3530a6fd3595d16d2ddadd6552bf32c12", + "sha256": "27ae29c02d718c38ee5f623c3ef08ad3530a6fd3595d16d2ddadd6552bf32c12" }, - "mojave": { + "big_sur": { "cellar": ":any", - "url": "https://ghcr.io/v2/homebrew/core/jemalloc/blobs/sha256:d3f6f85e74b08c8c97448e289734df484f884af35cd10ce9d9db43cf721fbf94", - "sha256": "d3f6f85e74b08c8c97448e289734df484f884af35cd10ce9d9db43cf721fbf94" + "url": "https://ghcr.io/v2/homebrew/core/jemalloc/blobs/sha256:72aef17aa140b457400c4f2b74d0473bf1160616c3df7cb8604ac2bf734afea5", + "sha256": "72aef17aa140b457400c4f2b74d0473bf1160616c3df7cb8604ac2bf734afea5" }, - "high_sierra": { + "catalina": { "cellar": ":any", - "url": "https://ghcr.io/v2/homebrew/core/jemalloc/blobs/sha256:8080c98844153da08346431fe0a0592f6f718cb7a17525f9ffb909c395bc0b6d", - "sha256": "8080c98844153da08346431fe0a0592f6f718cb7a17525f9ffb909c395bc0b6d" + "url": "https://ghcr.io/v2/homebrew/core/jemalloc/blobs/sha256:3f5cf334d16ab432bf210c7e171510d0edcd834f939b57bddfd428af5ed248ae", + "sha256": "3f5cf334d16ab432bf210c7e171510d0edcd834f939b57bddfd428af5ed248ae" + }, + "x86_64_linux": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/jemalloc/blobs/sha256:240b20cc078b21d90c32bd34447952b9b464958b1858ae109f168558993f9278", + "sha256": "240b20cc078b21d90c32bd34447952b9b464958b1858ae109f168558993f9278" } } } }, "libffi": { - "version": "3.3_3", + "version": "3.4.4", "bottle": { "rebuild": 0, "root_url": "https://ghcr.io/v2/homebrew/core", "files": { + "arm64_ventura": { + "cellar": ":any", + "url": "https://ghcr.io/v2/homebrew/core/libffi/blobs/sha256:66d9dcb218283c43250b04e507b7b96f0cf18fb1017fcaf811729324d11127f7", + "sha256": "66d9dcb218283c43250b04e507b7b96f0cf18fb1017fcaf811729324d11127f7" + }, + "arm64_monterey": { + "cellar": ":any", + "url": "https://ghcr.io/v2/homebrew/core/libffi/blobs/sha256:e7ea0921a053dc81e818c3893887e819ed26c0e231fd306e05e905b51b9ea902", + "sha256": "e7ea0921a053dc81e818c3893887e819ed26c0e231fd306e05e905b51b9ea902" + }, "arm64_big_sur": { "cellar": ":any", - "url": "https://ghcr.io/v2/homebrew/core/libffi/blobs/sha256:10a6d66c264f9a23d1162e535fe49f27c23f6ef452b4701ed7110f06aaf1e01d", - "sha256": "10a6d66c264f9a23d1162e535fe49f27c23f6ef452b4701ed7110f06aaf1e01d" + "url": "https://ghcr.io/v2/homebrew/core/libffi/blobs/sha256:8d44b24963c114512934de23cc776a6190f5bcb65db8e6cc65e1b60122571747", + "sha256": "8d44b24963c114512934de23cc776a6190f5bcb65db8e6cc65e1b60122571747" + }, + "ventura": { + "cellar": ":any", + "url": "https://ghcr.io/v2/homebrew/core/libffi/blobs/sha256:a86ed7eb1b02a3d44cd6e75977c910466357a1715743f89be94416d000577133", + "sha256": "a86ed7eb1b02a3d44cd6e75977c910466357a1715743f89be94416d000577133" + }, + "monterey": { + "cellar": ":any", + "url": "https://ghcr.io/v2/homebrew/core/libffi/blobs/sha256:9dd80c4c3d4451cc3216dbf1129a2bddec474aa9266b6bb5c603e0a6cce7605b", + "sha256": "9dd80c4c3d4451cc3216dbf1129a2bddec474aa9266b6bb5c603e0a6cce7605b" }, "big_sur": { "cellar": ":any", - "url": "https://ghcr.io/v2/homebrew/core/libffi/blobs/sha256:8a7a02cffb368dfdeaeb1176a7a7bcc6402371aee0a30bb001aff3452a4202c6", - "sha256": "8a7a02cffb368dfdeaeb1176a7a7bcc6402371aee0a30bb001aff3452a4202c6" + "url": "https://ghcr.io/v2/homebrew/core/libffi/blobs/sha256:b5c4e2054802f97a68b8f32d9ff2c6782f9a37223cd0a3b3d2175ecf04740a4f", + "sha256": "b5c4e2054802f97a68b8f32d9ff2c6782f9a37223cd0a3b3d2175ecf04740a4f" }, "catalina": { "cellar": ":any", - "url": "https://ghcr.io/v2/homebrew/core/libffi/blobs/sha256:66caa8a807684ce5d5173ffc4db1eaa7167eabd634335a2ce3b8ba667efe2686", - "sha256": "66caa8a807684ce5d5173ffc4db1eaa7167eabd634335a2ce3b8ba667efe2686" + "url": "https://ghcr.io/v2/homebrew/core/libffi/blobs/sha256:1f53646211da139b423eb38f923bc38da1de86b7a68bfc2df5351098fe3c67e3", + "sha256": "1f53646211da139b423eb38f923bc38da1de86b7a68bfc2df5351098fe3c67e3" }, - "mojave": { - "cellar": ":any", - "url": "https://ghcr.io/v2/homebrew/core/libffi/blobs/sha256:1205c19a1d51940726534923db0e1c291b001a3ea541d0694afccad7968343a3", - "sha256": "1205c19a1d51940726534923db0e1c291b001a3ea541d0694afccad7968343a3" + "x86_64_linux": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/libffi/blobs/sha256:dcc9412995b5e319f64796a77b1eb8e684f1d1b6b5d7ac824f434ada692e4ff8", + "sha256": "dcc9412995b5e319f64796a77b1eb8e684f1d1b6b5d7ac824f434ada692e4ff8" } } } }, "libomp": { - "version": "12.0.0", + "version": "15.0.7", "bottle": { "rebuild": 0, "root_url": "https://ghcr.io/v2/homebrew/core", "files": { + "arm64_ventura": { + "cellar": ":any", + "url": "https://ghcr.io/v2/homebrew/core/libomp/blobs/sha256:8c5c7b912a075e598fb7ae10f2999853343b2662061d92040b1a584cbb3ba7d2", + "sha256": "8c5c7b912a075e598fb7ae10f2999853343b2662061d92040b1a584cbb3ba7d2" + }, + "arm64_monterey": { + "cellar": ":any", + "url": "https://ghcr.io/v2/homebrew/core/libomp/blobs/sha256:1b1aad07e8677744cdaa264419fade98bd1a852894c77d01985053a96b7d1c7d", + "sha256": "1b1aad07e8677744cdaa264419fade98bd1a852894c77d01985053a96b7d1c7d" + }, "arm64_big_sur": { "cellar": ":any", - "url": "https://ghcr.io/v2/homebrew/core/libomp/blobs/sha256:2d2befd8f1ab88eac44e71bf05b4b03172e4b3352cc21d994898874905efadbe", - "sha256": "2d2befd8f1ab88eac44e71bf05b4b03172e4b3352cc21d994898874905efadbe" + "url": "https://ghcr.io/v2/homebrew/core/libomp/blobs/sha256:00e04fbe9783ad7751eaa6d2edda92dfbff85131777255a74e364f3217a7a2df", + "sha256": "00e04fbe9783ad7751eaa6d2edda92dfbff85131777255a74e364f3217a7a2df" }, - "big_sur": { + "ventura": { "cellar": ":any", - "url": "https://ghcr.io/v2/homebrew/core/libomp/blobs/sha256:fe1e5c0fa8ff667deb348e64e695ac355a43da34c020fa983e081ea67cb5f56c", - "sha256": "fe1e5c0fa8ff667deb348e64e695ac355a43da34c020fa983e081ea67cb5f56c" + "url": "https://ghcr.io/v2/homebrew/core/libomp/blobs/sha256:762c461db6af3cf78983b1eb58aee62699652b96237abf79469c8ac034b2156b", + "sha256": "762c461db6af3cf78983b1eb58aee62699652b96237abf79469c8ac034b2156b" }, - "catalina": { + "monterey": { "cellar": ":any", - "url": "https://ghcr.io/v2/homebrew/core/libomp/blobs/sha256:33818af9e5fa26153645f63dab95d060fea69757570910d2f86d56eff29a5cf6", - "sha256": "33818af9e5fa26153645f63dab95d060fea69757570910d2f86d56eff29a5cf6" + "url": "https://ghcr.io/v2/homebrew/core/libomp/blobs/sha256:0b944a6bbe8955e7900882b94f1b0b09030d5791191dc5b0c8b3d5d0895f4b12", + "sha256": "0b944a6bbe8955e7900882b94f1b0b09030d5791191dc5b0c8b3d5d0895f4b12" }, - "mojave": { + "big_sur": { "cellar": ":any", - "url": "https://ghcr.io/v2/homebrew/core/libomp/blobs/sha256:e6ccdea1356c28931543f73ebcc3fa5693056f40a5b04150fd54908fac17109e", - "sha256": "e6ccdea1356c28931543f73ebcc3fa5693056f40a5b04150fd54908fac17109e" + "url": "https://ghcr.io/v2/homebrew/core/libomp/blobs/sha256:f92e5b31f86c22c0fe875b50e050c19a89993b36106a9ad2737230ae2cb68069", + "sha256": "f92e5b31f86c22c0fe875b50e050c19a89993b36106a9ad2737230ae2cb68069" + }, + "x86_64_linux": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/libomp/blobs/sha256:d2a16a906c029e8405a11924837417ad1008d41bb1877399f494cb872a179f01", + "sha256": "d2a16a906c029e8405a11924837417ad1008d41bb1877399f494cb872a179f01" } } } @@ -268,11 +418,31 @@ "rebuild": 0, "root_url": "https://ghcr.io/v2/homebrew/core", "files": { + "arm64_ventura": { + "cellar": ":any", + "url": "https://ghcr.io/v2/homebrew/core/libsodium/blobs/sha256:9a473cc4469e5f641ff79fac0331c7b86ac22778becd1155a2395e52346116d8", + "sha256": "9a473cc4469e5f641ff79fac0331c7b86ac22778becd1155a2395e52346116d8" + }, + "arm64_monterey": { + "cellar": ":any", + "url": "https://ghcr.io/v2/homebrew/core/libsodium/blobs/sha256:493ce4faacb1fba817e73213cde331a68f73531d89260200726cc17c1ca00797", + "sha256": "493ce4faacb1fba817e73213cde331a68f73531d89260200726cc17c1ca00797" + }, "arm64_big_sur": { "cellar": ":any", "url": "https://ghcr.io/v2/homebrew/core/libsodium/blobs/sha256:ab7029c599665005a9c9ec9e72c74bf4d543fd7a995d9af9cfe9e6c10de79177", "sha256": "ab7029c599665005a9c9ec9e72c74bf4d543fd7a995d9af9cfe9e6c10de79177" }, + "ventura": { + "cellar": ":any", + "url": "https://ghcr.io/v2/homebrew/core/libsodium/blobs/sha256:bbb929946689440afc6bb47effbc3e8d70db86e86c381d8ba99c1befc07e5602", + "sha256": "bbb929946689440afc6bb47effbc3e8d70db86e86c381d8ba99c1befc07e5602" + }, + "monterey": { + "cellar": ":any", + "url": "https://ghcr.io/v2/homebrew/core/libsodium/blobs/sha256:522ac3f26c646f3d276b0c997e1a2771559d4766362d28f16ca1a9585bc20206", + "sha256": "522ac3f26c646f3d276b0c997e1a2771559d4766362d28f16ca1a9585bc20206" + }, "big_sur": { "cellar": ":any", "url": "https://ghcr.io/v2/homebrew/core/libsodium/blobs/sha256:5afc5678e30a174c1e46f1e905124f2619e6d9815ac776836090c0bff85631d6", @@ -292,35 +462,55 @@ "cellar": ":any", "url": "https://ghcr.io/v2/homebrew/core/libsodium/blobs/sha256:fc972755eb60f4221d7b32e58fc0f94e99b913fefefc84c4c76dc4bca1c5c445", "sha256": "fc972755eb60f4221d7b32e58fc0f94e99b913fefefc84c4c76dc4bca1c5c445" + }, + "x86_64_linux": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/libsodium/blobs/sha256:1ab2c66fc8ae6c1245b49c9bd7a32853c1b348afe7086d4c2d3baf5ea30bbac9", + "sha256": "1ab2c66fc8ae6c1245b49c9bd7a32853c1b348afe7086d4c2d3baf5ea30bbac9" } } } }, "opam": { - "version": "2.0.8", + "version": "2.1.4", "bottle": { "rebuild": 0, "root_url": "https://ghcr.io/v2/homebrew/core", "files": { + "arm64_ventura": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/opam/blobs/sha256:4203dd8ed7d01e2e27c226f41cde68f797433b39cea3b32d5f265205aad3c0d9", + "sha256": "4203dd8ed7d01e2e27c226f41cde68f797433b39cea3b32d5f265205aad3c0d9" + }, + "arm64_monterey": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/opam/blobs/sha256:442fda0ec64b42667e5299217e1053057fed3c0c2f84685302fa8f1fb4fa72c0", + "sha256": "442fda0ec64b42667e5299217e1053057fed3c0c2f84685302fa8f1fb4fa72c0" + }, "arm64_big_sur": { "cellar": ":any_skip_relocation", - "url": "https://ghcr.io/v2/homebrew/core/opam/blobs/sha256:83fedf7b107a1cc3ea02a3782e3d830feeec7b8482a8e015707af65c0bb94ac9", - "sha256": "83fedf7b107a1cc3ea02a3782e3d830feeec7b8482a8e015707af65c0bb94ac9" + "url": "https://ghcr.io/v2/homebrew/core/opam/blobs/sha256:6462d0f11704126247331049f1e737ae459b8bb11459534a673caf2a4b834938", + "sha256": "6462d0f11704126247331049f1e737ae459b8bb11459534a673caf2a4b834938" }, - "big_sur": { + "ventura": { "cellar": ":any_skip_relocation", - "url": "https://ghcr.io/v2/homebrew/core/opam/blobs/sha256:d34e0dcbfa4302960a8f813d4e06c113e24beff31d2fbf8e55e470c5b51ecc0b", - "sha256": "d34e0dcbfa4302960a8f813d4e06c113e24beff31d2fbf8e55e470c5b51ecc0b" + "url": "https://ghcr.io/v2/homebrew/core/opam/blobs/sha256:a392de4983f5be70c57469250d82bb81e08ec32f88fec9a755b678ac285b8898", + "sha256": "a392de4983f5be70c57469250d82bb81e08ec32f88fec9a755b678ac285b8898" }, - "catalina": { + "monterey": { "cellar": ":any_skip_relocation", - "url": "https://ghcr.io/v2/homebrew/core/opam/blobs/sha256:882bf7f9d3f94fbbc2d5f08019456f533e0a71fd58c0a02650aa5781faefca9a", - "sha256": "882bf7f9d3f94fbbc2d5f08019456f533e0a71fd58c0a02650aa5781faefca9a" + "url": "https://ghcr.io/v2/homebrew/core/opam/blobs/sha256:507ad56c58cd33a903932870720154be8a4bac7a53dbf26cbc54ab1e0d200d87", + "sha256": "507ad56c58cd33a903932870720154be8a4bac7a53dbf26cbc54ab1e0d200d87" }, - "mojave": { + "big_sur": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/opam/blobs/sha256:b7d269a8eacb55dfa391b361711cace261aff40941137d015f1f2fa0a7c8c0e3", + "sha256": "b7d269a8eacb55dfa391b361711cace261aff40941137d015f1f2fa0a7c8c0e3" + }, + "x86_64_linux": { "cellar": ":any_skip_relocation", - "url": "https://ghcr.io/v2/homebrew/core/opam/blobs/sha256:e091ed13ebfa241890e0489cdc2645d66c9c189f618466cf8f7576751b381726", - "sha256": "e091ed13ebfa241890e0489cdc2645d66c9c189f618466cf8f7576751b381726" + "url": "https://ghcr.io/v2/homebrew/core/opam/blobs/sha256:c2212e56b77c1c3c591ced93249ea9cd12f2a6eeebda161569b1c013938fb2b3", + "sha256": "c2212e56b77c1c3c591ced93249ea9cd12f2a6eeebda161569b1c013938fb2b3" } } } @@ -331,224 +521,319 @@ "rebuild": 0, "root_url": "https://ghcr.io/v2/homebrew/core", "files": { + "arm64_ventura": { + "cellar": "/opt/homebrew/Cellar", + "url": "https://ghcr.io/v2/homebrew/core/pkg-config/blobs/sha256:3ff612c5e44b945c8c0cc6df7d3edb407ca67cddad9c89f9ab99ced494b7a8c2", + "sha256": "3ff612c5e44b945c8c0cc6df7d3edb407ca67cddad9c89f9ab99ced494b7a8c2" + }, + "arm64_monterey": { + "cellar": "/opt/homebrew/Cellar", + "url": "https://ghcr.io/v2/homebrew/core/pkg-config/blobs/sha256:2af9bceb60b70a259f236f1d46d2bb24c4d0a4af8cd63d974dde4d76313711e0", + "sha256": "2af9bceb60b70a259f236f1d46d2bb24c4d0a4af8cd63d974dde4d76313711e0" + }, "arm64_big_sur": { - "cellar": ":any_skip_relocation", + "cellar": "/opt/homebrew/Cellar", "url": "https://ghcr.io/v2/homebrew/core/pkg-config/blobs/sha256:ffd4491f62201d14b7eca6beff954a2ab265351589cd5b3b79b8bbb414485574", "sha256": "ffd4491f62201d14b7eca6beff954a2ab265351589cd5b3b79b8bbb414485574" }, + "ventura": { + "cellar": "/usr/local/Cellar", + "url": "https://ghcr.io/v2/homebrew/core/pkg-config/blobs/sha256:c44b1544815518726d280d92d6f6df09bd45e41ad20fd43424725c1c20760be8", + "sha256": "c44b1544815518726d280d92d6f6df09bd45e41ad20fd43424725c1c20760be8" + }, + "monterey": { + "cellar": "/usr/local/Cellar", + "url": "https://ghcr.io/v2/homebrew/core/pkg-config/blobs/sha256:a6ba80711f98b65d8a2bf2c9278540860415e9b5e545da338a4d94f39d119285", + "sha256": "a6ba80711f98b65d8a2bf2c9278540860415e9b5e545da338a4d94f39d119285" + }, "big_sur": { - "cellar": ":any_skip_relocation", + "cellar": "/usr/local/Cellar", "url": "https://ghcr.io/v2/homebrew/core/pkg-config/blobs/sha256:0040b6ebe07f60549800b211343fd5fb3cf83c866d9f62e40f5fb2f38b71e161", "sha256": "0040b6ebe07f60549800b211343fd5fb3cf83c866d9f62e40f5fb2f38b71e161" }, "catalina": { - "cellar": ":any_skip_relocation", + "cellar": "/usr/local/Cellar", "url": "https://ghcr.io/v2/homebrew/core/pkg-config/blobs/sha256:80f141e695f73bd058fd82e9f539dc67471666ff6800c5e280b5af7d3050f435", "sha256": "80f141e695f73bd058fd82e9f539dc67471666ff6800c5e280b5af7d3050f435" }, "mojave": { - "cellar": ":any_skip_relocation", + "cellar": "/usr/local/Cellar", "url": "https://ghcr.io/v2/homebrew/core/pkg-config/blobs/sha256:0d14b797dba0e0ab595c9afba8ab7ef9c901b60b4f806b36580ef95ebb370232", "sha256": "0d14b797dba0e0ab595c9afba8ab7ef9c901b60b4f806b36580ef95ebb370232" }, "high_sierra": { - "cellar": ":any_skip_relocation", + "cellar": "/usr/local/Cellar", "url": "https://ghcr.io/v2/homebrew/core/pkg-config/blobs/sha256:8c6160305abd948b8cf3e0d5c6bb0df192fa765bbb9535dda0b573cb60abbe52", "sha256": "8c6160305abd948b8cf3e0d5c6bb0df192fa765bbb9535dda0b573cb60abbe52" + }, + "x86_64_linux": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/pkg-config/blobs/sha256:3d9b8bf9b7b4bd08086be1104e3e18afb1c437dfaca03e6e7df8f2710b9c1c1a", + "sha256": "3d9b8bf9b7b4bd08086be1104e3e18afb1c437dfaca03e6e7df8f2710b9c1c1a" } } } }, "openssl@1.1": { - "version": "1.1.1k", + "version": "1.1.1t", "bottle": { "rebuild": 0, "root_url": "https://ghcr.io/v2/homebrew/core", "files": { + "arm64_ventura": { + "cellar": "/opt/homebrew/Cellar", + "url": "https://ghcr.io/v2/homebrew/core/openssl/1.1/blobs/sha256:e1e08ddd93298ef8776b202e4b2f86fc519bf27a72f7cfb082b69ff2868a0175", + "sha256": "e1e08ddd93298ef8776b202e4b2f86fc519bf27a72f7cfb082b69ff2868a0175" + }, + "arm64_monterey": { + "cellar": "/opt/homebrew/Cellar", + "url": "https://ghcr.io/v2/homebrew/core/openssl/1.1/blobs/sha256:524ec08838d2826793e26b2ed084efdefec931e1aaa6dea01455aa77409b86c8", + "sha256": "524ec08838d2826793e26b2ed084efdefec931e1aaa6dea01455aa77409b86c8" + }, "arm64_big_sur": { "cellar": "/opt/homebrew/Cellar", - "url": "https://ghcr.io/v2/homebrew/core/openssl/1.1/blobs/sha256:0a75e0f116c0653bc7a2b422e5dc500e7e51557303aa4fca9c1a28786189c1da", - "sha256": "0a75e0f116c0653bc7a2b422e5dc500e7e51557303aa4fca9c1a28786189c1da" + "url": "https://ghcr.io/v2/homebrew/core/openssl/1.1/blobs/sha256:f80836e1ee1be8f531665451699061dcb02c7e4d10da90330c83d47ee2af88e5", + "sha256": "f80836e1ee1be8f531665451699061dcb02c7e4d10da90330c83d47ee2af88e5" }, - "big_sur": { + "ventura": { "cellar": "/usr/local/Cellar", - "url": "https://ghcr.io/v2/homebrew/core/openssl/1.1/blobs/sha256:17d94c51ddfa8364baed5f3a754063e1ca75f807194f68d0b976619cf4e69c1a", - "sha256": "17d94c51ddfa8364baed5f3a754063e1ca75f807194f68d0b976619cf4e69c1a" + "url": "https://ghcr.io/v2/homebrew/core/openssl/1.1/blobs/sha256:97676d1a616421e472c46fc7930fa4a9ced514cabc1d66ae0fb8597be09ac802", + "sha256": "97676d1a616421e472c46fc7930fa4a9ced514cabc1d66ae0fb8597be09ac802" }, - "catalina": { + "monterey": { "cellar": "/usr/local/Cellar", - "url": "https://ghcr.io/v2/homebrew/core/openssl/1.1/blobs/sha256:cb610ecdda346011031b890d7b7c6e1942d7fc08cf083b74f148ec7ffed8c7e1", - "sha256": "cb610ecdda346011031b890d7b7c6e1942d7fc08cf083b74f148ec7ffed8c7e1" + "url": "https://ghcr.io/v2/homebrew/core/openssl/1.1/blobs/sha256:43c00851b8447bd5d1fba3e8140b74ca3d4a5b19343e64ec50bafae376f95454", + "sha256": "43c00851b8447bd5d1fba3e8140b74ca3d4a5b19343e64ec50bafae376f95454" }, - "mojave": { + "big_sur": { "cellar": "/usr/local/Cellar", - "url": "https://ghcr.io/v2/homebrew/core/openssl/1.1/blobs/sha256:7928c80c309c6ece50b1c0d968a1e54011088cc896d26aa511249978a246bd50", - "sha256": "7928c80c309c6ece50b1c0d968a1e54011088cc896d26aa511249978a246bd50" + "url": "https://ghcr.io/v2/homebrew/core/openssl/1.1/blobs/sha256:c357ccf7ece01905099a0cde58a2bbfb14141edb3aafed7d20391ed6bf726381", + "sha256": "c357ccf7ece01905099a0cde58a2bbfb14141edb3aafed7d20391ed6bf726381" + }, + "x86_64_linux": { + "cellar": "/home/linuxbrew/.linuxbrew/Cellar", + "url": "https://ghcr.io/v2/homebrew/core/openssl/1.1/blobs/sha256:8844b2e735dd6e8bc1395eda1a123c136f90cb8985fcec6a7ae6815b5aad971b", + "sha256": "8844b2e735dd6e8bc1395eda1a123c136f90cb8985fcec6a7ae6815b5aad971b" } } } }, "python@3.8": { - "version": "3.8.9", + "version": "3.8.16", "bottle": { "rebuild": 0, "root_url": "https://ghcr.io/v2/homebrew/core", "files": { + "arm64_ventura": { + "cellar": "/opt/homebrew/Cellar", + "url": "https://ghcr.io/v2/homebrew/core/python/3.8/blobs/sha256:1f466c82b6a1c351b5f991cec4ef8a67434428f45c1444436200f47bb2f0c85b", + "sha256": "1f466c82b6a1c351b5f991cec4ef8a67434428f45c1444436200f47bb2f0c85b" + }, + "arm64_monterey": { + "cellar": "/opt/homebrew/Cellar", + "url": "https://ghcr.io/v2/homebrew/core/python/3.8/blobs/sha256:3bc726770581d74e306c96b59113e1d9c9628d7cdcd7a179e455f2351fa05ed6", + "sha256": "3bc726770581d74e306c96b59113e1d9c9628d7cdcd7a179e455f2351fa05ed6" + }, "arm64_big_sur": { "cellar": "/opt/homebrew/Cellar", - "url": "https://ghcr.io/v2/homebrew/core/python/3.8/blobs/sha256:e0aa205ed6ff34c99c3659490ccbc280c070dc04ac6a8d04960b36ff9076dd2e", - "sha256": "e0aa205ed6ff34c99c3659490ccbc280c070dc04ac6a8d04960b36ff9076dd2e" + "url": "https://ghcr.io/v2/homebrew/core/python/3.8/blobs/sha256:f5cd1b15c99ea84472064379445bffebdbbb95d7a900b3329e5bf18c3053aaa8", + "sha256": "f5cd1b15c99ea84472064379445bffebdbbb95d7a900b3329e5bf18c3053aaa8" }, - "big_sur": { + "ventura": { "cellar": "/usr/local/Cellar", - "url": "https://ghcr.io/v2/homebrew/core/python/3.8/blobs/sha256:6111e285226a59c3c3b0f684de2a810deb1b5b5b68e81fdafcb11f0a0b0f6606", - "sha256": "6111e285226a59c3c3b0f684de2a810deb1b5b5b68e81fdafcb11f0a0b0f6606" + "url": "https://ghcr.io/v2/homebrew/core/python/3.8/blobs/sha256:7edc200046d2c86bd21340a4da9770eb00f9d08ebf4a2be8e1406a012953ee3e", + "sha256": "7edc200046d2c86bd21340a4da9770eb00f9d08ebf4a2be8e1406a012953ee3e" }, - "catalina": { + "monterey": { "cellar": "/usr/local/Cellar", - "url": "https://ghcr.io/v2/homebrew/core/python/3.8/blobs/sha256:65a3d5fa32b16df0886c7390e992f4948b51ce56d10e57bd05895e5795efe0fd", - "sha256": "65a3d5fa32b16df0886c7390e992f4948b51ce56d10e57bd05895e5795efe0fd" + "url": "https://ghcr.io/v2/homebrew/core/python/3.8/blobs/sha256:8dec63a5d442ad9c3d124ee3a58f805e1e914d5013bb09d3608c4ed0d789aca0", + "sha256": "8dec63a5d442ad9c3d124ee3a58f805e1e914d5013bb09d3608c4ed0d789aca0" }, - "mojave": { + "big_sur": { "cellar": "/usr/local/Cellar", - "url": "https://ghcr.io/v2/homebrew/core/python/3.8/blobs/sha256:5d408f56ab185c3e7644e6ac3fe063cc367aa14810050cd2a9297332de97f5a9", - "sha256": "5d408f56ab185c3e7644e6ac3fe063cc367aa14810050cd2a9297332de97f5a9" + "url": "https://ghcr.io/v2/homebrew/core/python/3.8/blobs/sha256:c237c8d7d53954f52d3093090d1802b00cc3191f0c17d6848e8d5ee22bc032d6", + "sha256": "c237c8d7d53954f52d3093090d1802b00cc3191f0c17d6848e8d5ee22bc032d6" + }, + "x86_64_linux": { + "cellar": "/home/linuxbrew/.linuxbrew/Cellar", + "url": "https://ghcr.io/v2/homebrew/core/python/3.8/blobs/sha256:03296288039955cdfcaa96066df0d5faf68565e0a8681c112a859dbbcd972957", + "sha256": "03296288039955cdfcaa96066df0d5faf68565e0a8681c112a859dbbcd972957" } } } }, "zlib": { - "version": "1.2.11", + "version": "1.2.13", "bottle": { - "rebuild": 0, + "rebuild": 1, "root_url": "https://ghcr.io/v2/homebrew/core", "files": { - "arm64_big_sur": { - "cellar": ":any", - "url": "https://ghcr.io/v2/homebrew/core/zlib/blobs/sha256:b480ed6baf10880f61b5a3097fb0921d44466857e1dde53a09e2ae4e378b1a8c", - "sha256": "b480ed6baf10880f61b5a3097fb0921d44466857e1dde53a09e2ae4e378b1a8c" - }, - "big_sur": { + "arm64_ventura": { "cellar": ":any", - "url": "https://ghcr.io/v2/homebrew/core/zlib/blobs/sha256:b95aa332dfc7c6dfb5e86fd30068f78e2cf87ee0232e5bef0adddae8215f543d", - "sha256": "b95aa332dfc7c6dfb5e86fd30068f78e2cf87ee0232e5bef0adddae8215f543d" + "url": "https://ghcr.io/v2/homebrew/core/zlib/blobs/sha256:565286ede6cc691fb781b96a76235d714159bf47c7af2cadbca01bffa92bd785", + "sha256": "565286ede6cc691fb781b96a76235d714159bf47c7af2cadbca01bffa92bd785" }, - "catalina": { + "arm64_monterey": { "cellar": ":any", - "url": "https://ghcr.io/v2/homebrew/core/zlib/blobs/sha256:8ec66cf6faa310712767efc3022fdd16568a79234439f64bf579acb628f893bc", - "sha256": "8ec66cf6faa310712767efc3022fdd16568a79234439f64bf579acb628f893bc" + "url": "https://ghcr.io/v2/homebrew/core/zlib/blobs/sha256:71825106a1d3cc348f145e58a0f2580f7394c6e747455041551517bb0958b9a6", + "sha256": "71825106a1d3cc348f145e58a0f2580f7394c6e747455041551517bb0958b9a6" }, - "mojave": { + "arm64_big_sur": { "cellar": ":any", - "url": "https://ghcr.io/v2/homebrew/core/zlib/blobs/sha256:245a43a59c57f83848e7382974bb80a46eac1d53bcaefb1bdebd1f85107d4169", - "sha256": "245a43a59c57f83848e7382974bb80a46eac1d53bcaefb1bdebd1f85107d4169" + "url": "https://ghcr.io/v2/homebrew/core/zlib/blobs/sha256:5dfa4fd7fb89f0aff96b98965da0af7e01ef6c3b8f4a90f7b2b135e2f757783f", + "sha256": "5dfa4fd7fb89f0aff96b98965da0af7e01ef6c3b8f4a90f7b2b135e2f757783f" }, - "high_sierra": { + "ventura": { "cellar": ":any", - "url": "https://ghcr.io/v2/homebrew/core/zlib/blobs/sha256:30548658b43cf66979f2756680fbb32d3c19c967e478ceea22d07f536b22bbce", - "sha256": "30548658b43cf66979f2756680fbb32d3c19c967e478ceea22d07f536b22bbce" + "url": "https://ghcr.io/v2/homebrew/core/zlib/blobs/sha256:39899e784ac736887dd6b5a08740c0a625bcb5da06fa473dede99c67b7fcbccc", + "sha256": "39899e784ac736887dd6b5a08740c0a625bcb5da06fa473dede99c67b7fcbccc" }, - "sierra": { + "monterey": { "cellar": ":any", - "url": "https://ghcr.io/v2/homebrew/core/zlib/blobs/sha256:f822b4dbab4a15b889316b89248c7b4d15d6af9dc460bf209b9425b0accb7fa3", - "sha256": "f822b4dbab4a15b889316b89248c7b4d15d6af9dc460bf209b9425b0accb7fa3" + "url": "https://ghcr.io/v2/homebrew/core/zlib/blobs/sha256:ceee8b2e24b0c8e7fbb72d63f7844a0cdf4677771e94c46153190ba11be0f48c", + "sha256": "ceee8b2e24b0c8e7fbb72d63f7844a0cdf4677771e94c46153190ba11be0f48c" }, - "el_capitan": { + "big_sur": { "cellar": ":any", - "url": "https://ghcr.io/v2/homebrew/core/zlib/blobs/sha256:3f912f6f1ce6c586128ebde29756c883b89409e652ca7aa9a29a773c2d4d0915", - "sha256": "3f912f6f1ce6c586128ebde29756c883b89409e652ca7aa9a29a773c2d4d0915" + "url": "https://ghcr.io/v2/homebrew/core/zlib/blobs/sha256:c7e4e0fed83c7515f658f802604e2b6a0be47f1020d4ddfd2025aa748641fe00", + "sha256": "c7e4e0fed83c7515f658f802604e2b6a0be47f1020d4ddfd2025aa748641fe00" }, - "yosemite": { - "cellar": ":any", - "url": "https://ghcr.io/v2/homebrew/core/zlib/blobs/sha256:5b969eb38b90a3e31869586df9d62e59d359212b16c6a270aee690dd67caa491", - "sha256": "5b969eb38b90a3e31869586df9d62e59d359212b16c6a270aee690dd67caa491" + "x86_64_linux": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/zlib/blobs/sha256:087e022c50655b9a7cdfd980bcff0764ce0f53f02724d4a9cbb7ba3b68b863a9", + "sha256": "087e022c50655b9a7cdfd980bcff0764ce0f53f02724d4a9cbb7ba3b68b863a9" } } } }, "libpq": { - "version": "13.2", + "version": "15.2", "bottle": { "rebuild": 0, "root_url": "https://ghcr.io/v2/homebrew/core", "files": { + "arm64_ventura": { + "cellar": "/opt/homebrew/Cellar", + "url": "https://ghcr.io/v2/homebrew/core/libpq/blobs/sha256:c070425023520a337b84ace4ab2735577b00055bc7e4870c6993b6e6ca93a750", + "sha256": "c070425023520a337b84ace4ab2735577b00055bc7e4870c6993b6e6ca93a750" + }, + "arm64_monterey": { + "cellar": "/opt/homebrew/Cellar", + "url": "https://ghcr.io/v2/homebrew/core/libpq/blobs/sha256:15e14f40369631580b69778d0a9c92b951f3e969ae40cae9c0b5fadbd8509a26", + "sha256": "15e14f40369631580b69778d0a9c92b951f3e969ae40cae9c0b5fadbd8509a26" + }, "arm64_big_sur": { "cellar": "/opt/homebrew/Cellar", - "url": "https://ghcr.io/v2/homebrew/core/libpq/blobs/sha256:be102bcef1030289e73fe3643c9fd575471df27f4b958e1155abb7a76f21107c", - "sha256": "be102bcef1030289e73fe3643c9fd575471df27f4b958e1155abb7a76f21107c" + "url": "https://ghcr.io/v2/homebrew/core/libpq/blobs/sha256:f58a19b8834600e6b42595f40c1295dc25d8246c695a798df99b55b189709472", + "sha256": "f58a19b8834600e6b42595f40c1295dc25d8246c695a798df99b55b189709472" }, - "big_sur": { + "ventura": { "cellar": "/usr/local/Cellar", - "url": "https://ghcr.io/v2/homebrew/core/libpq/blobs/sha256:eae0a60decded85f7b0af6c880f81d746fc0f0e285eba091b75763e63da946ca", - "sha256": "eae0a60decded85f7b0af6c880f81d746fc0f0e285eba091b75763e63da946ca" + "url": "https://ghcr.io/v2/homebrew/core/libpq/blobs/sha256:1c588ee96000d09510522991025d15d49ed34b004eb6d4b6b2ad17dbae5956cc", + "sha256": "1c588ee96000d09510522991025d15d49ed34b004eb6d4b6b2ad17dbae5956cc" }, - "catalina": { + "monterey": { "cellar": "/usr/local/Cellar", - "url": "https://ghcr.io/v2/homebrew/core/libpq/blobs/sha256:9bf464e2cd8c0c8b07ba1ed8e203427103921ba051fb0db4965c880b0d085339", - "sha256": "9bf464e2cd8c0c8b07ba1ed8e203427103921ba051fb0db4965c880b0d085339" + "url": "https://ghcr.io/v2/homebrew/core/libpq/blobs/sha256:ca68207e33c0ff6a394a85d2ed7fa0c07aa4fe6f80e21acd321e7ffbe2f214bb", + "sha256": "ca68207e33c0ff6a394a85d2ed7fa0c07aa4fe6f80e21acd321e7ffbe2f214bb" }, - "mojave": { + "big_sur": { "cellar": "/usr/local/Cellar", - "url": "https://ghcr.io/v2/homebrew/core/libpq/blobs/sha256:51f2ac5acb1e614e6bc005fb2e975040bf72937f4ac1c70edcaeec3a0d396621", - "sha256": "51f2ac5acb1e614e6bc005fb2e975040bf72937f4ac1c70edcaeec3a0d396621" + "url": "https://ghcr.io/v2/homebrew/core/libpq/blobs/sha256:66552a11b4f11fc93128ff292487d3c4508ae7d06c909db74131f619b16e9fbe", + "sha256": "66552a11b4f11fc93128ff292487d3c4508ae7d06c909db74131f619b16e9fbe" + }, + "x86_64_linux": { + "cellar": "/home/linuxbrew/.linuxbrew/Cellar", + "url": "https://ghcr.io/v2/homebrew/core/libpq/blobs/sha256:d13f0d4a667199a5427cba37a5af212ca9676daed78054c1730f0b75426679ee", + "sha256": "d13f0d4a667199a5427cba37a5af212ca9676daed78054c1730f0b75426679ee" } } } }, "postgresql": { - "version": "13.2_1", + "version": "14.7", "bottle": { "rebuild": 0, "root_url": "https://ghcr.io/v2/homebrew/core", "files": { + "arm64_ventura": { + "cellar": "/opt/homebrew/Cellar", + "url": "https://ghcr.io/v2/homebrew/core/postgresql/14/blobs/sha256:762067b573bf672b638b6354e1bed5fb675a7d3bb26ec0d6eac1bb1e24e427dd", + "sha256": "762067b573bf672b638b6354e1bed5fb675a7d3bb26ec0d6eac1bb1e24e427dd" + }, + "arm64_monterey": { + "cellar": "/opt/homebrew/Cellar", + "url": "https://ghcr.io/v2/homebrew/core/postgresql/14/blobs/sha256:0ade8371ec5d58225e90982d5e75a0cada9625eee44d903e3f809d847203d1d4", + "sha256": "0ade8371ec5d58225e90982d5e75a0cada9625eee44d903e3f809d847203d1d4" + }, "arm64_big_sur": { "cellar": "/opt/homebrew/Cellar", - "url": "https://ghcr.io/v2/homebrew/core/postgresql/blobs/sha256:299babccbbf29b9769ab402aca01c4a0c4bc173a19a928e09fe1edabe7461c88", - "sha256": "299babccbbf29b9769ab402aca01c4a0c4bc173a19a928e09fe1edabe7461c88" + "url": "https://ghcr.io/v2/homebrew/core/postgresql/14/blobs/sha256:97a4a71a9373419b604ecde9de3d6480fc40a4648e56b050cf5d26e6edccd2c9", + "sha256": "97a4a71a9373419b604ecde9de3d6480fc40a4648e56b050cf5d26e6edccd2c9" }, - "big_sur": { + "ventura": { "cellar": "/usr/local/Cellar", - "url": "https://ghcr.io/v2/homebrew/core/postgresql/blobs/sha256:67a547842ae49911d301d490e70b5fff1ee27a65cea403abeff3a25d1806e8d6", - "sha256": "67a547842ae49911d301d490e70b5fff1ee27a65cea403abeff3a25d1806e8d6" + "url": "https://ghcr.io/v2/homebrew/core/postgresql/14/blobs/sha256:adfc715bdc8204a91dee0d20bf5cf04b6ec152f6105b7ae5b4cf006841c19cd1", + "sha256": "adfc715bdc8204a91dee0d20bf5cf04b6ec152f6105b7ae5b4cf006841c19cd1" }, - "catalina": { + "monterey": { "cellar": "/usr/local/Cellar", - "url": "https://ghcr.io/v2/homebrew/core/postgresql/blobs/sha256:02af915cc2b5291c5a15b59a74dff255e918e7a6af34dbef53cf6ad264627628", - "sha256": "02af915cc2b5291c5a15b59a74dff255e918e7a6af34dbef53cf6ad264627628" + "url": "https://ghcr.io/v2/homebrew/core/postgresql/14/blobs/sha256:d46d6770f2069a51b6b20310f46c26490672ec99b4c292b836fdc7ea4bbe4911", + "sha256": "d46d6770f2069a51b6b20310f46c26490672ec99b4c292b836fdc7ea4bbe4911" }, - "mojave": { + "big_sur": { "cellar": "/usr/local/Cellar", - "url": "https://ghcr.io/v2/homebrew/core/postgresql/blobs/sha256:37f0b76c0f034d8a6837805eb27da3787c39cf895516a193ad298ea96f68e98a", - "sha256": "37f0b76c0f034d8a6837805eb27da3787c39cf895516a193ad298ea96f68e98a" + "url": "https://ghcr.io/v2/homebrew/core/postgresql/14/blobs/sha256:af5b8ba17a1f9946396b130edd741088c0c7c7322c23891580ba3d3f0b2c026a", + "sha256": "af5b8ba17a1f9946396b130edd741088c0c7c7322c23891580ba3d3f0b2c026a" + }, + "x86_64_linux": { + "cellar": "/home/linuxbrew/.linuxbrew/Cellar", + "url": "https://ghcr.io/v2/homebrew/core/postgresql/14/blobs/sha256:6853d14ffd29a1f80dafc76d88583b769f272567cb39f6a9a6c717b73d0c89ac", + "sha256": "6853d14ffd29a1f80dafc76d88583b769f272567cb39f6a9a6c717b73d0c89ac" } } } }, "go": { - "version": "1.16.3", + "version": "1.20.2", "bottle": { "rebuild": 0, "root_url": "https://ghcr.io/v2/homebrew/core", "files": { + "arm64_ventura": { + "cellar": "/opt/homebrew/Cellar", + "url": "https://ghcr.io/v2/homebrew/core/go/blobs/sha256:3517ed5d96d5a40cd4c44a35f7799a239b82d9855a6799c6f60193768f9825cd", + "sha256": "3517ed5d96d5a40cd4c44a35f7799a239b82d9855a6799c6f60193768f9825cd" + }, + "arm64_monterey": { + "cellar": "/opt/homebrew/Cellar", + "url": "https://ghcr.io/v2/homebrew/core/go/blobs/sha256:3517ed5d96d5a40cd4c44a35f7799a239b82d9855a6799c6f60193768f9825cd", + "sha256": "3517ed5d96d5a40cd4c44a35f7799a239b82d9855a6799c6f60193768f9825cd" + }, "arm64_big_sur": { "cellar": "/opt/homebrew/Cellar", - "url": "https://ghcr.io/v2/homebrew/core/go/blobs/sha256:e7c1efdd09e951eb46d01a3200b01e7fa55ce285b75470051be7fef34f4233ce", - "sha256": "e7c1efdd09e951eb46d01a3200b01e7fa55ce285b75470051be7fef34f4233ce" + "url": "https://ghcr.io/v2/homebrew/core/go/blobs/sha256:3517ed5d96d5a40cd4c44a35f7799a239b82d9855a6799c6f60193768f9825cd", + "sha256": "3517ed5d96d5a40cd4c44a35f7799a239b82d9855a6799c6f60193768f9825cd" }, - "big_sur": { + "ventura": { "cellar": "/usr/local/Cellar", - "url": "https://ghcr.io/v2/homebrew/core/go/blobs/sha256:ea37f33fd27369612a3e4e6db6adc46db0e8bdf6fac1332bf51bafaa66d43969", - "sha256": "ea37f33fd27369612a3e4e6db6adc46db0e8bdf6fac1332bf51bafaa66d43969" + "url": "https://ghcr.io/v2/homebrew/core/go/blobs/sha256:ea6a2d446679fd9bc460425f3fce2e1c4a27504c6e6ad2cd9c8f7380fc75988a", + "sha256": "ea6a2d446679fd9bc460425f3fce2e1c4a27504c6e6ad2cd9c8f7380fc75988a" }, - "catalina": { + "monterey": { "cellar": "/usr/local/Cellar", - "url": "https://ghcr.io/v2/homebrew/core/go/blobs/sha256:69c28f5e60612801c66e51e93d32068f822b245ab83246cb6cb374572eb59e15", - "sha256": "69c28f5e60612801c66e51e93d32068f822b245ab83246cb6cb374572eb59e15" + "url": "https://ghcr.io/v2/homebrew/core/go/blobs/sha256:ea6a2d446679fd9bc460425f3fce2e1c4a27504c6e6ad2cd9c8f7380fc75988a", + "sha256": "ea6a2d446679fd9bc460425f3fce2e1c4a27504c6e6ad2cd9c8f7380fc75988a" }, - "mojave": { + "big_sur": { "cellar": "/usr/local/Cellar", - "url": "https://ghcr.io/v2/homebrew/core/go/blobs/sha256:bf1e90ed1680b8ee1acb49f2f99426c8a8ac3e49efd63c7f3b41e57e7214dd19", - "sha256": "bf1e90ed1680b8ee1acb49f2f99426c8a8ac3e49efd63c7f3b41e57e7214dd19" + "url": "https://ghcr.io/v2/homebrew/core/go/blobs/sha256:ea6a2d446679fd9bc460425f3fce2e1c4a27504c6e6ad2cd9c8f7380fc75988a", + "sha256": "ea6a2d446679fd9bc460425f3fce2e1c4a27504c6e6ad2cd9c8f7380fc75988a" + }, + "x86_64_linux": { + "cellar": "/home/linuxbrew/.linuxbrew/Cellar", + "url": "https://ghcr.io/v2/homebrew/core/go/blobs/sha256:dabdff435af1ef8289dbfe4313cb190f5c61cb46b3b845b37d79beaf38c2434b", + "sha256": "dabdff435af1ef8289dbfe4313cb190f5c61cb46b3b845b37d79beaf38c2434b" } } } @@ -581,6 +866,143 @@ } } } + }, + "gnu-sed": { + "version": "4.9", + "bottle": { + "rebuild": 0, + "root_url": "https://ghcr.io/v2/homebrew/core", + "files": { + "arm64_ventura": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/gnu-sed/blobs/sha256:5abaf39c16d02125db97d14cd36a96cf1a20a87821199cb38a55134fd4e0aaef", + "sha256": "5abaf39c16d02125db97d14cd36a96cf1a20a87821199cb38a55134fd4e0aaef" + }, + "arm64_monterey": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/gnu-sed/blobs/sha256:20ae3f853a32e7f7f0f340e8c751ab7350888a655bfe7c5c20e5746c61a24fd7", + "sha256": "20ae3f853a32e7f7f0f340e8c751ab7350888a655bfe7c5c20e5746c61a24fd7" + }, + "arm64_big_sur": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/gnu-sed/blobs/sha256:d7c89842a90d03dbb497bc1ded17b7d732fe20eaf69613fd4abb48820ab80895", + "sha256": "d7c89842a90d03dbb497bc1ded17b7d732fe20eaf69613fd4abb48820ab80895" + }, + "ventura": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/gnu-sed/blobs/sha256:a1ac59a9a6fa20c6c904e047df3ee4d0b4e57c0a5df3821b17b8cd82bcc67b5a", + "sha256": "a1ac59a9a6fa20c6c904e047df3ee4d0b4e57c0a5df3821b17b8cd82bcc67b5a" + }, + "monterey": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/gnu-sed/blobs/sha256:f5e2460ad86516b2517f1e77d672a4fd6ad30b158c470cccbb3b6464f228674d", + "sha256": "f5e2460ad86516b2517f1e77d672a4fd6ad30b158c470cccbb3b6464f228674d" + }, + "big_sur": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/gnu-sed/blobs/sha256:c1c63d995d132a82fadc80b470eecfe816cb86c8cd716f01de5f003bc1199fcc", + "sha256": "c1c63d995d132a82fadc80b470eecfe816cb86c8cd716f01de5f003bc1199fcc" + }, + "catalina": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/gnu-sed/blobs/sha256:fb5ee7317d987d9ac7f2ee357736a9bc594c88b5fbbca4f6a65046f1c2898c44", + "sha256": "fb5ee7317d987d9ac7f2ee357736a9bc594c88b5fbbca4f6a65046f1c2898c44" + }, + "x86_64_linux": { + "cellar": "/home/linuxbrew/.linuxbrew/Cellar", + "url": "https://ghcr.io/v2/homebrew/core/gnu-sed/blobs/sha256:8abd5b48de6b706c1ce7c2f7b8775420f63078ba294bd5ad801e458776228bbc", + "sha256": "8abd5b48de6b706c1ce7c2f7b8775420f63078ba294bd5ad801e458776228bbc" + } + } + } + }, + "postgresql@14": { + "version": "14.7", + "bottle": { + "rebuild": 0, + "root_url": "https://ghcr.io/v2/homebrew/core", + "files": { + "arm64_ventura": { + "cellar": "/opt/homebrew/Cellar", + "url": "https://ghcr.io/v2/homebrew/core/postgresql/14/blobs/sha256:762067b573bf672b638b6354e1bed5fb675a7d3bb26ec0d6eac1bb1e24e427dd", + "sha256": "762067b573bf672b638b6354e1bed5fb675a7d3bb26ec0d6eac1bb1e24e427dd" + }, + "arm64_monterey": { + "cellar": "/opt/homebrew/Cellar", + "url": "https://ghcr.io/v2/homebrew/core/postgresql/14/blobs/sha256:0ade8371ec5d58225e90982d5e75a0cada9625eee44d903e3f809d847203d1d4", + "sha256": "0ade8371ec5d58225e90982d5e75a0cada9625eee44d903e3f809d847203d1d4" + }, + "arm64_big_sur": { + "cellar": "/opt/homebrew/Cellar", + "url": "https://ghcr.io/v2/homebrew/core/postgresql/14/blobs/sha256:97a4a71a9373419b604ecde9de3d6480fc40a4648e56b050cf5d26e6edccd2c9", + "sha256": "97a4a71a9373419b604ecde9de3d6480fc40a4648e56b050cf5d26e6edccd2c9" + }, + "ventura": { + "cellar": "/usr/local/Cellar", + "url": "https://ghcr.io/v2/homebrew/core/postgresql/14/blobs/sha256:adfc715bdc8204a91dee0d20bf5cf04b6ec152f6105b7ae5b4cf006841c19cd1", + "sha256": "adfc715bdc8204a91dee0d20bf5cf04b6ec152f6105b7ae5b4cf006841c19cd1" + }, + "monterey": { + "cellar": "/usr/local/Cellar", + "url": "https://ghcr.io/v2/homebrew/core/postgresql/14/blobs/sha256:d46d6770f2069a51b6b20310f46c26490672ec99b4c292b836fdc7ea4bbe4911", + "sha256": "d46d6770f2069a51b6b20310f46c26490672ec99b4c292b836fdc7ea4bbe4911" + }, + "big_sur": { + "cellar": "/usr/local/Cellar", + "url": "https://ghcr.io/v2/homebrew/core/postgresql/14/blobs/sha256:af5b8ba17a1f9946396b130edd741088c0c7c7322c23891580ba3d3f0b2c026a", + "sha256": "af5b8ba17a1f9946396b130edd741088c0c7c7322c23891580ba3d3f0b2c026a" + }, + "x86_64_linux": { + "cellar": "/home/linuxbrew/.linuxbrew/Cellar", + "url": "https://ghcr.io/v2/homebrew/core/postgresql/14/blobs/sha256:6853d14ffd29a1f80dafc76d88583b769f272567cb39f6a9a6c717b73d0c89ac", + "sha256": "6853d14ffd29a1f80dafc76d88583b769f272567cb39f6a9a6c717b73d0c89ac" + } + } + } + }, + "goenv": { + "version": "2.0.6", + "bottle": { + "rebuild": 0, + "root_url": "https://ghcr.io/v2/homebrew/core", + "files": { + "arm64_ventura": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/goenv/blobs/sha256:b0e5d50704c497080a8b7ba300d12e5086ac2e248a69aa0b25fe2148a8ae1cd4", + "sha256": "b0e5d50704c497080a8b7ba300d12e5086ac2e248a69aa0b25fe2148a8ae1cd4" + }, + "arm64_monterey": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/goenv/blobs/sha256:b0e5d50704c497080a8b7ba300d12e5086ac2e248a69aa0b25fe2148a8ae1cd4", + "sha256": "b0e5d50704c497080a8b7ba300d12e5086ac2e248a69aa0b25fe2148a8ae1cd4" + }, + "arm64_big_sur": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/goenv/blobs/sha256:b0e5d50704c497080a8b7ba300d12e5086ac2e248a69aa0b25fe2148a8ae1cd4", + "sha256": "b0e5d50704c497080a8b7ba300d12e5086ac2e248a69aa0b25fe2148a8ae1cd4" + }, + "ventura": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/goenv/blobs/sha256:82a9a7d404efa1f809605b3f5cc4ad99c55b0819a7088d00c8294effee7de3a9", + "sha256": "82a9a7d404efa1f809605b3f5cc4ad99c55b0819a7088d00c8294effee7de3a9" + }, + "monterey": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/goenv/blobs/sha256:82a9a7d404efa1f809605b3f5cc4ad99c55b0819a7088d00c8294effee7de3a9", + "sha256": "82a9a7d404efa1f809605b3f5cc4ad99c55b0819a7088d00c8294effee7de3a9" + }, + "big_sur": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/goenv/blobs/sha256:82a9a7d404efa1f809605b3f5cc4ad99c55b0819a7088d00c8294effee7de3a9", + "sha256": "82a9a7d404efa1f809605b3f5cc4ad99c55b0819a7088d00c8294effee7de3a9" + }, + "x86_64_linux": { + "cellar": ":any_skip_relocation", + "url": "https://ghcr.io/v2/homebrew/core/goenv/blobs/sha256:b0e5d50704c497080a8b7ba300d12e5086ac2e248a69aa0b25fe2148a8ae1cd4", + "sha256": "b0e5d50704c497080a8b7ba300d12e5086ac2e248a69aa0b25fe2148a8ae1cd4" + } + } + } } } }, @@ -593,6 +1015,22 @@ "CLT": "12.4.0.0.1.1610135815", "Xcode": "12.0", "macOS": "10.15.7" + }, + "big_sur": { + "HOMEBREW_VERSION": "4.0.6-126-g1a72b86", + "HOMEBREW_PREFIX": "/usr/local", + "Homebrew/homebrew-core": "api", + "CLT": "13.2.0.0.1.1638488800", + "Xcode": "13.2.1", + "macOS": "11.7.4" + }, + "ventura": { + "HOMEBREW_VERSION": "4.0.6-147-gb3684e5", + "HOMEBREW_PREFIX": "/opt/homebrew", + "Homebrew/homebrew-core": "api", + "CLT": "14.2.0.0.1.1668646533", + "Xcode": "14.2", + "macOS": "13.2.1" } } } diff --git a/scripts/generate-community-keys.sh b/scripts/generate-community-keys.sh new file mode 100755 index 00000000000..9e8543957dc --- /dev/null +++ b/scripts/generate-community-keys.sh @@ -0,0 +1,53 @@ +#!/bin/bash +set -u + +# This script depends on the mina daemon package, zip, and pwgen +# to generate unique passwords for each key, and output a zip file containing: +# the password along with the public and private keypairs +# For convenience, the script finally zips together all of the individual zip files, +# plus a txt file containing just the public keys +# Set the prefix to the node name (like "community", "seed" or "block-producer") +# and set count as the number of keys of this type to generate (e.g. "./generate-community-keys.sh bp 5" produces 3 keys, "bp-1" through "bp-5") +PREFIX=$1 +COUNT=$2 + +mkdir "${PREFIX}" +cd "${PREFIX}" + +for i in $(seq 1 ${COUNT}); do + + NODE="${PREFIX}-${i}" + PASS="${NODE}-password.txt" + + mkdir "./${NODE}" + + export MINA_PRIVKEY_PASS=$(pwgen --no-vowels --secure --ambiguous 64 1) + echo "${MINA_PRIVKEY_PASS}" > "${NODE}/${PASS}" + + KEY="${NODE}-key" + PUB="${NODE}-key.pub" + ZIP="${NODE}.zip" + + echo "Generating key for ${NODE}" + mina advanced generate-keypair --privkey-path "${NODE}/${KEY}" # 2> /dev/null + + echo "Copying public key for use in ledgers:" + cp "${NODE}/${PUB}" . + + echo "Generating zip file ${ZIP}" + zip -r "${ZIP}" "${NODE}" + + echo "Cleaning up ${NODE} directory" + rm -rf ${NODE} +done + +echo "Combining .pub files into one ${PREFIX}-keys.txt and cleaning up" +cat ${PREFIX}-*.pub > ${PREFIX}-keys.txt +cp ${PREFIX}-keys.txt ../ +rm -rf ${PREFIX}-*.pub + +cd .. +echo "All keys generated successfully! Combining into one zip file" +zip -r "${PREFIX}.zip" "${PREFIX}" + +rm -rf "${PREFIX}" diff --git a/scripts/macos-setup.sh b/scripts/macos-setup.sh index 1e0a0e82a45..28307fafa70 100755 --- a/scripts/macos-setup.sh +++ b/scripts/macos-setup.sh @@ -2,6 +2,6 @@ set -x #echo on set -eu -# Kept for backward compatability +# Kept for backward compatibility ./scripts/macos-setup-brew.sh diff --git a/scripts/ocamlmerlin b/scripts/ocamlmerlin deleted file mode 100755 index c1280d40855..00000000000 --- a/scripts/ocamlmerlin +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash - -# Created at http://ellenandpaulsnewstartup.com - we're hiring! - -script=$(basename $0) - -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" -$DIR/run-in-docker "$script" ${@} <&0 diff --git a/scripts/opam b/scripts/opam deleted file mode 100755 index c1280d40855..00000000000 --- a/scripts/opam +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash - -# Created at http://ellenandpaulsnewstartup.com - we're hiring! - -script=$(basename $0) - -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" -$DIR/run-in-docker "$script" ${@} <&0 diff --git a/scripts/publish-macos.sh b/scripts/publish-macos.sh index 95bc120cb8d..46e2dbdc6b5 100755 --- a/scripts/publish-macos.sh +++ b/scripts/publish-macos.sh @@ -7,13 +7,13 @@ set +u GITHASH=$(git rev-parse --short=8 HEAD) GITBRANCH=$(git rev-parse --symbolic-full-name --abbrev-ref HEAD | sed 's!/!-!; s!_!-!' ) -# Make Portable Binary +# Make Portable Binary make macos-portable -# Download JFrog CLI +# Download JFrog CLI curl -fL https://getcli.jfrog.io | sh -# Configure JFrog CLI +# Configure JFrog CLI ./jfrog rt config --url $ARTIFACTORY_URL --user $ARTIFACTORY_USER --apikey $ARTIFACTORY_API_KEY --interactive=false # Upload Artifact to Artifactory diff --git a/scripts/rebuild-deb.sh b/scripts/rebuild-deb.sh index 5ebc600e32e..7b99ed1973b 100755 --- a/scripts/rebuild-deb.sh +++ b/scripts/rebuild-deb.sh @@ -86,7 +86,7 @@ if ${MINA_BUILD_MAINNET} # only builds on mainnet-like branches then echo "---- Built all packages including mainnet, devnet, and the sidecar" else - echo "---- Not a mainnet-like branch, only built berkeley and beyond packages" + echo "---- Not a mainnet-like branch, only built berkeley and beyond packages" fi ls -lh mina*.deb diff --git a/scripts/release-docker.sh b/scripts/release-docker.sh index a07d1ebfae8..d45edb5e664 100755 --- a/scripts/release-docker.sh +++ b/scripts/release-docker.sh @@ -59,7 +59,7 @@ case "${DEB_CODENAME##*=}" in esac IMAGE="--build-arg image=${IMAGE}" -# Determine profile for mina name. To preserve backward compatibility standard profile is default. +# Determine profile for mina name. To preserve backward compatibility standard profile is default. case "${DEB_PROFILE}" in standard) DOCKER_DEB_PROFILE="" @@ -162,7 +162,7 @@ else fi if [[ -z "$NOUPLOAD" ]] || [[ "$NOUPLOAD" -eq 0 ]]; then - + # push to GCR docker push "${TAG}" diff --git a/scripts/run-in-docker b/scripts/run-in-docker deleted file mode 100755 index 329b1ba9a8f..00000000000 --- a/scripts/run-in-docker +++ /dev/null @@ -1,31 +0,0 @@ -#!/bin/bash - -set -euo pipefail - -# Created at http://ellenandpaulsnewstartup.com - we're hiring! - -function fix_dir_stdin { - sed -e "s!$DIR!/home/opam/app!g" -} - -function fix_dir_stdout { - sed -e "s!/home/opam/app!$DIR!g" | sed -e "s!/home/opam/.opam/.*/bin!$DIR/scripts!g" -} - -# Replace any filenames with the in-container filenames (stdin) -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )"/.. && pwd )" -ARGS=${@} -ARGS=$(echo ${ARGS} | fix_dir_stdin) - -MYUID=$(id -u) -DOCKERNAME="codabuilder-$MYUID" - -NAME=$(docker ps -q --filter "name=$DOCKERNAME") - -if [ -t 0 ] ; -then - docker exec -it "${NAME}" ${ARGS} ; -else - # Replace any in-container filenames with host filesnames (stdout + stderr) - { cat <&0 | fix_dir_stdin | docker exec -i "${NAME}" ${ARGS} 2>&1 1>&3 3>&- | fix_dir_stdout; } 3>&1 1>&2 | fix_dir_stdout -fi diff --git a/scripts/select-opam-deps.sh b/scripts/select-opam-deps.sh deleted file mode 100755 index 0069d704a9b..00000000000 --- a/scripts/select-opam-deps.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash - -# -# Selects the correct versions of particular opam deps -# - -set -eou pipefail - -RES=() -for lib in $@; do - RES+=($(cat opam.export | grep '"'"$lib"'\.' | awk -F'"' '{ print $2 }')) -done - -echo "${RES[@]}" - diff --git a/scripts/testone.sh b/scripts/testone.sh index c41e264c575..5a718e198fc 100755 --- a/scripts/testone.sh +++ b/scripts/testone.sh @@ -3,7 +3,7 @@ set -e # File assumes tat you are running the program at the program at the root directory of the coda repo -if [[ "$#" -eq "0" ]]; then +if [[ "$#" -eq "0" ]]; then echo "This script needs at least one argument, TEST-FILE, to run" exit 1 fi @@ -22,9 +22,9 @@ DIRPATH=$(dirname "$TEST_FILE") LIBRARY_NAME=$(basename "$DIRPATH") TEST_RUNNER_PROG="$DIRPATH/.$LIBRARY_NAME.inline-tests/inline_test_runner_$LIBRARY_NAME.exe" -if [[ "$#" -eq "1" ]]; then - TEST_CASE="$TEST_FILE" -else +if [[ "$#" -eq "1" ]]; then + TEST_CASE="$TEST_FILE" +else TEST_CASE="$TEST_FILE:$2" fi ( ulimit -s 65532 || true ) && \ diff --git a/scripts/thread-timing/README.md b/scripts/thread-timing/README.md new file mode 100644 index 00000000000..51013d9bfef --- /dev/null +++ b/scripts/thread-timing/README.md @@ -0,0 +1,38 @@ +# Thread Timing + +Thread execution timing is provided by the [O1trace module](../../src/lib/o1trace). With it, the Mina daemon is instrumented in a hierarchical fashion, where there is one root through (called "Mina") under which there is a tree of descendant threads. As opposed to naive timing techniques which time the delay it takes for a thread to execute, this timing technique times the amount of time spent actually executing the thread in a way that allows us to generate dashboards showing the amount of time spent per second in various threads of the daemon. + +Thread timings are inclusive of descendants. This means that, for a given thread A that has children B and C, the amount of time that is reported for the execution of A includes the exection time of B and C. Therefore, when reasoning about the actual time spent in A, but not in B or C, we need to subtract the execution of B and C to find the leftover time spent in A that wasn't in a child thread. This makes writing Grafana charts for thread timing annoying and brittle. Luckily, there are scripts which can be used to automate the generation of the Grafana charts. + +## Generating Charts + +Generating charts boils down to 3 steps. + +1. Acquire a snapshot of thread hierarchy from a running daemon. +2. Execute the chart generating script, passing in the thread hierarchy captured in step 1, and specifying the options for the chart you want to generate. +3. Import the scripts JSON output into a Grafana chart. + +### Acquiring thread heiarchy snapshots + +Because O1trace does not have a PPX, the hierarchy of all the threads in the daemon are not known at compile time. Furthermore, different classes of daemons may have slightly different hierarchies of threads (eg. block producers and snark coordinators run additional subsystems that other nodes do not). Because of this, it is important to consider what node you actually want to take a thread snapshot from. In general, it usually works well to take thread hierarchy snapshots from block production nodes and generate dashboards from that for usage when investigating seeds and block producer nodes. For snark coordinators and archive nodes, it may be best to generate separate dashboards from their hierarchies in order to include their special subsystems in the charts. + +The thread hierarchy can be dumped from a running daemon using the command `mina advanced thread-graph`. It is important to check that the node has finished bootstrap before dumping the thread hierarchy, as some threads will be missing until bootstrap has completed and we enter the participation phase. + +### Executing chart scripts + +TODO: The current chart generating scripts need to be merged together and updated to take in cli parameters in order to be more usable. For now, I've been modifying the last line of each script in order to configure the chart I want to generate. I also need to edit the call to `pydot.graph_from_dot_file` to load the correct thread hierarchy graph I'm generating the chart for. In the future, there will be only one script with a nice CLI interface for configuring these options. + +There are 2 scripts for generating charts: `aggregate-thread-graph.py` and `single-thread-graph.py`. The `aggregate-thread-graph.py` generates a chart which averages all thread timing metrics across nodes in a testnet. The `single-thread-graph.py` sums all of the thread timing metrics across nodes in a testnet (this was originally used mainly when there was only one node running thread timing metrics, and is not as useful outside of that context). The missing feature from the chart generating scripts at the moment is generating charts for a dashboard where we can select a specific node to see the metrics for (this is easy to add though). + +### Importing charts to Grafana + +The charts rely on a couple of dashboard variables in order to work properly. You need to have a `testnet` variable, to configure the network the chart will query, and a `sample_range` variable to configure the range aggregation range for the metric. `sample_range` is necessary to configure because different prometheus instances have different scraping intervals. The goal is to choose the smallest `sample_range` possible that works with the query. For our prometheus instances, this is usually `2m`, `3m`, or `5m`. The `sample_range` has to be large enough to include enough samples for the `rate` operator to actually determine the per-second rate of change of the thread timing metrics. This can incur some small delay in chart, but should not be significant. + +To import a chart to Grafana: + +1. Add a new panel to the dashboard you want to import to. +2. On the dashboard view, when you open the dropdown for the panel (by clicking on the panel title), select "Inspect > Panel JSON". +3. In the righthand view that pops up, take note of the value set in the `id` field (we will need it in a second). Select and delete all of the JSON data for the panel. +4. Paste in the JSON output from the the script you ran to generate the chart. +5. Now the annoying part: look for the `id` field of the JSON object you just pasted. Update the `id` field to match `id` of the JSON object you deleted in step 3. Failing to do this can screw up your dashboard in weird ways because Grafana ends up overwriting other charts with the same id in a weird and buggy way. I hope to find a slightly better workflow for importing charts in the future that circumvents this weird chart id behavior that Grafana has. +6. Click "Apply". diff --git a/scripts/thread-timing/aggregate-thread-graph.py b/scripts/thread-timing/aggregate-thread-graph.py new file mode 100644 index 00000000000..c2e9947c5d9 --- /dev/null +++ b/scripts/thread-timing/aggregate-thread-graph.py @@ -0,0 +1,116 @@ +from collections import defaultdict +import json +import pydot + +# TODO: when a node has multiple successors, we need to subtract out the other threads + + +def thread_time_query(thread): + return ( + 'avg(sum by(app) (rate(Mina_Daemon_time_spent_in_thread_%s_ms{testnet="$testnet"}[$sample_range])))' + % thread + ) + + +def isolated_thread_time_query(child_index, thread): + if not thread in child_index: + raise Exception('thread not found in graph: %s' % thread) + query = thread_time_query(thread) + child_queries = [ + "sum(%s or vector(0))" % thread_time_query(child) + for child in child_index[thread] + ] + if len(child_queries) > 0: + return "sum(%s or vector(0))-(%s)" % (query, "+".join(child_queries)) + else: + return query + + +def grafana_thread_target(child_index, thread, isolate): + expr = isolated_thread_time_query(child_index, thread) if isolate else thread_time_query(thread) + return { + # "datasource": {"type": "prometheus", "uid": "grafanacloud-prom"}, + "datasource": None, + "exemplar": True, + "expr": expr, + "hide": False, + "interval": "", + "legendFormat": thread, + "refId": thread, + } + + +def grafana_chart_targets(child_index, root, max_depth=None): + is_leaf = (max_depth == 0) + + targets = [] + targets.append(grafana_thread_target(child_index, root, isolate=not is_leaf)) + + if not is_leaf: + for child in child_index[root]: + targets.extend(grafana_chart_targets(child_index, child, max_depth - 1 if max_depth != None else None)) + + return targets + + +def grafana_panel(child_index, root, max_depth=None): + targets = grafana_chart_targets(child_index, root, max_depth) + defaults = { + "custom": { + "drawStyle": "line", + "lineInterpolation": "smooth", + "barAlignment": 0, + "lineWidth": 1, + "fillOpacity": 100, + "gradientMode": "none", + "spanNulls": False, + "showPoints": "never", + "pointSize": 5, + "stacking": {"mode": "normal", "group": root}, + "axisPlacement": "auto", + "axisLabel": "", + "scaleDistribution": {"type": "linear"}, + "hideFrom": {"tooltip": False, "viz": False, "legend": False}, + "thresholdsStyle": {"mode": "off"}, + "lineStyle": {"fill": "solid"}, + }, + "color": {"mode": "palette-classic"}, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + {"color": "green", "value": None}, + {"color": "red", "value": 80}, + ], + }, + "min": 0, + "unit": "ms", + } + return { + "id": 0, + "gridPos": {"h": 10, "w": 24, "x": 0, "y": 14}, + "type": "timeseries", + "title": root, + # "datasource": {"type": "prometheus", "uid": "grafanacloud-prom"}, + "datasource": None, + "defaults": defaults, + "fieldConfig": { + "defaults": defaults, + "overrides": [] + }, + "options": { + "tooltip": {"mode": "single", "sort": "none"}, + "legend": {"displayMode": "list", "placement": "bottom", "calcs": []}, + }, + "targets": targets, + } + +graph = pydot.graph_from_dot_file("whale-threads.dot")[0] +child_index = {} +for v in graph.get_nodes(): + child_index[v.obj_dict['name']] = [] +for e in graph.get_edges(): + (pred, succ) = e.obj_dict["points"] + child_index[pred].append(succ) + +print(json.dumps(grafana_panel(child_index, "transition_router", max_depth=1))) diff --git a/scripts/thread-timing/single-thread-graph.py b/scripts/thread-timing/single-thread-graph.py new file mode 100644 index 00000000000..c58647caf46 --- /dev/null +++ b/scripts/thread-timing/single-thread-graph.py @@ -0,0 +1,116 @@ +from collections import defaultdict +import json +import pydot + +# TODO: when a node has multiple successors, we need to subtract out the other threads + + +def thread_time_query(thread): + return ( + 'sum by(app) (rate(Mina_Daemon_time_spent_in_thread_%s_ms{testnet="$testnet",app="$app"}[$sample_range]))' + % thread + ) + + +def isolated_thread_time_query(child_index, thread): + if not thread in child_index: + raise Exception('thread not found in graph: %s' % thread) + query = thread_time_query(thread) + child_queries = [ + "sum(%s or vector(0))" % thread_time_query(child) + for child in child_index[thread] + ] + if len(child_queries) > 0: + return "sum(%s or vector(0))-(%s)" % (query, "+".join(child_queries)) + else: + return query + + +def grafana_thread_target(child_index, thread, isolate): + expr = isolated_thread_time_query(child_index, thread) if isolate else thread_time_query(thread) + return { + # "datasource": {"type": "prometheus", "uid": "grafanacloud-prom"}, + "datasource": None, + "exemplar": True, + "expr": expr, + "hide": False, + "interval": "", + "legendFormat": thread, + "refId": thread, + } + + +def grafana_chart_targets(child_index, root, max_depth=None): + is_leaf = (max_depth == 0) + + targets = [] + targets.append(grafana_thread_target(child_index, root, isolate=not is_leaf)) + + if not is_leaf: + for child in child_index[root]: + targets.extend(grafana_chart_targets(child_index, child, max_depth - 1 if max_depth != None else None)) + + return targets + + +def grafana_panel(child_index, root, max_depth=None): + targets = grafana_chart_targets(child_index, root, max_depth) + defaults = { + "custom": { + "drawStyle": "line", + "lineInterpolation": "smooth", + "barAlignment": 0, + "lineWidth": 1, + "fillOpacity": 100, + "gradientMode": "none", + "spanNulls": False, + "showPoints": "never", + "pointSize": 5, + "stacking": {"mode": "normal", "group": root}, + "axisPlacement": "auto", + "axisLabel": "", + "scaleDistribution": {"type": "linear"}, + "hideFrom": {"tooltip": False, "viz": False, "legend": False}, + "thresholdsStyle": {"mode": "off"}, + "lineStyle": {"fill": "solid"}, + }, + "color": {"mode": "palette-classic"}, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + {"color": "green", "value": None}, + {"color": "red", "value": 80}, + ], + }, + "min": 0, + "unit": "ms", + } + return { + "id": 0, + "gridPos": {"h": 10, "w": 24, "x": 0, "y": 14}, + "type": "timeseries", + "title": root, + # "datasource": {"type": "prometheus", "uid": "grafanacloud-prom"}, + "datasource": None, + "defaults": defaults, + "fieldConfig": { + "defaults": defaults, + "overrides": [] + }, + "options": { + "tooltip": {"mode": "single", "sort": "none"}, + "legend": {"displayMode": "list", "placement": "bottom", "calcs": []}, + }, + "targets": targets, + } + +graph = pydot.graph_from_dot_file("coordinator-threads.dot")[0] +child_index = {} +for v in graph.get_nodes(): + child_index[v.obj_dict['name']] = [] +for e in graph.get_edges(): + (pred, succ) = e.obj_dict["points"] + child_index[pred].append(succ) + +print(json.dumps(grafana_panel(child_index, "serve_client_rpcs", max_depth=None))) diff --git a/src/app/archive_blocks/README.md b/src/app/archive_blocks/README.md new file mode 100644 index 00000000000..718e7a48467 --- /dev/null +++ b/src/app/archive_blocks/README.md @@ -0,0 +1,20 @@ +archive_blocks +============== + +The `archive_blocks` app adds blocks in either "precomputed" or +"extensional" format to the archive database. + +Precomputed blocks are stored in the bucket `mina_network_block_data` +on Google Cloud Storage. Blocks are named NETWORK-HEIGHT-STATEHASH.json. +Example: mainnet-100000-3NKLvMCimUjX1zjjiC3XPMT34D1bVQGzkKW58XDwFJgQ5wDQ9Tki.json. + +Extensional blocks are extracted from other archive databases using +the `extract_blocks` app. + +As many blocks as are available can be added at a time, but all blocks +must be in the same format. + +Except for blocks from the original mainnet, both precomputed and +extensional blocks have a version in their JSON representation. That +version must match the corresponding OCaml type in the code when this +app was built. diff --git a/src/app/archive_blocks/archive_blocks.ml b/src/app/archive_blocks/archive_blocks.ml index caa4203939e..7851ca04518 100644 --- a/src/app/archive_blocks/archive_blocks.ml +++ b/src/app/archive_blocks/archive_blocks.ml @@ -1,4 +1,4 @@ -(* archive_blocks.ml -- archive precomputed or extensional blocks to Postgresql *) +(* archive_blocks.ml *) open Core_kernel open Async diff --git a/src/app/cli/src/cli_entrypoint/dune b/src/app/cli/src/cli_entrypoint/dune index d212ebdccea..ab5dda7feec 100644 --- a/src/app/cli/src/cli_entrypoint/dune +++ b/src/app/cli/src/cli_entrypoint/dune @@ -41,6 +41,7 @@ currency signature_lib mina_base + mina_base.import error_json genesis_ledger_helper consensus @@ -52,6 +53,9 @@ genesis_constants blake2 mina_metrics + transaction_witness + snark_work_lib + transaction_snark mina_compile_config node_error_service mina_user_error diff --git a/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml b/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml index 3349abd2e69..d4b56fd38ed 100644 --- a/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml +++ b/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml @@ -160,9 +160,12 @@ let setup_daemon logger = flag "--block-producer-key" ~aliases:[ "block-producer-key" ] ~doc: (sprintf - "KEYFILE Private key file for the block producer. You cannot \ + "DEPRECATED: Use environment variable `MINA_BP_PRIVKEY` instead. \ + Private key file for the block producer. Providing this flag or \ + the environment variable will enable block production. You cannot \ provide both `block-producer-key` and `block-producer-pubkey`. \ - (default: don't produce blocks). %s" + (default: use environment variable `MINA_BP_PRIVKEY`, if provided, \ + or else don't produce any blocks) %s" receiver_key_warning ) (optional string) and block_production_pubkey = @@ -172,8 +175,8 @@ let setup_daemon logger = (sprintf "PUBLICKEY Public key for the associated private key that is being \ tracked by this daemon. You cannot provide both \ - `block-producer-key` and `block-producer-pubkey`. (default: don't \ - produce blocks). %s" + `block-producer-key` (or `MINA_BP_PRIVKEY`) and \ + `block-producer-pubkey`. (default: don't produce blocks) %s" receiver_key_warning ) (optional public_key_compressed) and block_production_password = @@ -494,6 +497,11 @@ let setup_daemon logger = ~aliases:[ "log-precomputed-blocks" ] (optional_with_default false bool) ~doc:"true|false Include precomputed blocks in the log (default: false)" + and start_filtered_logs = + flag "--start-filtered-logs" (listed string) + ~doc: + "LOG-FILTER Include filtered logs for the given filter. May be passed \ + multiple times" and block_reward_threshold = flag "--minimum-block-reward" ~aliases:[ "minimum-block-reward" ] ~doc: @@ -975,21 +983,39 @@ let setup_daemon logger = Unix.putenv ~key:Secrets.Keypair.env ~data:password ) block_production_password ; let%bind block_production_keypair = - match (block_production_key, block_production_pubkey) with - | Some _, Some _ -> + match + ( block_production_key + , block_production_pubkey + , Sys.getenv "MINA_BP_PRIVKEY" ) + with + | Some _, Some _, _ -> Mina_user_error.raise "You cannot provide both `block-producer-key` and \ `block_production_pubkey`" - | None, None -> + | None, Some _, Some _ -> + Mina_user_error.raise + "You cannot provide both `MINA_BP_PRIVKEY` and \ + `block_production_pubkey`" + | None, None, None -> Deferred.return None - | Some sk_file, _ -> + | None, None, Some base58_privkey -> + let kp = + Private_key.of_base58_check_exn base58_privkey + |> Keypair.of_private_key_exn + in + Deferred.return (Some kp) + (* CLI argument takes precedence over env variable *) + | Some sk_file, None, (Some _ | None) -> + [%log warn] + "`block-producer-key` is deprecated. Please set \ + `MINA_BP_PRIVKEY` environment variable instead." ; let%map kp = Secrets.Keypair.Terminal_stdin.read_exn ~should_prompt_user:false ~which:"block producer keypair" sk_file in Some kp - | _, Some tracked_pubkey -> + | None, Some tracked_pubkey, None -> let%map kp = Secrets.Wallets.get_tracked_keypair ~logger ~which:"block producer keypair" @@ -1391,10 +1417,11 @@ Pass one of -peer, -peer-list-file, -seed, -peer-list-url.|} ; ~work_reassignment_wait ~archive_process_location ~log_block_creation ~precomputed_values ~start_time ?precomputed_blocks_path ~log_precomputed_blocks - ~upload_blocks_to_gcloud ~block_reward_threshold ~uptime_url - ~uptime_submitter_keypair ~uptime_send_node_commit ~stop_time - ~node_status_url ~graphql_control_port:itn_graphql_port - ~simplified_node_stats () ) + ~start_filtered_logs ~upload_blocks_to_gcloud + ~block_reward_threshold ~uptime_url ~uptime_submitter_keypair + ~uptime_send_node_commit ~stop_time ~node_status_url + ~graphql_control_port:itn_graphql_port ~simplified_node_stats + () ) in { mina ; client_trustlist @@ -1773,6 +1800,52 @@ let internal_commands logger = Prover.prove_from_input_sexp prover sexp >>| ignore | `Eof -> failwith "early EOF while reading sexp" ) ) ) + ; ( "run-snark-worker-single" + , Command.async + ~summary:"Run snark-worker on a sexp provided on a single line of stdin" + (let open Command.Let_syntax in + let%map_open filename = + flag "--file" (required string) + ~doc:"File containing the s-expression of the snark work to execute" + in + fun () -> + let open Deferred.Let_syntax in + let logger = Logger.create () in + Parallel.init_master () ; + match%bind + Reader.with_file filename ~f:(fun reader -> + [%log info] "Created reader for %s" filename ; + Reader.read_sexp reader ) + with + | `Ok sexp -> ( + let%bind worker_state = + Snark_worker.Prod.Inputs.Worker_state.create + ~proof_level:Genesis_constants.Proof_level.compiled + ~constraint_constants: + Genesis_constants.Constraint_constants.compiled () + in + let sok_message = + { Mina_base.Sok_message.fee = Currency.Fee.of_mina_int_exn 0 + ; prover = Quickcheck.random_value Public_key.Compressed.gen + } + in + let spec = + [%of_sexp: + ( Transaction_witness.t + , Ledger_proof.t ) + Snark_work_lib.Work.Single.Spec.t] sexp + in + match%map + Snark_worker.Prod.Inputs.perform_single worker_state + ~message:sok_message spec + with + | Ok _ -> + [%log info] "Successfully worked" + | Error err -> + [%log error] "Work didn't work: $err" + ~metadata:[ ("err", Error_json.error_to_yojson err) ] ) + | `Eof -> + failwith "early EOF while reading sexp") ) ; ( "run-verifier" , Command.async ~summary:"Run verifier on a proof provided on a single line of stdin" diff --git a/src/app/cli/src/init/client.ml b/src/app/cli/src/init/client.ml index eaf9844129d..1558fb6aa4a 100644 --- a/src/app/cli/src/init/client.ml +++ b/src/app/cli/src/init/client.ml @@ -1034,13 +1034,13 @@ let pending_snark_work = (Array.map ~f:(fun bundle -> Array.map bundle.workBundle ~f:(fun w -> - let f = w.fee_excess in + let fee_excess_left = w.fee_excess.feeExcessLeft in { Cli_lib.Graphql_types.Pending_snark_work.Work .work_id = w.work_id ; fee_excess = Currency.Amount.Signed.of_fee - (to_signed_fee_exn f.sign - (Currency.Amount.to_fee f.fee_magnitude) ) + (to_signed_fee_exn fee_excess_left.sign + fee_excess_left.feeMagnitude ) ; supply_increase = w.supply_increase ; source_first_pass_ledger_hash = w.source_first_pass_ledger_hash @@ -1512,7 +1512,7 @@ let create_account = in let pk_string = Public_key.Compressed.to_base58_check - response.createAccount.public_key + response.createAccount.account.public_key in printf "\n😄 Added new account!\nPublic key: %s\n" pk_string ) ) @@ -1529,7 +1529,7 @@ let create_hd_account = in let pk_string = Public_key.Compressed.to_base58_check - response.createHDAccount.public_key + response.createHDAccount.account.public_key in printf "\n😄 created HD account with HD-index %s!\nPublic key: %s\n" (Mina_numbers.Hd_index.to_string hd_index) @@ -1563,7 +1563,7 @@ let unlock_account = in let pk_string = Public_key.Compressed.to_base58_check - response.unlockAccount.public_key + response.unlockAccount.account.public_key in printf "\n🔓 Unlocked account!\nPublic key: %s\n" pk_string | Error e -> diff --git a/src/app/cli/src/init/graphql_queries.ml b/src/app/cli/src/init/graphql_queries.ml index 50e0a9da10c..f9534625159 100644 --- a/src/app/cli/src/init/graphql_queries.ml +++ b/src/app/cli/src/init/graphql_queries.ml @@ -45,7 +45,7 @@ module Create_account = {| mutation ($password: String!) @encoders(module: "Encoders"){ createAccount(input: {password: $password}) { - public_key: publicKey + account: account { public_key : publicKey } } } |}] @@ -55,7 +55,7 @@ module Create_hd_account = {| mutation ($hd_index: UInt32!) @encoders(module: "Encoders"){ createHDAccount(input: {index: $hd_index}) { - public_key: publicKey + account : account { public_key: publicKey } } } |}] @@ -65,7 +65,7 @@ module Unlock_account = {| mutation ($password: String!, $public_key: PublicKey!) @encoders(module: "Encoders"){ unlockAccount(input: {password: $password, publicKey: $public_key }) { - public_key: publicKey + account: account { public_key: publicKey } } } |}] @@ -109,10 +109,18 @@ query pendingSnarkWork { source_second_pass_ledger_hash: sourceSecondPassLedgerHash target_second_pass_ledger_hash: targetSecondPassLedgerHash fee_excess: feeExcess { - sign - fee_magnitude: feeMagnitude + feeTokenLeft + feeExcessLeft { + sign + feeMagnitude + } + feeTokenRight + feeExcessRight { + sign + feeMagnitude + } } - supply_increase: supplyIncrease + supply_increase : supplyIncrease work_id: workId } } @@ -207,10 +215,10 @@ module Pooled_user_commands = query user_commands($public_key: PublicKey) @encoders(module: "Encoders"){ pooledUserCommands(publicKey: $public_key) @bsRecord { id - isDelegation + kind nonce - from - to_: to + feePayer { public_key: publicKey } + receiver { public_key: publicKey } amount fee memo diff --git a/src/app/decoder/decoder.ml b/src/app/decoder/decoder.ml deleted file mode 100644 index 33ce15390c6..00000000000 --- a/src/app/decoder/decoder.ml +++ /dev/null @@ -1,8 +0,0 @@ -open Mina_base - -let () = - if Array.length Sys.argv < 2 then failwith "MISSING ARGUMENT" - else - let state_hash_b58 = Sys.argv.(1) in - State_hash.of_base58_check_exn state_hash_b58 - |> State_hash.to_yojson |> Yojson.Safe.to_string |> print_endline diff --git a/src/app/decoder/dune b/src/app/decoder/dune deleted file mode 100644 index 0b5bd9fd6c8..00000000000 --- a/src/app/decoder/dune +++ /dev/null @@ -1,5 +0,0 @@ -(executable - (name decoder) - (libraries mina_base yojson) - (instrumentation (backend bisect_ppx)) - (preprocess (pps ppx_version))) diff --git a/src/app/delegation_backend/src/delegation_backend/submit.go b/src/app/delegation_backend/src/delegation_backend/submit.go index 26c95403932..273b3e02c20 100644 --- a/src/app/delegation_backend/src/delegation_backend/submit.go +++ b/src/app/delegation_backend/src/delegation_backend/submit.go @@ -34,22 +34,30 @@ func writeErrorResponse(app *App, w *http.ResponseWriter, msg string) { func (ctx *AwsContext) S3Save(objs ObjectsToSave) { for path, bs := range objs { - _, err := ctx.Client.HeadObject(ctx.Context, &s3.HeadObjectInput{ - Bucket: ctx.BucketName, - Key: aws.String(ctx.Prefix + "/" + path), - }) - if err == nil { - ctx.Log.Warnf("object already exists: %s", path) + fullKey := aws.String(ctx.Prefix + "/" + path) + if strings.HasPrefix(path, "blocks/") { + _, err := ctx.Client.HeadObject(ctx.Context, &s3.HeadObjectInput{ + Bucket: ctx.BucketName, + Key: fullKey, + }) + if err == nil { + //block already exists, skipping + continue + } + if !strings.Contains(err.Error(), "NotFound") { + ctx.Log.Warnf("S3Save: Error when checking if block exists, but will continue with block save: %s, error: %v", path, err) + } } - _, err = ctx.Client.PutObject(ctx.Context, &s3.PutObjectInput{ + ctx.Log.Debugf("S3Save: saving %s", path) + _, err := ctx.Client.PutObject(ctx.Context, &s3.PutObjectInput{ Bucket: ctx.BucketName, - Key: aws.String(ctx.Prefix + "/" + path), + Key: fullKey, Body: bytes.NewReader(bs), ContentMD5: nil, }) if err != nil { - ctx.Log.Warnf("Error while saving metadata: %v", err) + ctx.Log.Warnf("S3Save: Error while saving metadata: %v", err) } } } @@ -175,7 +183,7 @@ func (h *SubmitH) ServeHTTP(w http.ResponseWriter, r *http.Request) { metaBytes, err1 := req.MakeMetaToBeSaved(remoteAddr) if err1 != nil { - h.app.Log.Errorf("Error while marshaling JSON for metaToBeSaved: %v", err) + h.app.Log.Errorf("Error while marshaling JSON for metaToBeSaved: %v", err1) w.WriteHeader(500) writeErrorResponse(h.app, &w, "Unexpected server error") return diff --git a/src/app/dhall_types/dump_dhall_types.ml b/src/app/dhall_types/dump_dhall_types.ml deleted file mode 100644 index 8efdf2011c3..00000000000 --- a/src/app/dhall_types/dump_dhall_types.ml +++ /dev/null @@ -1,39 +0,0 @@ -(* dump_dhall_types.ml -- dump Dhall types for runtime config and related types *) - -open Core - -(* Dhall types (as OCaml values) and their names to be used in Dhall *) -let types_and_files = - [ (Runtime_config.Json_layout.dhall_type, "runtime_config") ] - -let print_dhall_type (ty, nm) = - let s = Ppx_dhall_type.Dhall_type.to_string ty in - let dhall_file = nm ^ ".dhall" in - let oc = Caml.open_out dhall_file in - Printf.fprintf oc "-- GENERATED FILE\n\n%!" ; - Printf.fprintf oc "let %s : Type = %s in %s" nm s nm ; - Caml.close_out oc ; - ignore - (Unix.create_process ~prog:"dhall" - ~args:[ "format"; "--inplace"; dhall_file ] ) - -let main ~output_dir () = - let output_dir = - Option.value_map ~default:(Sys.getcwd ()) ~f:Fn.id output_dir - in - Sys.chdir output_dir ; - List.iter types_and_files ~f:print_dhall_type - -let () = - Command.( - run - (let open Let_syntax in - basic ~summary:"Dump Dhall types to files" - (let%map output_dir = - Param.flag "--output-dir" - ~doc: - "Directory where the Dhall files will be created (default: \ - current directory)" - Param.(optional string) - in - main ~output_dir ))) diff --git a/src/app/dhall_types/dune b/src/app/dhall_types/dune deleted file mode 100644 index ed95793d486..00000000000 --- a/src/app/dhall_types/dune +++ /dev/null @@ -1,19 +0,0 @@ -(executable - (package dhall_types) - (name dump_dhall_types) - (public_name dump_dhall_types) - (libraries - ;; opam libraries - base - core_kernel - async_kernel - core - stdio - base.caml - ;; local libraries - mina_runtime_config - ppx_dhall_type - ) - (preprocessor_deps ../../config.mlh) - (instrumentation (backend bisect_ppx)) - (preprocess (pps ppx_version ppx_let))) diff --git a/src/app/disk_caching_stats/README.md b/src/app/disk_caching_stats/README.md new file mode 100644 index 00000000000..321a86a8196 --- /dev/null +++ b/src/app/disk_caching_stats/README.md @@ -0,0 +1,7 @@ +# Disk Caching Stats + +This program computes the expected worst-case memory usage of the daemon before and after applying the disk caching changes proposed in [RFC 56: Reducing Daemon Memory Usage](rfcs/0056-reducing-daemon-memory-usage.md). + +This program counts the size of GC allocations on various data structures used by the daemon, and does so by carefully ensuring every value is a unique allocation and that there are no shared references within data structures. We do this by transporting values back and forth via bin_prot, simulating the same behavior the daemon will have when it reads and deserializes data from the network. We then use these measurements to estimate the expected worst-case memory footprint of larger data structures in the system, such as the mempools and the frontier. Expectations around shared references across these larger data structures are directly subtracted from the estimates. + +The program can be run with no arguments to run the calculation. The `Params` module contains parameters for the computation, which can be changed before recompiling the program to produce new results. diff --git a/src/app/disk_caching_stats/disk_caching_stats.ml b/src/app/disk_caching_stats/disk_caching_stats.ml new file mode 100644 index 00000000000..626623f0b42 --- /dev/null +++ b/src/app/disk_caching_stats/disk_caching_stats.ml @@ -0,0 +1,813 @@ +open Core + +(* number of operations to do when performing benchmarks *) +let bench_count = 10_000 + +module Const = struct + let k = 290 + + let ledger_depth = 30 + + let scan_state_depth = 7 + + let scan_state_delay = 2 + + (* 2*k for best tip path (including root history), k for duplicate block producers *) + let est_blocks_in_frontier = 3 * k + + (* k for best tip boath (excluding root history), k for duplicate block producers *) + let est_scan_states = 2 * k + + let max_accounts_modified_per_signed_command = 2 +end + +(* things we can change in the protocol *) +module Params = struct + let max_zkapp_txn_account_updates = 6 + + let max_zkapp_commands_per_block = 8 + + let max_signed_commands_per_block = 128 - max_zkapp_commands_per_block + + let max_zkapp_events = 100 + + let max_zkapp_actions = 100 + + let max_txn_pool_size = 3000 + + let max_accounts_modified_per_zkapp_command = + 1 + max_zkapp_txn_account_updates + + let max_accounts_modified_per_block = + (max_accounts_modified_per_zkapp_command * max_zkapp_commands_per_block) + + Const.max_accounts_modified_per_signed_command + * max_signed_commands_per_block +end + +(* dummy values used for computing RAM usage benchmarking *) +module Values = struct + let bin_copy (type a) ~(bin_class : a Bin_prot.Type_class.t) (x : a) = + let size = bin_class.writer.size x in + let buf = Bigstring.create size in + assert (bin_class.writer.write buf ~pos:0 x = size) ; + bin_class.reader.read buf ~pos_ref:(ref 0) + + let field () : Snark_params.Tick.Field.t = + bin_copy ~bin_class:Snark_params.Tick.Field.bin_t + Snark_params.Tick.Field.zero + + let amount () : Currency.Amount.t = + bin_copy ~bin_class:Currency.Amount.Stable.Latest.bin_t Currency.Amount.zero + + let balance () : Currency.Balance.t = + bin_copy ~bin_class:Currency.Balance.Stable.Latest.bin_t + Currency.Balance.zero + + let fee () : Currency.Fee.t = + bin_copy ~bin_class:Currency.Fee.Stable.Latest.bin_t Currency.Fee.zero + + let length () : Mina_numbers.Length.t = + bin_copy ~bin_class:Mina_numbers.Length.Stable.Latest.bin_t + Mina_numbers.Length.zero + + let account_nonce () : Mina_numbers.Account_nonce.t = + bin_copy ~bin_class:Mina_numbers.Account_nonce.Stable.Latest.bin_t + Mina_numbers.Account_nonce.zero + + let global_slot_since_genesis () : Mina_numbers.Global_slot_since_genesis.t = + bin_copy + ~bin_class:Mina_numbers.Global_slot_since_genesis.Stable.Latest.bin_t + Mina_numbers.Global_slot_since_genesis.zero + + let global_slot_span () : Mina_numbers.Global_slot_span.t = + bin_copy ~bin_class:Mina_numbers.Global_slot_span.Stable.Latest.bin_t + Mina_numbers.Global_slot_span.zero + + let zkapp_version () : Mina_numbers.Zkapp_version.t = + bin_copy ~bin_class:Mina_numbers.Zkapp_version.Stable.Latest.bin_t + Mina_numbers.Zkapp_version.zero + + let signed_command_memo () : Mina_base.Signed_command_memo.t = + bin_copy ~bin_class:Mina_base.Signed_command_memo.Stable.Latest.bin_t + Mina_base.Signed_command_memo.empty + + let zkapp_uri () : string = + bin_copy ~bin_class:String.bin_t (String.init 255 ~f:(Fn.const 'z')) + + let token_symbol () : Mina_base.Account.Token_symbol.t = + bin_copy ~bin_class:Mina_base.Account.Token_symbol.Stable.Latest.bin_t + (String.init Mina_base.Account.Token_symbol.max_length ~f:(Fn.const 'z')) + + let token_id () : Mina_base.Token_id.t = + bin_copy ~bin_class:Mina_base.Token_id.Stable.Latest.bin_t + Mina_base.Token_id.default + + let timing_info () : Mina_base.Account_update.Update.Timing_info.t = + bin_copy + ~bin_class:Mina_base.Account_update.Update.Timing_info.Stable.Latest.bin_t + Mina_base.Account_update.Update.Timing_info.dummy + + let state_hash () : Mina_base.State_hash.t = + bin_copy ~bin_class:Mina_base.State_hash.Stable.Latest.bin_t + Mina_base.State_hash.dummy + + let permissions () : Mina_base.Permissions.t = + bin_copy ~bin_class:Mina_base.Permissions.Stable.Latest.bin_t + Mina_base.Permissions.user_default + + let precondition_numeric (type a) (f : unit -> a) : + a Mina_base.Zkapp_precondition.Numeric.t = + Check { lower = f (); upper = f () } + + let precondition_hash (type a) (f : unit -> a) : + a Mina_base.Zkapp_precondition.Hash.t = + Check (f ()) + + let preconditions () : Mina_base.Account_update.Preconditions.t = + { network = + { snarked_ledger_hash = precondition_hash field + ; blockchain_length = precondition_numeric length + ; min_window_density = precondition_numeric length + ; total_currency = precondition_numeric amount + ; global_slot_since_genesis = + precondition_numeric global_slot_since_genesis + ; staking_epoch_data = + { ledger = + { hash = precondition_hash field + ; total_currency = precondition_numeric amount + } + ; seed = precondition_hash field + ; start_checkpoint = precondition_hash field + ; lock_checkpoint = precondition_hash field + ; epoch_length = precondition_numeric length + } + ; next_epoch_data = + { ledger = + { hash = precondition_hash field + ; total_currency = precondition_numeric amount + } + ; seed = precondition_hash field + ; start_checkpoint = precondition_hash field + ; lock_checkpoint = precondition_hash field + ; epoch_length = precondition_numeric length + } + } + ; account = Mina_base.Zkapp_precondition.Account.accept + ; valid_while = + Check + { lower = global_slot_since_genesis () + ; upper = global_slot_since_genesis () + } + } + + let keypair () : Signature_lib.Keypair.t = Signature_lib.Keypair.create () + + let private_key () : Signature_lib.Private_key.t = (keypair ()).private_key + + let public_key_uncompressed () : Signature_lib.Public_key.t = + (keypair ()).public_key + + let public_key () : Signature_lib.Public_key.Compressed.t = + Signature_lib.Public_key.compress (keypair ()).public_key + + let verification_key : unit -> Mina_base.Verification_key_wire.t = + let vk = + let `VK vk, `Prover _ = + Transaction_snark.For_tests.create_trivial_snapp + ~constraint_constants:Genesis_constants.Constraint_constants.compiled + () + in + vk + in + fun () -> + bin_copy ~bin_class:Mina_base.Verification_key_wire.Stable.Latest.bin_t vk + + let side_loaded_proof : unit -> Pickles.Side_loaded.Proof.t = + let proof = + let num_updates = 1 in + let _ledger, zkapp_commands = + Snark_profiler_lib.create_ledger_and_zkapps ~min_num_updates:num_updates + ~num_proof_updates:num_updates ~max_num_updates:num_updates () + in + let cmd = List.hd_exn zkapp_commands in + let update = + List.nth_exn (Mina_base.Zkapp_command.all_account_updates_list cmd) 1 + in + match update.authorization with + | Proof proof -> + proof + | _ -> + failwith "woops" + in + fun () -> + bin_copy ~bin_class:Pickles.Side_loaded.Proof.Stable.Latest.bin_t + (Pickles.Side_loaded.Proof.of_proof proof) + + let ledger_proof () : Ledger_proof.t = + bin_copy ~bin_class:Ledger_proof.Stable.Latest.bin_t + (Ledger_proof.For_tests.mk_dummy_proof + (Mina_state.Snarked_ledger_state.genesis + ~genesis_ledger_hash: + (Mina_base.Frozen_ledger_hash.of_ledger_hash + Mina_base.Ledger_hash.empty_hash ) ) ) + + let one_priced_proof () : + Ledger_proof.t One_or_two.t Network_pool.Priced_proof.t = + { proof = `One (ledger_proof ()) + ; fee = { prover = public_key (); fee = fee () } + } + + let two_priced_proofs () : + Ledger_proof.t One_or_two.t Network_pool.Priced_proof.t = + { proof = `Two (ledger_proof (), ledger_proof ()) + ; fee = { prover = public_key (); fee = fee () } + } + + let receipt_chain_hash () : Mina_base.Receipt.Chain_hash.t = + bin_copy ~bin_class:Mina_base.Receipt.Chain_hash.Stable.Latest.bin_t + Mina_base.Receipt.Chain_hash.empty + + let account () : Mina_base.Account.t = + { public_key = public_key () + ; token_id = token_id () + ; token_symbol = token_symbol () + ; balance = balance () + ; nonce = account_nonce () + ; receipt_chain_hash = receipt_chain_hash () + ; delegate = Some (public_key ()) + ; voting_for = state_hash () + ; timing = + Mina_base.Account.Timing.Timed + { initial_minimum_balance = balance () + ; cliff_time = global_slot_since_genesis () + ; cliff_amount = amount () + ; vesting_period = global_slot_span () + ; vesting_increment = amount () + } + ; permissions = permissions () + ; zkapp = + Some + { app_state = + Pickles_types.Vector.init Mina_base.Zkapp_state.Max_state_size.n + ~f:(fun _ -> field ()) + ; verification_key = Some (verification_key ()) + ; zkapp_uri = zkapp_uri () + ; zkapp_version = zkapp_version () + ; action_state = + Pickles_types.Vector.init Pickles_types.Nat.N5.n ~f:(fun _ -> + field () ) + ; last_action_slot = global_slot_since_genesis () + ; proved_state = false + } + } + + let ledger_mask ?(n = Params.max_accounts_modified_per_block) () : + Mina_ledger.Ledger.t = + let ledger = + Mina_ledger.Ledger.create_ephemeral ~depth:Const.ledger_depth () + in + List.init n ~f:Fn.id + |> List.iter ~f:(fun i -> + Mina_ledger.Ledger.set_at_index_exn ledger i (account ()) ) ; + ledger + + let ledger_witness n : Mina_ledger.Sparse_ledger.t = + let ledger_mask = ledger_mask ~n () in + let ids = ref [] in + Mina_ledger.Ledger.iteri ledger_mask ~f:(fun _ acc -> + ids := Mina_base.Account.identifier acc :: !ids ) ; + Mina_ledger.Sparse_ledger.of_ledger_subset_exn ledger_mask !ids + + let zkapp_command_witness () : Mina_ledger.Sparse_ledger.t = + ledger_witness Params.max_accounts_modified_per_zkapp_command + + let signed_command_witness () : Mina_ledger.Sparse_ledger.t = + ledger_witness Const.max_accounts_modified_per_signed_command + + let signed_command' () : Mina_base.Signed_command.t = + { payload = + { common = + { fee = fee () + ; fee_payer_pk = public_key () + ; nonce = account_nonce () + ; valid_until = global_slot_since_genesis () + ; memo = signed_command_memo () + } + ; body = Payment { receiver_pk = public_key (); amount = amount () } + } + ; signer = public_key_uncompressed () + ; signature = (field (), private_key ()) + } + + let signed_command () : Mina_base.User_command.t = + Mina_base.User_command.Signed_command (signed_command' ()) + + let zkapp_account_update () : Mina_base.Account_update.t = + { body = + { public_key = public_key () + ; token_id = token_id () + ; update = + { app_state = + Pickles_types.Vector.init Mina_base.Zkapp_state.Max_state_size.n + ~f:(fun _ -> Mina_base.Zkapp_basic.Set_or_keep.Set (field ())) + ; delegate = Set (public_key ()) + ; verification_key = Set (verification_key ()) + ; permissions = Set (permissions ()) + ; zkapp_uri = Set (zkapp_uri ()) + ; token_symbol = Set (token_symbol ()) + ; timing = Set (timing_info ()) + ; voting_for = Set (state_hash ()) + } + ; balance_change = + (* TODO: ensure uniqueness *) Currency.Amount.Signed.zero + ; increment_nonce = false + ; events = [ Array.init Params.max_zkapp_events ~f:(fun _ -> field ()) ] + ; actions = + [ Array.init Params.max_zkapp_actions ~f:(fun _ -> field ()) ] + ; call_data = field () + ; preconditions = preconditions () + ; use_full_commitment = false + ; implicit_account_creation_fee = false + ; may_use_token = No + ; authorization_kind = Proof (field ()) + } + ; authorization = Proof (side_loaded_proof ()) + } + + let zkapp_command' () : Mina_base.Zkapp_command.t = + { fee_payer = + { body = + { public_key = public_key () + ; fee = fee () + ; valid_until = Some (global_slot_since_genesis ()) + ; nonce = account_nonce () + } + ; authorization = (field (), private_key ()) + } + ; account_updates = + List.init Params.max_zkapp_txn_account_updates ~f:(Fn.const ()) + |> List.fold_left ~init:[] ~f:(fun acc () -> + Mina_base.Zkapp_command.Call_forest.cons + (zkapp_account_update ()) acc ) + ; memo = signed_command_memo () + } + + let zkapp_command () : Mina_base.User_command.t = + Mina_base.User_command.Zkapp_command (zkapp_command' ()) + + let pending_coinbase_stack () : Mina_base.Pending_coinbase.Stack.t = + bin_copy + ~bin_class:Mina_base.Pending_coinbase.Stack_versioned.Stable.Latest.bin_t + Mina_base.Pending_coinbase.Stack.empty + + let local_state () : Mina_state.Local_state.t = + bin_copy ~bin_class:Mina_state.Local_state.Stable.Latest.bin_t + (Mina_state.Local_state.dummy ()) + + let fee_excess () : Mina_base.Fee_excess.t = + bin_copy ~bin_class:Mina_base.Fee_excess.Stable.Latest.bin_t + Mina_base.Fee_excess.empty + + let base_work varying witness : + Transaction_snark_scan_state.Transaction_with_witness.t = + { transaction_with_info = { previous_hash = field (); varying = varying () } + ; state_hash = (state_hash (), field ()) + ; statement = + (*Transaction_snark.Statement.Stable.V2.t*) + { source = + { first_pass_ledger = field () + ; second_pass_ledger = field () + ; pending_coinbase_stack = pending_coinbase_stack () + ; local_state = local_state () + } + ; target = + { first_pass_ledger = field () + ; second_pass_ledger = field () + ; pending_coinbase_stack = pending_coinbase_stack () + ; local_state = local_state () + } + ; connecting_ledger_left = field () + ; connecting_ledger_right = field () + ; supply_increase = + (* TODO: insure uniqueness *) Currency.Amount.Signed.zero + ; fee_excess = fee_excess () + ; sok_digest = () + } + ; init_stack = Base (pending_coinbase_stack ()) + ; first_pass_ledger_witness = witness () + ; second_pass_ledger_witness = witness () + ; block_global_slot = global_slot_since_genesis () + } + + let zkapp_command_base_work () : + Transaction_snark_scan_state.Transaction_with_witness.t = + base_work + (fun () -> + Command + (Zkapp_command + { accounts = + List.init Params.max_accounts_modified_per_zkapp_command + ~f:(fun _ -> + let a = account () in + (Mina_base.Account.identifier a, Some a) ) + ; command = + { status = Applied; data = zkapp_command' () } + (* the worst case is that no new accounts are created and they are all cached, so we leave this empty *) + ; new_accounts = [] + } ) ) + zkapp_command_witness + + let signed_command_base_work () : + Transaction_snark_scan_state.Transaction_with_witness.t = + base_work + (fun () -> + Command + (Signed_command + { common = + { user_command = + { status = Applied; data = signed_command' () } + } + ; body = + Payment + { new_accounts = + [ Mina_base.Account.identifier (account ()) ] + } + } ) ) + signed_command_witness + + let sok_message () : Mina_base.Sok_message.t = + Mina_base.Sok_message.create ~fee:(fee ()) ~prover:(public_key ()) + + let merge_work () : + Transaction_snark_scan_state.Ledger_proof_with_sok_message.t = + (ledger_proof (), sok_message ()) +end + +module Sizes = struct + let count (type a) (x : a) = + Obj.(reachable_words @@ repr x) * (Sys.word_size / 8) + + let verification_key = count @@ Values.verification_key () + + let side_loaded_proof = count @@ Values.side_loaded_proof () + + let ledger_proof = count @@ Values.ledger_proof () + + let one_priced_proof = count @@ Values.one_priced_proof () + + let two_priced_proof = count @@ Values.two_priced_proofs () + + let signed_command = count @@ Values.signed_command () + + let zkapp_command = count @@ Values.zkapp_command () + + let ledger_mask = count @@ Values.ledger_mask () + + let zkapp_command_base_work = count @@ Values.zkapp_command_base_work () + + let signed_command_base_work = count @@ Values.signed_command_base_work () + + let merge_work = count @@ Values.merge_work () + + type size_params = + { side_loaded_proof : int + ; ledger_proof : int + ; one_priced_proof : int + ; two_priced_proof : int + ; signed_command : int + ; zkapp_command : int + ; ledger_mask : int + ; zkapp_command_base_work : int + ; signed_command_base_work : int + ; merge_work : int + } + [@@deriving sexp] + + let pre_fix = + { side_loaded_proof + ; ledger_proof + ; one_priced_proof + ; two_priced_proof + ; signed_command + ; zkapp_command + ; ledger_mask + ; zkapp_command_base_work + ; signed_command_base_work + ; merge_work + } + + let post_fix = + let cache_ref_size = Sys.word_size / 8 in + (* ledger witness (x2) + toplevel accounts list on applied command *) + let num_accounts_in_zkapp_command_base_work = + Params.max_accounts_modified_per_zkapp_command * 3 + in + (* ledger witness (x2) + 1 new account *) + let num_accounts_in_signed_command_base_work = + (Const.max_accounts_modified_per_signed_command * 2) + 1 + in + { pre_fix with + side_loaded_proof = cache_ref_size + ; ledger_proof = + cache_ref_size (* replace ledger proofs with content id references *) + ; one_priced_proof = one_priced_proof - ledger_proof + cache_ref_size + ; two_priced_proof = + two_priced_proof - (ledger_proof * 2) + (cache_ref_size * 2) + (* replace zkapps proofs and verification keys in commands *) + ; zkapp_command = + zkapp_command + - (side_loaded_proof + verification_key - (cache_ref_size * 2)) + * Params.max_zkapp_txn_account_updates + (* replace verification keys in ledger masks *) + ; ledger_mask = + ledger_mask + - (verification_key - cache_ref_size) + * Params.max_accounts_modified_per_block + (* replace side loaded proofs and verification keys from commands embedded in base work, and verification keys in accounts loaded *) + ; zkapp_command_base_work = + zkapp_command_base_work + - (side_loaded_proof + verification_key - (cache_ref_size * 2)) + * Params.max_zkapp_txn_account_updates + - (verification_key - cache_ref_size) + * num_accounts_in_zkapp_command_base_work + (* replace verification keys loaded in accounts of signed command base work *) + ; signed_command_base_work = + signed_command_base_work + - (verification_key - cache_ref_size) + * num_accounts_in_signed_command_base_work + (* replace ledger proofs in merge work *) + ; merge_work = merge_work - ledger_proof + cache_ref_size + } +end + +module Timer : sig + type t + + val init : unit -> t + + val time : t -> (unit -> 'a) -> 'a + + val total : t -> Time.Span.t + + val average : t -> Time.Span.t +end = struct + type t = { mutable total : Time.Span.t; mutable samples : int } + [@@deriving fields] + + let init () = { total = Time.Span.zero; samples = 0 } + + let time t f = + let start = Time.now () in + let x = f () in + let elapsed = Time.(abs_diff (now ()) start) in + t.total <- Time.Span.(t.total + elapsed) ; + t.samples <- t.samples + 1 ; + x + + let average t = + Time.Span.of_ns (Time.Span.to_ns t.total /. Int.to_float t.samples) +end + +type serial_bench_measurements = + { write : Time.Span.t; read : Time.Span.t; hash : Time.Span.t } + +let print_header name = + Printf.printf + "==========================================================================================\n\ + %s\n\ + ==========================================================================================\n" + name + +let print_timer name timer = + Printf.printf + !"%s: %{Time.Span} (total: %{Time.Span})\n" + name (Timer.average timer) (Timer.total timer) + +let serial_bench (type a) ~(name : string) + ~(bin_class : a Bin_prot.Type_class.t) ~(gen : a Quickcheck.Generator.t) + ~(equal : a -> a -> bool) ?(size = 0) () = + print_header (Printf.sprintf "SERIALIZATION BENCHMARKS %s" name) ; + let write_timer = Timer.init () in + let read_timer = Timer.init () in + let hash_timer = Timer.init () in + for i = 1 to bench_count do + let random = Splittable_random.State.of_int i in + let sample = Quickcheck.Generator.generate ~size ~random gen in + let size = bin_class.writer.size sample in + let buf = Bigstring.create size in + let final_pos = + Timer.time write_timer (fun () -> + bin_class.writer.write buf ~pos:0 sample ) + in + assert (final_pos = size) ; + Timer.time hash_timer (fun () -> + ignore (Digestif.SHA256.digest_bigstring buf : Digestif.SHA256.t) ) ; + let result = + Timer.time read_timer (fun () -> + bin_class.reader.read buf ~pos_ref:(ref 0) ) + in + assert (equal sample result) + done ; + print_timer "write" write_timer ; + print_timer "read" read_timer ; + print_timer "hash" hash_timer ; + { write = Timer.average write_timer + ; read = Timer.average read_timer + ; hash = Timer.average hash_timer + } + +let compute_ram_usage (sizes : Sizes.size_params) = + let format_gb size = Int.to_float size /. (1024.0 **. 3.0) in + (* + let format_kb size = (Int.to_float size /. 1024.0) in + Printf.printf "verification key = %fKB, side_loaded_proof = %fKB, account update = %fKB\n, command = %fKB, %d\n" + (format_kb Sizes.verification_key) + (format_kb Sizes.side_loaded_proof) + (format_kb Sizes.zkapp_account_update) + (format_kb Sizes.zkapp_command) + Params.max_zkapp_txn_account_updates ; + *) + (* this baseline measurement was taken from a fresh daemon, and serves to show the general overhead a daemon has before bootstrapping *) + let baseline = + let prover = Int.of_float (1.04 *. 1024.0 *. 1024.0 *. 1024.0) in + let verifier = 977 * 1024 * 1024 in + let vrf_evaluator = 127 * 1024 * 1024 in + let daemon = 966 * 1024 * 1024 in + (* the libp2p baseline was taken from a seed running on a real network *) + let libp2p_helper = 3312128 in + prover + verifier + vrf_evaluator + daemon + libp2p_helper + in + (* TODO: actually measure the entire scan state instead of estimating *) + let scan_states = + (* for the deltas, the zkapp commands and ledger proofs a shared references to the staged ledger diff we deserialize from the network *) + (* we assume accounts loaded are not shared since they can all be loaded from the on-disk ledger separately *) + let deltas = + let base = + Params.max_zkapp_commands_per_block + * (sizes.zkapp_command_base_work - sizes.zkapp_command) + + (Params.max_signed_commands_per_block * sizes.signed_command_base_work) + in + let merge = + ( Params.max_zkapp_commands_per_block + + Params.max_signed_commands_per_block - 1 ) + * (sizes.merge_work - sizes.ledger_proof) + in + (* the deltas apply for all but the root scan state *) + (Const.est_scan_states - 1) * (base + merge) + in + (* for the root, we cannot subtract out shared references, since the data in the root can be from bootstrap *) + (* after k blocks, some references can be shared from root history, but not necessarily all *) + let root = + let base = + (Params.max_zkapp_commands_per_block * sizes.zkapp_command_base_work) + + (Params.max_signed_commands_per_block * sizes.signed_command_base_work) + in + let merge i = Int.pow 2 (Const.scan_state_depth - i) * sizes.merge_work in + List.init (Const.scan_state_depth + 1) ~f:Fn.id + |> List.sum + (module Int) + ~f:(fun i -> (Const.scan_state_delay + 1) * (base + merge i)) + in + root + deltas + in + let ledger_masks = Const.k * sizes.ledger_mask in + let staged_ledger_diffs = + (* TODO: coinbases, fee transfers *) + let zkapp_commands_size_per_block = + Params.max_zkapp_commands_per_block * sizes.zkapp_command + in + let signed_commands_size_per_block = + Params.max_signed_commands_per_block * sizes.signed_command + in + Const.est_blocks_in_frontier + * (zkapp_commands_size_per_block + signed_commands_size_per_block) + in + let snark_pool = + Printf.printf "snark pool references = %d\n" + ( (128 * (Const.scan_state_delay + 1)) + + (128 * (Const.est_scan_states - 1)) ) ; + (* NB: the scan state is split up into (depth+1)+(delay+1) trees, but with different layers + being built across each tree, they squash down into (delay+1) full trees of work referenced *) + (* the size of works referenced per a squashed tree; 127 bundles of 2 proofs, 1 bundle of 1 + proof for the root (under the assumption every block is full) *) + let referenced_size_per_squashed_tree = + (127 * sizes.two_priced_proof) + sizes.one_priced_proof + in + (* the size of work referenced by the root of the frontier *) + let root_referenced_size = + (Const.scan_state_delay + 1) * referenced_size_per_squashed_tree + in + (* the size of delta references added by each full block in the frontier after the root *) + let delta_referenced_size = referenced_size_per_squashed_tree in + root_referenced_size + ((Const.est_scan_states - 1) * delta_referenced_size) + in + (* TODO: measure the actuall network pool memory footprint instead of estimating *) + let transaction_pool = Params.max_txn_pool_size * sizes.zkapp_command in + let usage_categories = + [ ("baseline", baseline) + ; ("scan_states", scan_states) + ; ("ledger_masks", ledger_masks) + ; ("staged_ledger_diffs", staged_ledger_diffs) + ; ("snark_pool", snark_pool) + ; ("transaction_pool", transaction_pool) + ] + in + List.iter usage_categories ~f:(fun (name, size) -> + Printf.printf "%s = %fGB\n" name (format_gb size) ) ; + let total_size = + List.sum (module Int) usage_categories ~f:(fun (_, size) -> size) + in + Printf.printf "TOTAL: %fGB\n" (format_gb total_size) + +let () = + print_header "PRE FIX SIZES" ; + Printf.printf !"%{sexp: Sizes.size_params}\n" Sizes.pre_fix ; + compute_ram_usage Sizes.pre_fix ; + Printf.printf "\n" ; + print_header "POST FIX SIZES" ; + Printf.printf !"%{sexp: Sizes.size_params}\n" Sizes.post_fix ; + compute_ram_usage Sizes.post_fix ; + Printf.printf "\n" ; + let side_loaded_proof_serial_times = + serial_bench ~name:"Pickles.Side_loaded.Proof.t" + ~bin_class:Pickles.Side_loaded.Proof.Stable.Latest.bin_t + ~gen:(Quickcheck.Generator.return (Values.side_loaded_proof ())) + ~equal:Pickles.Side_loaded.Proof.equal () + in + Printf.printf "\n" ; + let verification_key_serial_times = + serial_bench ~name:"Mina_base.Verification_key_wire.t" + ~bin_class:Mina_base.Verification_key_wire.Stable.Latest.bin_t + ~gen:(Quickcheck.Generator.return (Values.verification_key ())) + ~equal:Mina_base.Verification_key_wire.equal () + in + Printf.printf "\n" ; + let ledger_proof_serial_times = + serial_bench ~name:"Ledger_proof.t" + ~bin_class:Ledger_proof.Stable.Latest.bin_t + ~gen:(Quickcheck.Generator.return (Values.ledger_proof ())) + ~equal:Ledger_proof.equal () + in + Printf.printf "\n" ; + print_header "SERIALIZATION OVERHEAD ESTIMATES" ; + Printf.printf + !"zkapp command ingest = %{Time.Span}\n" + (Time.Span.of_ns + ( Int.to_float Params.max_zkapp_txn_account_updates + *. Time.Span.to_ns + Time.Span.( + side_loaded_proof_serial_times.write + + side_loaded_proof_serial_times.hash + + verification_key_serial_times.write + + verification_key_serial_times.hash) ) ) ; + Printf.printf + !"snark work ingest = %{Time.Span}\n" + (Time.Span.of_ns + ( 2.0 + *. Time.Span.to_ns + Time.Span.( + ledger_proof_serial_times.write + ledger_proof_serial_times.hash) + ) ) ; + Printf.printf + !"block ingest = %{Time.Span}\n" + (let zkapps = + Time.Span.of_ns + ( Int.to_float + ( Params.max_zkapp_commands_per_block + * Params.max_zkapp_txn_account_updates ) + *. Time.Span.to_ns + Time.Span.( + side_loaded_proof_serial_times.write + + side_loaded_proof_serial_times.hash + + verification_key_serial_times.write + + verification_key_serial_times.hash) ) + in + let snark_works = + Time.Span.of_ns + ( 2.0 + *. Int.to_float Params.max_zkapp_commands_per_block + *. Time.Span.to_ns + Time.Span.( + ledger_proof_serial_times.write + ledger_proof_serial_times.hash) + ) + in + Time.Span.(zkapps + snark_works) ) ; + Printf.printf + !"block production = %{Time.Span}\n" + (let zkapps = + Time.Span.of_ns + ( Int.to_float + ( Params.max_zkapp_commands_per_block + * Params.max_zkapp_txn_account_updates ) + *. Time.Span.to_ns + Time.Span.( + side_loaded_proof_serial_times.read + + verification_key_serial_times.read) ) + in + let snark_works = + Time.Span.of_ns + ( 2.0 + *. Int.to_float Params.max_zkapp_commands_per_block + *. Time.Span.to_ns ledger_proof_serial_times.read ) + in + Time.Span.(zkapps + snark_works) ) diff --git a/src/app/disk_caching_stats/dune b/src/app/disk_caching_stats/dune new file mode 100644 index 00000000000..9f469c1c780 --- /dev/null +++ b/src/app/disk_caching_stats/dune @@ -0,0 +1,11 @@ +(executable + (name disk_caching_stats) + (libraries pickles pickles_types pickles.backend snark_params crypto_params network_pool mina_base + signature_lib one_or_two currency ledger_proof mina_state mina_base_import mina_wire_types + mina_numbers random_oracle random_oracle_input kimchi_pasta_basic data_hash_lib with_hash + kimchi_pasta mina_ledger transaction_snark_scan_state mina_transaction_logic transaction_snark + sgn snark_profiler_lib genesis_constants digestif bigarray-compat + ; -- + core core_kernel base base.caml bin_prot splittable_random sexplib0) + (instrumentation (backend bisect_ppx)) + (preprocess (pps ppx_jane ppx_version))) diff --git a/src/app/display_public_key/display_public_key.ml b/src/app/display_public_key/display_public_key.ml deleted file mode 100644 index 6a61d95736b..00000000000 --- a/src/app/display_public_key/display_public_key.ml +++ /dev/null @@ -1,21 +0,0 @@ -open Async -open Signature_lib - -let main privkey_path = - let%map { public_key; _ } = - Secrets.Keypair.Terminal_stdin.read_exn ~which:"Mina keypair" privkey_path - in - printf "%s\n%!" - (Public_key.Compressed.to_base58_check (Public_key.compress public_key)) - -let cmd = - Command.async - ~summary: - "Display the latest-verison public key corresponding to the given \ - private key." - Command.( - let open Let_syntax in - let%map path = Cli_lib.Flag.privkey_read_path in - fun () -> main path) - -let () = Command.run cmd diff --git a/src/app/display_public_key/dune b/src/app/display_public_key/dune deleted file mode 100644 index 1d94105d63d..00000000000 --- a/src/app/display_public_key/dune +++ /dev/null @@ -1,24 +0,0 @@ -(executable - (package display_public_key) - (name display_public_key) - (public_name display_public_key) - (modes native) - (libraries - ;; opam libraries - base - async.async_command - async_kernel - async_unix - async - core_kernel - core - base.caml - ;; local libraries - bounded_types - cli_lib - signature_lib - secrets - ) - (instrumentation (backend bisect_ppx)) - (preprocess (pps ppx_version ppx_let ppx_sexp_conv ppx_optcomp)) - (flags -short-paths -w @a-4-29-40-41-42-44-45-48-58-59-60)) diff --git a/src/app/extract_blocks/README.md b/src/app/extract_blocks/README.md new file mode 100644 index 00000000000..bbaecdbb29b --- /dev/null +++ b/src/app/extract_blocks/README.md @@ -0,0 +1,12 @@ +extract_blocks +============== + +The `extract_blocks` app pulls out individual blocks from an archive +database in "extensional" format. Such blocks can be added to other +archive databases using the `archive_blocks` app. + +Blocks are extracted into files with name .json. + +The app offers the choice to extract all canonical blocks, or a +subchain specified with starting state hash, or a subchain specified +with starting and ending state hashes. diff --git a/src/app/extract_blocks/extract_blocks.ml b/src/app/extract_blocks/extract_blocks.ml index 3605538ae44..5e3574139a3 100644 --- a/src/app/extract_blocks/extract_blocks.ml +++ b/src/app/extract_blocks/extract_blocks.ml @@ -1,4 +1,5 @@ (* extract_blocks.ml -- dump extensional blocks from archive db *) + [@@@coverage exclude_file] open Core_kernel diff --git a/src/app/heap_usage/values.ml b/src/app/heap_usage/values.ml index 7fa5590ad3c..ca65b9545d9 100644 --- a/src/app/heap_usage/values.ml +++ b/src/app/heap_usage/values.ml @@ -242,7 +242,7 @@ let scan_state_merge_node : let sok_msg : Mina_base.Sok_message.t = { fee = Currency.Fee.zero; prover = sample_pk_compressed } in - let proof = Mina_base.Proof.transaction_dummy in + let proof = Lazy.force Mina_base.Proof.transaction_dummy in let statement = let without_sok = Quickcheck.random_value ~seed:(`Deterministic "no sok left") @@ -258,7 +258,7 @@ let scan_state_merge_node : { fee = Currency.Fee.zero; prover = sample_pk_compressed } in (* so the left, right proofs differ, don't want sharing *) - let proof = Mina_base.Proof.blockchain_dummy in + let proof = Lazy.force Mina_base.Proof.blockchain_dummy in let statement = let without_sok = Quickcheck.random_value ~seed:(`Deterministic "no sok right") diff --git a/src/app/missing_blocks_auditor/README.md b/src/app/missing_blocks_auditor/README.md new file mode 100644 index 00000000000..e5db4ee64a3 --- /dev/null +++ b/src/app/missing_blocks_auditor/README.md @@ -0,0 +1,10 @@ +missing_blocks_auditor +====================== + +The `missing_blocks_auditor` app looks for blocks without parent +blocks in an archive database. + +The app also looks for blocks marked as pending that are lower (have a +lesser height) than the highest (most recent) canonical block. There +can be such blocks if blocks are added when there are missing blocks +in the database. diff --git a/src/app/missing_blocks_auditor/missing_blocks_auditor.ml b/src/app/missing_blocks_auditor/missing_blocks_auditor.ml index 5bce995ea28..e6fd538f1b5 100644 --- a/src/app/missing_blocks_auditor/missing_blocks_auditor.ml +++ b/src/app/missing_blocks_auditor/missing_blocks_auditor.ml @@ -1,4 +1,4 @@ -(* missing_blocks_auditor.ml -- report missing blocks from an archive db *) +(* missing_blocks_auditor.ml *) open Core_kernel open Async diff --git a/src/app/print_blockchain_snark_vk/.ocamlformat b/src/app/print_blockchain_snark_vk/.ocamlformat new file mode 120000 index 00000000000..8e0a87983aa --- /dev/null +++ b/src/app/print_blockchain_snark_vk/.ocamlformat @@ -0,0 +1 @@ +../../.ocamlformat \ No newline at end of file diff --git a/src/app/print_blockchain_snark_vk/blockchain_snark_vk.json b/src/app/print_blockchain_snark_vk/blockchain_snark_vk.json new file mode 100644 index 00000000000..c621a566694 --- /dev/null +++ b/src/app/print_blockchain_snark_vk/blockchain_snark_vk.json @@ -0,0 +1 @@ +{"commitments":{"sigma_comm":[["0x16A2CBB00F33D627AAEFB19EAF5424680EF37F77C87558AF424130A927D08BAC","0x089F70D3C7690C8421C293C01F7A67EB2997E5B96AF11408D88884A6AD328E37"],["0x22CFAD2E118F91BE8E133664585947E25A282017E007700D67F0D72999639BCC","0x31AA847B558743C3CAD651B085327F0B67E1C1BFEA2FF472976589ADD952E6D4"],["0x02FB6661EE11262AFC7C8B0389A9B1E4ED18263C3F2468AAF035A07ECC61C9EB","0x30705D77AB82D6BAAC8E23640BBAB40F7E7FB53826CD5011575CAE52C6D0517D"],["0x0FD615E77CF392BE8285848DF60D4F87350ABCD65200FC4932E309133D145D05","0x3E75EA0E3810A97D8F1D1F8A6B0F9AE5D0F68AE34A4D0EFC29F00AB3BF6D480E"],["0x20C89F37CF8E43831F077DFF9AB9A676907BF9A0F64E418A643667808AB72DAB","0x389F98A86437D8A7F667ABB9B0B0DEACBD7E420B583B795363EBCECDBF1C187B"],["0x13548AEE4F67B7B90418512F400B79A69E156A801DE46233C756F9B67A9ABDD1","0x25F561C9531A0C83099AC722322A39DC653BC314D63A971624D1D3315B3DA0D5"],["0x20216D064B7466C42626F66F3F141997E6F29D17CC6EBA8FF91E1E03D31FE555","0x0082380869AFA6A961738C93BCEEA3042F3ECEA143A585B565A2BF6BD78A1224"]],"coefficients_comm":[["0x23C8C8C9BB27867647BE92C92080742C40F8E82D4D95F90D333515B5C8B0D22F","0x2C96AF550D63E6495763C63CFAAABA971144E2E1CB33649A8B2EE7DF5CB2D3B6"],["0x08060FD15238AE40E726A04EA5C1AE41D8A1D267C9AF7355A8D6C70607139922","0x13D36CAD9C78FD0AF514AB7ECAC433592389AE84283024B1D4C85D2E8C952B28"],["0x3AE007735587246066F71A1F09801C359E393CBF49DBDFEDD01E61AA88ECAF0F","0x15882EBC62C0E9BC93A14F2CB2ECE43402063B50C09CE735E56D51447689D2C6"],["0x3DCBFD7F258F598AEFB560A1B3BCE632B4AC20534FEFF3B35C82082FE708B236","0x254668B50AB44E074902E0DB981FDF045FC614C1E61EBDDA27C06DF9131FC71A"],["0x364F1609C5A72CC0A6F5966400AE1A5AA4B7684805B46D0EE09001F7618DD614","0x0F3E45A3DEC2B73BEC5E91ED78BB750B87C02A2339E5A40C2D51FAA0EE1D5B7E"],["0x13AE7A1AAFBCB5E04667043BCE07807A7699F733C2CF8F805D6E383CFB8AFD75","0x1D5FBA42DDFF5FB1CFE7EEC500F5A6D7B3CF9617F6C344EBA9FAD9138285CDB3"],["0x3E32E6702761E653043644E9DC5D75FE7A351B764CD90D72DD1176DB1A541121","0x0454C684E407E066394B31CFCCBAF22844B7E17D162EFE860030960A38AD2B2E"],["0x1AE2CE814A97742F41565FEB5881767A2BCF55981A3C8ED33BAE069CBE652FCA","0x14DC3F97387117CCA4884F28DCC82C9CF1B7101623B481FD3D1588F568B3E26B"],["0x0A10FA40BB6C61E8185412EE2BAE86C51D19EA7276593DFA7FA3FABB0345521F","0x3A8ACF73B5EF4E52ED5DC96404A60C69A067B84FE77211C5407A48437BD5CF89"],["0x21B2C2D62891139A91056B9D298DA8713B7ADA51963B864A5A2376771A1AA766","0x1AC7782A588865397F219624D70D11A109283E82B9CD50FFEE23753461E755FE"],["0x2763E7A5B2C387147A0381B50D5C504A5012B7F7CA474C2B4A351011B9BBD036","0x13DEA6F4AEBDC606331746A886756C7EA266A52F60B45DE5544A04BFDB277455"],["0x32596E43A053571EE478A3106CABFE9ECB29437F78A978409B4DDE33FE637103","0x3D76AF5EE3EFF37E666087AC2827A8BD0D9690BF440FF24434DA3E6AFF7A2AF4"],["0x1D73FE7224F38A37B2C69E22FA6750BABAED17B0F9998877B33E488D9063CE8E","0x3E24CEADB1BDA92A0DBDA0F90DF3B8FBD7C6F7ABCC039E9D41AB6916A799F645"],["0x2FDF5D887BC70465AFAC06B7A43632732B5AF0B933EA045D35E99D532BD44CAF","0x211A76FD7B76DF3E693CAA0BBB5C178D5DDE827AB6A902AF04AB39D8040E29DF"],["0x0D29BA887D54D915CFB321573463A3CAF5C354848A51FFD6D4FFC0E0B4464D39","0x232829C5C662E87CD033AFB3E84E86EC342F4942AC9D75123A21172EE06CF567"]],"generic_comm":["0x363662743B4E693E18C0CF5FB651DF1205BB27FABCD93ADF54ECD973B21B921B","0x116FBA051A4A146C88BCB0F2B56309429CD63514EEEFBE6EA0B39927E72BB20C"],"psm_comm":["0x0A8B3EF5670C6367C721EDAA40AF511C18B1602A4732FEA01124D5D949304324","0x1DBE04516C4A33CDFBBD1F54F413B9F21B5D41B6CD668B249879A2688693E51B"],"complete_add_comm":["0x1E859218F11F787CE75C06FD5303457CBD307BDEEB693CC66A235CB85B314D4B","0x228167E190903072E8F34BD7AF61A0C02DE0BC3D54FF8760A2BCBFDD6A880688"],"mul_comm":["0x3EC97D3A8CD405A92B31B67184817925B99B1527065A28677AEAAEC37CC7B9C5","0x3844006206FF29A55DBB44A3D06E46610639E24E960B4BC32A663EEC4D04C689"],"emul_comm":["0x114772020FAF5E6660D7D75B666B7121829027A866A8214B42899E824D820CB9","0x01F7FC015E2F0C5E02E34F0FD6FBA0FCE01E40EA183F0F6F7C197553524A96B9"],"endomul_scalar_comm":["0x04C30A9B6412594ECD5EEFCA20D4B759BBC52B08868E01F74FDC82B557A76ADD","0x019413D8112950CB93D20BA29452DC281FFE1A692706C34BD148E331F844D244"]},"index":{"domain":{"log_size_of_group":14,"group_gen":"0x1E5587687024253BB079B38D9C5371594958E496C605D3BD898B34D068AFBEE7"},"max_poly_size":32768,"public":40,"prev_challenges":2,"srs":null,"evals":{"sigma_comm":[{"unshifted":[["Finite",["0x16A2CBB00F33D627AAEFB19EAF5424680EF37F77C87558AF424130A927D08BAC","0x089F70D3C7690C8421C293C01F7A67EB2997E5B96AF11408D88884A6AD328E37"]]],"shifted":null},{"unshifted":[["Finite",["0x22CFAD2E118F91BE8E133664585947E25A282017E007700D67F0D72999639BCC","0x31AA847B558743C3CAD651B085327F0B67E1C1BFEA2FF472976589ADD952E6D4"]]],"shifted":null},{"unshifted":[["Finite",["0x02FB6661EE11262AFC7C8B0389A9B1E4ED18263C3F2468AAF035A07ECC61C9EB","0x30705D77AB82D6BAAC8E23640BBAB40F7E7FB53826CD5011575CAE52C6D0517D"]]],"shifted":null},{"unshifted":[["Finite",["0x0FD615E77CF392BE8285848DF60D4F87350ABCD65200FC4932E309133D145D05","0x3E75EA0E3810A97D8F1D1F8A6B0F9AE5D0F68AE34A4D0EFC29F00AB3BF6D480E"]]],"shifted":null},{"unshifted":[["Finite",["0x20C89F37CF8E43831F077DFF9AB9A676907BF9A0F64E418A643667808AB72DAB","0x389F98A86437D8A7F667ABB9B0B0DEACBD7E420B583B795363EBCECDBF1C187B"]]],"shifted":null},{"unshifted":[["Finite",["0x13548AEE4F67B7B90418512F400B79A69E156A801DE46233C756F9B67A9ABDD1","0x25F561C9531A0C83099AC722322A39DC653BC314D63A971624D1D3315B3DA0D5"]]],"shifted":null},{"unshifted":[["Finite",["0x20216D064B7466C42626F66F3F141997E6F29D17CC6EBA8FF91E1E03D31FE555","0x0082380869AFA6A961738C93BCEEA3042F3ECEA143A585B565A2BF6BD78A1224"]]],"shifted":null}],"coefficients_comm":[{"unshifted":[["Finite",["0x23C8C8C9BB27867647BE92C92080742C40F8E82D4D95F90D333515B5C8B0D22F","0x2C96AF550D63E6495763C63CFAAABA971144E2E1CB33649A8B2EE7DF5CB2D3B6"]]],"shifted":null},{"unshifted":[["Finite",["0x08060FD15238AE40E726A04EA5C1AE41D8A1D267C9AF7355A8D6C70607139922","0x13D36CAD9C78FD0AF514AB7ECAC433592389AE84283024B1D4C85D2E8C952B28"]]],"shifted":null},{"unshifted":[["Finite",["0x3AE007735587246066F71A1F09801C359E393CBF49DBDFEDD01E61AA88ECAF0F","0x15882EBC62C0E9BC93A14F2CB2ECE43402063B50C09CE735E56D51447689D2C6"]]],"shifted":null},{"unshifted":[["Finite",["0x3DCBFD7F258F598AEFB560A1B3BCE632B4AC20534FEFF3B35C82082FE708B236","0x254668B50AB44E074902E0DB981FDF045FC614C1E61EBDDA27C06DF9131FC71A"]]],"shifted":null},{"unshifted":[["Finite",["0x364F1609C5A72CC0A6F5966400AE1A5AA4B7684805B46D0EE09001F7618DD614","0x0F3E45A3DEC2B73BEC5E91ED78BB750B87C02A2339E5A40C2D51FAA0EE1D5B7E"]]],"shifted":null},{"unshifted":[["Finite",["0x13AE7A1AAFBCB5E04667043BCE07807A7699F733C2CF8F805D6E383CFB8AFD75","0x1D5FBA42DDFF5FB1CFE7EEC500F5A6D7B3CF9617F6C344EBA9FAD9138285CDB3"]]],"shifted":null},{"unshifted":[["Finite",["0x3E32E6702761E653043644E9DC5D75FE7A351B764CD90D72DD1176DB1A541121","0x0454C684E407E066394B31CFCCBAF22844B7E17D162EFE860030960A38AD2B2E"]]],"shifted":null},{"unshifted":[["Finite",["0x1AE2CE814A97742F41565FEB5881767A2BCF55981A3C8ED33BAE069CBE652FCA","0x14DC3F97387117CCA4884F28DCC82C9CF1B7101623B481FD3D1588F568B3E26B"]]],"shifted":null},{"unshifted":[["Finite",["0x0A10FA40BB6C61E8185412EE2BAE86C51D19EA7276593DFA7FA3FABB0345521F","0x3A8ACF73B5EF4E52ED5DC96404A60C69A067B84FE77211C5407A48437BD5CF89"]]],"shifted":null},{"unshifted":[["Finite",["0x21B2C2D62891139A91056B9D298DA8713B7ADA51963B864A5A2376771A1AA766","0x1AC7782A588865397F219624D70D11A109283E82B9CD50FFEE23753461E755FE"]]],"shifted":null},{"unshifted":[["Finite",["0x2763E7A5B2C387147A0381B50D5C504A5012B7F7CA474C2B4A351011B9BBD036","0x13DEA6F4AEBDC606331746A886756C7EA266A52F60B45DE5544A04BFDB277455"]]],"shifted":null},{"unshifted":[["Finite",["0x32596E43A053571EE478A3106CABFE9ECB29437F78A978409B4DDE33FE637103","0x3D76AF5EE3EFF37E666087AC2827A8BD0D9690BF440FF24434DA3E6AFF7A2AF4"]]],"shifted":null},{"unshifted":[["Finite",["0x1D73FE7224F38A37B2C69E22FA6750BABAED17B0F9998877B33E488D9063CE8E","0x3E24CEADB1BDA92A0DBDA0F90DF3B8FBD7C6F7ABCC039E9D41AB6916A799F645"]]],"shifted":null},{"unshifted":[["Finite",["0x2FDF5D887BC70465AFAC06B7A43632732B5AF0B933EA045D35E99D532BD44CAF","0x211A76FD7B76DF3E693CAA0BBB5C178D5DDE827AB6A902AF04AB39D8040E29DF"]]],"shifted":null},{"unshifted":[["Finite",["0x0D29BA887D54D915CFB321573463A3CAF5C354848A51FFD6D4FFC0E0B4464D39","0x232829C5C662E87CD033AFB3E84E86EC342F4942AC9D75123A21172EE06CF567"]]],"shifted":null}],"generic_comm":{"unshifted":[["Finite",["0x363662743B4E693E18C0CF5FB651DF1205BB27FABCD93ADF54ECD973B21B921B","0x116FBA051A4A146C88BCB0F2B56309429CD63514EEEFBE6EA0B39927E72BB20C"]]],"shifted":null},"psm_comm":{"unshifted":[["Finite",["0x0A8B3EF5670C6367C721EDAA40AF511C18B1602A4732FEA01124D5D949304324","0x1DBE04516C4A33CDFBBD1F54F413B9F21B5D41B6CD668B249879A2688693E51B"]]],"shifted":null},"complete_add_comm":{"unshifted":[["Finite",["0x1E859218F11F787CE75C06FD5303457CBD307BDEEB693CC66A235CB85B314D4B","0x228167E190903072E8F34BD7AF61A0C02DE0BC3D54FF8760A2BCBFDD6A880688"]]],"shifted":null},"mul_comm":{"unshifted":[["Finite",["0x3EC97D3A8CD405A92B31B67184817925B99B1527065A28677AEAAEC37CC7B9C5","0x3844006206FF29A55DBB44A3D06E46610639E24E960B4BC32A663EEC4D04C689"]]],"shifted":null},"emul_comm":{"unshifted":[["Finite",["0x114772020FAF5E6660D7D75B666B7121829027A866A8214B42899E824D820CB9","0x01F7FC015E2F0C5E02E34F0FD6FBA0FCE01E40EA183F0F6F7C197553524A96B9"]]],"shifted":null},"endomul_scalar_comm":{"unshifted":[["Finite",["0x04C30A9B6412594ECD5EEFCA20D4B759BBC52B08868E01F74FDC82B557A76ADD","0x019413D8112950CB93D20BA29452DC281FFE1A692706C34BD148E331F844D244"]]],"shifted":null},"xor_comm":null,"range_check0_comm":null,"range_check1_comm":null,"foreign_field_add_comm":null,"foreign_field_mul_comm":null,"rot_comm":null},"shifts":["0x0000000000000000000000000000000000000000000000000000000000000001","0x00B9CDC8FD0BD4B27E2A74AF7AEBD5734D52D75BDF85EBF1CAD03413E914A2E3","0x007CF68160D84012626E0046A932AD12E68B3394D6E2A001A537FFB40D3527C6","0x0077D45AECB939AE97A3952B48189964AA209609F19BE4A4B89F339A33440F6D","0x0077C7E54505D4771F6AF1FED2195500481EF1F3C0397B0AC819E678BD2309B4","0x00B3AF68ECC6AE7A4727F0708EDF4736BE1C99281FA380846E42264C62407484","0x00381CA4536FC0ED935D50A74A87136F1A0675B618898DBCE67E564AB20174A1"],"lookup_index":null,"zk_rows":3},"data":{"constraints":16384}} \ No newline at end of file diff --git a/src/app/print_blockchain_snark_vk/dune b/src/app/print_blockchain_snark_vk/dune new file mode 100644 index 00000000000..3b1137fd543 --- /dev/null +++ b/src/app/print_blockchain_snark_vk/dune @@ -0,0 +1,18 @@ +(executable + (name print_blockchain_snark_vk) + (libraries + blockchain_snark) + (instrumentation (backend bisect_ppx)) + (preprocess (pps ppx_version))) + +(rule + (deps print_blockchain_snark_vk.exe) + (targets blockchain_snark_vk.json.corrected) + (action + (with-stdout-to %{targets} + (run %{deps})))) + +(rule + (alias runtest) + (action + (diff blockchain_snark_vk.json blockchain_snark_vk.json.corrected))) diff --git a/src/app/print_blockchain_snark_vk/dune-project b/src/app/print_blockchain_snark_vk/dune-project new file mode 100644 index 00000000000..7b17fb2d308 --- /dev/null +++ b/src/app/print_blockchain_snark_vk/dune-project @@ -0,0 +1 @@ +(lang dune 3.3) diff --git a/src/app/print_blockchain_snark_vk/print_blockchain_snark_vk.ml b/src/app/print_blockchain_snark_vk/print_blockchain_snark_vk.ml new file mode 100644 index 00000000000..c111eacabd3 --- /dev/null +++ b/src/app/print_blockchain_snark_vk/print_blockchain_snark_vk.ml @@ -0,0 +1,41 @@ +open Core_kernel + +module Config = struct + let constraint_constants = Genesis_constants.Constraint_constants.compiled + + let proof_level = Genesis_constants.Proof_level.Full +end + +let () = Format.eprintf "Generating transaction snark circuit..@." + +let before = Time.now () + +module Transaction_snark_instance = Transaction_snark.Make (Config) + +let after = Time.now () + +let () = + Format.eprintf "Generated transaction snark circuit in %s.@." + (Time.Span.to_string_hum (Time.diff after before)) + +let () = Format.eprintf "Generating blockchain snark circuit..@." + +let before = Time.now () + +module Blockchain_snark_instance = +Blockchain_snark.Blockchain_snark_state.Make (struct + let tag = Transaction_snark_instance.tag + + include Config +end) + +let after = Time.now () + +let () = + Format.eprintf "Generated blockchain snark circuit in %s.@." + (Time.Span.to_string_hum (Time.diff after before)) + +let () = + Lazy.force Blockchain_snark_instance.Proof.verification_key + |> Pickles.Verification_key.to_yojson |> Yojson.Safe.to_string + |> Format.print_string diff --git a/src/app/proposer_simulation/sim.py b/src/app/proposer_simulation/sim.py deleted file mode 100644 index 8e71e0928b8..00000000000 --- a/src/app/proposer_simulation/sim.py +++ /dev/null @@ -1,58 +0,0 @@ -import random -import matplotlib.pyplot as plt -import numpy as np -from matplotlib.animation import FuncAnimation - -f = 0.99 - -threshold = lambda x: 1 - (1 - f) ** x - -def normalize(xs): - s = sum(xs) - return [ x / s for x in xs ] - -coinbase_amount = 0.1 - -def simulate(stakes, total_money, slots): - total_money = float(total_money) - winners = [] - for i in range(slots): - per_slot_winners = [] - for id, s in enumerate(stakes): - if random.random() < threshold(s / total_money): - per_slot_winners.append(id) - # Simulate the deterministic tie-breaking - if per_slot_winners: - winners.append(random.choice(per_slot_winners)) - - new_stakes = stakes[:] - for w in winners: - total_money += coinbase_amount - new_stakes[w] += coinbase_amount - return (new_stakes, total_money) - -figure = plt.figure() -stakes = 100 * [0.5] + [ 50 ] -total_money = float(sum(stakes)) -bars = plt.bar(np.arange(len(stakes)), [s / total_money for s in stakes]) -plt.ylim(top=1) - -sim = lambda (s, g): simulate(s, g, 2160) -def iterate(f, n, x): - for i in range(n): - x = f(x) - return x - -def update(frame): - global stakes - global total_money - res = iterate(sim, 1, (stakes, total_money)) - stakes = res[0] - total_money = res[1] - print max(stakes) / total_money - for i, b in enumerate(bars): - b.set_height(stakes[i] / total_money) - -animation = FuncAnimation(figure, update, interval=10) - -plt.show() diff --git a/src/app/replayer/replayer.ml b/src/app/replayer/replayer.ml index d59abbadb30..1a2729b3684 100644 --- a/src/app/replayer/replayer.ml +++ b/src/app/replayer/replayer.ml @@ -547,7 +547,7 @@ let zkapp_command_to_transaction ~logger ~pool (cmd : Sql.Zkapp_command.t) : let (authorization : Control.t) = match body.authorization_kind with | Proof _ -> - Proof Proof.transaction_dummy + Proof (Lazy.force Proof.transaction_dummy) | Signature -> Signature Signature.dummy | None_given -> diff --git a/src/app/test_executive/dune b/src/app/test_executive/dune index c972a5ea15f..986f4e3bd7a 100644 --- a/src/app/test_executive/dune +++ b/src/app/test_executive/dune @@ -1,67 +1,69 @@ (executable (name test_executive) (libraries - ;; opam libraries - async_kernel - async - core - uri - yojson - core_kernel - cmdliner - base.base_internalhash_types - base.caml - async_unix - unsigned_extended - stdio - sexplib0 - ;; local libraries - protocol_version - mina_wire_types - with_hash - data_hash_lib - kimchi_backend - kimchi_backend.pasta - kimchi_backend.pasta.basic - pickles - pickles_types - random_oracle_input - genesis_constants - bash_colors - integration_test_lib - signature_lib - mina_signature_kind - mina_base - mina_stdlib - mina_transaction - file_system - currency - mina_runtime_config - secrets - integration_test_cloud_engine - mina_generators - logger - random_oracle - mina_numbers - transaction_snark - snark_params - pickles.backend - pipe_lib - mina_base.import - key_gen - integers - user_command_input - participating_state - graph_algorithms - visualization - sgn - zkapp_command_builder - network_pool - zkapps_examples - cache_dir - snarky.backendless - bounded_types - block_time - ) - (instrumentation (backend bisect_ppx)) - (preprocess (pps ppx_mina ppx_jane ppx_deriving_yojson ppx_mina ppx_version))) + ;; opam libraries + async_kernel + async + core + uri + yojson + core_kernel + cmdliner + base.base_internalhash_types + base.caml + async_unix + unsigned_extended + stdio + sexplib0 + ;; local libraries + protocol_version + mina_wire_types + with_hash + data_hash_lib + kimchi_backend + kimchi_backend.pasta + kimchi_backend.pasta.basic + pickles + pickles_types + random_oracle_input + genesis_constants + bash_colors + integration_test_lib + signature_lib + mina_signature_kind + mina_base + mina_stdlib + mina_transaction + file_system + currency + mina_runtime_config + secrets + integration_test_cloud_engine + integration_test_local_engine + mina_generators + logger + random_oracle + mina_numbers + transaction_snark + snark_params + pickles.backend + pipe_lib + mina_base.import + key_gen + integers + user_command_input + participating_state + graph_algorithms + visualization + sgn + zkapp_command_builder + network_pool + zkapps_examples + cache_dir + snarky.backendless + bounded_types + block_time) + (instrumentation + (backend bisect_ppx)) + (preprocess + (pps ppx_mina ppx_jane ppx_deriving_yojson ppx_mina ppx_version))) diff --git a/src/app/test_executive/test_executive.ml b/src/app/test_executive/test_executive.ml index 863b68fb30d..0c5071c1b49 100644 --- a/src/app/test_executive/test_executive.ml +++ b/src/app/test_executive/test_executive.ml @@ -47,7 +47,9 @@ let validate_inputs ~logger inputs (test_config : Test_config.t) : else Deferred.return () let engines : engine list = - [ ("cloud", (module Integration_test_cloud_engine : Intf.Engine.S)) ] + [ ("cloud", (module Integration_test_cloud_engine : Intf.Engine.S)) + ; ("local", (module Integration_test_local_engine : Intf.Engine.S)) + ] let tests : test list = [ ( "peers-reliability" diff --git a/src/app/test_executive/zkapps.ml b/src/app/test_executive/zkapps.ml index 16afdace331..864bc882438 100644 --- a/src/app/test_executive/zkapps.ml +++ b/src/app/test_executive/zkapps.ml @@ -378,7 +378,8 @@ module Make (Inputs : Intf.Test.Inputs_intf) = struct | Proof _ -> { other_p with authorization = - Control.Proof Mina_base.Proof.blockchain_dummy + Control.Proof + (Lazy.force Mina_base.Proof.blockchain_dummy) } | _ -> other_p ) @@ -759,6 +760,8 @@ module Make (Inputs : Intf.Test.Inputs_intf) = struct (Network.Node.get_ingress_uri node) zkapp_command_insufficient_fee "Insufficient fee" ) in + let%bind () = wait_for t (Wait_condition.blocks_to_be_produced 1) in + let%bind () = Malleable_error.lift (after (Time.Span.of_sec 30.0)) in (* Won't be accepted until the previous transactions are applied *) let%bind () = section_hard "Send a zkApp transaction to update all fields" diff --git a/src/app/validation/.credo.exs b/src/app/validation/.credo.exs deleted file mode 100644 index c489dd88edd..00000000000 --- a/src/app/validation/.credo.exs +++ /dev/null @@ -1,186 +0,0 @@ -# This file contains the configuration for Credo and you are probably reading -# this after creating it with `mix credo.gen.config`. -# -# If you find anything wrong or unclear in this file, please report an -# issue on GitHub: https://github.com/rrrene/credo/issues -# -%{ - # - # You can have as many configs as you like in the `configs:` field. - configs: [ - %{ - # - # Run any config using `mix credo -C `. If no config name is given - # "default" is used. - # - name: "default", - # - # These are the files included in the analysis: - files: %{ - # - # You can give explicit globs or simply directories. - # In the latter case `**/*.{ex,exs}` will be used. - # - included: [ - "lib/", - "src/", - "test/", - "web/", - "apps/*/lib/", - "apps/*/src/", - "apps/*/test/", - "apps/*/web/" - ], - excluded: [~r"/_build/", ~r"/deps/", ~r"/node_modules/"] - }, - # - # Load and configure plugins here: - # - plugins: [], - # - # If you create your own checks, you must specify the source files for - # them here, so they can be loaded by Credo before running the analysis. - # - requires: [], - # - # If you want to enforce a style guide and need a more traditional linting - # experience, you can change `strict` to `true` below: - # - strict: true, - # - # To modify the timeout for parsing files, change this value: - # - parse_timeout: 5000, - # - # If you want to use uncolored output by default, you can change `color` - # to `false` below: - # - color: true, - # - # You can customize the parameters of any check by adding a second element - # to the tuple. - # - # To disable a check put `false` as second element: - # - # {Credo.Check.Design.DuplicatedCode, false} - # - checks: [ - # - ## Consistency Checks - # - {Credo.Check.Consistency.ExceptionNames, []}, - {Credo.Check.Consistency.LineEndings, []}, - {Credo.Check.Consistency.ParameterPatternMatching, []}, - {Credo.Check.Consistency.SpaceAroundOperators, []}, - {Credo.Check.Consistency.SpaceInParentheses, []}, - {Credo.Check.Consistency.TabsOrSpaces, []}, - - # - ## Design Checks - # - # You can customize the priority of any check - # Priority values are: `low, normal, high, higher` - # - {Credo.Check.Design.AliasUsage, - [priority: :low, if_nested_deeper_than: 2, if_called_more_often_than: 0]}, - # You can also customize the exit_status of each check. - # If you don't want TODO comments to cause `mix credo` to fail, just - # set this value to 0 (zero). - # {Credo.Check.Design.TagTODO, [exit_status: 2]}, - # {Credo.Check.Design.TagFIXME, []}, - {Credo.Check.Design.TagTODO, false}, - {Credo.Check.Design.TagFIXME, false}, - - # - ## Readability Checks - # - {Credo.Check.Readability.AliasOrder, []}, - {Credo.Check.Readability.FunctionNames, []}, - {Credo.Check.Readability.LargeNumbers, []}, - {Credo.Check.Readability.MaxLineLength, [priority: :low, max_length: 120]}, - {Credo.Check.Readability.ModuleAttributeNames, []}, - {Credo.Check.Readability.ModuleDoc, []}, - {Credo.Check.Readability.ModuleNames, []}, - {Credo.Check.Readability.ParenthesesInCondition, []}, - {Credo.Check.Readability.ParenthesesOnZeroArityDefs, []}, - {Credo.Check.Readability.PredicateFunctionNames, []}, - {Credo.Check.Readability.PreferImplicitTry, []}, - {Credo.Check.Readability.RedundantBlankLines, []}, - {Credo.Check.Readability.Semicolons, []}, - {Credo.Check.Readability.SpaceAfterCommas, []}, - {Credo.Check.Readability.StringSigils, []}, - {Credo.Check.Readability.TrailingBlankLine, []}, - {Credo.Check.Readability.TrailingWhiteSpace, []}, - {Credo.Check.Readability.UnnecessaryAliasExpansion, []}, - {Credo.Check.Readability.VariableNames, []}, - - # - ## Refactoring Opportunities - # - {Credo.Check.Refactor.CondStatements, []}, - {Credo.Check.Refactor.CyclomaticComplexity, []}, - {Credo.Check.Refactor.FunctionArity, []}, - {Credo.Check.Refactor.LongQuoteBlocks, []}, - {Credo.Check.Refactor.MapInto, false}, - {Credo.Check.Refactor.MatchInCondition, []}, - {Credo.Check.Refactor.NegatedConditionsInUnless, []}, - {Credo.Check.Refactor.NegatedConditionsWithElse, []}, - {Credo.Check.Refactor.Nesting, []}, - {Credo.Check.Refactor.UnlessWithElse, []}, - {Credo.Check.Refactor.WithClauses, []}, - - # - ## Warnings - # - {Credo.Check.Warning.BoolOperationOnSameValues, []}, - {Credo.Check.Warning.ExpensiveEmptyEnumCheck, []}, - {Credo.Check.Warning.IExPry, []}, - {Credo.Check.Warning.IoInspect, []}, - {Credo.Check.Warning.LazyLogging, false}, - {Credo.Check.Warning.MixEnv, false}, - {Credo.Check.Warning.OperationOnSameValues, []}, - {Credo.Check.Warning.OperationWithConstantResult, []}, - {Credo.Check.Warning.RaiseInsideRescue, []}, - {Credo.Check.Warning.UnusedEnumOperation, []}, - {Credo.Check.Warning.UnusedFileOperation, []}, - {Credo.Check.Warning.UnusedKeywordOperation, []}, - {Credo.Check.Warning.UnusedListOperation, []}, - {Credo.Check.Warning.UnusedPathOperation, []}, - {Credo.Check.Warning.UnusedRegexOperation, []}, - {Credo.Check.Warning.UnusedStringOperation, []}, - {Credo.Check.Warning.UnusedTupleOperation, []}, - {Credo.Check.Warning.UnsafeExec, []}, - - # - # Checks scheduled for next check update (opt-in for now, just replace `false` with `[]`) - - # - # Controversial and experimental checks (opt-in, just replace `false` with `[]`) - # - {Credo.Check.Readability.StrictModuleLayout, false}, - {Credo.Check.Consistency.MultiAliasImportRequireUse, false}, - {Credo.Check.Consistency.UnusedVariableNames, false}, - {Credo.Check.Design.DuplicatedCode, false}, - {Credo.Check.Readability.AliasAs, false}, - {Credo.Check.Readability.MultiAlias, false}, - {Credo.Check.Readability.Specs, false}, - {Credo.Check.Readability.SinglePipe, false}, - {Credo.Check.Readability.WithCustomTaggedTuple, false}, - {Credo.Check.Refactor.ABCSize, false}, - {Credo.Check.Refactor.AppendSingleItem, false}, - {Credo.Check.Refactor.DoubleBooleanNegation, false}, - {Credo.Check.Refactor.ModuleDependencies, false}, - {Credo.Check.Refactor.NegatedIsNil, false}, - {Credo.Check.Refactor.PipeChainStart, false}, - {Credo.Check.Refactor.VariableRebinding, false}, - {Credo.Check.Warning.LeakyEnvironment, false}, - {Credo.Check.Warning.MapGetUnsafePass, false}, - {Credo.Check.Warning.UnsafeToAtom, false} - - # - # Custom checks can be created using `mix credo.gen.check`. - # - ] - } - ] -} diff --git a/src/app/validation/.formatter.exs b/src/app/validation/.formatter.exs deleted file mode 100644 index d2cda26eddc..00000000000 --- a/src/app/validation/.formatter.exs +++ /dev/null @@ -1,4 +0,0 @@ -# Used by "mix format" -[ - inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"] -] diff --git a/src/app/validation/README.md b/src/app/validation/README.md deleted file mode 100644 index a2a542198d0..00000000000 --- a/src/app/validation/README.md +++ /dev/null @@ -1,42 +0,0 @@ -# Intro - -Compile application only: `mix compile` -Compile and perform static analysis (check for type-errors): `mix dialyzer` -Compile and execute test suite: `mix test` -Compile, execute test suite, and generate test coverage html: `MIX_ENV=test mix coveralls.html` -Compile and run application locally: `mix run` -Compile and run inside interactive elixir session: `iex -S mix` -Compile and start interactive elixir session inside application scope, without actually starting the application: `iex -S mix run --no-start` - -# Debugging - -``` -$ iex -S mix run --no-start -# start the OTP observer: -iex(.)> :observer.start() -# start the GUI debugger: -iex(.)> :debugger.start() -# add modules to debugger (repeat for all modules you want to debug): -iex(.)> :int.ni(SomeModuleYouWantToDebug) -# start the application (the arguments are currently unused, so they don't matter): -iex(.)> CodaValidation.start([], []) -``` - -# TODO - -- Fix auth token TTL expiration (refresh on interval?) -- Design spot providers and unify with log providers -- Implement aggregate statistics (as opposed to scalar statistics) -- Implement statistics subscribing to statistics (switch to defining list of data sources; check for cycles) -- Implement planning (given resources and validation requests, plan all process requests) - - May involve dynamic request graph? -- Implement discord alerting backend -- Implement statistic exports (Prometheus interface; exported statistics selected by validation queries) -- Implement core statistics - - Approximate frontier - - Block acceptance rate - - Aggregate block acceptance rate - - Common prefix - - Aggregate frontier? -- Abstract over cloud provider -- Dynamic resource classification diff --git a/src/app/validation/config/config.exs b/src/app/validation/config/config.exs deleted file mode 100644 index 4b131672744..00000000000 --- a/src/app/validation/config/config.exs +++ /dev/null @@ -1,20 +0,0 @@ -import Config - -config :logger, - level: String.to_atom(System.get_env("LOG_LEVEL") || "info"), - handle_otp_reports: true, - handle_sasl_reports: true - -config :logger, :console, - format: {PrettyConsoleLog, :format}, - metadata: [:pid, :context] - -config :coda_validation, - project_id: "o1labs-192920", - region: "us-east1", - cluster: "coda-infra-east", - testnet: "regeneration" - -if File.exists?("config/local.exs") do - import_config "local.exs" -end diff --git a/src/app/validation/lib/architecture/alert_server.ex b/src/app/validation/lib/architecture/alert_server.ex deleted file mode 100644 index 8428bea6a46..00000000000 --- a/src/app/validation/lib/architecture/alert_server.ex +++ /dev/null @@ -1,10 +0,0 @@ -defmodule Architecture.AlertServer do - @moduledoc """ - One day, this will be the AlertServer which manages alerts fromv validations. For now, it's a - noop. - """ - - def validation_error(_, _, _) do - IO.puts("woopsie") - end -end diff --git a/src/app/validation/lib/architecture/junction.ex b/src/app/validation/lib/architecture/junction.ex deleted file mode 100644 index d12feaf6e57..00000000000 --- a/src/app/validation/lib/architecture/junction.ex +++ /dev/null @@ -1,29 +0,0 @@ -defmodule Architecture.Junction do - @moduledoc """ - Mixin for defining junctions. Junctions are a pattern around `Registry`s which provide a - highly-parallel pub/sub system. - """ - - defmacro __using__(_params) do - quote do - def child_spec do - Registry.child_spec( - keys: :duplicate, - name: __MODULE__, - partitions: System.schedulers_online() - ) - end - - def subscribe(key) do - Registry.register(__MODULE__, key, []) - end - - def broadcast(key, msg) do - Registry.dispatch(__MODULE__, key, fn entries -> - msg = {:subscription, key, msg} - Enum.each(entries, fn {pid, _} -> GenServer.cast(pid, msg) end) - end) - end - end - end -end diff --git a/src/app/validation/lib/architecture/log_filter.ex b/src/app/validation/lib/architecture/log_filter.ex deleted file mode 100644 index 3351ef592ef..00000000000 --- a/src/app/validation/lib/architecture/log_filter.ex +++ /dev/null @@ -1,185 +0,0 @@ -defmodule Architecture.LogFilter do - @moduledoc "A generalized language for representing log filters." - - @type selection :: [String.t()] - - # TODO: remove global_restriction support in favor of local restrictions on messages? - @type t :: - {:global_restriction, String.t()} - | {:equals | :contains, selection, String.t()} - | {:adjunction | :disjunction, [t]} - | String.t() - | nil - - # Simple Constructors - - @spec global_restriction(String.t()) :: t - def global_restriction(str), do: {:global_restriction, str} - - @spec equals(selection, String.t()) :: t - def equals(sel, str), do: {:equals, sel, str} - - @spec contains(selection, String.t()) :: t - def contains(sel, str), do: {:contains, sel, str} - - # Filter Combinators - - @spec adjoin(t, t) :: t - def adjoin(nil, b), do: b - def adjoin(a, nil), do: a - # IMPLIED: def adjoin(nil, nil), do: nil - def adjoin({:adjunction, a}, {:adjunction, b}), do: {:adjunction, a ++ b} - def adjoin({:adjunction, a}, b), do: {:adjunction, a ++ [b]} - def adjoin(a, {:adjunction, b}), do: {:adjunction, [a | b]} - def adjoin(a, b), do: {:adjunction, [a, b]} - - @spec adjoin([t] | nil) :: t - def adjoin(nil), do: nil - def adjoin([]), do: nil - def adjoin(ls) when is_list(ls), do: Enum.reduce(ls, &adjoin(&2, &1)) - - @spec disjoin(t, t) :: t - def disjoin(nil, b), do: b - def disjoin(a, nil), do: a - # IMPLIED: def adjoin(nil, nil), do: nil - def disjoin({:disjunction, a}, {:disjunction, b}), do: {:disjunction, a ++ b} - def disjoin({:disjunction, a}, b), do: {:disjunction, a ++ [b]} - def disjoin(a, {:disjunction, b}), do: {:disjunction, [a | b]} - def disjoin(a, b), do: {:disjunction, [a, b]} - - @spec disjoin([t] | nil) :: t - def disjoin(nil), do: nil - def disjoin([]), do: nil - def disjoin(ls) when is_list(ls), do: Enum.reduce(ls, &disjoin(&2, &1)) - - @spec nest(String.t()) :: String.t() - defp nest(str), do: "(#{str})" - - # Filter Rendering - # TODO: split this off into a LogEngine abstraction - # TODO: apply stack driver specific optimizations (e.g. `a = "a" OR a = "b"` can be optimized to `a = ("a" OR "b")`) - - @spec render_string(String.t()) :: String.t() - defp render_string(str) when is_binary(str) do - escaped_str = String.replace(str, "\"", "\\\"") - "\"#{escaped_str}\"" - end - - # TODO: custom selection part escape logic (will reduce the size of rendered filters) - # for now, we just wrap all selection parts in strings to avoid special escape logic - @spec render_selection(selection) :: String.t() - defp render_selection(sel), do: sel |> Enum.map(&render_string/1) |> Enum.join(".") - - @spec render_comparison(selection, String.t()) :: String.t() - defp render_comparison(sel, str), do: "#{render_selection(sel)}=#{render_string(str)}" - - @spec render_conjunction([t], String.t()) :: String.t() - defp render_conjunction(ts, joinder) do - ts - |> Enum.map(&render_inner/1) - |> Enum.join("#{joinder}") - end - - @spec render_inner(t) :: String.t() - defp render_inner(nil), do: "" - defp render_inner(str) when is_binary(str), do: str - defp render_inner({:global_restriction, str}), do: render_string(str) - defp render_inner({:equals, sel, str}), do: render_comparison(sel, str) - defp render_inner({:contains, sel, str}), do: render_comparison(sel, str) - defp render_inner({:adjunction, ts}), do: nest(render_conjunction(ts, " AND ")) - defp render_inner({:disjunction, ts}), do: nest(render_conjunction(ts, " OR ")) - - @spec render(t) :: String.t() - def render({:adjunction, ts}), do: render_conjunction(ts, "\n") - def render(t), do: render_inner(t) -end - -defmodule Architecture.LogFilter.Language do - @moduledoc "A DSL for constructing log filters." - - defmodule SyntaxError do - defexception [:syntax, :error] - - def message(%__MODULE__{syntax: syntax, error: error}) do - # "#{error} [offending syntax: `#{Macro.to_string(syntax)}`]" - "#{error} [offending syntax: `#{Macro.to_string(syntax)}`; `#{inspect(syntax)}`]" - end - end - - @spec parse_string(Macro.t()) :: Macro.t() - defp parse_string(x) when is_binary(x), do: Macro.escape(x) - defp parse_string({:<<>>, _, _} = x), do: x - - defp parse_string(syntax) do - raise SyntaxError, - syntax: syntax, - error: "right hand side of `==` or `<~>` must be a string" - end - - @spec parse_selection(Macro.t()) :: Architecture.LogFilter.selection() - defp parse_selection({{:., _, [head, tail]}, _, []}) when is_atom(tail), - do: parse_selection(head) ++ [to_string(tail)] - - defp parse_selection({{:., _, [Access, :get]}, _, [head, tail]}), - do: parse_selection(head) ++ [to_string(tail)] - - defp parse_selection({base, _, _}) when is_atom(base), do: [to_string(base)] - - defp parse_selection(syntax) do - raise SyntaxError, - syntax: syntax, - error: "invalid log data selector (left hand side of comparision operators)" - end - - @spec parse_filter(Macro.t()) :: Macro.t() - defp parse_filter({:=, _, _} = syntax) do - raise SyntaxError, - syntax: syntax, - error: "`=` not supported in log filters; use `==` for equality instead" - end - - defp parse_filter({:==, _, [a, b]}) do - quote do: - unquote(Architecture.LogFilter).equals( - unquote(parse_selection(a)), - unquote(parse_string(b)) - ) - end - - defp parse_filter({:<~>, _, [a, b]}) do - quote do: - unquote(Architecture.LogFilter).contains( - unquote(parse_selection(a)), - unquote(parse_string(b)) - ) - end - - defp parse_filter({:and, _, [a, b]}) do - quote do: - unquote(Architecture.LogFilter).adjoin( - unquote(parse_filter(a)), - unquote(parse_filter(b)) - ) - end - - defp parse_filter({:or, _, [a, b]}) do - quote do: - unquote(Architecture.LogFilter).disjoin( - unquote(parse_filter(a)), - unquote(parse_filter(b)) - ) - end - - defp parse_filter(syntax) do - quote do: unquote(Architecture.LogFilter).global_restriction(unquote(parse_string(syntax))) - rescue - SyntaxError -> - reraise SyntaxError, [syntax: syntax, error: "invalid filter syntax"], __STACKTRACE__ - end - - defmacro filter(do: {:__block__, _, body}) do - quote do: unquote(Architecture.LogFilter).adjoin(unquote(Enum.map(body, &parse_filter/1))) - end - - defmacro filter(do: filter), do: parse_filter(filter) -end diff --git a/src/app/validation/lib/architecture/log_provider.ex b/src/app/validation/lib/architecture/log_provider.ex deleted file mode 100644 index d0260707394..00000000000 --- a/src/app/validation/lib/architecture/log_provider.ex +++ /dev/null @@ -1,131 +0,0 @@ -defmodule Architecture.LogProvider do - @moduledoc "Behaviour for log providers." - - alias Architecture.ResourceSet - alias Cloud.Google.Subscription - - @type t :: module - # TODO - @type log :: any - - @callback resource_class() :: module() - @callback log_filter() :: Architecture.LogFilter.t() - - defmacro __using__(_params) do - quote do - @behaviour unquote(__MODULE__) - import Architecture.LogFilter.Language - require Architecture.LogFilter.Language - end - end - - def log_filter(log_provider, resource_db) do - resource_filter = ResourceSet.filter(resource_db) - Architecture.LogFilter.adjoin(resource_filter, log_provider.log_filter()) - end - - defmodule Junction do - @moduledoc "Junction for logs." - - use Architecture.Junction - - def subscribe(log_provider, resource), do: subscribe({log_provider, resource}) - def broadcast(log_provider, resource, msg), do: broadcast({log_provider, resource}, msg) - end - - defmodule Spec do - @moduledoc "Specification of a log provider to execute." - - use Class - - defclass( - conn: Cloud.Google.pubsub_conn(), - subscription: Subscription.t(), - log_provider: module - ) - end - - defmodule Broker do - @moduledoc "Interpreter and message broker for executing log providers." - - # Each provider has 1 sink pub/sub pipeline associated with it. Provider ingests gcloud - # subscriptions and forwards to junction if the associated resource exists in the resource - # set. - require Logger - alias Architecture.LogProvider - - @spec child_spec(LogProvider.Spec.t()) :: Supervisor.child_spec() - def child_spec(%LogProvider.Spec{} = spec) do - %{ - id: __MODULE__, - type: :worker, - start: {__MODULE__, :start_link, [spec]}, - restart: :permanent, - modules: [__MODULE__, spec.log_provider] - } - end - - @spec start_link(LogProvider.Spec.t()) :: {:ok, pid} - def start_link(spec) do - {:ok, spawn_link(fn -> init(spec) end)} - end - - @spec init(LogProvider.Spec.t()) :: no_return - def init(spec) do - Logger.metadata(context: __MODULE__) - Process.register(self(), String.to_atom("#{__MODULE__}:#{spec.log_provider}")) - run(spec) - end - - @spec run(LogProvider.Spec.t()) :: nil - def run(spec) do - Subscription.pull_and_process(spec.conn, spec.subscription, fn msg -> handle_message(msg, spec.log_provider) end) - run(spec) - end - - # TODO: properly define message schema as a type - @spec handle_message(map,module) :: :ok - def handle_message(message,log_provider) do - resource = - try do - # TODO: implement dynamic resource classification - Coda.ResourceClassifier.classify_resource(message) - rescue - e -> - Logger.error("failed to classify resource") - reraise e, __STACKTRACE__ - end - - LogProvider.Junction.broadcast(log_provider, resource, message) - end - end - - defmodule MainSupervisor do - @moduledoc "Main supervisor for spawning and monitoring log providers." - - alias Architecture.LogProvider - - use Supervisor - - def start_link(log_provider_specs) do - Supervisor.start_link(__MODULE__, log_provider_specs, name: __MODULE__) - end - - @impl true - def init(log_provider_specs) do - children = [ - LogProvider.Junction.child_spec() - | Enum.map(log_provider_specs, &broker_child_spec/1) - ] - - Supervisor.init(children, strategy: :one_for_one) - end - - defp broker_child_spec(spec) do - Supervisor.child_spec( - {LogProvider.Broker, spec}, - id: "LogProvider.Broker:#{spec.log_provider}" - ) - end - end -end diff --git a/src/app/validation/lib/architecture/resource.ex b/src/app/validation/lib/architecture/resource.ex deleted file mode 100644 index cd70b7b9d93..00000000000 --- a/src/app/validation/lib/architecture/resource.ex +++ /dev/null @@ -1,79 +0,0 @@ -defmodule Architecture.Resource do - @moduledoc "Behaviour and mixin for resources." - - import Util.ForMacros - - use Class - defclass([]) - - @type class :: Class.t() - - @spec is_resource_class?(module) :: boolean - def is_resource_class?(mod), do: Class.is_subclass?(mod, __MODULE__) - - @callback global_filter :: Architecture.LogFilter.t() - @callback local_filter(any) :: Architecture.LogFilter.t() - - defmacro __using__(_params) do - quote do - @behaviour unquote(__MODULE__) - @before_compile unquote(__MODULE__) - require unquote(__MODULE__) - import unquote(__MODULE__) - require unquote(Architecture.LogFilter.Language) - import unquote(Architecture.LogFilter.Language) - end - end - - defmacro __before_compile__(env) do - if not Class.is_class?(env.module) do - raise "must create a class" - end - - # full_global_filter = - # case Module.get_attribute(env.module, :extends) do - # nil -> quote do: global_filter() - # parent -> quote do: unquote(parent).full_global_filter() ++ global_filter() - # end - - quote do - def full_global_filter do - # unquote(full_global_filter) - "TODO" - end - end - end - - @spec defresource(Keyword.t(Macro.t())) :: Macro.t() - defmacro defresource(fields) when is_list(fields) do - ensure_not_already_class(__CALLER__.module) - inject_class(__MODULE__, fields) - end - - @spec defresource(module, Keyword.t(Macro.t())) :: Macro.t() - defmacro defresource(super_class, fields) when is_list(fields) do - ensure_not_already_class(__CALLER__.module) - super_class = resolve_module(__CALLER__, super_class) - - if not Class.is_class?(super_class) or not is_resource_class?(super_class) do - raise "superclass must be a resource" - end - - inject_class(super_class, fields) - end - - @spec ensure_not_already_class(module) :: nil - defp ensure_not_already_class(mod) do - if Class.is_class?(mod) do - raise "cannot define resource because #{inspect(mod)} is already a class" - end - end - - @spec inject_class(module, Keyword.t(Macro.t())) :: Macro.t() - defp inject_class(super_class, fields) do - quote do - use unquote(Class) - defclass(unquote(super_class), unquote(fields)) - end - end -end diff --git a/src/app/validation/lib/architecture/resource_set.ex b/src/app/validation/lib/architecture/resource_set.ex deleted file mode 100644 index 67df69cdc47..00000000000 --- a/src/app/validation/lib/architecture/resource_set.ex +++ /dev/null @@ -1,128 +0,0 @@ -defmodule Architecture.ResourceSet do - @moduledoc """ - An indexed, queryable set of resources. Supports computing filters for all resources in the set. - """ - - alias Architecture.Resource - - use Class - - @type resource_id :: integer - - # NB: - # with this design, uniqueness is only guaranteed within a single set, not across sets - # (should be fine for now, needs to use a max_resource_id agent if use-case changes) - defclass( - max_resource_id: integer, - resources: %{optional(resource_id) => Resource.t()}, - class_index: %{optional(Resource.class()) => [resource_id]} - ) - - @spec empty :: t - def empty, - do: %__MODULE__{ - max_resource_id: -1, - resources: %{}, - class_index: %{} - } - - @spec build([Class.instance()]) :: t - def build(resources), do: Enum.reduce(resources, empty(), &insert(&2, &1)) - - # insertion is private to enforce correct usage - # (please see NB at defclass if considering exposing publicly) - @spec insert(t, Class.instance()) :: t - defp insert(set, resource) do - id = set.max_resource_id + 1 - resource_class = Class.class_of(resource) - all_classes = [resource_class | Class.all_super_classes(resource_class)] - # there's no reason to index the root object or root resource - # TODO: Class.super_classes_up_to() - indexable_classes = Enum.filter(all_classes, &(&1 != Class.Object and &1 != Resource)) - resources = Map.put(set.resources, id, resource) - - class_index = - Enum.reduce( - indexable_classes, - set.class_index, - &Map.update(&2, &1, [], fn ids -> [id | ids] end) - ) - - %{set | max_resource_id: id, resources: resources, class_index: class_index} - end - - @spec all_resources(t) :: [Resource.t()] - def all_resources(set), do: Map.values(set.resources) - - @spec all_resource_classes(t) :: [Class.t()] - def all_resource_classes(set), do: Map.keys(set.class_index) - - @spec select(t, Class.t()) :: t - def select(set, class), do: slice(set, Map.get(set.class_index, class, [])) - - @spec slice(t, [resource_id]) :: t - defp slice(set, ids) do - {resources, _} = Map.split(set.resources, ids) - - class_index = - Enum.map(set.class_index, fn {class, index} -> - {class, Enum.filter(index, &Enum.member?(ids, &1))} - end) - |> Map.new() - - %__MODULE__{set | resources: resources, class_index: class_index} - end - - @spec filter(t) :: Architecture.LogFilter.t() - def filter(set) do - import Architecture.LogFilter - - # need to crawl a tree of class relationships in order to build the proper filter expression - # for each resource, nest with least descriptive class on the outer, and most descriptive class inner - # eg: - # - # ( - # CodaNode.global_filter - # AND ( - # ( - # BlockProducer.global_filter - # AND ((BlockProducer.local_filter AND CodaNode.local_filter) OR ...)) - # OR (...) - # OR (CodaNode.local_filter OR ...)) - # - # the disjunction of local filter, resource specific filters gets pushed to the most descriptive class's layer, and - # the adjunctions are nested in a tree following the subclass relationships up to Architecture.Resource - - resource_filters_by_class = - all_resources(set) - |> Enum.reduce(%{}, fn resource, map -> - resource_class = Class.class_of(resource) - - local_filter = - Class.inheritance_chain!(resource_class, Resource) - |> Enum.reverse() - |> Enum.map(fn class -> class.local_filter(Class.downcast!(resource, class)) end) - |> adjoin() - - if local_filter != nil do - Map.update(map, resource_class, [local_filter], &(&1 ++ [local_filter])) - else - map - end - end) - - Class.Hiearchy.compute(Resource, all_resource_classes(set)) - |> Class.Hiearchy.reduce_depth_first_exclusive( - nil, - fn class, child_filters -> - resource_filters = Map.get(resource_filters_by_class, class) - - adjoin( - class.global_filter(), - disjoin(disjoin(resource_filters), child_filters) - ) - end, - &disjoin/1 - ) - end -end diff --git a/src/app/validation/lib/architecture/statistic.ex b/src/app/validation/lib/architecture/statistic.ex deleted file mode 100644 index 485077fd9d2..00000000000 --- a/src/app/validation/lib/architecture/statistic.ex +++ /dev/null @@ -1,183 +0,0 @@ -defmodule Architecture.Statistic do - @moduledoc "Behaviour for statistics." - - alias Architecture.LogProvider - alias Architecture.Resource - alias Architecture.ResourceSet - alias Architecture.Timer - - @type t :: module - @type message :: any - - @callback providers :: [module] # log providers or other statistics - @callback resources(ResourceSet.t()) :: ResourceSet.t() - @callback init(Resource.t()) :: struct - @callback update(Resource.t(), state) :: state when state: struct - @callback handle_message(Resource.t(), state, {module, t()}, message()) :: state - when state: struct - - # a statistic can depend on a statistic provider, so cycles are possible - @spec check_cycle([module],MapSet.t()) :: :ok - def check_cycle(providers,seen) do - Enum.each(providers, - fn prov -> - if MapSet.member?(seen,prov) do - raise "Found a Statistics provider cycle containing #{prov}" - end - # a Log_provider has no provider dependencies - if Util.has_behaviour?(prov,Architecture.Statistic) do - check_cycle(prov.providers,MapSet.put(seen,prov)) - end - end) - :ok - end - - defmacro __using__(_params) do - quote do - @behaviour unquote(__MODULE__) - end - end - - defmodule Junction do - @moduledoc "Junction for statistic states." - - use Architecture.Junction - - def subscribe(statistic, resource), do: subscribe({statistic, resource}) - def broadcast(statistic, resource, msg), do: broadcast({statistic, resource}, msg) - end - - defmodule Spec do - @moduledoc "Specification of a statistic to execute." - - use Class - - defclass( - statistic: module, - resource_db: ResourceSet.t() - ) - end - - defmodule Broker do - @moduledoc "Interpreter and message broker for executing statistics." - - alias Architecture.Statistic - require Logger - - defmodule Params do - @moduledoc "Statistic broker parameters." - - defstruct [:mod, :resource] - end - - use GenServer - - def start_link(params) do - Logger.info("starting #{__MODULE__} for #{params.resource.name}") - - GenServer.start_link( - __MODULE__, - params, - name: String.to_atom("#{__MODULE__}:#{params.mod}:#{params.resource.name}") - ) - end - - def update(server), do: GenServer.call(server, :update) - - @impl true - def init(params) do - Logger.metadata(context: __MODULE__) - Logger.info("subscribing to providers", process_module: __MODULE__) - Enum.each(params.mod.providers(), - fn provider -> - cond do - Util.has_behaviour?(provider,Architecture.LogProvider) -> - LogProvider.Junction.subscribe(provider, params.resource) - Util.has_behaviour?(provider,Architecture.Statistic) -> - Statistic.Junction.subscribe(provider, params.resource) - true -> - raise "#{provider} does not have the behaviour of either LogProvider or Statistic" - end - end) - state = params.mod.init(params.resource) - {:ok, {params,state}} - end - - @impl true - def handle_cast({:subscription, provider, message}, {params, state}) do - state = params.mod.handle_message(params.resource, state, provider, message) - Statistic.Junction.broadcast(params.mod, params.resource, state) - {:noreply, {params, state}} - end - - @impl true - def handle_call(:tick, _from, {params, state}) do - state = params.mod.update(params.resource, state) - Statistic.Junction.broadcast(__MODULE__, state) - {:reply, :ok, {params, state}} - end - end - - defmodule MainSupervisor do - @moduledoc "Main supervisor for spawning and monitoring statistics." - - alias Architecture.Statistic - - use Supervisor - - # TODO: make configurable per statistic - @default_update_interval 20_000 - - def start_link(statistics_spec) do - Supervisor.start_link(__MODULE__, statistics_spec, name: __MODULE__) - end - - def init(stat_specs) do - # Logger.metadata(context: __MODULE__) - - all_broker_child_specs = Enum.flat_map(stat_specs, &broker_child_specs/1) - children = [Statistic.Junction.child_spec() | all_broker_child_specs] - - Supervisor.init(children, strategy: :one_for_one) - end - - @spec broker_child_specs(Statistic.Spec.t()) :: [Supervisor.child_spec()] - def broker_child_specs(%Statistic.Spec{} = stat_spec) do - if not Util.has_behaviour?(stat_spec.statistic, Architecture.Statistic) do - raise "#{inspect(stat_spec.statistic)} must be a Statistic" - end - - stat_spec.statistic.resources(stat_spec.resource_db) - |> ResourceSet.all_resources() - |> Enum.map(fn resource -> - # We construct the following supervision tree for each statistic we compute: - # - # Timer.CoSupervisor - # / \ - # Statistic.Broker Timer - # (executing stat_spec.statistic) - - # TEMP HACK: existence of resource.id is an unreasonable assumption - - server_params = %Statistic.Broker.Params{ - mod: stat_spec.statistic, - resource: resource - } - - supervisor_params = %Timer.CoSupervisor.Params{ - sidecar_mod: Statistic.Broker, - sidecar_arg: server_params, - update_interval: @default_update_interval - } - - broker_id = "Statistic.Broker:#{stat_spec.statistic}:#{resource.name}" - timer_cosup_id = "Timer.CoSupervisor:#{broker_id}" - - Supervisor.child_spec( - {Timer.CoSupervisor, supervisor_params}, - id: timer_cosup_id - ) - end) - end - end -end diff --git a/src/app/validation/lib/architecture/timer.ex b/src/app/validation/lib/architecture/timer.ex deleted file mode 100644 index caf52fe249f..00000000000 --- a/src/app/validation/lib/architecture/timer.ex +++ /dev/null @@ -1,123 +0,0 @@ -defmodule Architecture.Timer do - @moduledoc "A generic timer process which can send :tick messages to a GenServer on an interval." - - require Logger - - defmodule TargetUnavailableError do - defexception [] - - def message(_), do: "timer target is unavailable" - end - - @spec child_spec(get_target: function, update_interval: integer) :: Supervisor.child_spec() - def child_spec(get_target: get_target, update_interval: update_interval) do - %{ - id: __MODULE__, - type: :worker, - start: {__MODULE__, :start_link, [get_target, update_interval]}, - restart: :permanent, - shutdown: :brutal_kill, - modules: [__MODULE__] - } - end - - @spec start_link(function, integer) :: {:ok, pid} - def start_link(get_target, update_interval) do - pid = spawn_link(fn -> run(get_target, update_interval) end) - {:ok, pid} - end - - @spec run(function, integer) :: no_return - def run(get_target, update_interval) do - Logger.metadata(context: __MODULE__) - Logger.info("fetching target #{inspect(get_target)}") - - case get_target.() do - # if the target can't be found, crash and let our supervisor choose how to restart us to try again - nil -> - Logger.error("target is unavailable") - raise TargetUnavailableError - - target_pid -> - Logger.info("target found") - tick_loop(target_pid, update_interval) - end - end - - @spec tick_loop(pid, integer) :: no_return - def tick_loop(target_pid, update_interval) do - :timer.sleep(update_interval) - Logger.info("sending tick") - :ok = GenServer.call(target_pid, :tick) - tick_loop(target_pid, update_interval) - end - - defmodule CoSupervisor do - @moduledoc """ - A simple supervisor which can monitor another process alongside a timer. - This is useful for wrapping the target process of a timer. - """ - - use Supervisor - - defmodule Params do - @moduledoc "Supervisor parameters." - - use Class - - defclass( - sidecar_mod: module, - sidecar_arg: any, - update_interval: pos_integer - ) - end - - # # this type is missing from the standard library; corresponds with the return value of Supervisor.which_children - # @type supervisor_child:: {Supervisor.term | :undefined, Supervisor.child | :restarting, :worker | :supervisor, :supervisor.modules()} - - # @spec is_sidecar_child(module, supervisor_child) :: boolean - # def is_active_sidecar_child(sidecar_mod, {_id, _child, _type, modules}) do - - @spec find_sidecar(pid, module) :: pid | nil - def find_sidecar(supervisor_pid, sidecar_mod) do - sidecar_child_data = - Supervisor.which_children(supervisor_pid) - |> Enum.find(fn {_id, child_pid, _type, modules} -> - Enum.member?(modules, sidecar_mod) and child_pid != :restarting - end) - - case sidecar_child_data do - nil -> nil - {_id, sidecar_pid, _type, _modules} -> sidecar_pid - end - rescue - e -> - Logger.error("exception while getting target: #{inspect(e)}") - nil - end - - def start_link(params) do - Supervisor.start_link(__MODULE__, params) - end - - @impl true - def init(%Params{} = params) do - # in order to make the timer restart with the sidecar, but only restart the timer when it - # fails, we choose the rest_for_one strategy and put the sidecar in front of the timer - strategy = :rest_for_one - - supervisor_pid = self() - - children = [ - {params.sidecar_mod, params.sidecar_arg}, - {Architecture.Timer, - [ - get_target: fn -> find_sidecar(supervisor_pid, params.sidecar_mod) end, - update_interval: params.update_interval - ]} - ] - - Supervisor.init(children, strategy: strategy) - end - end -end diff --git a/src/app/validation/lib/architecture/validation.ex b/src/app/validation/lib/architecture/validation.ex deleted file mode 100644 index 01a20c97f98..00000000000 --- a/src/app/validation/lib/architecture/validation.ex +++ /dev/null @@ -1,116 +0,0 @@ -defmodule Architecture.Validation do - @moduledoc "Behaviour for validations." - - alias Architecture.AlertServer - alias Architecture.Resource - alias Architecture.ResourceSet - alias Architecture.Statistic - alias Architecture.Validation - - require Logger - - @callback statistic :: module - @callback validate({module,Resource.t()},any) :: :valid | {:invalid, String.t()} - - defmacro __using__(_params) do - quote do - @behaviour unquote(__MODULE__) - end - end - - defmodule Spec do - @moduledoc "Specification of a validation to execute." - - use Class - - defclass( - validation: module, - resource_db: ResourceSet.t() - ) - end - - defmodule Broker do - @moduledoc "Interpreter and message broker for executing validations." - - use GenServer - - def child_spec([mod, resource]) do - %{ - type: :worker, - start: {__MODULE__, :start_link, [mod, resource]}, - restart: :permanent, - modules: [__MODULE__, mod] - } - end - - def start_link(mod, resource) do - GenServer.start_link( - __MODULE__, - {mod, resource}, - name: String.to_atom("#{__MODULE__}:#{mod}:#{resource.name}") - ) - end - - @impl true - def init({mod, resource}) do - Logger.metadata(context: __MODULE__) - Logger.info("initializing") - - # TODO: pluralize - # validations = Enum.map(...) - {:ok, {mod, resource}, {:continue, nil}} - end - - @impl true - def handle_continue(nil, {mod, resource}) do - Logger.info("subscribing to #{mod.statistic()}") - Statistic.Junction.subscribe(mod.statistic(), resource) - {:noreply, {mod, resource}} - end - - @impl true - def handle_cast({:subscription, {statistic,_res}, state}, {mod, resource}) do - Logger.info("received new state from statistic #{statistic}") - - case mod.validate({statistic,resource}, state) do - :valid -> - Logger.info("validation successful") - {:noreply, {mod, resource}} - - {:invalid, reason} -> - Logger.info("validation failed: #{reason}") - AlertServer.validation_error(mod, resource, reason) - {:noreply, {mod, resource}} - end - end - end - - defmodule MainSupervisor do - @moduledoc "Main supervisor for spawning and monitoring validations." - - use Supervisor - - def start_link(validation_specs) do - Supervisor.start_link(__MODULE__, validation_specs, name: __MODULE__) - end - - @impl true - def init(validation_specs) do - children = - Enum.flat_map(validation_specs, fn spec -> - ResourceSet.all_resources(spec.resource_db) - |> Enum.map(&broker_child_spec(spec.validation, &1)) - end) - - Supervisor.init(children, strategy: :one_for_one) - end - - defp broker_child_spec(validation, resource) do - # TEMP HACK: existence of resource.id is an unreasonable assumption - Supervisor.child_spec( - {Validation.Broker, [validation, resource]}, - id: "Validation.Broker:#{validation}:#{resource.name}" - ) - end - end -end diff --git a/src/app/validation/lib/class.ex b/src/app/validation/lib/class.ex deleted file mode 100644 index 4730526ddf4..00000000000 --- a/src/app/validation/lib/class.ex +++ /dev/null @@ -1,217 +0,0 @@ -# OOP! (sin methods, constructors, mutation, abstractions -- ok, maybe drop the P and just call it OO) -# TODO: make this less fragile (needs to check that modules is applies functions to are actually classes first) -defmodule Class do - @moduledoc "A lightweight, immutable, record subtyping relationship and definition system." - - import Util - import Util.ForMacros - - defmodule NotASubclassError do - defexception [:class, :expected_subclass_of] - - def message(%__MODULE__{class: class, expected_subclass_of: expected_subclass_of}) do - "#{class} is not a subclass of #{expected_subclass_of}" - end - end - - @type t :: module - @type instance :: struct - - # Root object (which all classes are a subclass of) - - defmodule Object do - @moduledoc "The root record type which all class instances are a subtype of." - - defstruct [] - @type t :: %__MODULE__{} - def parent_class, do: nil - def __class_fields, do: [] - end - - # Class helper functions - - @spec is_class?(module) :: boolean - # for some reason, behaviours set from macros do not seem to persist, so this doesn't work: - # def is_class?(mod), do: has_behaviour?(mod, __MODULE__) - def is_class?(mod) do - if module_exists?(mod) do - function_exported?(mod, :__class_fields, 0) - else - has_behaviour?(mod, __MODULE__) - end - end - - @spec is_subclass?(module, module) :: module - def is_subclass?(c1, c2) when c1 == c2, do: true - def is_subclass?(Object, _), do: false - def is_subclass?(c1, c2), do: is_subclass?(c1.parent_class(), c2) - - @spec class_of(instance) :: t - def class_of(instance) when is_struct(instance), do: instance.__struct__ - - @spec instance_of?(instance, t) :: boolean - def instance_of?(instance, class) when is_struct(instance) do - is_subclass?(class_of(instance), class) - end - - @spec downcast!(instance, t) :: instance - def downcast!(instance, class) do - cond do - class == class_of(instance) -> - instance - - not instance_of?(instance, class) -> - raise NotASubclassError, class: class_of(instance), expected_subclass_of: class - - true -> - struct!(class, Map.take(instance, Keyword.keys(class.__class_fields))) - end - end - - @spec all_super_classes(t) :: [t] - def all_super_classes(Object), do: [] - - def all_super_classes(class) do - parent_class = class.parent_class() - [parent_class | all_super_classes(parent_class)] - end - - @doc """ - Computes the inheritance chain from `c1` up to `c2`, including `c1` and excluding `c2` (i.e. `[c1,c2)`). - Returns `nil` if `c1` is not a subclass of `c2`. - """ - @spec inheritance_chain(c1 :: t, c2 :: t) :: [t] | nil - def inheritance_chain(c1, c2) when c1 == c2, do: [] - def inheritance_chain(Object, _), do: nil - def inheritance_chain(c1, c2), do: [c1 | inheritance_chain(c1.parent_class(), c2)] - - @doc """ - Same as `inheritance_chain/2`, but explosive! - If `c1` is not a subclass of `c2`, a `Class.NotASubclassError` exception is raised." - """ - @spec inheritance_chain!(c1 :: t, c2 :: t) :: [t] - def inheritance_chain!(c1, c2) do - case inheritance_chain(c1, c2) do - nil -> raise NotASubclassError, class: c1, expected_subclass_of: c2 - chain -> chain - end - end - - # Class heiarchy - - defmodule Hiearchy do - @moduledoc "Tree structure for representing class hiearchies." - - @type t :: {Class.t(), [t]} - - def compute(root_class, leaf_classes) do - # insert into index - insert = fn index, class -> - Map.update(index, class.parent_class(), MapSet.new([class]), &MapSet.put(&1, class)) - end - - index = - Enum.reduce(leaf_classes, %{root_class => MapSet.new()}, fn leaf_class, index -> - Class.inheritance_chain!(leaf_class, root_class) - |> Enum.reduce(index, &insert.(&2, &1)) - end) - - compute_from_index!(root_class, index) - end - - defp compute_from_index!(target_class, index) do - children = Map.get(index, target_class, MapSet.new()) - {target_class, Enum.map(MapSet.to_list(children), &compute_from_index!(&1, index))} - end - - @spec reduce_depth_first(t | Class.t(), any, function, function) :: any - def reduce_depth_first({leaf, []}, init, lift, _) when is_atom(leaf), do: lift.(leaf, init) - - def reduce_depth_first({root, _} = node, init, lift, merge), - do: lift.(root, reduce_depth_first_exclusive(node, init, lift, merge)) - - @spec reduce_depth_first_exclusive(t | Class.t(), any, function, function) :: any - def reduce_depth_first_exclusive({_, []}, init, _, _), do: init - - def reduce_depth_first_exclusive({_, leaves}, init, lift, merge) do - leaves - |> Enum.map(&reduce_depth_first(&1, init, lift, merge)) - |> merge.() - end - end - - # Class definition mixin - - @callback parent_class :: module - @callback __class_fields :: Keyword.t(Macro.t()) - - # TODO: There should be a less complicated way of doing this. - # I wanted to use mixin params, but it was too messy to differentiate between fields and the parent class to extend. - defmacro __using__(_params) do - quote do - import unquote(__MODULE__) - require unquote(__MODULE__) - @before_compile unquote(__MODULE__) - @behaviour unquote(__MODULE__) - Module.register_attribute(__MODULE__, :super_class, []) - Module.register_attribute(__MODULE__, :class_fields, []) - end - end - - # TODO: handle field collisions - defmacro __before_compile__(env) do - super_class = Module.get_attribute(env.module, :super_class) - class_fields = Module.get_attribute(env.module, :class_fields) - - if is_nil(super_class) or is_nil(class_fields) do - raise ArgumentError, "must specify class when using #{__MODULE__} mixin" - end - - quote do - @impl true - def parent_class, do: unquote(super_class) - @impl true - def __class_fields, do: unquote(Macro.escape(class_fields)) - end - end - - defmacro defclass(fields) when is_list(fields) do - inject_class_info(__CALLER__, Object, fields) - end - - defmacro defclass(super_class, fields) when is_list(fields) do - Code.ensure_compiled(resolve_module(__CALLER__, super_class)) - - if not Class.is_class?(resolve_module(__CALLER__, super_class)) do - raise "cannot extend #{super_class} because it is not a class" - end - - inject_class_info(__CALLER__, super_class, fields) - end - - defp inject_class_info(env, super_class, local_fields) do - # TODO: sanity check module definition thus far? - - super_class_module = resolve_module(env, super_class) - super_class_fields = super_class_module.__class_fields - class_fields = super_class_fields ++ local_fields - class_field_names = Keyword.keys(class_fields) - - # %__MODULE__{ class_fields... } - class_type = - {:%, [], - [ - {:__MODULE__, [], Elixir}, - {:%{}, [], class_fields} - ]} - - quote do - @enforce_keys unquote(class_field_names) - defstruct unquote(class_field_names) - @type t :: unquote(class_type) - - @super_class unquote(Macro.escape(super_class)) - @class_fields unquote(Macro.escape(class_fields)) - end - end -end diff --git a/src/app/validation/lib/cloud/google.ex b/src/app/validation/lib/cloud/google.ex deleted file mode 100644 index 2ef431a1e09..00000000000 --- a/src/app/validation/lib/cloud/google.ex +++ /dev/null @@ -1,41 +0,0 @@ -# TODO: fully separate this from the rest of the project -# currently, it depends on Coda directly for configuration, and log providers depend on this directly - -defmodule Cloud.Google do - @moduledoc "Google Cloud interface." - - alias GoogleApi.Logging.V2, as: Logging - alias GoogleApi.PubSub.V1, as: PubSub - - @type pubsub_conn :: PubSub.Connection.t() - @type logging_conn :: Logging.Connection.t() - - defmodule ApiError do - defexception [:message, :error] - - def message(%__MODULE__{message: message, error: error}) do - "#{message}: #{inspect(error)}" - end - end - - defmodule Connections do - @moduledoc "Collection of connections for communicating with the Google Cloud API" - - use Class - - defclass( - pubsub: Cloud.Google.pubsub_conn(), - logging: Cloud.Google.logging_conn() - ) - end - - @spec connect :: Connections.t() - def connect do - {:ok, token} = Goth.Token.for_scope("https://www.googleapis.com/auth/cloud-platform") - - %Connections{ - pubsub: PubSub.Connection.new(token.token), - logging: Logging.Connection.new(token.token) - } - end -end diff --git a/src/app/validation/lib/cloud/google/log_pipeline.ex b/src/app/validation/lib/cloud/google/log_pipeline.ex deleted file mode 100644 index 0159741d534..00000000000 --- a/src/app/validation/lib/cloud/google/log_pipeline.ex +++ /dev/null @@ -1,35 +0,0 @@ -defmodule Cloud.Google.LogPipeline do - @moduledoc """ - A log pipelines consists of 3 cloud resources representing a data pipeline of logs to consume from - Google StackDriver. A log pipeline contains a pub sub topic, a subscription to that topic, and a - log sink which is feeding messages into the pub sub topic. - """ - - alias Cloud.Google.LogSink - alias Cloud.Google.Subscription - alias Cloud.Google.Topic - - use Class - - defclass( - name: String.t(), - topic: Topic.t(), - subscription: Subscription.t(), - log_sink: LogSink.t() - ) - - @spec create(Cloud.Google.pubsub_conn(), Cloud.Google.logging_conn(), String.t(), String.t()) :: - t - def create(pubsub_conn, logging_conn, name, filter) do - topic = Topic.create(pubsub_conn, name) - subscription = Subscription.create(pubsub_conn, name, topic) - log_sink = LogSink.create(logging_conn, name, topic, filter) - - %__MODULE__{ - name: name, - topic: topic, - subscription: subscription, - log_sink: log_sink - } - end -end diff --git a/src/app/validation/lib/cloud/google/log_sink.ex b/src/app/validation/lib/cloud/google/log_sink.ex deleted file mode 100644 index 2986d7bf88e..00000000000 --- a/src/app/validation/lib/cloud/google/log_sink.ex +++ /dev/null @@ -1,54 +0,0 @@ -defmodule Cloud.Google.LogSink do - @moduledoc "Wrapper for interacting with GoogleCloud logging sinks." - - import Util - alias GoogleApi.Logging.V2, as: Logging - import Logging.Api.Sinks - alias Logging.Model.LogSink - - @type t :: LogSink.t() - - # only supports creation, currently - @spec create(Cloud.Google.logging_conn(), String.t(), Cloud.Google.Topic.t(), String.t()) :: t - @spec create( - Cloud.Google.logging_conn(), - String.t(), - Cloud.Google.Topic.t(), - String.t(), - boolean - ) :: - t - def create(conn, name, topic, filter, already_attempted \\ false) do - sink_destination = "pubsub.googleapis.com/#{topic.name}" - - sink_body = %LogSink{ - description: "validation log sink", - name: name, - destination: sink_destination, - filter: filter - } - - case logging_sinks_create(conn, "projects", Coda.project_id(), body: sink_body) do - {:ok, sink} -> - sink - - {:error, env} -> - json = Jason.decode!(env.body) - - if not already_attempted and json["error"]["status"] == "ALREADY_EXISTS" do - IO.puts("log sink #{name} already existed, destroying and recreating") - - logging_sinks_delete(conn, "projects", Coda.project_id(), name) - |> ok_or_error(Cloud.Google.ApiError, "failed to create sink") - - create(conn, name, topic, filter, true) - else - raise Cloud.Google.ApiError, - error_message: "failed to create sink", - erorr: json["error"] - end - - # |> ok_or_error(Cloud.Google.ApiError, "failed to create sink") - end - end -end diff --git a/src/app/validation/lib/cloud/google/subscription.ex b/src/app/validation/lib/cloud/google/subscription.ex deleted file mode 100644 index 9b52cee265e..00000000000 --- a/src/app/validation/lib/cloud/google/subscription.ex +++ /dev/null @@ -1,107 +0,0 @@ -defmodule Cloud.Google.Subscription do - @moduledoc "Wrapper for interacting with GoogleCloud pub sub subscriptions." - - alias GoogleApi.PubSub.V1, as: PubSub - alias PubSub.Model.AcknowledgeRequest - alias PubSub.Model.ExpirationPolicy - alias PubSub.Model.PullRequest - alias PubSub.Model.PullResponse - alias PubSub.Model.Subscription - - import Util - import PubSub.Api.Projects - require Logger - - @type t :: Subscription.t() - - @spec short_name(t) :: String.t() - def short_name(subscription) do - subscription.name |> String.split("/") |> List.last() - end - - @spec get(Cloud.Google.pubsub_conn(), String.t()) :: t | nil - def get(conn, name) do - pubsub_projects_subscriptions_get(conn, Coda.project_id(), name) - |> ok_or_nil() - end - - @spec create(Cloud.Google.pubsub_conn(), String.t(), Cloud.Google.Topic.t()) :: t - def create(conn, name, topic) do - case get(conn, name) do - nil -> - subscription_body = %Subscription{ - topic: topic.name, - messageRetentionDuration: "3600s", - # 1 day - expirationPolicy: %ExpirationPolicy{ttl: "86400s"} - } - - pubsub_projects_subscriptions_create(conn, Coda.project_id(), name, - body: subscription_body - ) - |> ok_or_error(Cloud.Google.ApiError, "failed to create subscription") - - subscription -> - subscription - end - end - - @spec pull_raw(Cloud.Google.pubsub_conn(), t) :: PullResponse.t() - def pull_raw(conn, subscription) do - pull_request = %PullRequest{maxMessages: 20} - - Logger.info("pulling subscription #{short_name(subscription)}") - - pubsub_projects_subscriptions_pull( - conn, - Coda.project_id(), - short_name(subscription), - body: pull_request - ) - |> ok_or_error(Cloud.Google.ApiError, "failed to pull subscription") - end - - @spec acknowledge(Cloud.Google.pubsub_conn(), t, PullResponse.t() | [String.t()]) :: any - def acknowledge(conn, subscription, %PullResponse{receivedMessages: messages}) do - acknowledge(conn, subscription, Enum.map(messages, & &1.ackId)) - end - - def acknowledge(conn, subscription, ack_ids) when is_list(ack_ids) do - ack_request = %AcknowledgeRequest{ackIds: ack_ids} - - pubsub_projects_subscriptions_acknowledge( - conn, - Coda.project_id(), - short_name(subscription), - body: ack_request - ) - |> ok_or_error(Cloud.Google.ApiError, "failed to acknowledge subscription messages") - end - - # TODO: allow this to send intermediate acknowledgements and handle failures in f? - @spec pull_and_process(Cloud.Google.pubsub_conn(), t, function) :: no_return - def pull_and_process(conn, subscription, f) do - response = pull_raw(conn, subscription) - - if response.receivedMessages != nil do - count = length(response.receivedMessages) - Logger.info("beginning to process #{count} messages") - - ack_ids = - Enum.map(response.receivedMessages, fn received_message -> - message_data = - received_message.message.data - |> Base.decode64!() - |> Jason.decode!() - - f.(message_data) - received_message.ackId - end) - - acknowledge(conn, subscription, ack_ids) - Logger.info("processed and acknowledged #{count} messages") - else - Logger.info("no messages received: #{inspect(response)}") - end - end -end diff --git a/src/app/validation/lib/cloud/google/topic.ex b/src/app/validation/lib/cloud/google/topic.ex deleted file mode 100644 index d236ad1b040..00000000000 --- a/src/app/validation/lib/cloud/google/topic.ex +++ /dev/null @@ -1,40 +0,0 @@ -defmodule Cloud.Google.Topic do - @moduledoc "Wrapper for interacting with GoogleCloud pub sub topics." - - alias GoogleApi.PubSub.V1, as: PubSub - - require Logger - import Util - import PubSub.Api.Projects - - @type t :: PubSub.Model.Topic.t() - - @spec get(Cloud.Google.pubsub_conn(), String.t()) :: t | nil - def get(conn, name) do - case pubsub_projects_topics_get(conn, Coda.project_id(), name) do - {:ok, topic} -> - topic - - {:error, error} -> - Logger.warn( - "got error looking up object from api; assuming that means it's not there for now -- #{ - inspect(error) - }" - ) - - nil - end - end - - @spec create(Cloud.Google.pubsub_conn(), String.t()) :: t - def create(conn, name) do - case get(conn, name) do - nil -> - pubsub_projects_topics_create(conn, Coda.project_id(), name, body: %{}) - |> ok_or_error(Cloud.Google.ApiError, "failed to create topic") - - topic -> - topic - end - end -end diff --git a/src/app/validation/lib/coda.ex b/src/app/validation/lib/coda.ex deleted file mode 100644 index 960c519aaa0..00000000000 --- a/src/app/validation/lib/coda.ex +++ /dev/null @@ -1,9 +0,0 @@ -defmodule Coda do - @moduledoc "Coda network validation definitions." - - def project_id, do: Application.fetch_env!(:coda_validation, :project_id) - def testnet, do: Application.fetch_env!(:coda_validation, :testnet) - def region, do: Application.fetch_env!(:coda_validation, :region) - def cluster, do: Application.fetch_env!(:coda_validation, :cluster) - -end diff --git a/src/app/validation/lib/coda/application.ex b/src/app/validation/lib/coda/application.ex deleted file mode 100644 index cef348ffcc9..00000000000 --- a/src/app/validation/lib/coda/application.ex +++ /dev/null @@ -1,122 +0,0 @@ -defmodule Coda.Application do - @moduledoc """ - The root application. Responsible for initializing the process tree. - """ - - alias Architecture.LogFilter - alias Architecture.ResourceSet - alias Cloud.Google.LogPipeline - alias Coda.Resources - - import LogFilter.Language - - use Application - - def resource_db_entries do - win_rates = Coda.Validations.Configuration.whale_win_rates - [ - # class, id, expected_win_rate - Resources.BlockProducer.build("whale", 1, Enum.fetch!(win_rates,0)), - Resources.BlockProducer.build("whale", 2, Enum.fetch!(win_rates,1)), - Resources.BlockProducer.build("whale", 3, Enum.fetch!(win_rates,2)), - Resources.BlockProducer.build("whale", 4, Enum.fetch!(win_rates,3)), - Resources.BlockProducer.build("whale", 5, Enum.fetch!(win_rates,4)) - # Resources.BlockProducer.build("fish", 1), - # Resources.BlockProducer.build("fish", 2), - # Resources.BlockProducer.build("fish", 3) - ] - end - - def start(_, _) do - {:ok, _started} = Application.ensure_all_started(:goth) - :httpc.set_options(pipeline_timeout: 1000) - - api_conns = Cloud.Google.connect() - - # TODO: derive all of this from validations + resource query - resource_db = ResourceSet.build(resource_db_entries()) - - # validation_requests = [Validations.GlobalBlockAcceptanceRate] - # requirements ==> - # Validations.GlobalBlockAcceptanceRate - # > Statistics.GlobalBlockAcceptanceRate - # > forall(r : resources). - # {Statistics.BlockAcceptanceRate, r} - # > {Statistics.BlocksProduced, r} - # > {Providers.BlockProduced, r} - # > Statistics.GlobalFrontier - # > forall(r2 : resources). - # {Statistics.Frontier, r2} - # > {Providers.FrontierDiffApplied, r2} - # validations ==> [ - # Validations.GlobalBlockAcceptanceRate - # ] - # statistics ==> [ - # Statistics.GlobalBlockAcceptanceRate, - # {Statistic.BlockAcceptanceRate, resources}, - # {Statistics.BlocksProduced, resources}, - # Statistics.GlobalFrontier, - # {Statistics.Frontier, resources} - # ] - # providers ==> [ - # {Providers.BlockProduced, resources} - # {Providers.BlockFrontierDiffApplied, resources} - # ] - - resource_filter = Architecture.LogProvider.log_filter(Coda.Providers.BlockProduced, resource_db) - - global_filter = filter do - resource.labels.project_id == "#{Coda.project_id()}" - resource.labels.location == "#{Coda.region()}" - resource.labels.cluster_name == "#{Coda.cluster()}" - resource.labels.namespace_name == "#{Coda.testnet()}" - end - - log_filter = Architecture.LogFilter.adjoin(global_filter, resource_filter) - - IO.puts("LOG FILTER:") - IO.puts(LogFilter.render(log_filter)) - IO.puts("===========") - - log_pipeline = - LogPipeline.create( - api_conns.pubsub, - api_conns.logging, - "blocks-produced", - LogFilter.render(log_filter) - ) - - validations_spec = [ - %Architecture.Validation.Spec{ - validation: Coda.Validations.BlockProductionRate, - resource_db: resource_db - } - ] - - statistics_spec = [ - # in theory, resource db queries can be performed separately for each stat config - %Architecture.Statistic.Spec{ - statistic: Coda.Statistics.BlockProductionRate, - resource_db: resource_db, - } - ] - - log_providers_spec = [ - %Architecture.LogProvider.Spec{ - log_provider: Coda.Providers.BlockProduced, - subscription: log_pipeline.subscription, - conn: api_conns.pubsub - } - ] - - children = [ - # {Architecture.AlertServer, []}, - {Architecture.LogProvider.MainSupervisor, log_providers_spec}, - {Architecture.Statistic.MainSupervisor, statistics_spec}, - {Architecture.Validation.MainSupervisor, validations_spec} - ] - - # TODO: should the strategy here be :one_for_rest? - Supervisor.start_link(children, name: __MODULE__, strategy: :one_for_one) - end -end diff --git a/src/app/validation/lib/coda/providers/block_produced.ex b/src/app/validation/lib/coda/providers/block_produced.ex deleted file mode 100644 index 92013414d4c..00000000000 --- a/src/app/validation/lib/coda/providers/block_produced.ex +++ /dev/null @@ -1,11 +0,0 @@ -defmodule Coda.Providers.BlockProduced do - @moduledoc "Log provider for block production." - - use Architecture.LogProvider - - def resource_class, do: Coda.Resources.BlockProducer - - def log_filter do - filter(do: "Successfully produced a new block") - end -end diff --git a/src/app/validation/lib/coda/providers/transaction_pool_diff_received.ex b/src/app/validation/lib/coda/providers/transaction_pool_diff_received.ex deleted file mode 100644 index c3f6f05b951..00000000000 --- a/src/app/validation/lib/coda/providers/transaction_pool_diff_received.ex +++ /dev/null @@ -1,10 +0,0 @@ -defmodule Coda.Providers.TransactionPoolDiffReceived do - @moduledoc "Log provider for transaction pool diffs receipt." - - use Architecture.LogProvider - def resource_class, do: Coda.Resources.BlockProducer - - def log_filter do - filter(do: "Received transaction-pool diff $txns from $sender") - end -end diff --git a/src/app/validation/lib/coda/resource_classifier.ex b/src/app/validation/lib/coda/resource_classifier.ex deleted file mode 100644 index 9917dc21d84..00000000000 --- a/src/app/validation/lib/coda/resource_classifier.ex +++ /dev/null @@ -1,25 +0,0 @@ -defmodule Coda.ResourceClassifier do - @moduledoc """ - Resource classification for coda networks. - - It is a temporary flaw in the design that this is necessary, but I believe that this can be folded - into the resource abstraction (with some thought). - """ - - alias Coda.Resources - - def classify_resource(message) do - # TODO: make robust - labels = message["labels"] - - if labels["k8s-pod/role"] == "block-producer" do - app = labels["k8s-pod/app"] # e.g., whale-block-producer-4 - id = String.split(app,"-") |> List.last |> String.to_integer - win_rate = Enum.fetch!(Coda.Validations.Configuration.whale_win_rates,id - 1) - # class, id, expected_win_rate - Resources.BlockProducer.build(labels["k8s-pod/class"], id, win_rate) - else - Resources.CodaNode.build(labels["k8s-pods/name"]) - end - end -end diff --git a/src/app/validation/lib/coda/resources/block_producer.ex b/src/app/validation/lib/coda/resources/block_producer.ex deleted file mode 100644 index ae29180acb3..00000000000 --- a/src/app/validation/lib/coda/resources/block_producer.ex +++ /dev/null @@ -1,29 +0,0 @@ -defmodule Coda.Resources.BlockProducer do - @moduledoc "BlockProducer resource definition." - - use Architecture.Resource - - defresource(Coda.Resources.CodaNode, - class: String.t(), - id: pos_integer(), - expected_win_rate: float() - ) - - @spec build(String.t(), pos_integer(), float()) :: t() - def build(class, id, expected_win_rate) do - %__MODULE__{ - name: "#{class}-block-producer-#{id}", - class: class, - id: id, - expected_win_rate: expected_win_rate - } - end - - @impl true - def global_filter do - filter(do: labels["k8s-pod/role"] == "block-producer") - end - - @impl true - def local_filter(_), do: nil -end diff --git a/src/app/validation/lib/coda/resources/coda_node.ex b/src/app/validation/lib/coda/resources/coda_node.ex deleted file mode 100644 index ad70f194b70..00000000000 --- a/src/app/validation/lib/coda/resources/coda_node.ex +++ /dev/null @@ -1,22 +0,0 @@ -defmodule Coda.Resources.CodaNode do - @moduledoc "CodaNode resource definition." - - use Architecture.Resource - - defresource(name: String.t()) - - @spec build(String.t()) :: t - def build(name), do: %__MODULE__{name: name} - - @impl true - def global_filter do - filter do - resource.labels.container_name == "coda" - end - end - - @impl true - def local_filter(%__MODULE__{name: name}) do - filter(do: labels["k8s-pod/app"] == "#{name}") - end -end diff --git a/src/app/validation/lib/coda/statistics/block_production_rate.ex b/src/app/validation/lib/coda/statistics/block_production_rate.ex deleted file mode 100644 index a3b96ef792d..00000000000 --- a/src/app/validation/lib/coda/statistics/block_production_rate.ex +++ /dev/null @@ -1,59 +0,0 @@ -defmodule Coda.Statistics.BlockProductionRate do - @moduledoc "A scalar statistic that monitors the block production rate of a block producer" - - alias Architecture.ResourceSet - alias Architecture.Statistic - - use Statistic - - require Logger - - @impl true - def providers, do: [Coda.Providers.BlockProduced] - @impl true - def resources(resource_db), - do: ResourceSet.select(resource_db, Coda.Resources.BlockProducer) - - defmodule State do - @moduledoc "State for Coda.Statistics.BlockProductionRate" - - use Class - - defclass( - start_time: Time.t(), - elapsed_time: Time.t(), - last_updated: Time.t(), - blocks_produced: pos_integer() - ) - end - - @type state :: State.t() - - @impl true - def init(_resource) do - start_time = Time.utc_now() - {:ok, zero_time} = Time.new(0, 0, 0, 0) - - %State{ - start_time: start_time, - elapsed_time: zero_time, - last_updated: start_time, - blocks_produced: 0 - } - end - - defp update_time(state) do - now = Time.utc_now() - ms_since_last_update = Time.diff(now, state.last_updated, :millisecond) - elapsed_time = Time.add(state.elapsed_time, ms_since_last_update, :millisecond) - %State{state | last_updated: now, elapsed_time: elapsed_time} - end - - @impl true - def update(_resource, state), do: update_time(state) - - @impl true - def handle_message(_resource, state, {Coda.Providers.BlockProduced,_}, _message) do - %State{state | blocks_produced: state.blocks_produced + 1} - end -end diff --git a/src/app/validation/lib/coda/validations/block_production_rate.ex b/src/app/validation/lib/coda/validations/block_production_rate.ex deleted file mode 100644 index 916efe7a3da..00000000000 --- a/src/app/validation/lib/coda/validations/block_production_rate.ex +++ /dev/null @@ -1,47 +0,0 @@ -defmodule Coda.Validations.BlockProductionRate do - @moduledoc """ - Validates that a block producer's block production rate matches is within an acceptable_margin of - the expected rate. - """ - - use Architecture.Validation - - import Coda.Validations.Configuration - - require Logger - - @impl true - def statistic, do: Coda.Statistics.BlockProductionRate - - @impl true - def validate({Coda.Statistics.BlockProductionRate,resource}, state) do - # implication - - if Time.compare(state.elapsed_time,grace_window(state)) == :lt do - :valid - else - tm = state.elapsed_time - elapsed_sec = tm.hour * 60 * 60 + tm.minute * 60 + tm.second - slots_elapsed = elapsed_sec / slot_time() - - slot_production_ratio = state.blocks_produced / slots_elapsed - - # putting the call to acceptable_margin() here make dialyzer happy - margin = acceptable_margin() - - cond do - slot_production_ratio >= 1 -> - {:invalid, "unexpected, slot production ratio is 1 or greater"} - - slot_production_ratio < resource.expected_win_rate - margin -> - {:invalid, "not producing enough blocks"} - - slot_production_ratio > resource.expected_win_rate + margin -> - {:invalid, "producing more blocks than expected"} - - true -> - :valid - end - end - end -end diff --git a/src/app/validation/lib/coda/validations/configuration.ex b/src/app/validation/lib/coda/validations/configuration.ex deleted file mode 100644 index 9a50bbd7bb8..00000000000 --- a/src/app/validation/lib/coda/validations/configuration.ex +++ /dev/null @@ -1,18 +0,0 @@ -defmodule Coda.Validations.Configuration do - @moduledoc """ - Configuration parameters used by the validations - TODO: Read these parameters from a file at application startup - """ - - # in milliseconds - def slot_time, do: 3 * 60 * 1000 - - # in milliseconds: 20 * 60 * 1000 - def grace_window(_state), do: Time.from_iso8601!("00:20:00") - - def acceptable_margin, do: 0.05 - - # dummy values, not based on actual stake - def whale_win_rates, do: [0.15,0.20,0.05,0.08,0.3] - -end diff --git a/src/app/validation/lib/pretty_console_log.ex b/src/app/validation/lib/pretty_console_log.ex deleted file mode 100644 index 12dec44ff16..00000000000 --- a/src/app/validation/lib/pretty_console_log.ex +++ /dev/null @@ -1,64 +0,0 @@ -defmodule PrettyConsoleLog do - @moduledoc """ - Provides an alternative message format for elixir's `Logger`. Processes in the system may register - a `:context` metadata value which will be logged along with logs from that process. - """ - - defp format_pid(pid) do - # pids are opaque, and can't be inspected; this hack attempts to parse the inspect format to shorten it some - # TODO: remove sigil for improved portability - pid_regex = ~r/#PID<([\d\.]+)>/ - [_, addr] = Regex.run(pid_regex, inspect(pid)) - addr - end - - defp format_timestamp({_date, {hr, mn, sc, ms}}), do: "#{hr}:#{mn}:#{sc}:#{ms}" - - defp format_message(msg), do: String.replace(to_string(msg), "\n", "\n ") - - defp format_template(timestamp, level, pid, message) do - "#{format_timestamp(timestamp)} [#{level}] #{format_pid(pid)}: #{format_message(message)}" - end - - defp format!(level, message, timestamp, pid: pid, context: mod) do - # turning a module to a string this way avoids the extra namespacing elixir does in the normal Module.to_string/1 - mod_str = Module.split(mod) |> Enum.join(".") - base_str = format_template(timestamp, level, pid, message) - "#{base_str} {#{mod_str}}\n" - end - - defp format!(level, message, timestamp, pid: pid) do - format_template(timestamp, level, pid, message) <> "\n" - end - - def format(level, message, timestamp, metadata) do - format!(level, message, timestamp, metadata) - rescue - e -> - "!!! failed to format log message: #{Exception.format(:error, e)} (#{inspect(timestamp)} [#{ - level - }] #{message} #{inspect(metadata)})\n" - end - - # def format_log_message(level, message, timestamp, metadata) do - # {_date, {hr, mn, sc, ms}} = timestamp - - # module = - # Keyword.get(metadata, :process_module, UNKNOWN) - # |> Module.split() - # |> Enum.join(".") - - # pid_tag = - # if Keyword.has_key?(metadata, :pid) do - - # [_, x, y, z] = Regex.run(~r/#PID<(\d+).(\d+).(\d+)>/, inspect(metadata[:pid])) - # "(#{x}.#{y}.#{z})" - # else - # "" - # end - - # "#{hr}:#{mn}:#{sc}.#{ms} [#{level}] #{module}#{pid_tag}: #{message}\n" - # rescue - # err -> "could not format log message: #{inspect({level, message, timestamp, metadata})} -- #{inspect(err)}\n" - # end -end diff --git a/src/app/validation/lib/util.ex b/src/app/validation/lib/util.ex deleted file mode 100644 index 6348eac1ae4..00000000000 --- a/src/app/validation/lib/util.ex +++ /dev/null @@ -1,71 +0,0 @@ -# generic elixir utility functions (stuff that should be in the standard lib really) -defmodule Util do - @moduledoc "Provides project wide utility functions." - - @spec ok_or_error(result :: {:ok, return} | {:error, any}, exn :: module, String.t()) :: - return - when return: any - def ok_or_error({:ok, x}, _, _), do: x - - def ok_or_error({:error, error}, exn, error_msg) do - raise exn, message: error_msg, error: error - end - - @spec ok_or(result :: {:ok, any} | {:error, any}, f :: function) :: any - def ok_or({:ok, x}, _), do: x - def ok_or({:error, _}, f), do: f.() - - @spec ok_or_nil(result :: {:ok, return} | {:error, any}) :: return | nil when return: any - def ok_or_nil({:ok, x}), do: x - def ok_or_nil({:error, _}), do: nil - - @doc "Tests if a module exists. If called inside a macro, this will check if the module is compiled yet without triggering the module adding the module to the current dependency graph." - @spec module_exists?(module) :: boolean - def module_exists?(mod) do - mod.__info__(:attributes) - true - rescue - UndefinedFunctionError -> - false - end - - @doc "Tests to see if `mod` implements the given `behaviour`. Works both in and out of macros." - @spec has_behaviour?(module, module) :: boolean - def has_behaviour?(mod, behaviour) do - # if the module is compiled, then we expect the behaviour to be available via reflection attributes - Keyword.get(mod.__info__(:attributes), :behaviour, []) - |> Enum.member?(behaviour) - rescue - # triggered by [:__info__] if still compiling (in macro) or does not exist - UndefinedFunctionError -> - # if the module is still being compiled, we can inspect the modules actual attributes in more detail - behaviours = Module.get_attribute(mod, :behaviour) - Enum.member?(behaviours, behaviour) - # triggered by Module.get_attribute if already compiled (not in macro) or does not exist - ArgumentError -> - # at this point, the module just doesn't exist - false - - end - - defmodule ForMacros do - @moduledoc "Macro specific utility functions." - - @spec resolve_module(Macro.Env.t(), module() | Macro.t()) :: module() - def resolve_module(_, mod) when is_atom(mod) do - Code.ensure_compiled(mod) - mod - end - - def resolve_module(env, ast) do - {mod, []} = Code.eval_quoted(ast, [], env) - - if not is_atom(mod) do - raise ArgumentError, "failed to resolve super class module" - end - - Code.ensure_compiled(mod) - mod - end - end -end diff --git a/src/app/validation/mix.exs b/src/app/validation/mix.exs deleted file mode 100644 index 5e852fb18ff..00000000000 --- a/src/app/validation/mix.exs +++ /dev/null @@ -1,58 +0,0 @@ -defmodule CodaValidation.MixProject do - use Mix.Project - - def project do - [ - app: :coda_validation, - name: "CodaValidation", - version: "0.1.0", - elixir: "~> 1.10", - start_permanent: true, - deps: deps(), - aliases: aliases(), - dialyzer: [ - plt_add_deps: :app_tree, - plt_file: {:no_warn, "priv/plts/dialyzer.plt"} - ], - docs: [ - main: "CodaValidation", - extras: ["README.md"] - ], - test_coverage: [tool: ExCoveralls] - ] - end - - def application do - [ - mod: {Coda.Application, []}, - extra_applications: [:sasl, :logger] - ] - end - - defp deps do - [ - {:google_api_logging, "~> 0.28.0"}, - {:google_api_pub_sub, "~> 0.23.0"}, - {:goth, "~> 1.2.0"}, - {:credo, "~> 1.4", only: [:dev, :test], runtime: false}, - {:dialyxir, "~> 1.0", only: [:dev, :test], runtime: false}, - {:doctor, "~> 0.14.0", only: [:dev], runtime: false}, - {:ex_doc, "~> 0.22", only: [:dev], runtime: false}, - {:excoveralls, "~> 0.10", only: [:test]}, - {:hackney, "~> 1.16.0"} - ] - end - - defp aliases do - [ - test: "test --no-start", - run: "run --no-halt", - check: [ - "compile --warnings-as-errors", - "format --check-formatted", - "dialyzer", - "credo" - ] - ] - end -end diff --git a/src/app/validation/mix.lock b/src/app/validation/mix.lock deleted file mode 100644 index 1bd675bae64..00000000000 --- a/src/app/validation/mix.lock +++ /dev/null @@ -1,33 +0,0 @@ -%{ - "bunt": {:hex, :bunt, "0.2.0", "951c6e801e8b1d2cbe58ebbd3e616a869061ddadcc4863d0a2182541acae9a38", [:mix], [], "hexpm", "7af5c7e09fe1d40f76c8e4f9dd2be7cebd83909f31fee7cd0e9eadc567da8353"}, - "certifi": {:hex, :certifi, "2.5.2", "b7cfeae9d2ed395695dd8201c57a2d019c0c43ecaf8b8bcb9320b40d6662f340", [:rebar3], [{:parse_trans, "~>3.3", [hex: :parse_trans, repo: "hexpm", optional: false]}], "hexpm", "3b3b5f36493004ac3455966991eaf6e768ce9884693d9968055aeeeb1e575040"}, - "credo": {:hex, :credo, "1.4.0", "92339d4cbadd1e88b5ee43d427b639b68a11071b6f73854e33638e30a0ea11f5", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "1fd3b70dce216574ce3c18bdf510b57e7c4c85c2ec9cad4bff854abaf7e58658"}, - "decimal": {:hex, :decimal, "1.8.1", "a4ef3f5f3428bdbc0d35374029ffcf4ede8533536fa79896dd450168d9acdf3c", [:mix], [], "hexpm", "3cb154b00225ac687f6cbd4acc4b7960027c757a5152b369923ead9ddbca7aec"}, - "dialyxir": {:hex, :dialyxir, "1.0.0", "6a1fa629f7881a9f5aaf3a78f094b2a51a0357c843871b8bc98824e7342d00a5", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "aeb06588145fac14ca08d8061a142d52753dbc2cf7f0d00fc1013f53f8654654"}, - "doctor": {:hex, :doctor, "0.14.0", "fde322c58f09131559c4868cee98cc4a2e598e9ed0af1a3411cc6c0bea221bf1", [:mix], [{:decimal, "~> 1.8", [hex: :decimal, repo: "hexpm", optional: false]}], "hexpm", "1b2f5f1ae9c97a6cc6f0afe2620d5a69318c478716631b0622a56f8ce746ec80"}, - "earmark": {:hex, :earmark, "1.4.5", "62ffd3bd7722fb7a7b1ecd2419ea0b458c356e7168c1f5d65caf09b4fbdd13c8", [:mix], [], "hexpm", "b7d0e6263d83dc27141a523467799a685965bf8b13b6743413f19a7079843f4f"}, - "erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"}, - "ex_doc": {:hex, :ex_doc, "0.22.1", "9bb6d51508778193a4ea90fa16eac47f8b67934f33f8271d5e1edec2dc0eee4c", [:mix], [{:earmark, "~> 1.4.0", [hex: :earmark, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}], "hexpm", "d957de1b75cb9f78d3ee17820733dc4460114d8b1e11f7ee4fd6546e69b1db60"}, - "excoveralls": {:hex, :excoveralls, "0.12.3", "2142be7cb978a3ae78385487edda6d1aff0e482ffc6123877bb7270a8ffbcfe0", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "568a3e616c264283f5dea5b020783ae40eef3f7ee2163f7a67cbd7b35bcadada"}, - "google_api_logging": {:hex, :google_api_logging, "0.28.0", "1e92bc592cf532e6ae186769338dcc4242a511f9ca55b7dc5f0678c24415908e", [:mix], [{:google_gax, "~> 0.2", [hex: :google_gax, repo: "hexpm", optional: false]}], "hexpm", "b084247262e59034b641f03399bb3fcb3b0b4c0230857126e6b2af85b46fc3c1"}, - "google_api_pub_sub": {:hex, :google_api_pub_sub, "0.23.0", "9637cd902e6536c4dedb6c9d4287341dfda5531250dfa80c5d0c960b1b2d0999", [:mix], [{:google_gax, "~> 0.2", [hex: :google_gax, repo: "hexpm", optional: false]}], "hexpm", "d492f5f01727bd3a06434ed40432b673a9bbded436044b365c8d7c53d50f2486"}, - "google_gax": {:hex, :google_gax, "0.3.2", "3746309dcf0979312ca8809f8a9f8acb007cad2ee2934406544c8a6d7282e82b", [:mix], [{:poison, ">= 3.0.0 and < 5.0.0", [hex: :poison, repo: "hexpm", optional: false]}, {:tesla, "~> 1.2", [hex: :tesla, repo: "hexpm", optional: false]}], "hexpm", "98516c995d2bde23e65ccbf3cc70645051f755392e7a6dc60d22fd09621ad386"}, - "goth": {:hex, :goth, "1.2.0", "92d6d926065a72a7e0da8818cc3a133229b56edf378022c00d9886c4125ce769", [:mix], [{:httpoison, "~> 0.11 or ~> 1.0", [hex: :httpoison, repo: "hexpm", optional: false]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:joken, "~> 2.0", [hex: :joken, repo: "hexpm", optional: false]}], "hexpm", "4974932ab3b782c99a6fdeb0b968ddd61436ef14de5862bd6bb0227386c63b26"}, - "hackney": {:hex, :hackney, "1.16.0", "5096ac8e823e3a441477b2d187e30dd3fff1a82991a806b2003845ce72ce2d84", [:rebar3], [{:certifi, "2.5.2", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "6.0.1", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "1.0.1", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:parse_trans, "3.3.0", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "1.1.6", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm", "3bf0bebbd5d3092a3543b783bf065165fa5d3ad4b899b836810e513064134e18"}, - "httpoison": {:hex, :httpoison, "1.6.2", "ace7c8d3a361cebccbed19c283c349b3d26991eff73a1eaaa8abae2e3c8089b6", [:mix], [{:hackney, "~> 1.15 and >= 1.15.2", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "aa2c74bd271af34239a3948779612f87df2422c2fdcfdbcec28d9c105f0773fe"}, - "idna": {:hex, :idna, "6.0.1", "1d038fb2e7668ce41fbf681d2c45902e52b3cb9e9c77b55334353b222c2ee50c", [:rebar3], [{:unicode_util_compat, "0.5.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "a02c8a1c4fd601215bb0b0324c8a6986749f807ce35f25449ec9e69758708122"}, - "jason": {:hex, :jason, "1.2.1", "12b22825e22f468c02eb3e4b9985f3d0cb8dc40b9bd704730efa11abd2708c44", [:mix], [{:decimal, "~> 1.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "b659b8571deedf60f79c5a608e15414085fa141344e2716fbd6988a084b5f993"}, - "joken": {:hex, :joken, "2.2.0", "2daa1b12be05184aff7b5ace1d43ca1f81345962285fff3f88db74927c954d3a", [:mix], [{:jose, "~> 1.9", [hex: :jose, repo: "hexpm", optional: false]}], "hexpm", "b4f92e30388206f869dd25d1af628a1d99d7586e5cf0672f64d4df84c4d2f5e9"}, - "jose": {:hex, :jose, "1.10.1", "16d8e460dae7203c6d1efa3f277e25b5af8b659febfc2f2eb4bacf87f128b80a", [:mix], [], "hexpm", "3c7ddc8a9394b92891db7c2771da94bf819834a1a4c92e30857b7d582e2f8257"}, - "makeup": {:hex, :makeup, "1.0.3", "e339e2f766d12e7260e6672dd4047405963c5ec99661abdc432e6ec67d29ef95", [:mix], [{:nimble_parsec, "~> 0.5", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "2e9b4996d11832947731f7608fed7ad2f9443011b3b479ae288011265cdd3dad"}, - "makeup_elixir": {:hex, :makeup_elixir, "0.14.1", "4f0e96847c63c17841d42c08107405a005a2680eb9c7ccadfd757bd31dabccfb", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "f2438b1a80eaec9ede832b5c41cd4f373b38fd7aa33e3b22d9db79e640cbde11"}, - "metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm", "69b09adddc4f74a40716ae54d140f93beb0fb8978d8636eaded0c31b6f099f16"}, - "mime": {:hex, :mime, "1.3.1", "30ce04ab3175b6ad0bdce0035cba77bba68b813d523d1aac73d9781b4d193cf8", [:mix], [], "hexpm", "6cbe761d6a0ca5a31a0931bf4c63204bceb64538e664a8ecf784a9a6f3b875f1"}, - "mimerl": {:hex, :mimerl, "1.2.0", "67e2d3f571088d5cfd3e550c383094b47159f3eee8ffa08e64106cdf5e981be3", [:rebar3], [], "hexpm", "f278585650aa581986264638ebf698f8bb19df297f66ad91b18910dfc6e19323"}, - "nimble_parsec": {:hex, :nimble_parsec, "0.6.0", "32111b3bf39137144abd7ba1cce0914533b2d16ef35e8abc5ec8be6122944263", [:mix], [], "hexpm", "27eac315a94909d4dc68bc07a4a83e06c8379237c5ea528a9acff4ca1c873c52"}, - "parse_trans": {:hex, :parse_trans, "3.3.0", "09765507a3c7590a784615cfd421d101aec25098d50b89d7aa1d66646bc571c1", [:rebar3], [], "hexpm", "17ef63abde837ad30680ea7f857dd9e7ced9476cdd7b0394432af4bfc241b960"}, - "poison": {:hex, :poison, "4.0.1", "bcb755a16fac91cad79bfe9fc3585bb07b9331e50cfe3420a24bcc2d735709ae", [:mix], [], "hexpm", "ba8836feea4b394bb718a161fc59a288fe0109b5006d6bdf97b6badfcf6f0f25"}, - "ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.6", "cf344f5692c82d2cd7554f5ec8fd961548d4fd09e7d22f5b62482e5aeaebd4b0", [:make, :mix, :rebar3], [], "hexpm", "bdb0d2471f453c88ff3908e7686f86f9be327d065cc1ec16fa4540197ea04680"}, - "tesla": {:hex, :tesla, "1.3.3", "26ae98627af5c406584aa6755ab5fc96315d70d69a24dd7f8369cfcb75094a45", [:mix], [{:castore, "~> 0.1", [hex: :castore, repo: "hexpm", optional: true]}, {:exjsx, ">= 3.0.0", [hex: :exjsx, repo: "hexpm", optional: true]}, {:fuse, "~> 2.4", [hex: :fuse, repo: "hexpm", optional: true]}, {:gun, "~> 1.3", [hex: :gun, repo: "hexpm", optional: true]}, {:hackney, "~> 1.6", [hex: :hackney, repo: "hexpm", optional: true]}, {:ibrowse, "~> 4.4.0", [hex: :ibrowse, repo: "hexpm", optional: true]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: true]}, {:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.0", [hex: :mint, repo: "hexpm", optional: true]}, {:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm", "2648f1c276102f9250299e0b7b57f3071c67827349d9173f34c281756a1b124c"}, - "unicode_util_compat": {:hex, :unicode_util_compat, "0.5.0", "8516502659002cec19e244ebd90d312183064be95025a319a6c7e89f4bccd65b", [:rebar3], [], "hexpm", "d48d002e15f5cc105a696cf2f1bbb3fc72b4b770a184d8420c8db20da2674b38"}, -} diff --git a/src/app/validation/test/architecture_tests/log_filter_test.exs b/src/app/validation/test/architecture_tests/log_filter_test.exs deleted file mode 100644 index 9c56f6b3f01..00000000000 --- a/src/app/validation/test/architecture_tests/log_filter_test.exs +++ /dev/null @@ -1,85 +0,0 @@ -defmodule ArchitectureTests.LogFilterTest do - use ExUnit.Case, async: true - doctest Architecture.LogFilter - - import Architecture.LogFilter - import Architecture.LogFilter.Language - require Architecture.LogFilter.Language - - defp test_combinator(c1, c2, a, b) do - assert c2.(a, nil) == c2.(nil, a) - assert c2.(b, nil) == c2.(nil, b) - assert c1.([a, nil]) == c1.([nil, a]) - assert c1.([b, nil]) == c1.([nil, b]) - assert c1.([a, b]) == c2.(a, b) - assert c1.([b, a]) == c2.(b, a) - end - - test "filter combinators" do - x = equals(["a"], "x") - y = equals(["b"], "y") - z = equals(["c"], "z") - test_combinator(&adjoin/1, &adjoin/2, x, y) - test_combinator(&adjoin/1, &adjoin/2, adjoin(x, y), z) - test_combinator(&adjoin/1, &adjoin/2, disjoin(x, y), z) - test_combinator(&disjoin/1, &disjoin/2, x, y) - test_combinator(&disjoin/1, &disjoin/2, adjoin(x, y), z) - test_combinator(&disjoin/1, &disjoin/2, disjoin(x, y), z) - end - - test "simple dsl construction" do - example = - filter do - a.b == "test123" - xYz == "logs" - x["~~"].z <~> "indices" or z.y == "nothing" - "world" - end - - assert example == - adjoin([ - equals(["a", "b"], "test123"), - equals(["xYz"], "logs"), - disjoin( - contains(["x", "~~", "z"], "indices"), - equals(["z", "y"], "nothing") - ), - global_restriction("world") - ]) - end - - test "dsl comparison rhs interpolation support" do - y = "asdf" - example = filter(do: x == "#{y}123") - - assert example == equals(["x"], "asdf123") - end - - test "StackDriver rendering" do - example1 = - filter do - a.b == "x" - c <~> "z" - a.c["_z"] == "123" or a["~"] == "5" - a == "x" and b == "y" - "global" - end - - assert render(example1) == - """ - "a"."b"="x" - "c"="z" - ("a"."c"."_z"="123" OR "a"."~"="5") - "a"="x" - "b"="y" - "global"\ - """ - - example2 = disjoin(equals(["a"], "x"), equals(["a"], "z")) - - assert render(example2) == - """ - ("a"="x" OR "a"="z")\ - """ - end -end diff --git a/src/app/validation/test/architecture_tests/resource_set_test.exs b/src/app/validation/test/architecture_tests/resource_set_test.exs deleted file mode 100644 index 98235c4e2f9..00000000000 --- a/src/app/validation/test/architecture_tests/resource_set_test.exs +++ /dev/null @@ -1,103 +0,0 @@ -defmodule ArchitectureTests.ResourceSetTest do - use ExUnit.Case, async: true - doctest Architecture.ResourceSet - - alias Architecture.Resource - alias Architecture.ResourceSet - - import Architecture.LogFilter.Language - require Architecture.LogFilter.Language - - test "filter calculation" do - defmodule A do - use Resource - require Architecture.LogFilter.Language - defresource(x: integer) - @impl true - def global_filter do - filter do - name <~> "a" - end - end - - @impl true - def local_filter(%__MODULE__{x: x}) do - filter do - data.a == "#{x}" - end - end - end - - defmodule B do - use Resource - require Architecture.LogFilter.Language - defresource(A, y: integer) - @impl true - def global_filter do - filter do - name <~> "b" - end - end - - @impl true - def local_filter(%__MODULE__{y: y}) do - filter do - data.b == "#{y}" - end - end - end - - defmodule C do - use Resource - require Architecture.LogFilter.Language - defresource(A, z: integer) - @impl true - def global_filter do - filter do - name <~> "c" - end - end - - @impl true - def local_filter(%__MODULE__{z: z}) do - filter do - data.c == "#{z}" - end - end - end - - defmodule Test do - import Architecture.LogFilter.Language - require Architecture.LogFilter.Language - - def run do - resources = [ - %A{x: 0}, - %B{x: 1, y: 2}, - %B{x: 2, y: 3}, - %C{x: 3, z: 0} - ] - - resource_filter = - resources - |> ResourceSet.build() - |> ResourceSet.filter() - - expected_filter = - filter do - name <~> "a" - - data.a == "0" or - (name <~> "b" and - ((data.a == "1" and data.b == "2") or - (data.a == "2" and data.b == "3"))) or - (name <~> "c" and (data.a == "3" and data.c == "0")) - end - - assert resource_filter == expected_filter - end - end - - Test.run() - end -end diff --git a/src/app/validation/test/architecture_tests/statistic_test.exs b/src/app/validation/test/architecture_tests/statistic_test.exs deleted file mode 100644 index 83fcbac1e6e..00000000000 --- a/src/app/validation/test/architecture_tests/statistic_test.exs +++ /dev/null @@ -1,134 +0,0 @@ -# statistic_test.exs -- tests for the Architecture.Statistic module - -alias Architecture.Statistic - -# dummy statistics for cycle-checking -# N.B.: putting these inside a module causes, which would be neater, -# causes problems with `has_behaviour?/2` - -defmodule Stat0 do - use Statistic - - @impl true - def resources(_resource_db), do: :ok - @impl true - def init(_resource), do: :ok - @impl true - def update(_resource, _state), do: :ok - @impl true - def handle_message(_resource, _state, _, _log), do: :ok - - @impl true - def providers, do: [] -end - -defmodule Stat1 do - use Statistic - - @impl true - def resources(_resource_db), do: :ok - @impl true - def init(_resource), do: :ok - @impl true - def update(_resource, _state), do: :ok - @impl true - def handle_message(_resource, _state, _, _log), do: :ok - - @impl true - def providers, do: [Stat0] -end - -defmodule Stat2 do - use Statistic - - @impl true - def resources(_resource_db), do: :ok - @impl true - def init(_resource), do: :ok - @impl true - def update(_resource, _state), do: :ok - @impl true - def handle_message(_resource, _state, _, _log), do: :ok - - @impl true - def providers, do: [Stat0,Stat1] -end - -defmodule Stat3 do - use Statistic - - @impl true - def resources(_resource_db), do: :ok - @impl true - def init(_resource), do: :ok - @impl true - def update(_resource, _state), do: :ok - @impl true - def handle_message(_resource, _state, _, _log), do: :ok - - @impl true - def providers, do: [Stat4] -end - -defmodule Stat4 do - use Statistic - - @impl true - def resources(_resource_db), do: :ok - @impl true - def init(_resource), do: :ok - @impl true - def update(_resource, _state), do: :ok - @impl true - def handle_message(_resource, _state, _, _log), do: :ok - - @impl true - def providers, do: [Stat0,Stat1,Stat3] -end - - -defmodule ArchitectureTests.StatisticTest do - use ExUnit.Case, async: true - - doctest Architecture.Statistic - - import Architecture.Statistic - - # tests with dummy statistics - - test "statistics with no cycle", _context do - statistics = [Stat0,Stat1,Stat2] - check_cycle(statistics,MapSet.new) - end - - test "statistics with cycle starting at Stat3", _context do - try do - # Stat3 depends on Stat4 depends on Stat3 - statistics = [Stat3] - check_cycle(statistics,MapSet.new) - assert(false) - rescue - _ -> :ok - end - end - - test "statistics with cycle starting at Stat4", _context do - try do - # Stat4 depends on Stat3 depends on Stat4 - statistics = [Stat4] - check_cycle(statistics,MapSet.new) - assert(false) - rescue - _ -> :ok - end - end - - # test with real statistics - - test "real statistics check for cycles", _context do - # add all statistics as roots here - statistics = [Coda.Statistics.BlockProductionRate] - check_cycle(statistics,MapSet.new) - end - -end diff --git a/src/app/validation/test/architecture_tests/timer_test.exs b/src/app/validation/test/architecture_tests/timer_test.exs deleted file mode 100644 index 900a1c60484..00000000000 --- a/src/app/validation/test/architecture_tests/timer_test.exs +++ /dev/null @@ -1,43 +0,0 @@ -defmodule ArchitectureTests.TimerTest do - use ExUnit.Case, async: true - doctest Architecture.Timer - - alias Architecture.Timer - - defmodule TickRecipient do - use GenServer - - def start_link([expected_ticks, notify_pid]) do - GenServer.start_link( - __MODULE__, - {expected_ticks, notify_pid} - ) - end - - def init(args), do: {:ok, args} - - def handle_call(:tick, _caller, {expected_ticks, notify_pid}) do - if expected_ticks == 0 do - send(notify_pid, :success) - end - - {:reply, :ok, {expected_ticks - 1, notify_pid}} - end - end - - test "co-supervision test" do - update_interval = 1_000 - num_ticks = 5 - timeout = update_interval * (num_ticks + 2) - - {:ok, pid} = - Timer.CoSupervisor.start_link(%Timer.CoSupervisor.Params{ - sidecar_mod: TickRecipient, - sidecar_arg: [num_ticks, self()], - update_interval: update_interval - }) - - assert_receive(:success, timeout) - :ok = Supervisor.stop(pid) - end -end diff --git a/src/app/validation/test/class_test.exs b/src/app/validation/test/class_test.exs deleted file mode 100644 index e4486ebff29..00000000000 --- a/src/app/validation/test/class_test.exs +++ /dev/null @@ -1,222 +0,0 @@ -defmodule ClassTest do - # we do not want to run this test in parallel as it performs runtime module redefinition - use ExUnit.Case, async: false - doctest Class - - test "basic properties of class inheritance and instance relationships" do - defmodule A do - use Class - defclass(x: integer) - end - - defmodule B do - use Class - defclass(A, y: String.t()) - end - - defmodule C do - use Class - defclass(B, z: atom) - end - - defmodule D do - use Class - defclass(B, k: module) - end - - defmodule Test do - import Class - - def run_test do - a = %A{x: 1} - b = %B{x: 2, y: "b"} - c = %C{x: 3, y: "c", z: :test} - d = %D{x: 4, y: "d", k: __MODULE__} - - assert is_class?(A) - assert is_class?(B) - assert is_class?(C) - assert is_class?(D) - - assert is_subclass?(B, A) - assert is_subclass?(C, A) - assert is_subclass?(D, A) - assert is_subclass?(C, B) - assert is_subclass?(D, B) - - assert class_of(a) == A - assert class_of(b) == B - assert class_of(c) == C - assert class_of(d) == D - - assert instance_of?(a, A) - assert not instance_of?(a, B) - assert not instance_of?(a, C) - assert not instance_of?(a, D) - assert instance_of?(b, A) - assert instance_of?(b, B) - assert not instance_of?(b, C) - assert not instance_of?(b, D) - assert instance_of?(c, A) - assert instance_of?(c, B) - assert instance_of?(c, C) - assert not instance_of?(c, D) - assert instance_of?(d, A) - assert instance_of?(d, B) - assert not instance_of?(d, C) - assert instance_of?(d, D) - end - end - - Test.run_test() - end - - test "instance downcasting" do - defmodule A do - use Class - defclass(x: integer) - end - - defmodule B do - use Class - defclass(A, y: integer) - end - - defmodule C do - use Class - defclass(B, z: integer) - end - - defmodule Test do - alias Class.NotASubclassError - import Class - - def run_test do - a = %A{x: 1} - b = %B{x: 1, y: 2} - c = %C{x: 1, y: 2, z: 3} - - assert downcast!(a, A) == a - assert_raise NotASubclassError, fn -> downcast!(a, B) end - assert_raise NotASubclassError, fn -> downcast!(a, C) end - assert downcast!(b, A) == a - assert downcast!(b, B) == b - assert_raise NotASubclassError, fn -> downcast!(b, C) end - assert downcast!(c, A) == a - assert downcast!(c, B) == b - assert downcast!(c, C) == c - end - end - - Test.run_test() - end - - test "hiearchy" do - defmodule A do - use Class - defclass(x: integer) - end - - defmodule B do - use Class - defclass(A, y: integer) - end - - defmodule C do - use Class - defclass(B, z: integer) - end - - defmodule D do - use Class - defclass(B, k: integer) - end - - defmodule E do - use Class - defclass(A, d: integer) - end - - defmodule F do - use Class - defclass(E, r: integer) - end - - defmodule G do - use Class - defclass(D, w: integer) - end - - defmodule H do - use Class - defclass(F, c: integer) - end - - defmodule Test do - import Class.Hiearchy - - def run_test do - assert compute(A, [B]) == - {A, - [ - {B, []} - ]} - - assert compute(A, [C]) == - {A, - [ - {B, - [ - {C, []} - ]} - ]} - - assert compute(A, [C]) == compute(A, [B, C]) - - assert compute(A, [H]) == - {A, - [ - {E, - [ - {F, - [ - {H, []} - ]} - ]} - ]} - - assert compute(A, [C, G, H]) == - {A, - [ - {B, - [ - {C, []}, - {D, - [ - {G, []} - ]} - ]}, - {E, - [ - {F, - [ - {H, []} - ]} - ]} - ]} - - str = - reduce_depth_first( - compute(A, [C, G, H]), - "", - fn root, acc -> "#{List.last(Module.split(root))}:#{acc}" end, - fn ls -> "(#{Enum.join(ls, ",")})" end - ) - - assert str == "A:(B:(C:,D:(G:)),E:(F:(H:)))" - end - end - - Test.run_test() - end -end diff --git a/src/app/validation/test/test_helper.exs b/src/app/validation/test/test_helper.exs deleted file mode 100644 index 7c9d912e4e2..00000000000 --- a/src/app/validation/test/test_helper.exs +++ /dev/null @@ -1,2 +0,0 @@ -Code.compiler_options(ignore_module_conflict: true) -ExUnit.start() diff --git a/src/app/zkapp_test_transaction/README.md b/src/app/zkapp_test_transaction/README.md new file mode 100644 index 00000000000..48d3288cc12 --- /dev/null +++ b/src/app/zkapp_test_transaction/README.md @@ -0,0 +1,773 @@ +# ZkApp test transaction tool + +A tool to generate zkapp transactions that can be sent to a mina test network. For more information on zkapps, checkout the following resources: https://docs.minaprotocol.com/en/zkapps. +The WIP progress spec [here](https://o1-labs.github.io/snapps-txns-reference-impl/target/doc/snapps_txn_reference_impl/index.html) proposes the structure and behavior of mina zkapp transactions. + +The smart contract (which users might write using snarkyJS) used in the tool is intended only for testing as it does no operation on the state and simply accepts any update. The tool provides options to deploy this smart contract to a mina account and make various updates to the account + +#### Usage + +The tool generates a graphQL `sendZkapp` mutation that can be sent to the graphQL server the daemon starts by default at port 3085. One can use the UI to interact with the local graphQL server mounted at http://localhost:3085/graphql and paste the graphQL object that the tool prints + +The commands proivded by this tool are- + +```shell +$mina-zkapp-test-transaction -help +ZkApp test transaction + + zkapp_test_transaction.exe SUBCOMMAND + +=== subcommands === + + create-zkapp-account Generate a zkApp transaction that creates a + zkApp account + upgrade-zkapp Generate a zkApp transaction that updates the + verification key + transfer-funds Generate a zkApp transaction that makes + multiple transfers from one account + update-state Generate a zkApp transaction that updates + zkApp state + update-zkapp-uri Generate a zkApp transaction that updates the + zkApp uri + update-sequence-state Generate a zkApp transaction that updates + zkApp state + update-token-symbol Generate a zkApp transaction that updates + token symbol + update-permissions Generate a zkApp transaction that updates the + permissions of a zkApp account + test-zkapp-with-genesis-ledger Generate a trivial zkApp transaction and + genesis ledger with verification key for + testing + version print version information + help explain a given subcommand (perhaps + recursively) +``` + +### Example usage + +#### 1. Create a zkapp account / Deploy the test smart contract + +`create-zkapp-account` command takes the following input to create a zkapp account and deploy the test smart contract. + +```shell +$mina-zkapp-test-transaction create-zkapp-account -help +Generate a zkApp transaction that creates a zkApp account + + zkapp_test_transaction.exe create-zkapp-account + +=== flags === + + --fee-payer-key KEYFILE Private key file for the fee payer of the + transaction (should already be in the ledger) + --nonce NN Nonce of the fee payer account + --receiver-amount NN Receiver amount in Mina + --zkapp-account-key KEYFILE Private key file to create a new zkApp account + [--debug] Debug mode, generates transaction snark + [--fee FEE] Amount you are willing to pay to process the + transaction (default: 1) (minimum: 0.003) + [--memo STRING] Memo accompanying the transaction + [-help] print this help text and exit + (alias: -?) +``` + +For example: + +```shell +$mina-zkapp-test-transaction create-zkapp-account --fee-payer-key my-fee-payer --nonce 0 --receiver-amount 2 --zkapp-account-key my-zkapp-key +``` + +generates the following graphQL object- a zkapp transaction as input to the `sendZkapp` mutation. A zkapp transaction is basically a list of parties where each [party](https://o1-labs.github.io/snapps-txns-reference-impl/target/doc/snapps_txn_reference_impl/party/index.html) is an update performed on an account. + +The zkapp transaction here has three parties- + +1. the fee payer party which specifies who pays the transaction fees and how much +2. A party that pays the account creation fee to create the new zkapp account which in this case is the same as the fee payer +3. A party to create a new zkapp account, set its verification key associated with the test smart contract, and update `editState` and `editSequenceState` permissions to use proofs as [authorization](https://o1-labs.github.io/snapps-txns-reference-impl/target/doc/snapps_txn_reference_impl/party/enum.PartyAuthorization.html). + +The authorization used in each of the parties here is a signature of the respective accounts i.e., the updates on these accounts are authorized as per the accounts' permissions. + +``` +mutation MyMutation { + __typename + sendZkapp( + input: { + parties: { + feePayer: { + body: { + publicKey: "B62qpfgnUm7zVqi8MJHNB2m37rtgMNDbFNhC2DpMmmVpQt8x6gKv9Ww" + update: { + appState: [null, null, null, null, null, null, null, null] + delegate: null + verificationKey: null + permissions: null + zkappUri: null + tokenSymbol: null + timing: null + votingFor: null + } + balanceChange: "1000000000" + events: [] + sequenceEvents: [] + callData: "0" + callDepth: 0 + protocolStatePrecondition: { + snarkedLedgerHash: null + timestamp: null + blockchainLength: null + minWindowDensity: null + totalCurrency: null + globalSlotSinceHardFork: null + globalSlotSinceGenesis: null + stakingEpochData: { + ledger: { hash: null, totalCurrency: null } + seed: null + startCheckpoint: null + lockCheckpoint: null + epochLength: null + } + nextEpochData: { + ledger: { hash: null, totalCurrency: null } + seed: null + startCheckpoint: null + lockCheckpoint: null + epochLength: null + } + } + accountPrecondition: "0" + } + authorization: "7mXEzaufjDaZ3BkxpCnFziJt4L99gHzUPA13pGGTspt4gndnnxaMUvdQXprzQ3u4MEQixHb2iDbaKFUZSWgHEo7xyTa5f6Rh" + } + otherParties: [ + { + body: { + publicKey: "B62qpfgnUm7zVqi8MJHNB2m37rtgMNDbFNhC2DpMmmVpQt8x6gKv9Ww" + tokenId: "wSHV2S4qX9jFsLjQo8r1BsMLH2ZRKsZx6EJd1sbozGPieEC4Jf" + update: { + appState: [null, null, null, null, null, null, null, null] + delegate: null + verificationKey: null + permissions: null + zkappUri: null + tokenSymbol: null + timing: null + votingFor: null + } + balanceChange: { magnitude: "10000000000", sgn: "Negative" } + incrementNonce: true + events: [] + sequenceEvents: [] + callData: "0" + callDepth: 0 + protocolStatePrecondition: { + snarkedLedgerHash: null + timestamp: null + blockchainLength: null + minWindowDensity: null + totalCurrency: null + globalSlotSinceHardFork: null + globalSlotSinceGenesis: null + stakingEpochData: { + ledger: { hash: null, totalCurrency: null } + seed: null + startCheckpoint: null + lockCheckpoint: null + epochLength: null + } + nextEpochData: { + ledger: { hash: null, totalCurrency: null } + seed: null + startCheckpoint: null + lockCheckpoint: null + epochLength: null + } + } + accountPrecondition: { + balance: null + nonce: { lower: "1", upper: "1" } + receiptChainHash: null + publicKey: null + delegate: null + state: [null, null, null, null, null, null, null, null] + sequenceState: null + provedState: null + } + useFullCommitment: false + } + authorization: { + proof: null + signature: "7mXA83gWTqA3cEPPcNuNnMneJV6nxTjrQ1QPciJWxijkPYgqP2uD3zytzi6VD1jV1ZkcJkCsfjT82t1PuoxJ93Leu3YB5bRi" + } + } + { + body: { + publicKey: "B62qmQDtbNTymWXdZAcp4JHjfhmWmuqHjwc6BamUEvD8KhFpMui2K1Z" + tokenId: "wSHV2S4qX9jFsLjQo8r1BsMLH2ZRKsZx6EJd1sbozGPieEC4Jf" + update: { + appState: [null, null, null, null, null, null, null, null] + delegate: null + verificationKey: { + data: "4X1tWgjtpm8S5PqrYuTtmeUXriBHh6M8mDrXq3LGNnAVMCk6b5LkA9pzFk6h2D5FeuSppqURqfvUB6G4hrhAHJRzrr9sgbGvDdyKXCDuvzBL7bSDNfSDrRMGJ8QAX6cJbDYptwhKLEzv8DKKshvnzsMeMUxDwikSSpemhwALrASAS5RWdD7Sa79FsYgWduVTLk8P99WUgqP9zwj79PDWPp7QdMJomPL1bXT8Jr1YSu8xPZhDDxhpWgZXeEs2hHyJ1iz9xphSGQ3ayTDMHiuaiucT3zdmBCvKaNDuq2mDMRQ4TS6vzHReY5o3j6iBcRPyhEKmJQVWWgtZJ6J2ZY1cdYvCpi34vRYVTHuwYzYPpaE2i2F2jCJCs3Jrytdk1vfRX6BL9RxDtB1Ac1Wanbte8AZqHmd2yyY1rMZx13xpPBwdcH75d77htn3HRgW5LyuTwydNEU6fd4nVp9RPg5zFd6Y8de4HHNzwLjFXkLxN2QHMqCCNTCWALndFn3dzgkXJj8JTAYS4kxrJBiQQt97YBgdWXb1ZxKTTBcQwK23chuyaQSTzfH4fWQHTtqGxpwXfTorvrbkoiky6TXrfYkwiJHbt8S6g4bgbbBwLL9hRDAzumRSUDmEUjZVL3sLrBQa5kkyS5WUCb1EiaeRoETY8pSHKSAbNC5oDT8teQXrSkzyd86tH5EPMtCVuT3qBai1ZTjEHkVZNqDerFW4nzmKvCReVPLG4GWw6kmW1vxBY7B8zTvVKeNDzy6HkQRBYcmWU5fVvMpaDXYJetn16dmEGv5vx9vtjSsX7YRY7GdVuagS3FeFUSpX7g49LiDQPdxk26dXjsQonwoBYqE2MGyTuhf8vfyWaqs9iFV35LEDB5PP9ijV8qRyMZj52jPUEjrJueXWd89MUbpkabPQBbR2GNJaZCbcG6XkVc46HQjGw6wE6GhfpEVGPhupNZA4xwd51sR46sKfDnQeqoxzQRrYYyXEZM4Qu2c12XrZLQjsn9tN96JTCCgKTR9nesmKvxFBwxeWDFhoPv9UHUwphPvcp1uVd77W36pzjH7M3frotakNpx2UNC7H9cd5McAeYheoSBY6eUPm9EY7AvvFFEXiq18Nbt2omc327BVMkx3A62i6yTRJW2k7sNXhs4TBt4rVrYEHusvZsmNhby8rLPSB7ktwBMcrCGCeUFvu1da59NPvchjVw2d4TwT9uSuUaQGm2Tvu3TPsWpH1fx5akLMsSj534g3Zu8JNoDPrjD8RJ2WAK2m1gnEhW8jZeJypaj8JWeF4jBnW6UrcrpavXJbUZMmxHoEqsax21baB3nMudEbLFp4zaDUqXr9PqE9ZPueFqabXZHxRBFZ3CgdvMWHtAsh6dMvFb2tbxTLdcffW1bZBzVDccxW4PJWASs2ahVaNzNDNgkb1CAZaeBm8GDRS7izafdxJdE14VbLBg6PQZ5LnrCrLmcfuocq7GPRShKdDXSFfDLHwvpXbWQKpsdd3XTZyeT6TBQuKzKwAekrZwKsGSYdVWTVZJG3brAqqLTA8regvKneSSStBG6HoV9dsMrLMJh4ugr4bkP1MqqHBEwHD82kBPTpkU9VioboDJDHpMHGHzb169MmMTRD9qy85wTUbnEXM7cYUVz2VGqXYn9kbu95WLHhd9FKpDkvDMrgVVPzH9iuoHP8JSLT6WipawTiJSKjJZp6DtgdiTCwDBYfzHyj3KWpj5ogB2TBK96nEULLg8vkmtU4Xuw8SLr2RTdtYc699HHrUHqwneeLro2Mhu1ve6rm9dwpDbiNzfejijUpvJZi8VGh1G6Y1UN54soHRKdrN25yAgc3QTsvdSyyMs4gwk63mcjHQ8vKXd4f8Fpn1v3ZoXZ5xb1J1cbUgp7wYaPzWfe7i3ycC7DqcaUdubKCopyLynUPjkJ5omdJDZQZcs5tEHUFdBZEjBQe78u9yy5bgDknzkpnEkp8SzvKZdwFYbpigrro77XiJsvS5bj88i8tijx6hh2Ut4avj6aENzdLedLCL6b5ycX1jJnajLLeRbyPbhLz3rLvE6vrWuxZHQtAufEAZ11vyX7rxSRsvxMBzAcFo5z9oAQkHBsCYEU5eaEpsiaZER6qXauCxo1qi4NhFrjxDuJWp8irhFoR3Bqf4sccQjwcYhShZtyHyYEkZgFKcnJktF6s4xVMTidifw33bqfEk64q5a2UK4hxsL5RLmff8rqwCM57Fh7HwtRVNqHzCk4bxixpVYcc6eyB256WYQZar2JBJNMYDbYYCr97xANeTWJUxgZfdHjwoFRbyXibW83A24FkX6BmJ9NEqC5f15JG3Gv6EkN47EDu3UeaEgatTiqrJhwAjVSpKbYKBaKk11yJqzaH6GqD7JqyjS9oH4hAfx8pRWtH3bsRHWCh5JrKnXxtZon1Lm6tfCVFqgKmDpcVzh7NAHzx7cc6CbFv6y4SoqEmPZhHgewCFph1orcPeNzMx4BM3oBixF3mWZc3YD9UB" + hash: "19079293979474920563146704039152670161084248765333687110610215570697279088632" + } + permissions: { + editState: "Proof" + send: "Signature" + receive: "None" + setDelegate: "Signature" + setPermissions: "Signature" + setVerificationKey: "Signature" + setZkappUri: "Signature" + editSequenceState: "Proof" + setTokenSymbol: "Signature" + incrementNonce: "Signature" + setVotingFor: "Signature" + } + zkappUri: null + tokenSymbol: null + timing: null + votingFor: null + } + balanceChange: { magnitude: "10000000000", sgn: "Positive" } + incrementNonce: false + events: [] + sequenceEvents: [] + callData: "0" + callDepth: 0 + protocolStatePrecondition: { + snarkedLedgerHash: null + timestamp: null + blockchainLength: null + minWindowDensity: null + totalCurrency: null + globalSlotSinceHardFork: null + globalSlotSinceGenesis: null + stakingEpochData: { + ledger: { hash: null, totalCurrency: null } + seed: null + startCheckpoint: null + lockCheckpoint: null + epochLength: null + } + nextEpochData: { + ledger: { hash: null, totalCurrency: null } + seed: null + startCheckpoint: null + lockCheckpoint: null + epochLength: null + } + } + accountPrecondition: { + balance: null + nonce: null + receiptChainHash: null + publicKey: null + delegate: null + state: [null, null, null, null, null, null, null, null] + sequenceState: null + provedState: null + } + useFullCommitment: true + } + authorization: { + proof: null + signature: "7mXM47EwEYXagt1AvxVqUjteeEJeFwi2yLx51j1SxZzjtJiGC1EALuL85MmgYLFU1MyHkQahnuwMW4nM9MfJoyQgpDGy18QR" + } + } + ] + memo: "E4YM2vTHhWEg66xpj52JErHUBU4pZ1yageL4TVDDpTTSsv8mK6YaH" + } + } + ) +} +``` + +Send the generated graphQL object to the local daemon via GraphiQL interface at http://localhost:3085/graphql ![Screenshot](res/deploy-zkapp.png) + + +After the transaction is sent and included in a block, a new zkapp account with the verification of the test smart contract gets created. The account information can be queried through the graphQL `account` query. + +``` +query MyQuery { + account(publicKey: "B62qmQDtbNTymWXdZAcp4JHjfhmWmuqHjwc6BamUEvD8KhFpMui2K1Z") { + nonce + balance { + total + } + verificationKey { + hash + verificationKey + } + permissions { + editSequenceState + editState + incrementNonce + receive + send + setDelegate + setPermissions + setZkappUri + setTokenSymbol + setVerificationKey + setVotingFor + } + } +} +``` + + +Query result: + +```json +{ + "data": { + "account": { + "balance": { + "total": "1000000000" + }, + "verificationKey": { + "hash": "11292887885696531659094127423705404064892721380499236041832155935416728493189" + }, + "permissions": { + "editSequenceState": "Proof", + "editState": "Proof", + "incrementNonce": "Signature", + "receive": "None", + "send": "Signature", + "setDelegate": "Signature", + "setPermissions": "Signature", + "setZkappUri": "Signature", + "setTokenSymbol": "Signature", + "setVerificationKey": "Signature", + "setVotingFor": "Signature", + }, + "nonce": "0" + } + } +} +``` +![Screenshot](res/account-after-deploy.png) + +#### 2. Update zkapp state + +A zkapp transaction to update the 8 field elements representing the on-chain state of a smart contract + +```shell +$mina-zkapp-test-transaction update-state -help +Generate a zkApp transaction that updates zkApp state + + zkapp_test_transaction.exe update-state + +=== flags === + + --fee-payer-key KEYFILE Private key file for the fee + payer of the transaction + (should already be in the + ledger) + --nonce NN Nonce of the fee payer account + --zkapp-account-key KEYFILE Private key file to create a + new zkApp account + [--debug] Debug mode, generates + transaction snark + [--fee FEE] Amount you are willing to pay + to process the transaction + (default: 1) (minimum: 0.003) + [--memo STRING] Memo accompanying the + transaction + [--zkapp-state String(hash)|Integer(field] ... element) a list of 8 elements + that represent the zkApp state + (Use empty string for no-op) + [-help] print this help text and exit + (alias: -?) + +``` + +For example: + +```shell +$mina-zkapp-test-transaction update-state --fee-payer-key my-fee-payer --nonce 2 --zkapp-account-key my-zkapp-key --fee 5 --zkapp-state 1 --zkapp-state 2 --zkapp-state 3 --zkapp-state 4 --zkapp-state 5 --zkapp-state 6 --zkapp-state 7 --zkapp-state 8 +``` + +The zkapp transaction here has two parties- + +1. The fee payer party which specifies who pays the transaction fees and how much +2. A party that updates the `app_state` of the zkapp account. The authorization required to update the state is a proof (as updated the by deploy-zkapp transaction above `editState: Proof`) + +``` +mutation MyMutation { + __typename + sendZkapp( + input: { + parties: { + feePayer: { + body: { + publicKey: "B62qpfgnUm7zVqi8MJHNB2m37rtgMNDbFNhC2DpMmmVpQt8x6gKv9Ww" + update: { + appState: [null, null, null, null, null, null, null, null] + delegate: null + verificationKey: null + permissions: null + zkappUri: null + tokenSymbol: null + timing: null + votingFor: null + } + balanceChange: "5000000000" + events: [] + sequenceEvents: [] + callData: "0" + callDepth: 0 + protocolStatePrecondition: { + snarkedLedgerHash: null + timestamp: null + blockchainLength: null + minWindowDensity: null + totalCurrency: null + globalSlotSinceHardFork: null + globalSlotSinceGenesis: null + stakingEpochData: { + ledger: { hash: null, totalCurrency: null } + seed: null + startCheckpoint: null + lockCheckpoint: null + epochLength: null + } + nextEpochData: { + ledger: { hash: null, totalCurrency: null } + seed: null + startCheckpoint: null + lockCheckpoint: null + epochLength: null + } + } + accountPrecondition: "2" + } + authorization: "7mX8xyRkwtjoNA6B5VCjLGVQQUhgLrm5X7vLA9yj4aMUKDWKb9HgMxuX2Jw41qLFjvN8BiVVJJXG8syyteva9V52VW5RFChq" + } + otherParties: [ + { + body: { + publicKey: "B62qmQDtbNTymWXdZAcp4JHjfhmWmuqHjwc6BamUEvD8KhFpMui2K1Z" + tokenId: "wSHV2S4qX9jFsLjQo8r1BsMLH2ZRKsZx6EJd1sbozGPieEC4Jf" + update: { + appState: ["1", "2", "3", "4", "5", "6", "7", "8"] + delegate: null + verificationKey: null + permissions: null + zkappUri: null + tokenSymbol: null + timing: null + votingFor: null + } + balanceChange: { magnitude: "0", sgn: "Positive" } + incrementNonce: false + events: [] + sequenceEvents: [] + callData: "0" + callDepth: 0 + protocolStatePrecondition: { + snarkedLedgerHash: null + timestamp: null + blockchainLength: null + minWindowDensity: null + totalCurrency: null + globalSlotSinceHardFork: null + globalSlotSinceGenesis: null + stakingEpochData: { + ledger: { hash: null, totalCurrency: null } + seed: null + startCheckpoint: null + lockCheckpoint: null + epochLength: null + } + nextEpochData: { + ledger: { hash: null, totalCurrency: null } + seed: null + startCheckpoint: null + lockCheckpoint: null + epochLength: null + } + } + accountPrecondition: { + balance: null + nonce: null + receiptChainHash: null + publicKey: null + delegate: null + state: [null, null, null, null, null, null, null, null] + sequenceState: null + provedState: null + } + useFullCommitment: true + } + authorization: { + proof: "((statement((proof_state((deferred_values((plonk((alpha((inner(47bb6705393526bd 6581bc048e9b45f2))))(beta(730a79fba81492fe aaadcbdbfbdd2e25))(gamma(7f769a01c05ea04f 40d0475144ef5ef9))(zeta((inner(9fd7abe5ce342946 e12524963e1e9aa3))))))(combined_inner_product(Shifted_value 0x0B5A024F24DC0DFA55644175215EC184C602A772F95B4E5B130EC8CC796AEF65))(b(Shifted_value 0x377EF314D370B71C8CA559A4CB29D573F1181D72AEF1CCFFB51F8FA0E51D869A))(xi((inner(79079e8b18710b33 1e5c6312df06262e))))(bulletproof_challenges(((prechallenge((inner(edccb02c5890ba14 1f0eb7207bfe4037)))))((prechallenge((inner(ca4954892eb925ff 7acde776e6141ebc)))))((prechallenge((inner(3d27eca5c732f6b9 f671602a6be84a74)))))((prechallenge((inner(859a59e3f618e276 50b49e8755669ed6)))))((prechallenge((inner(2275b0bfeb34ea63 bd4bae6a352c9964)))))((prechallenge((inner(bd7d28854682e588 1ea13020295bef65)))))((prechallenge((inner(a56a2f4e53e4f537 b86735b5ec9a33af)))))((prechallenge((inner(6a131510213f8c00 3274f639e3cf2259)))))((prechallenge((inner(fc2b9aa3033e5d03 f07a18efd6cba792)))))((prechallenge((inner(bcd77936aac56d37 44fa75b2ede4f628)))))((prechallenge((inner(8389c9c50d04224f c6f3c8244ff226c6)))))((prechallenge((inner(27e135394394447e eab0ef6a9e67f209)))))((prechallenge((inner(d2e179ff7b3d0328 9d93e82ff43c8af7)))))((prechallenge((inner(e4c20a8ebdf382e9 e89b545a09d9a36a)))))((prechallenge((inner(809104238a0bf122 ef7514e2087bc7e3)))))((prechallenge((inner(7a07b84f821fa743 6807fd5023a67218)))))))(which_branch"\000")))(sponge_digest_before_evaluations(8efbb0ed2eab8b1f 02d024fa6aa9ad2d 8bd7cddc8ae0872c 0395fb502bd860a7))(me_only((sg(0x3E097678282E5759C7AF116DCBF82815D79C02F1587374D1F7AC88FDA48F1760 0x24D5FF8AC5705F12ABE261D273726562A939848F0F64126C9E627EBC81103EEF))(old_bulletproof_challenges((((prechallenge((inner(3382b3c9ace6bf6f 79974358f9761863)))))((prechallenge((inner(dd3a2b06e9888797 dd7ae6402944a1c7)))))((prechallenge((inner(c6e8e530f49c9fcb 07ddbb65cda09cdd)))))((prechallenge((inner(532c59a287691a13 a921bcb02a656f7b)))))((prechallenge((inner(e29c77b18f10078b f85c5f00df6b0cee)))))((prechallenge((inner(1dbda72d07b09c87 4d1b97e2e95f26a0)))))((prechallenge((inner(9c75747c56805f11 a1fe6369facef1e8)))))((prechallenge((inner(5c2b8adfdbe9604d 5a8c718cf210f79b)))))((prechallenge((inner(22c0b35c51e06b48 a6888b7340a96ded)))))((prechallenge((inner(9007d7b55e76646e c1c68b39db4e8e12)))))((prechallenge((inner(4445e35e373f2bc9 9d40c715fc8ccde5)))))((prechallenge((inner(429882844bbcaa4e 97a927d7d0afb7bc)))))((prechallenge((inner(99ca3d5bfffd6e77 efe66a55155c4294)))))((prechallenge((inner(4b7db27121979954 951fa2e06193c840)))))((prechallenge((inner(2cd1ccbeb20747b3 5bd1de3cf264021d))))))(((prechallenge((inner(3382b3c9ace6bf6f 79974358f9761863)))))((prechallenge((inner(dd3a2b06e9888797 dd7ae6402944a1c7)))))((prechallenge((inner(c6e8e530f49c9fcb 07ddbb65cda09cdd)))))((prechallenge((inner(532c59a287691a13 a921bcb02a656f7b)))))((prechallenge((inner(e29c77b18f10078b f85c5f00df6b0cee)))))((prechallenge((inner(1dbda72d07b09c87 4d1b97e2e95f26a0)))))((prechallenge((inner(9c75747c56805f11 a1fe6369facef1e8)))))((prechallenge((inner(5c2b8adfdbe9604d 5a8c718cf210f79b)))))((prechallenge((inner(22c0b35c51e06b48 a6888b7340a96ded)))))((prechallenge((inner(9007d7b55e76646e c1c68b39db4e8e12)))))((prechallenge((inner(4445e35e373f2bc9 9d40c715fc8ccde5)))))((prechallenge((inner(429882844bbcaa4e 97a927d7d0afb7bc)))))((prechallenge((inner(99ca3d5bfffd6e77 efe66a55155c4294)))))((prechallenge((inner(4b7db27121979954 951fa2e06193c840)))))((prechallenge((inner(2cd1ccbeb20747b3 5bd1de3cf264021d))))))))))))(pass_through((app_state())(sg())(old_bulletproof_challenges())))))(prev_evals((evals(((public_input 0x209F99F14602C5213455318805B48BB275F82DEEA3B0070AD4D1CBF8C97CB8A4)(evals((w((0x0DFB2399966A4561C7CB7E4B4CD5E0140058483BA430EAB9A47CBF74742EF96E)(0x269AA18BD92AF17B2783F9EC214E7C365CA00BC8799D47ADDB648E028DD6AC9E)(0x280E28751B98C448DCBFD6144FCA665B865094C4B8F1B57D3592DA546798F5E4)(0x03E0C0829023E4F2C505B7FA54DC32CE4E1E101143FA747B50C73FA4AEBC4504)(0x06C07DFF0CCCC91257221CB1744DFC17F7392D80A3BA0CAE8345933EA01960DD)(0x1DF1FE2E780D422D99208A49D139815C284E75FC41F36E8C8AF7270326AECDCA)(0x33619BAA1436CD4DC6278C31640653DF8998B56949DE8E89A6834D7012908163)(0x3F2AAA0AB4F26B597217F62904B4CA2EBE3ADB7752EC00CDB85ECBD002BBF431)(0x233F2292628A1C5AA680D948FE3B66A203EF61F9F0964A693B5D0DB7518115E2)(0x29BCDBFB5A5511043E6066BA21758CA1AFA09C02AF06084EB6549507A9CBFDB6)(0x2A5A21F0C23D00136C349EDC7C309EBFF1498CA582A907B2943D9B79A0A2FB61)(0x1D36DDDE925AD1D53DFAE0F4326565999AAC45DD669ADA5688633196CCC33E36)(0x397FDA23B893399F680F3E53E1A581354ACDFECF0C6451A626FFC0DB44EDC274)(0x25828CB1CC66BFD04CDE75D17C06176C54DBA5A88DDAF2570BEB172052B69A4C)(0x3749EF008FF411C6693A5F852A56A0919820F9B9B2587D90D3C185F5B7E939E0)))(z(0x0B9B0D3EC7C150A988D0446AE7118E9A7883A7F7D3981E721D35F19F707FD73E))(s((0x2D1180D9C35F85E9B0B6022795A5C7928704D3054E88A6D20DA62285366860FB)(0x206223308A639E4DBA03FA10B2017895C89205D72A0974AE721994099C184373)(0x00482F379D871FE9828B714F24E1327941C758641B0276A6A705BF872A4287AB)(0x12583E33E17094C663E5F0A679C310AADCC0221EF17042C455D8E5DC542378BB)(0x0D5BB6C07071C4E4BA116E4C41E0A7A0B3B85AF2FE56466F17218C5302D87892)(0x07DF915253457B9B1FDB632CD40E491B7F1CB5CF3849D201A3618820BAFBF87B)))(generic_selector(0x0047B88B4AD29021BEE8E8EAC93AE840CACE047B76F10A6F1F724DDA485EB245))(poseidon_selector(0x25446537BC04998A3D669E4170C5338ED7D86CBC1E11226E66FCB50A935F7207)))))((public_input 0x2CF7F8E2B4FDDE8935CA90167238EE9AFABD80F20610FE9351D905F9B06FA2D6)(evals((w((0x119A147ED0D62B4106CCB41BC8BF7AB345821C55CAFAB0F65A4E5C23C7AAC26F)(0x158E77A0F4F0F96120A826C3B53B12CAD8AECE823426DE603945E293A1DAE513)(0x36AC431FE2FDD9E15BF5D99979E79AA91C9A14DA1CE65E68673B80C5EAFA1DDC)(0x0CC373294822BD21337E3703D1CE4CFECA9C9640FA7634B7140198C694D94E04)(0x3C336B63BFB9D659F6EF17B6D99FDEAFB84CA675E0E0CD49890202463E30DA9B)(0x355C8602EA2DD2FCB88628F404C986B29E46570EC6D0D4585976D91F0892661F)(0x10B24AB6A1CF68439B7C55FF3968C911FB7FEA8AEE1A9BD45379D9339DEB9AD0)(0x12B9915BA94E300F2671E43765D3FC15C8A4A66F43B3815E3E41C8CCA7053AB7)(0x31DA76EC16499FBA426A26E9580F2D4C1E220947F2A96D163B9A7C043FC4E830)(0x01CCCFF61B30273899CF7CEDC3B944636BD60AACBD48152090F313E805D205F2)(0x1280CE187A158204FCDC27816596C869406170772222EDBCCC6979B81771C618)(0x30881D9A06201BB68780D0E84889CF3B89C1A83A22BFD87039F78521FC20F60F)(0x20E92D8A4DCA9D10AD0A23DB9406B6DD64E3EDFE87CD34960A7072D4A20B7FBD)(0x253AEF91F6403508A7452A4D16FCC157F60CCE638FF237251D70727B9EA053FC)(0x1C416AF24F287C409D0A0ACD8826A2A6028FEA49DB141759F230117336D4F21F)))(z(0x23AD740FB8654C91DA7D0100AEE5123055D064A9E7A257FF2BB090547314329E))(s((0x3396B0F8E5FB0559EF6FC5BC7531D085D541199E6C936C54B466968BF25C2E6A)(0x03649574ABC2F8D05857F2B54E0035C91FEE96D0A736777280A91B8D1510FFC6)(0x2FC51E5150A06F577C44C18AC83B5D6CAF5AE3894870607EA325B0207D0A81A8)(0x09DCFD0BC34E50517DF9E7FA42DCB74C4748A09E5C87CAB51B2C52DBD1B71061)(0x05F72EFE891ED145C95218B8461BB0E80ABC64BEAFAF2BF59E8BD992CA375C3D)(0x25A3D700E2BCEC288CF6F06452065295569B845203B562B5E7AD2287EF1C4E58)))(generic_selector(0x1F28F75A2071192AA9096C6745AD8C615BC20EE1A30A267E329F857D490830C1))(poseidon_selector(0x1273BE7EDB37C17D59DDBD7630C05B70AE08CD84FDB803B904D0BA79B4B5E7E1)))))))(ft_eval1 0x0D870A24AA0ABC36DF4946A8C5F1E3F53FD71940B405CC2C95146BD5D8B481BF)))(proof((messages((w_comm(((0x3EB961762CBF37EB8B1CE4C712E4B6BCA2643641C15CB95B308F3DCBEC94F654 0x392235BCEF5850EF29F91BDC98651D6F039FE6A87284EF072D19E8E0DC9A6636))((0x0D9FCD13341F489E585E0C884D46FC760BEA03CDFBF7D02036997FC36C8B24D5 0x292DED9FD63E77A680EE5D489FEADD63F2025FE6E9866E5F02E91A1F13B8C3D2))((0x3511D2352A3193E1F1DBD02146CE73A9DFD2F43B13C298202EBB7DD1CE9FE8AB 0x126ED827FAACB58B42FABEDA8C29F198454FACACE61B58FE7343F8DFAE8C43D5))((0x1E9E8645DDDF1E89F0B19B43DBDF8BE4F12E45A7662BBB86CE2C7001016D94D4 0x39AB2C36B4F5EE68F0A407B247BC1DAD68BA423D6D1FCA396E1091276893092D))((0x3355DC918B82719EC1F41C239530515F475F1A634AD5551C51AD154D38312597 0x24AB079ECB0B016156DA239F1264DAB77EE6508C7FE8489C22871BBCAB407BB8))((0x251ADF82F76F0EEE378D86F81F7A29A66B86A768D6D8EB8E9BCFC6EC8582FE9E 0x25C035A5F02F6ADBE86D369B56461F185708D77309BBAE7B89C14A49E30B4332))((0x0BC73A4775C1D362E3CDAE3CD71A70B1625B1B6B7A9C92E15C837977D31F79B8 0x17E0C0FC9C5D78223BF9998177DC15B6641DF74CC81EF07D66785CAB6A8E086F))((0x28C7F187782BB91A271A1B68C2C2E341D9D50946C5566F0EAD6C84B0DC2E671E 0x363C4A073D492017D1061BF54A4644C406C12D59BB66E040E38CA5D2919E85C1))((0x245C90BAAB0A011C30305A3C6DADC1F5B2BE32ECDF91B801FDA0DE05C8D1B034 0x371346CE91B16C1FB603F4B6EAA71D3E31B52F9ADA8A884178944B5D365702EE))((0x38994D1A3D539B451D695B3A1D77680952CC03B26871F24D68D4591A91EB8F4C 0x0AEF7EF6E236429CCC16724FBA629627782C48C26AF086E4CF2E24450487267A))((0x3E81CB6DD34F19FDD407860866D914207EF98F812935A9E10D4E6A57E918A6DD 0x14EE09EFAC44327ACA65F1928525B6AE719642B623BCF4A826A0721E91AB3E03))((0x2C7A25231D64C252281A668CB21149D66FFD3C89CC29291700AEC20823598742 0x2831585963E6D6D544ED409473A4721EFE239D6924A2D6239981B3F13C2E0A5C))((0x088D1294353E01ECCEE6DF2684C6D4AAE2480FDA01A2F266A4C5FE8951CD8A47 0x27CB57EC76D20BED4CC7F103F100F9F7CF9B4424628B1757A0F4021B74A02C83))((0x1E8789FE6E0DD2C52082DD2D7FE355959495887B610FDCB6832541B28704CC27 0x2E8D8BE1A2310F038F4A5268E8C64A6E55A15005E7107EE37BB9D15E89E909CA))((0x1F6E60DBF7DE3B3E5A59E9BD9C4171AA7FDDF16B194E1E434F626D75D40F7820 0x0BE990D34932486CF69F969FA8BED341BAA15507AE64FEAAD824D9A25AD113E2))))(z_comm((0x3F52C655B3012E6ADF98A218ACA54B37ED0087DDB75B388979593403B2D14AE4 0x24E9A4D8B1F85875678B298E996B382214F246F12B8B95841A960B80282F57E7)))(t_comm((0x02C21B166C057EDA296FD8BB1457AA3F6A8F50C0DCB7407610CBD3FAA6B0248C 0x1A109EB1E0EF40C7C0F707B6BA43B97CE7D9EABCAD1AFD81AF20AB3B1B282175)(0x300B20AF01AE2C4BCE5505A4A477E90E3B9F30A782D9403430E0A3EEDDBECD7C 0x3DC71645EC57A29A9E0BB51E3D834120151E897149723BE1EA45157B378ECD0D)(0x20AE37E2FCFC30A13B6991B7FBDE2B888667B2A929AC332C6F5FD216326EC136 0x28DFABCA604A50A4DD30310F45FEA1261F9C6A629EC88761450884443E97A4FE)(0x1EC85554E625C82CEF35FC453D404CFC24FBF0C8DE9D642AB9A86C160A2D57F8 0x0487789E13DB88005480C0A6A7E58096057B223CA21857D8B621CD330D514802)(0x151AB5BB69A903BEEB633D983E7E23165EF4AA1A60518836BFA5DC7D5A64CD9F 0x1A6AADD7AB6AC03D004E449EDA81E8F4F8F0B6BB73B00745FAA763E8C91123F5)(0x3A3783F2BC7BC6FA7DB73A9FF15FC4A62AFE3D854DAA33DBB3609BCDE67B8F78 0x2D61EA0F0FA6CE106A96452FEB2586BDDE531D34E5AC4D6189C16A1E6B83B701)(0x0DB662FA90B12C0EF17ACFDD229E702A622DA830173FA64BE1F4C88599D130B4 0x13F5EABAD74AD106D305872C5A1218BFDCCA4D1A505658CBA6543D4D629B6146)))))(openings((proof((lr(((0x3B1651A9CA9DDDFC53E3199C73260E9A79BCEBDBC16064C715DBEF5CA7DD74C1 0x1C5E6376B9B1E8925D01B57386387A3DB46C7A8D8F5799FD08A482E10265A870)(0x00FE97F075E6E0125C64A5F8E696BAA781F4F27085EF5E90844E5A3F5B407798 0x0D5372FF88A48ABDA1110BD1DD4236B2856B3CF9D0B2AADEB2A19E18C54DB900))((0x3E6E8794BF42F76966567FC24E264641F717A64231AA1F099F10555EFE47F323 0x2F7BAFEBC813896AFC10FE97885DC8C9BA3AFE0F39D9F9D8AFA9EAC6E5159EA3)(0x24F6AFA3C372BCF61FD3639491FFC99FD3C6666848834BB3AA026A71C36CE31E 0x0226BBF4025A7DADC60C62541958748DBADB408EA7944274938CEF6E67C3C185))((0x3C29693E5EE6A878588120202415F3929C6D0EE6C6170994206E8930706DF51F 0x3612137909717A64D4264E7A33CB42A4EEDFF508697A0405C383F0BFA6D623EB)(0x27CB3E59C1CE4E9A828098F2DCE2562EEA1422656DB386D513FF453550334520 0x2ED6050EFF0D69D19A7492200A8E4988F6AB4F331AA3E227823CAEBF246E4695))((0x2052E31912F15235D8C4E471BB2C9AC412B1E14E4527CCA5099FA8926C7AF40D 0x029898DD25B0B36772F3468154337252692AAAA23C59C2022F72E84F0FE7959D)(0x30342353E1207099F249FC187DB2C96AE92EF0E120A9B3AFF0DE347F824BB052 0x052119F72487B489AB38485F1D853FBA9837F6C0D48EF580113F00A30A3981E9))((0x2761C0D26CF1CD930CE04A2817DED7AC8B95A0003B97F149E1642640F80F07DC 0x0F3E249B1DDD4067F931E9ACFCAF69A48DB97E69B8523CCACE49456B5A6C36F2)(0x065486C45FEA1C0FA58101B4080B5290685D7F0547556EC5080FA0C939DCE509 0x187479BCC218B72EB32086C9B4434F5765EDEA3012130D07B884B222DAC7D4AC))((0x10E7FE168E1D69B665A48C3CEAD07C4C2110FBFDAAD5B673D0C5137AD2D491B0 0x3C804A284C8207FB62A2EBAC76A6DD56D6DBFFAF3EB7EE4ACCDD1FDC237D6D9E)(0x2CF4C52993A4EC2938B934EEACB4D017C4F08A3996F33582513A0F14D87EAC21 0x1F070D91B17DB6F7E8E8400F3156B538396631DBEC11B8422CE5AA9E099E9A88))((0x0F290AF12C211CB5960ACEE1B8312943F92B1154456A5E52C3FFE838E73D286D 0x388D0536609B7D5F7868BACB4626F4C7DE49A208FF508B20D8397A23917982B4)(0x01146BA29953B46B5B09C377559759DE86C2A0231DBEB51CD4EAABA734DF89FA 0x1233BA736D1701EADB648C3D6A63D24863D6AD92A82559B997DBC709C4AA2C8C))((0x3BA89BBB11B74816DCA2AB98A0CCF1EE2DCBAC57ABF1A0985FB2A57D3F8E4DE5 0x31BC6DE5602C70B25A315D15CC8003BDB1CBB319DF81B51D21A12D9347F703E0)(0x3470901CDD2AD800D1536FC0A6A30CE3C76DCBA9181AA90A4C945FC4AA227A34 0x1BCFAF809FA6CD074B5BCB9B153D66FDF8F5D3937029537E18D4708ABA2549B2))((0x05C14DB85827E0C70141F86FADB1C6023399848006B432FCC2610809B38D8F3E 0x113B89CA869F5796323586D2B7F306A2260BE5010748F898F0C35084DA4A8CE3)(0x2A360434F43DABDBF37EF92E20DB45083E6CF28D64514A8B0057F28739057AD8 0x3AEBC8BB3C1175C987FB3E660C5A619B55CCCB32DC2F0319745615AFA12E5658))((0x100EC351A5160E55B913D92236DB86C797F7D1EDF0273D5E34DFDC10E5639D59 0x3F6D515A784D0ACDABE278071BF2BAF8D25730CD8DD46B175DBDD4E8006755E8)(0x21F00E3D03E15DD3002B0FCC3C3C88A36EBB899F29DE78E760C407E12F9B8F6A 0x15682277C5E39D649BAB9FE1E2D526FCE5B91F17240F96855C6CE8DAE456B19B))((0x1EF992BFD686D101E29B4080DD013F41D23461803A5919892F4C6EF3C7076962 0x3A9DBABDD814B6B5363FA5C2A4994C4C5467B3C57B78D5453D371080BBBE6D9A)(0x06AC0AFD96D9DAD32CDE340B6F2C3B7A4369AF7B354150ADABB5DDB6AA91CD5B 0x01312B47D786BA9AE213FD58F8B3FF55A570FD9186197DC267760FDB7FA49C5C))((0x371C60C79187239920B215F67901961D2FB8F8AF5D0AD748790C5B6AE4D59271 0x1151392CC6742BFE68D705D368DB5CA82CB36904D6A908014FB8A405AD618C39)(0x07AD48ADAF925E1B4B1FFAEE2EB2DA1A6C28F586B6DBCC454C536A0C6BF95869 0x3293D602645D9955B165A6099EFD639FD3B7B86ED645B1C420CE48A896EC4E98))((0x0889F47BD507C8D2AC48C7E08BD1E7774243E71664633E692397582F2F03102C 0x239F42BA4D7106E05891A3D65C826FFA050FB77C0516778D019E6F0F8C723E00)(0x0E9E21EAEE50DF241ACE5539332ACAE798855D98E603494F7905B0199B7CDCB5 0x26AC83A596A3221EBC2CB47C9B9B97FBCA39A8387BC8663EAA7CECE632B2CCEC))((0x133C6CB2EB44DC7E03BDEC135C7B23D5CE31A17904ED7F9D610AEB7EE3737B98 0x3F71B886B4799FAB114FCD3DC56692E37E7EAFA6B07563DFC7CE809DEE72111B)(0x29D708C99AD804A195ADFCFB7F86D7C9D1B43269E497C68D7FD01E67606C2D68 0x1F1B455E92E8BB4D262EC07D8FB1D5871AB6A3EDE18C114C682A70A43425E2A7))((0x0C5B6042C709B962B6183B4E33833C2C29700BDA04F9B9266032C46DEDE27A2C 0x262AEF0F253206268A1EC0986D509053D3130946A854A03F220444D03DDD5B68)(0x1F7E99769C7C2FE438F02D6DF4347FD0A55A70145C666430E972529B7434F244 0x3EA8FA359F49ABCA21029AB76ECF529A177960438062A2861E2C5E2EB30955D6))))(z_1 0x37BD5F7AEACB8DACCF0F2912D11C88AA777C5FFAB569E442002D2002D812B336)(z_2 0x146D87039C31149BBB1684FB57DF792C254D00FDD8142718025DB1BA6E668651)(delta(0x3723FC11F5190ED697DD49327697CEBE267011E3C82F00791E53C33E3FEFD37E 0x321072DCCA11273EC8F41CF784A79864AC5F984C15D4378B2234B6AA2114AD4B))(sg(0x2F7BFB65DF6EABD7AC4AFF06A2E8539A23642411FD1AAD0813C2C42929F8CAC0 0x34135A7EB558F73834D29429C09C76F2DBCEAE801EC0DA28BB42AA2BACDA6DB4))))(evals(((w((0x39368E19D314E81FD139D2C16A226665571F8901516727C8A03BFDADD2C1F8EA)(0x059FAB7621F89E8D1B556ECF24F47FD77DC422CF93D947E581D434082F1EA831)(0x1FBAC092A1AD32D1B562E72194B9B3122BDFD7F061B0EF46229126F4990FAD6E)(0x3CBA9BD890B8AFA9B6547D7E416ACEAFB9AF382E9F8879C611AD9C384A3A74E7)(0x126D8BF4D74BCB8DA0D1B16AC1A2CB10FDEB621630A8623B1BA61A1BBEF5C133)(0x06F4E0466CC69CE2C8F9E7361703D1E2E30B1DB32C76C5EAC5D0BCE043D2C4BE)(0x0A5BF92B62B8FCEE6E3372EF6F1BE4F85139B0968F33113435B39424F0FCE735)(0x3EA82C109D8ACE3612E74B1B39C173FE61878B03F545C3F49E8F5F210B19D4EE)(0x3CEEEBFFCB67A97772187D18842B186F99CDF5BF31C57309B2278DE8BA590C77)(0x23AAD20ED5D08A0B7C6DBFF07CB92462613391B34B4631369C8F646BAC84DE40)(0x3404CA37721FA3EC6DA6139D1445774CF855562C7615D5EB3AC30E12173E5324)(0x1915B9C4B456D98105D39FF05C5CD7C662AA72AAC6BFA13E276709AA793AEEAD)(0x158CC3DB1696F62DA13DB3DB543D87CCC0765E0B21A6067AFA9724870928822D)(0x2DB8D731D205ED573CB02ECB38C9F83C8C27BA876C131B7168FB39D09AE28D03)(0x2ADEF4C3839FBB07FE482495DB0C1B6EFF889B07C4C799287A93B5A8A5E17995)))(z(0x16AFF9FB8AED63D1355081B1BA5A2462E1E4E163BB8720DB66BECEB2ABDF50FC))(s((0x131759F2F60454275D9EB74048AF6063B0F31C322A6EB5E0ED2B78BEFACA5B0D)(0x0A3653A78EA82652071F3BB2F3B8866E8EAD7FC4B6D2BD0B7C78536D91C8E82C)(0x208D1D73BC160293FC8EFA5662D914567473336FCA7A52442DFB62D3F2C16A74)(0x3BE5FB02BE99AE4C808F515D1510EDA7F04334827D2747E16668E63DCC75F0B9)(0x02717E919A080CAC6105A8DF7E0D002F1F1FB649015A3399F489529EF24EC21F)(0x317820B302015952CDBDDFDF8E5E5C433E57665D09CC340F799E443399ED80F9)))(generic_selector(0x1BBC4EE705C1A85610E35F37AF16EB4CCF1D376D9FF41EC258EE487D8B334AA9))(poseidon_selector(0x3C54202C396048CBC75B24CB9AE9309C85CCDD696AD6F73B68AD54876DDFE45C)))((w((0x3D09C0BC1B9B933A474FC911535833E444148ED86326E092FACA367C3A616432)(0x15259E16D5B939B400B5001AFB7959A170BDDF738DCAE49942F2ADD659A6C750)(0x1F44D757C9F85B819A520BA60CE6521E3882DBBE127A8213A96DCD5BCD39A75C)(0x3C2C37780457E10D72A8E1BCF455AE87AF756872D5A673C95FB466A82258F390)(0x0F35D208E7F9D18D60D2B293C8FC885F2C38CED43E5292D2BB3BDC11902D4A05)(0x3B8DA5385ADB93837A3047EAF88F597B87E3937087F310E295FCBEF029207F5F)(0x0301E8C8E83C0C36965FD5E04E80669731C77330FF5E07EFEC3BF9470B7890C6)(0x1A99BFCEDB10E83BEDBB0E165790E66EF531E2002ED4DEF75E62B6B4144C3143)(0x18BB8D73E3DECF7BBCE9599B9E5AC98602DF3CBE6DAC043473A3CCF3CA649E74)(0x275AB4129437B70D5F332105C020D6FDE4B5B4219603057FC84B890FED3E0EB7)(0x18657C4E070DC2578776302CA0425F894B4ADEFF96B43FF37D1B5DC8490EF8CA)(0x3CEB93C647214DC0DA723289DFEE43121AE0C28EDB0DD33C339C7764F6A89F79)(0x217FBB022BC0A6A3FB88F8F0979E1BEF953D925EABA4F7BB65A50CFF9C30E9F7)(0x13BBF0ECACDD41584C03E4AF65335F7A1A44A4E7FD4632374E0831A996EF48AC)(0x1497DDBD5279B48FC7923505FB155517D2004D02730A26C892608FEB84CCF37E)))(z(0x3CD4B8D574CC57DADC110478024BC62528A8B2C4BD6859AD32EFEAA08C25B7DC))(s((0x3753C1B3C19DF9F83D1B60745586AFD0676EC3AA5DA9493FE3A16678B01C067C)(0x1875A200E971FBE9F6A5CA841EA0A46ABBB0B7A209CBA9DBD81A82BB0A22BD20)(0x00F7B023427A66612A27342470EBB84D81964BF8608ABFBB24C3393BD159C009)(0x2725A0D714490C7789D3DDDA3C182C19EB9A27ACFB4ABB30CF3109547C1563F7)(0x111B57A2D670CBE906800F48919476DB22489EA2E59AB39DA0A719F888A35E2B)(0x103A5FADBB09FB610F7708063523A1ED918CC320AA4406A0475407CEECEDC694)))(generic_selector(0x238F61AC265346F4DA1BBD3380873ACE77371D1C2703D3FB0CABD4DC3A5CC1DC))(poseidon_selector(0x255FDEFD6988A0B7D64F8308E8FC0F79F8AF94E55C756ADE42E69E5F25E6BE73)))))(ft_eval1 0x1C0DB0B62E72EFDC816BF91920BDFEE08D30C1E7DEDDB7F145745CFD7300E743))))))" + signature: null + } + } + ] + memo: "E4YM2vTHhWEg66xpj52JErHUBU4pZ1yageL4TVDDpTTSsv8mK6YaH" + } + } + ) +} +``` + +Account state after the above transaction is included in a block + +``` +query MyQuery { + account(publicKey: "B62qmQDtbNTymWXdZAcp4JHjfhmWmuqHjwc6BamUEvD8KhFpMui2K1Z") { + nonce + zkappState + } +} +``` + +Result of the query + +```json +{ + "data": { + "account": { + "nonce": "0", + "zkappState": [ + "1", + "2", + "3", + "4", + "5", + "6", + "7", + "8" + ] + } + } +} +``` +![Screenshot](res/account-after-state-update.png) + +#### 3. Update Account Permissions + +A zkapp transaction to update the account's permissions. + +```shell +$mina-zkapp-test-transaction update-permissions -help +Generate a zkApp transaction that updates the permissions of a zkApp account + + zkapp_test_transaction.exe update-permissions + +=== flags === + + --current-auth Proof|Signature|Either|None Current authorization in the + account to change permissions + --edit-state _ Proof|Signature|Either|None + --fee-payer-key KEYFILE Private key file for the fee payer + of the transaction (should already + be in the ledger) + --increment-nonce _ Proof|Signature|Either|None + --nonce NN Nonce of the fee payer account + --receive _ Proof|Signature|Either|None + --send _ Proof|Signature|Either|None + --set-delegate _ Proof|Signature|Either|None + --set-permissions _ Proof|Signature|Either|None + --set-sequence-state _ Proof|Signature|Either|None + --set-token-symbol _ Proof|Signature|Either|None + --set-verification-key _ Proof|Signature|Either|None + --set-voting-for _ Proof|Signature|Either|None + --set-zkapp-uri _ Proof|Signature|Either|None + --zkapp-account-key KEYFILE Private key file to create a new + zkApp account + [--debug] Debug mode, generates transaction + snark + [--fee FEE] Amount you are willing to pay to + process the transaction (default: + 1) (minimum: 0.003) + [--memo STRING] Memo accompanying the transaction + [-help] print this help text and exit + (alias: -?) + +``` + +For example: To change the permission required to edit permissions from Signature to Proof + +```shell +$mina-zkapp-test-transaction update-permissions --fee-payer-key ..my-fee-payer --nonce 4 --zkapp-account-key my-zkapp-key --current-auth signature --edit-state Proof --receive None --set-permissions Proof --set-delegate Signature --set-verification-key Signature --set-zkapp-uri Signature --set-sequence-state Proof --set-token-symbol Signature --send Signature --increment-nonce Signature --set-voting-for Signature +``` + +``` +mutation MyMutation { + __typename + sendZkapp( + input: { + parties: { + feePayer: { + body: { + publicKey: "B62qpfgnUm7zVqi8MJHNB2m37rtgMNDbFNhC2DpMmmVpQt8x6gKv9Ww" + update: { + appState: [null, null, null, null, null, null, null, null] + delegate: null + verificationKey: null + permissions: null + zkappUri: null + tokenSymbol: null + timing: null + votingFor: null + } + balanceChange: "5000000000" + events: [] + sequenceEvents: [] + callData: "0" + callDepth: 0 + protocolStatePrecondition: { + snarkedLedgerHash: null + timestamp: null + blockchainLength: null + minWindowDensity: null + totalCurrency: null + globalSlotSinceHardFork: null + globalSlotSinceGenesis: null + stakingEpochData: { + ledger: { hash: null, totalCurrency: null } + seed: null + startCheckpoint: null + lockCheckpoint: null + epochLength: null + } + nextEpochData: { + ledger: { hash: null, totalCurrency: null } + seed: null + startCheckpoint: null + lockCheckpoint: null + epochLength: null + } + } + accountPrecondition: "3" + } + authorization: "7mWyHxNKM1WG5syxbayM2xPLethzreaD5eijCXhabBfJMTnFo4LkhLXtKUq3YnouWduVmoSgd4buYyHh2XgRamUZVNuUjpWp" + } + otherParties: [ + { + body: { + publicKey: "B62qmQDtbNTymWXdZAcp4JHjfhmWmuqHjwc6BamUEvD8KhFpMui2K1Z" + tokenId: "wSHV2S4qX9jFsLjQo8r1BsMLH2ZRKsZx6EJd1sbozGPieEC4Jf" + update: { + appState: [null, null, null, null, null, null, null, null] + delegate: null + verificationKey: null + permissions: { + editState: "Proof" + send: "Signature" + receive: "None" + setDelegate: "Signature" + setPermissions: "Proof" + setVerificationKey: "Signature" + setZkappUri: "Signature" + editSequenceState: "Proof" + setTokenSymbol: "Signature" + incrementNonce: "Signature" + setVotingFor: "Signature" + } + zkappUri: null + tokenSymbol: null + timing: null + votingFor: null + } + balanceChange: { magnitude: "0", sgn: "Positive" } + incrementNonce: false + events: [] + sequenceEvents: [] + callData: "0" + callDepth: 0 + protocolStatePrecondition: { + snarkedLedgerHash: null + timestamp: null + blockchainLength: null + minWindowDensity: null + totalCurrency: null + globalSlotSinceHardFork: null + globalSlotSinceGenesis: null + stakingEpochData: { + ledger: { hash: null, totalCurrency: null } + seed: null + startCheckpoint: null + lockCheckpoint: null + epochLength: null + } + nextEpochData: { + ledger: { hash: null, totalCurrency: null } + seed: null + startCheckpoint: null + lockCheckpoint: null + epochLength: null + } + } + accountPrecondition: { + balance: null + nonce: null + receiptChainHash: null + publicKey: null + delegate: null + state: [null, null, null, null, null, null, null, null] + sequenceState: null + provedState: null + } + useFullCommitment: true + } + authorization: { + proof: null + signature: "7mXL44M3nJbyHpbYoQ9ak2XUFS4g8THc63ejMR1Wyz3uxd1NY4bnDaGxuCSD4jnbXo9apnDeQgzrdUvHuSFz8uaMjkF5ogey" + } + } + ] + memo: "E4YM2vTHhWEg66xpj52JErHUBU4pZ1yageL4TVDDpTTSsv8mK6YaH" + } + } + ) +} +``` +Account state after the above transaction is sent and included in a block + +``` +query MyQuery { + account(publicKey: "B62qmQDtbNTymWXdZAcp4JHjfhmWmuqHjwc6BamUEvD8KhFpMui2K1Z") { + permissions { + editSequenceState + editState + incrementNonce + receive + send + setDelegate + setPermissions + setZkappUri + setTokenSymbol + setVerificationKey + setVotingFor + } + } +} +``` + +Result of the query + +```json +{ + "data": { + "account": { + "permissions": { + "editSequenceState": "Proof", + "editState": "Proof", + "incrementNonce": "Signature", + "receive": "None", + "send": "Signature", + "setDelegate": "Signature", + "setPermissions": "Proof", + "setZkappUri": "Signature", + "setTokenSymbol": "Signature", + "setVerificationKey": "Signature", + "setVotingFor": "Signature", + } + } + } +} +``` +![Screenshot](res/account-after-setting-permissions.png) diff --git a/src/app/zkapp_test_transaction/res/account-after-deploy.png b/src/app/zkapp_test_transaction/res/account-after-deploy.png new file mode 100644 index 00000000000..550d6a51078 Binary files /dev/null and b/src/app/zkapp_test_transaction/res/account-after-deploy.png differ diff --git a/src/app/zkapp_test_transaction/res/account-after-setting-permissions.png b/src/app/zkapp_test_transaction/res/account-after-setting-permissions.png new file mode 100644 index 00000000000..dc0f7b08ac1 Binary files /dev/null and b/src/app/zkapp_test_transaction/res/account-after-setting-permissions.png differ diff --git a/src/app/zkapp_test_transaction/res/account-after-state-update.png b/src/app/zkapp_test_transaction/res/account-after-state-update.png new file mode 100644 index 00000000000..ed629ae33e0 Binary files /dev/null and b/src/app/zkapp_test_transaction/res/account-after-state-update.png differ diff --git a/src/app/zkapp_test_transaction/res/deploy-zkapp.png b/src/app/zkapp_test_transaction/res/deploy-zkapp.png new file mode 100644 index 00000000000..bd9b8f73aef Binary files /dev/null and b/src/app/zkapp_test_transaction/res/deploy-zkapp.png differ diff --git a/src/app/zkapps_examples/test/optional_custom_gates/dune b/src/app/zkapps_examples/test/optional_custom_gates/dune new file mode 100644 index 00000000000..ecf9f3905ac --- /dev/null +++ b/src/app/zkapps_examples/test/optional_custom_gates/dune @@ -0,0 +1,43 @@ +(library + (name zkapp_optional_custom_gates_tests) + (libraries + ;; opam libraries + ppx_inline_test.config + async + async_kernel + async_unix + core + core_kernel + result + base + base.caml + sexplib0 + integers + ;; local libraries + mina_base.import + transaction_snark + mina_ledger + mina_base + mina_generators + currency + pickles + pickles_types + mina_numbers + zkapp_command_builder + signature_lib + genesis_constants + sgn + transaction_snark_tests + test_util + mina_transaction_logic + zkapps_examples + pickles_optional_custom_gates_circuits + snark_keys_header + snark_params + data_hash_lib + ) + (library_flags -linkall) + (inline_tests (flags -verbose -show-counts)) + (preprocess + (pps ppx_snarky ppx_version ppx_jane ppx_mina)) + (instrumentation (backend bisect_ppx))) diff --git a/src/app/zkapps_examples/test/optional_custom_gates/zkapp_optional_custom_gates_tests.ml b/src/app/zkapps_examples/test/optional_custom_gates/zkapp_optional_custom_gates_tests.ml new file mode 100644 index 00000000000..4524e5115e4 --- /dev/null +++ b/src/app/zkapps_examples/test/optional_custom_gates/zkapp_optional_custom_gates_tests.ml @@ -0,0 +1,118 @@ +open Core_kernel +open Mina_base + +module Account_info () = struct + let keypair = Quickcheck.random_value Signature_lib.Keypair.gen + + let public_key = Signature_lib.Public_key.compress keypair.public_key + + let token_id = Token_id.default + + let account_id = Account_id.create public_key token_id +end + +module Circuits (Account_info : sig + val public_key : Signature_lib.Public_key.Compressed.t +end) = +struct + open Account_info + open Pickles.Impls.Step + open Pickles_types + + let constraint_constants = + { Snark_keys_header.Constraint_constants.sub_windows_per_window = 0 + ; ledger_depth = 0 + ; work_delay = 0 + ; block_window_duration_ms = 0 + ; transaction_capacity = Log_2 0 + ; pending_coinbase_depth = 0 + ; coinbase_amount = Unsigned.UInt64.of_int 0 + ; supercharged_coinbase_factor = 0 + ; account_creation_fee = Unsigned.UInt64.of_int 0 + ; fork = None + } + + let feature_flags = + { Plonk_types.Features.none_bool with + rot = true + ; xor = true + ; range_check0 = true + ; range_check1 = true + ; foreign_field_add = true + ; foreign_field_mul = true + } + + let tag, _cache_handle, proof, Pickles.Provers.[ prove ] = + Zkapps_examples.compile ~auxiliary_typ:Typ.unit + ~branches:(module Nat.N1) + ~max_proofs_verified:(module Nat.N0) + ~name:"custom gates" ~constraint_constants + ~choices:(fun ~self:_ -> + [ { identifier = "main" + ; prevs = [] + ; main = + Zkapps_examples.wrap_main + ~public_key: + (Signature_lib.Public_key.Compressed.var_of_t public_key) + (fun _account_update -> + Pickles_optional_custom_gates_circuits.main_body + ~feature_flags () ) + ; feature_flags + } + ] ) + () + + module Proof = (val proof) +end + +let%test_module "Zkapp with optional custom gates" = + ( module struct + let () = Backtrace.elide := false + + module Account_info = Account_info () + + module Circuits = Circuits (Account_info) + + let account_update = + lazy + (fst + (Async.Thread_safe.block_on_async_exn (fun () -> Circuits.prove ())) ) + + open Transaction_snark_tests.Util + + let initialize_ledger ledger = + let balance = + let open Currency.Balance in + let add_amount x y = add_amount y x in + zero + |> add_amount (Currency.Amount.of_nanomina_int_exn 500) + |> Option.value_exn + in + let account = Account.create Account_info.account_id balance in + let _, loc = + Ledger.get_or_create_account ledger Account_info.account_id account + |> Or_error.ok_exn + in + loc + + let%test_unit "Generate a zkapp using a combination of optional custom \ + gates" = + ignore (Lazy.force account_update : _ Zkapp_command.Call_forest.Tree.t) + + let%test_unit "Zkapp using a combination of optional custom gates verifies" + = + let account_update = Lazy.force account_update in + let account_updates = + [] + |> Zkapp_command.Call_forest.cons_tree account_update + |> Zkapp_command.Call_forest.cons + (Zkapps_examples.Deploy_account_update.full ~access:Either + Account_info.public_key Account_info.token_id + (Pickles.Side_loaded.Verification_key.of_compiled Circuits.tag) ) + in + test_zkapp_command account_updates ~fee_payer_pk:Account_info.public_key + ~signers: + [| (Account_info.public_key, Account_info.keypair.private_key) |] + ~initialize_ledger + ~finalize_ledger:(fun _ _ -> ()) + end ) diff --git a/src/dune-project b/src/dune-project index ce3a9a64a8b..95e9590bc10 100644 --- a/src/dune-project +++ b/src/dune-project @@ -38,6 +38,7 @@ (package (name delegation_compliance)) (package (name dhall_types)) (package (name direction)) +(package (name disk_caching_stats)) (package (name display_public_key)) (package (name downloader)) (package (name dummy_values)) @@ -63,12 +64,14 @@ (package (name graphql_wrapper)) (package (name hash_prefixes)) (package (name hash_prefix_states)) +(package (name hash_prefix_create)) (package (name heap_usage)) (package (name hex)) (package (name immutable_array)) (package (name inline_test_quiet_logs)) (package (name integers_stubs_js)) (package (name integration_test_cloud_engine)) +(package (name integration_test_local_engine)) (package (name integration_test_lib)) (package (name internal_tracing)) (package (name interpolator_lib)) @@ -150,13 +153,13 @@ (package (name pipe_lib)) (package (name pokolog)) (package (name ppx_annot)) -(package (name ppx_dhall_type)) (package (name ppx_mina)) (package (name ppx_register_event)) (package (name ppx_representatives)) (package (name ppx_to_enum)) (package (name ppx_util)) (package (name ppx_version)) +(package (name ppx_version.runtime)) (package (name precomputed_values)) (package (name promise)) (package (name proof_carrying_data)) diff --git a/src/internal_tracing.opam b/src/internal_tracing.opam new file mode 100644 index 00000000000..7be19e3d612 --- /dev/null +++ b/src/internal_tracing.opam @@ -0,0 +1,5 @@ +opam-version: "2.0" +version: "0.1" +build: [ + ["dune" "build" "--only" "src" "--root" "." "-j" jobs "@install"] +] diff --git a/src/lib/base58_check/base58_check.ml b/src/lib/base58_check/base58_check.ml index 14c3faecb0c..d09f28a689d 100644 --- a/src/lib/base58_check/base58_check.ml +++ b/src/lib/base58_check/base58_check.ml @@ -104,53 +104,3 @@ struct end module Version_bytes = Version_bytes - -let%test_module "base58check tests" = - ( module struct - module Base58_check = Make (struct - let description = "Base58check tests" - - let version_byte = '\x53' - end) - - open Base58_check - - let test_roundtrip payload = - let encoded = encode payload in - let payload' = decode_exn encoded in - String.equal payload payload' - - let%test "empty_string" = test_roundtrip "" - - let%test "nonempty_string" = - test_roundtrip "Somewhere, over the rainbow, way up high" - - let%test "longer_string" = - test_roundtrip - "Someday, I wish upon a star, wake up where the clouds are far behind \ - me, where trouble melts like lemon drops, High above the chimney top, \ - that's where you'll find me" - - let%test "invalid checksum" = - try - let encoded = encode "Bluer than velvet were her eyes" in - let bytes = Bytes.of_string encoded in - let len = Bytes.length bytes in - let last_ch = Bytes.get bytes (len - 1) in - (* change last byte to invalidate checksum *) - let new_last_ch = - if Char.equal last_ch '\xFF' then '\x00' - else Char.of_int_exn (Char.to_int last_ch + 1) - in - Bytes.set bytes (len - 1) new_last_ch ; - let encoded_bad_checksum = Bytes.to_string bytes in - let _payload = decode_exn encoded_bad_checksum in - false - with Invalid_base58_checksum _ -> true - - let%test "invalid length" = - try - let _payload = decode_exn "abcd" in - false - with Invalid_base58_check_length _ -> true - end ) diff --git a/src/lib/base58_check/tests/dune b/src/lib/base58_check/tests/dune new file mode 100644 index 00000000000..46019aaed4a --- /dev/null +++ b/src/lib/base58_check/tests/dune @@ -0,0 +1,3 @@ +(tests + (names test_base58_check) + (libraries core_kernel base58_check alcotest)) diff --git a/src/lib/base58_check/tests/test_base58_check.ml b/src/lib/base58_check/tests/test_base58_check.ml new file mode 100644 index 00000000000..735c9fa0e86 --- /dev/null +++ b/src/lib/base58_check/tests/test_base58_check.ml @@ -0,0 +1,75 @@ +module M = Base58_check.Make (struct + let description = "Base58check tests" + + let version_byte = '\x53' +end) + +open M + +let helper_test_roundtrip payload = + let encoded = encode payload in + let payload' = decode_exn encoded in + assert (String.equal payload payload') + +let test_roundtrip_empty_string () = helper_test_roundtrip "" + +let test_roundtrip_nonempty_string () = + helper_test_roundtrip "Somewhere, over the rainbow, way up high" + +let test_roundtrip_longer_string () = + helper_test_roundtrip + "Someday, I wish upon a star, wake up where the clouds are far behind me, \ + where trouble melts like lemon drops, High above the chimney top, that's \ + where you'll find me" + +let test_invalid_checksum () = + try + let encoded = encode "Bluer than velvet were her eyes" in + let bytes = Bytes.of_string encoded in + let len = Bytes.length bytes in + let last_ch = Bytes.get bytes (len - 1) in + (* change last byte to invalidate checksum *) + let new_last_ch = + if Char.equal last_ch '\xFF' then '\x00' + else Core_kernel.Char.of_int_exn (Core_kernel.Char.to_int last_ch + 1) + in + Bytes.set bytes (len - 1) new_last_ch ; + let encoded_bad_checksum = Bytes.to_string bytes in + let _payload = decode_exn encoded_bad_checksum in + assert false + with Base58_check.Invalid_base58_checksum _ -> assert true + +let test_invalid_length () = + try + let _payload = decode_exn "abcd" in + assert false + with Base58_check.Invalid_base58_check_length _ -> assert true + +let test_vectors () = + let vectors = + [ ("", "AR3b7Dr") + ; ("vectors", "2aML9fKacueS1p5W3") + ; ("test", "24cUQZMy5c7Mj") + ] + in + assert ( + List.for_all + (fun (inp, exp_output) -> + let output = M.encode inp in + String.equal output exp_output ) + vectors ) + +let () = + let open Alcotest in + run "Base58_check" + [ ( "test_roundtrip" + , [ test_case "empty string" `Quick test_roundtrip_empty_string + ; test_case "non empty string" `Quick test_roundtrip_nonempty_string + ; test_case "longer string" `Quick test_roundtrip_longer_string + ] ) + ; ( "negative tests" + , [ test_case "invalid checksym" `Quick test_invalid_checksum + ; test_case "invalid length" `Quick test_invalid_length + ] ) + ; ("test vectors", [ test_case "vectors" `Quick test_vectors ]) + ] diff --git a/src/lib/blockchain_snark/blockchain_snark_state.ml b/src/lib/blockchain_snark/blockchain_snark_state.ml index 5b9ed212e46..24283660811 100644 --- a/src/lib/blockchain_snark/blockchain_snark_state.ml +++ b/src/lib/blockchain_snark/blockchain_snark_state.ml @@ -238,7 +238,7 @@ let%snarkydef_ step ~(logger : Logger.t) in (t, is_base_case) in - let%bind txn_snark_should_verify, success = + let%bind txn_snark_must_verify, success = let%bind new_pending_coinbase_hash, deleted_stack, no_coinbases_popped = let coinbase_receiver = Consensus.Data.Consensus_state.coinbase_receiver_var consensus_state @@ -348,14 +348,14 @@ let%snarkydef_ step ~(logger : Logger.t) in (transaction_snark_should_verifiy, result) in - let txn_snark_should_verify = + let txn_snark_must_verify = match proof_level with | Check | None -> Boolean.false_ | Full -> - txn_snark_should_verify + txn_snark_must_verify in - let prev_should_verify = + let prev_must_verify = match proof_level with | Check | None -> Boolean.false_ @@ -374,11 +374,11 @@ let%snarkydef_ step ~(logger : Logger.t) ( { Pickles.Inductive_rule.Previous_proof_statement.public_input = previous_blockchain_proof_input ; proof = previous_blockchain_proof - ; proof_must_verify = prev_should_verify + ; proof_must_verify = prev_must_verify } , { Pickles.Inductive_rule.Previous_proof_statement.public_input = txn_snark ; proof = txn_snark_proof - ; proof_must_verify = txn_snark_should_verify + ; proof_must_verify = txn_snark_must_verify } ) module Statement = struct @@ -487,6 +487,13 @@ end) : S = struct ~constraint_constants: (Genesis_constants.Constraint_constants.to_snark_keys_header constraint_constants ) + ~commits: + { commits = + { mina = Mina_version.commit_id + ; marlin = Mina_version.marlin_commit_id + } + ; commit_date = Mina_version.commit_date + } ~choices:(fun ~self -> [ rule ~proof_level ~constraint_constants T.tag self ] ) diff --git a/src/lib/blockchain_snark/dune b/src/lib/blockchain_snark/dune index 093d1f82501..d19b6c05c9d 100644 --- a/src/lib/blockchain_snark/dune +++ b/src/lib/blockchain_snark/dune @@ -36,6 +36,8 @@ kimchi_backend.pasta.basic data_hash_lib ppx_version.runtime + snark_keys_header + mina_version ) (inline_tests (flags -verbose -show-counts)) (preprocess diff --git a/src/lib/bootstrap_controller/README.md b/src/lib/bootstrap_controller/README.md new file mode 100644 index 00000000000..ad0f3cf01f0 --- /dev/null +++ b/src/lib/bootstrap_controller/README.md @@ -0,0 +1,20 @@ +# Bootstrap Controller + +Bootstrap controller is the component that responsible for constructing the +root of transition frontier when node first comes up online or falls behind +the canonical chain too much that catchup. + +Bootstrap controller does the following steps to initialize the root of +transition frontier: +1) Download the `snarked_ledger` using `Bootstrap.sync_ledger` function. +Incoming block gossips would be used to update the target `snarked_ledger`. +2) Download the `scan_state` and `pending_coinbases`. +3) Construct the `staged_ledger` from `snarked_ledger`, `scan_state` and +`pending_coinbases`. +4) Download relevent `local_state` by `Consensus.Hooks.sync_local_state`. +5) Reset persistent frontier and construct the new frontier. The new frontier +would only have the root breadcrumb. Transitions collected during bootstrap +phase would be used for catchup. + +At any of the step, if it fails, bootstrap controller would loop back to +step 1. diff --git a/src/lib/bootstrap_controller/bootstrap_controller.ml b/src/lib/bootstrap_controller/bootstrap_controller.ml index 440430c3684..b9a06ca6d83 100644 --- a/src/lib/bootstrap_controller/bootstrap_controller.ml +++ b/src/lib/bootstrap_controller/bootstrap_controller.ml @@ -45,6 +45,7 @@ let opt_time_to_yojson = function | None -> `Null +(** An auxiliary data structure for collecting various metrics for boostrap controller. *) type bootstrap_cycle_stats = { cycle_result : string ; sync_ledger_time : time @@ -96,6 +97,7 @@ let should_sync ~root_sync_ledger t candidate_state = (not @@ done_syncing_root root_sync_ledger) && worth_getting_root t candidate_state +(** Update [Synced_ledger]'s target and [best_seen_transition] and [current_root] accordingly. *) let start_sync_job_with_peer ~sender ~root_sync_ledger ({ context = (module Context); _ } as t) peer_best_tip peer_root = let open Context in @@ -139,6 +141,11 @@ let start_sync_job_with_peer ~sender ~root_sync_ledger | `Repeat -> `Ignored +(** For each transition, this function would compare it with the existing one. + If the incoming transition is better, then download the merkle list from + that transition to its root and verify it. If we get a better root than + the existing one, then reset the Sync_ledger's target by calling + [start_sync_job_with_peer] function. *) let on_transition ({ context = (module Context); _ } as t) ~sender ~root_sync_ledger ~genesis_constants candidate_transition = let open Context in @@ -173,6 +180,9 @@ let on_transition ({ context = (module Context); _ } as t) ~sender | Error e -> return (received_bad_proof t sender e |> Fn.const `Ignored) ) +(** A helper function that wraps the calls to Sync_ledger and iterate through + incoming transitions, add those to the transition_cache and calls + [on_transition] function. *) let sync_ledger ({ context = (module Context); _ } as t) ~preferred ~root_sync_ledger ~transition_graph ~sync_ledger_reader ~genesis_constants = let open Context in @@ -227,9 +237,19 @@ let external_transition_compare ~context:(module Context : CONTEXT) = else 1 ) ~f:(With_hash.map ~f:Mina_block.consensus_state) -(* We conditionally ask other peers for their best tip. This is for testing - eager bootstrapping and the regular functionalities of bootstrapping in - isolation *) +(** The entry point function for bootstrap controller. When bootstrap finished + it would return a transition frontier with the root breadcrumb and a list + of transitions collected during bootstrap. + + Bootstrap controller would do the following steps to contrust the + transition frontier: + 1. Download the root snarked_ledger. + 2. Download the scan state and pending coinbases. + 3. Construct the staged ledger from the snarked ledger, scan state and + pending coinbases. + 4. Synchronize the consensus local state if necessary. + 5. Close the old frontier and reload a new one from disk. + *) let run ~context:(module Context : CONTEXT) ~trust_system ~verifier ~network ~consensus_local_state ~transition_reader ~best_seen_transition ~persistent_root ~persistent_frontier ~initial_root_transition ~catchup_mode @@ -289,6 +309,7 @@ let run ~context:(module Context : CONTEXT) ~trust_system ~verifier ~network Transition_frontier.Persistent_root.Instance.snarked_ledger temp_persistent_root_instance in + (* step 1. download snarked_ledger *) let%bind sync_ledger_time, (hash, sender, expected_staged_ledger_hash) = time_deferred (let root_sync_ledger = @@ -319,6 +340,7 @@ let run ~context:(module Context : CONTEXT) ~trust_system ~verifier ~network Mina_metrics.( Gauge.set Bootstrap.num_of_root_snarked_ledger_retargeted (Float.of_int t.num_of_root_snarked_ledger_retargeted)) ; + (* step 2. Download scan state and pending coinbases. *) let%bind ( staged_ledger_data_download_time , staged_ledger_construction_time , staged_ledger_aux_result ) = @@ -408,6 +430,8 @@ let run ~context:(module Context : CONTEXT) ~trust_system ~verifier ~network | Some protocol_state -> Ok (With_hash.data protocol_state) in + (* step 3. Construct staged ledger from snarked ledger, scan state + and pending coinbases. *) (* Construct the staged ledger before constructing the transition * frontier in order to verify the scan state we received. * TODO: reorganize the code to avoid doing this twice (#3480) *) @@ -504,7 +528,7 @@ let run ~context:(module Context : CONTEXT) ~trust_system ~verifier ~network |> Mina_block.header |> Mina_block.Header.protocol_state |> Protocol_state.consensus_state in - (* Synchronize consensus local state if necessary *) + (* step 4. Synchronize consensus local state if necessary *) let%bind ( local_state_sync_time , (local_state_sync_required, local_state_sync_result) ) = time_deferred @@ -554,7 +578,7 @@ let run ~context:(module Context : CONTEXT) ~trust_system ~verifier ~network in loop (this_cycle :: previous_cycles) | Ok () -> - (* Close the old frontier and reload a new on from disk. *) + (* step 5. Close the old frontier and reload a new one from disk. *) let new_root_data : Transition_frontier.Root_data.Limited.t = Transition_frontier.Root_data.Limited.create ~transition:(Mina_block.Validated.lift new_root) diff --git a/src/lib/bootstrap_controller/bootstrap_controller.mli b/src/lib/bootstrap_controller/bootstrap_controller.mli index 777a68cf0e1..481f8d58b24 100644 --- a/src/lib/bootstrap_controller/bootstrap_controller.mli +++ b/src/lib/bootstrap_controller/bootstrap_controller.mli @@ -14,6 +14,19 @@ end type Structured_log_events.t += Bootstrap_complete [@@deriving register_event] +(** The entry point function for bootstrap controller. When bootstrap finished + it would return a transition frontier with the root breadcrumb and a list + of transitions collected during bootstrap. + + Bootstrap controller would do the following steps to contrust the + transition frontier: + 1. Download the root snarked_ledger. + 2. Download the scan state and pending coinbases. + 3. Construct the staged ledger from the snarked ledger, scan state and + pending coinbases. + 4. Synchronize the consensus local state if necessary. + 5. Close the old frontier and reload a new one from disk. + *) val run : context:(module CONTEXT) -> trust_system:Trust_system.t diff --git a/src/lib/cli_lib/commands.ml b/src/lib/cli_lib/commands.ml index 34eb9b01129..8a67c102392 100644 --- a/src/lib/cli_lib/commands.ml +++ b/src/lib/cli_lib/commands.ml @@ -104,7 +104,7 @@ let validate_transaction = (* TODO upgrade to yojson 2.0.0 when possible to use seq_from_channel * instead of the deprecated stream interface *) let jsons = Yojson.Safe.stream_from_channel In_channel.stdin in - ( match[@alert "--deprecated"] + ( match[@alert "-deprecated"] Or_error.try_with (fun () -> Caml.Stream.iter (fun transaction_json -> @@ -139,7 +139,7 @@ let validate_transaction = Format.printf "Some transactions failed to verify@." ; exit 1 ) else - let[@alert "--deprecated"] first = Caml.Stream.peek jsons in + let[@alert "-deprecated"] first = Caml.Stream.peek jsons in match first with | None -> Format.printf "Could not parse any transactions@." ; diff --git a/src/lib/consensus/proof_of_stake.ml b/src/lib/consensus/proof_of_stake.ml index 2aa6a3a35d6..49afd89ffb1 100644 --- a/src/lib/consensus/proof_of_stake.ml +++ b/src/lib/consensus/proof_of_stake.ml @@ -842,9 +842,9 @@ module Make_str (A : Wire_types.Concrete) = struct | _ -> respond (Provide - (Snarky_backendless.Request.Handler.run handlers - [ "Ledger Handler"; "Pending Coinbase Handler" ] - request ) ) + (Option.value_exn ~message:"unhandled request" + (Snarky_backendless.Request.Handler.run handlers request) ) + ) end let check ~context:(module Context : CONTEXT) @@ -2585,9 +2585,9 @@ module Make_str (A : Wire_types.Concrete) = struct | _ -> respond (Provide - (Snarky_backendless.Request.Handler.run handlers - [ "Ledger Handler"; "Pending Coinbase Handler" ] - request ) ) + (Option.value_exn ~message:"unhandled request" + (Snarky_backendless.Request.Handler.run handlers request) ) + ) let ledger_depth { ledger; _ } = ledger.depth end diff --git a/src/lib/crypto/kimchi_backend/common/dlog_plonk_based_keypair.ml b/src/lib/crypto/kimchi_backend/common/dlog_plonk_based_keypair.ml index f4fe273b8b5..84d79803e2c 100644 --- a/src/lib/crypto/kimchi_backend/common/dlog_plonk_based_keypair.ml +++ b/src/lib/crypto/kimchi_backend/common/dlog_plonk_based_keypair.ml @@ -53,13 +53,30 @@ module type Inputs_intf = sig val set_prev_challenges : t -> int -> unit - val finalize_and_get_gates : t -> Gate_vector.t + val finalize_and_get_gates : + t + -> Gate_vector.t + * Scalar_field.t Kimchi_types.lookup_table array + * Scalar_field.t Kimchi_types.runtime_table_cfg array end module Index : sig type t - val create : Gate_vector.t -> int -> int -> Urs.t -> t + (** [create + gates + nb_public + runtime_tables_cfg + nb_prev_challanges + srs] *) + val create : + Gate_vector.t + -> int + -> Scalar_field.t Kimchi_types.lookup_table array + -> Scalar_field.t Kimchi_types.runtime_table_cfg array + -> int + -> Urs.t + -> t end module Curve : sig @@ -156,7 +173,9 @@ module Make (Inputs : Inputs_intf) = struct (set_urs_info, load) let create ~prev_challenges cs = - let gates = Inputs.Constraint_system.finalize_and_get_gates cs in + let gates, fixed_lookup_tables, runtime_table_cfgs = + Inputs.Constraint_system.finalize_and_get_gates cs + in let public_input_size = Inputs.Constraint_system.get_primary_input_size cs in @@ -170,7 +189,8 @@ module Make (Inputs : Inputs_intf) = struct prev_challenges' in let index = - Inputs.Index.create gates public_input_size prev_challenges (load_urs ()) + Inputs.Index.create gates public_input_size fixed_lookup_tables + runtime_table_cfgs prev_challenges (load_urs ()) in { index; cs } @@ -203,4 +223,51 @@ module Make (Inputs : Inputs_intf) = struct ; emul_comm = g t.evals.emul_comm ; endomul_scalar_comm = g t.evals.endomul_scalar_comm } + + let full_vk_commitments (t : Inputs.Verifier_index.t) : + ( Inputs.Curve.Affine.t array + , Inputs.Curve.Affine.t array option ) + Pickles_types.Plonk_verification_key_evals.Step.t = + let g c : Inputs.Curve.Affine.t array = + match Inputs.Poly_comm.of_backend_without_degree_bound c with + | `Without_degree_bound x -> + x + | `With_degree_bound _ -> + assert false + in + let lookup f = + let open Option.Let_syntax in + let%bind l = t.lookup_index in + f l >>| g + in + { sigma_comm = + Pickles_types.Vector.init Pickles_types.Plonk_types.Permuts.n + ~f:(fun i -> g t.evals.sigma_comm.(i)) + ; coefficients_comm = + Pickles_types.Vector.init Pickles_types.Plonk_types.Columns.n + ~f:(fun i -> g t.evals.coefficients_comm.(i)) + ; generic_comm = g t.evals.generic_comm + ; psm_comm = g t.evals.psm_comm + ; complete_add_comm = g t.evals.complete_add_comm + ; mul_comm = g t.evals.mul_comm + ; emul_comm = g t.evals.emul_comm + ; endomul_scalar_comm = g t.evals.endomul_scalar_comm + ; xor_comm = Option.map ~f:g t.evals.xor_comm + ; range_check0_comm = Option.map ~f:g t.evals.range_check0_comm + ; range_check1_comm = Option.map ~f:g t.evals.range_check1_comm + ; foreign_field_add_comm = Option.map ~f:g t.evals.foreign_field_add_comm + ; foreign_field_mul_comm = Option.map ~f:g t.evals.foreign_field_mul_comm + ; rot_comm = Option.map ~f:g t.evals.rot_comm + ; lookup_table_comm = + Pickles_types.Vector.init + Pickles_types.Plonk_types.Lookup_sorted_minus_1.n ~f:(fun i -> + lookup (fun l -> Option.try_with (fun () -> l.lookup_table.(i))) ) + ; lookup_table_ids = lookup (fun l -> l.table_ids) + ; runtime_tables_selector = lookup (fun l -> l.runtime_tables_selector) + ; lookup_selector_lookup = lookup (fun l -> l.lookup_selectors.lookup) + ; lookup_selector_xor = lookup (fun l -> l.lookup_selectors.xor) + ; lookup_selector_range_check = + lookup (fun l -> l.lookup_selectors.range_check) + ; lookup_selector_ffmul = lookup (fun l -> l.lookup_selectors.ffmul) + } end diff --git a/src/lib/crypto/kimchi_backend/common/kimchi_backend_common.mli b/src/lib/crypto/kimchi_backend/common/kimchi_backend_common.mli index ed045783443..ffb3b49e495 100644 --- a/src/lib/crypto/kimchi_backend/common/kimchi_backend_common.mli +++ b/src/lib/crypto/kimchi_backend/common/kimchi_backend_common.mli @@ -11,6 +11,20 @@ module Plonk_constraint_system : sig val get_public_input_size : ('a, 'b) t -> int Core_kernel.Set_once.t + (** Return the size of all the fixed lookup tables concatenated, without the + built-in XOR and RangeCheck tables *) + val get_concatenated_fixed_lookup_table_size : ('a, 'b) t -> int + + (** Return the size of all the runtime lookup tables concatenated *) + val get_concatenated_runtime_lookup_table_size : ('a, 'b) t -> int + + (** Finalize the fixed lookup tables. The function can not be called twice *) + val finalize_fixed_lookup_tables : _ t -> unit + + (** Finalize the runtime lookup table configurations. The function can not be + called twice. *) + val finalize_runtime_lookup_tables : _ t -> unit + val get_rows_len : ('a, 'b) t -> int end diff --git a/src/lib/crypto/kimchi_backend/common/plonk_constraint_system.ml b/src/lib/crypto/kimchi_backend/common/plonk_constraint_system.ml index ad7141591e2..9c8cadca30f 100644 --- a/src/lib/crypto/kimchi_backend/common/plonk_constraint_system.ml +++ b/src/lib/crypto/kimchi_backend/common/plonk_constraint_system.ml @@ -306,6 +306,8 @@ module Plonk_constraint = struct ; bound_crumb7 : 'v ; (* Coefficients *) two_to_rot : 'f (* Rotation scalar 2^rot *) } + | AddFixedLookupTable of { id : int32; data : 'f array array } + | AddRuntimeTableCfg of { id : int32; first_column : 'f array } | Raw of { kind : Kimchi_gate_type.t; values : 'v array; coeffs : 'f array } [@@deriving sexp] @@ -619,6 +621,12 @@ module Plonk_constraint = struct ; bound_crumb7 = f bound_crumb7 ; (* Coefficients *) two_to_rot } + | AddFixedLookupTable { id; data } -> + (* TODO: see a possible better API - + https://github.com/MinaProtocol/mina/issues/13984 *) + AddFixedLookupTable { id; data } + | AddRuntimeTableCfg { id; first_column } -> + AddRuntimeTableCfg { id; first_column } | Raw { kind; values; coeffs } -> Raw { kind; values = Array.map ~f values; coeffs } @@ -700,6 +708,14 @@ type ('f, 'rust_gates) circuit = and a list of gates that corresponds to the circuit. *) +type 'f fixed_lookup_tables = + | Unfinalized_fixed_lookup_tables_rev of 'f Kimchi_types.lookup_table list + | Compiled_fixed_lookup_tables of 'f Kimchi_types.lookup_table array + +type 'f runtime_tables_cfg = + | Unfinalized_runtime_tables_cfg_rev of 'f Kimchi_types.runtime_table_cfg list + | Compiled_runtime_tables_cfg of 'f Kimchi_types.runtime_table_cfg array + (** The constraint system. *) type ('f, 'rust_gates) t = { (* Map of cells that share the same value (enforced by to the permutation). *) @@ -713,6 +729,13 @@ type ('f, 'rust_gates) t = The finalized tag contains the digest of the circuit. *) mutable gates : ('f, 'rust_gates) circuit + (* Witnesses values corresponding to each runtime lookups *) + ; mutable runtime_lookups_rev : (V.t * (V.t * V.t)) list + (* The user-provided lookup tables associated with this circuit. *) + ; mutable fixed_lookup_tables : 'f fixed_lookup_tables + (* The user-provided runtime table configurations associated with this + circuit. *) + ; mutable runtime_tables_cfg : 'f runtime_tables_cfg ; (* The row to use the next time we add a constraint. *) mutable next_row : int ; (* The size of the public input (which fills the first rows of our constraint system. *) @@ -750,6 +773,48 @@ let get_prev_challenges sys = sys.prev_challenges let set_prev_challenges sys challenges = Core_kernel.Set_once.set_exn sys.prev_challenges [%here] challenges +let get_concatenated_fixed_lookup_table_size sys = + match sys.fixed_lookup_tables with + | Unfinalized_fixed_lookup_tables_rev _ -> + failwith + "Cannot get the fixed lookup tables before finalizing the constraint \ + system" + | Compiled_fixed_lookup_tables flts -> + let get_table_size (flt : _ Kimchi_types.lookup_table) = + if Array.length flt.data = 0 then 0 + else Array.length (Array.get flt.data 0) + in + Array.fold_left (fun acc flt -> acc + get_table_size flt) 0 flts + +let get_concatenated_runtime_lookup_table_size sys = + match sys.runtime_tables_cfg with + | Unfinalized_runtime_tables_cfg_rev _ -> + failwith + "Cannot get the runtime table configurations before finalizing the \ + constraint system" + | Compiled_runtime_tables_cfg rt_cfgs -> + Array.fold_left + (fun acc (rt_cfg : _ Kimchi_types.runtime_table_cfg) -> + acc + Array.length rt_cfg.first_column ) + 0 rt_cfgs + +let finalize_fixed_lookup_tables sys = + match sys.fixed_lookup_tables with + | Unfinalized_fixed_lookup_tables_rev fixed_lt_rev -> + sys.fixed_lookup_tables <- + Compiled_fixed_lookup_tables + (Core_kernel.Array.of_list_rev fixed_lt_rev) + | Compiled_fixed_lookup_tables _ -> + failwith "Fixed lookup tables have already been finalized" + +let finalize_runtime_lookup_tables sys = + match sys.runtime_tables_cfg with + | Unfinalized_runtime_tables_cfg_rev rt_cfgs_rev -> + sys.runtime_tables_cfg <- + Compiled_runtime_tables_cfg (Core_kernel.Array.of_list_rev rt_cfgs_rev) + | Compiled_runtime_tables_cfg _ -> + failwith "Runtime table configurations have already been finalized" + (* TODO: shouldn't that Make create something bounded by a signature? As we know what a back end should be? Check where this is used *) (* TODO: glossary of terms in this file (terms, reducing, feeding) + module doc *) @@ -789,6 +854,17 @@ module Make val next_row : t -> int + val get_concatenated_fixed_lookup_table_size : t -> int + + val get_concatenated_runtime_lookup_table_size : t -> int + + (** Finalize the fixed lookup tables. The function can not be called twice *) + val finalize_fixed_lookup_tables : t -> unit + + (** Finalize the runtime lookup table configurations. The function can not be + called twice. *) + val finalize_runtime_lookup_tables : t -> unit + val add_constraint : ?label:string -> t @@ -797,11 +873,18 @@ module Make Snarky_backendless.Constraint.basic -> unit - val compute_witness : t -> (int -> Fp.t) -> Fp.t array array + val compute_witness : + t + -> (int -> Fp.t) + -> Fp.t array array * Fp.t Kimchi_types.runtime_table array val finalize : t -> unit - val finalize_and_get_gates : t -> Gates.t + val finalize_and_get_gates : + t + -> Gates.t + * Fp.t Kimchi_types.lookup_table array + * Fp.t Kimchi_types.runtime_table_cfg array val num_constraints : t -> int @@ -812,6 +895,17 @@ end = struct open Core_kernel open Pickles_types + (* Used by compute_witness to build the runtime tables from the Lookup + constraint *) + module MapRuntimeTable = struct + module T = struct + type t = int32 * Fp.t [@@deriving hash, sexp, compare] + end + + include T + include Core_kernel.Hashable.Make (T) + end + type nonrec t = (Fp.t, Gates.t) t (** Converts the set of permutations (equivalence_classes) to @@ -852,7 +946,7 @@ end = struct and a function that converts the indexed secret inputs to their concrete values. *) let compute_witness (sys : t) (external_values : int -> Fp.t) : - Fp.t array array = + Fp.t array array * Fp.t Kimchi_types.runtime_table array = let internal_values : Fp.t Internal_var.Table.t = Internal_var.Table.create () in @@ -899,8 +993,61 @@ end = struct let value = compute lc in res.(col_idx).(row_idx) <- value ; Hashtbl.set internal_values ~key:var ~data:value ) ) ; + + let map_runtime_tables = MapRuntimeTable.Table.create () in + let runtime_tables : Fp.t Kimchi_types.runtime_table array = + match sys.runtime_tables_cfg with + | Unfinalized_runtime_tables_cfg_rev _ -> + failwith + "Attempted to generate a witness for an unfinalized constraint \ + system" + | Compiled_runtime_tables_cfg cfgs -> + Array.mapi cfgs ~f:(fun rt_idx { Kimchi_types.id; first_column } -> + let data = + Array.mapi first_column ~f:(fun i v -> + ignore + (* `add` leaves the value unchanged if the index has been + already used. Therefore, it keeps the first value. + This handles the case that the first column has + duplicated index values. + *) + @@ MapRuntimeTable.Table.add map_runtime_tables ~key:(id, v) + ~data:(i, rt_idx) ; + (* default padding value for lookup *) + Fp.zero ) + in + let rt : Fp.t Kimchi_types.runtime_table = { id; data } in + rt ) + in + + (* Fill in the used entries of the runtime lookup tables. *) + List.iter (List.rev sys.runtime_lookups_rev) ~f:(fun (id, (idx, v)) -> + let compute_value x = compute ([ (Fp.one, x) ], None) in + let vid = compute_value id in + let vidx = compute_value idx in + let vv = compute_value v in + (* FIXME: we should have a int32 here. We are not sure the ID will be a + int32. We should enforce that. + See https://github.com/MinaProtocol/mina/issues/13955 + *) + let id_int32 = Int32.of_string @@ Fp.to_string vid in + (* Using find allows to handle fixed lookup tables + As the map has been built from the runtime table configurations, + except in the case that a runtime table and a fixed table shares the + same ID, the lookups in fixed lookup tables will return None. + See https://github.com/MinaProtocol/mina/issues/14016 + *) + let v = + MapRuntimeTable.Table.find map_runtime_tables (id_int32, vidx) + in + if Option.is_some v then + let i, rt_idx = Option.value_exn v in + let rt = runtime_tables.(rt_idx) in + (* Important note: we do not check if the value has been set before. + Therefore, it will always use the latest value *) + rt.data.(i) <- vv ) ; (* Return the witness. *) - res + (res, runtime_tables) let union_find sys v = Hashtbl.find_or_add sys.union_finds v ~default:(fun () -> @@ -919,6 +1066,9 @@ end = struct ; prev_challenges = Set_once.create () ; internal_vars = Internal_var.Table.create () ; gates = Unfinalized_rev [] (* Gates.create () *) + ; runtime_lookups_rev = [] + ; fixed_lookup_tables = Unfinalized_fixed_lookup_tables_rev [] + ; runtime_tables_cfg = Unfinalized_runtime_tables_cfg_rev [] ; rows_rev = [] ; next_row = 0 ; equivalence_classes = V.Table.create () @@ -954,6 +1104,16 @@ end = struct let next_row (sys : t) = sys.next_row + let get_concatenated_fixed_lookup_table_size (sys : t) = + get_concatenated_fixed_lookup_table_size sys + + let get_concatenated_runtime_lookup_table_size (sys : t) = + get_concatenated_runtime_lookup_table_size sys + + let finalize_fixed_lookup_tables = finalize_fixed_lookup_tables + + let finalize_runtime_lookup_tables = finalize_runtime_lookup_tables + (** Adds {row; col} to the system's wiring under a specific key. A key is an external or internal variable. The row must be given relative to the start of the circuit @@ -995,14 +1155,29 @@ end = struct *) let rec finalize_and_get_gates sys = match sys with - | { gates = Compiled (_, gates); _ } -> - gates + | { gates = Compiled (_, gates) + ; fixed_lookup_tables = Compiled_fixed_lookup_tables fixed_lookup_tables + ; runtime_tables_cfg = Compiled_runtime_tables_cfg runtime_tables_cfg + ; _ + } -> + (gates, fixed_lookup_tables, runtime_tables_cfg) + (* Finalizing lookup tables and runtime table cfgs first *) + | { fixed_lookup_tables = Unfinalized_fixed_lookup_tables_rev _; _ } -> + finalize_fixed_lookup_tables sys ; + finalize_and_get_gates sys + | { runtime_tables_cfg = Unfinalized_runtime_tables_cfg_rev _; _ } -> + finalize_runtime_lookup_tables sys ; + finalize_and_get_gates sys | { pending_generic_gate = Some (l, r, o, coeffs); _ } -> (* Finalize any pending generic constraint first. *) add_row sys [| l; r; o |] Generic coeffs ; sys.pending_generic_gate <- None ; finalize_and_get_gates sys - | { gates = Unfinalized_rev gates_rev; _ } -> + | { gates = Unfinalized_rev gates_rev + ; fixed_lookup_tables = Compiled_fixed_lookup_tables fixed_lookup_tables + ; runtime_tables_cfg = Compiled_runtime_tables_cfg runtime_tables_cfg + ; _ + } -> let rust_gates = Gates.create () in (* Create rows for public input. *) @@ -1075,15 +1250,24 @@ end = struct sys.gates <- Compiled (md5_digest, rust_gates) ; (* return the gates *) - rust_gates + (rust_gates, fixed_lookup_tables, runtime_tables_cfg) (** Calls [finalize_and_get_gates] and ignores the result. *) - let finalize t = ignore (finalize_and_get_gates t : Gates.t) + let finalize t = + ignore + ( finalize_and_get_gates t + : Gates.t + * Fp.t Kimchi_types.lookup_table array + * Fp.t Kimchi_types.runtime_table_cfg array ) - let num_constraints sys = finalize_and_get_gates sys |> Gates.len + let num_constraints sys = + let gates, _, _ = finalize_and_get_gates sys in + Gates.len gates let to_json (sys : t) : string = - let gates = finalize_and_get_gates sys in + (* TODO: add lookup tables and runtime table cfgs *) + (* https://github.com/MinaProtocol/mina/issues/13886 *) + let gates, _, _ = finalize_and_get_gates sys in let public_input_size = Set_once.get_exn sys.public_input_size [%here] in Gates.to_json public_input_size gates @@ -1188,8 +1372,8 @@ end = struct let reduce_lincom sys (x : Fp.t Snarky_backendless.Cvar.t) = let constant, terms = Fp.( - Snarky_backendless.Cvar.to_constant_and_terms ~add ~mul ~zero:(of_int 0) - ~equal ~one:(of_int 1)) + Snarky_backendless.Cvar.to_constant_and_terms ~add ~mul ~zero ~equal + ~one) x in let terms = accumulate_terms terms in @@ -1686,16 +1870,37 @@ end = struct (Fn.compose add_endoscale_scalar_round (Endoscale_scalar_round.map ~f:reduce_to_v) ) | Plonk_constraint.T (Lookup { w0; w1; w2; w3; w4; w5; w6 }) -> + (* table ID *) + let red_w0 = reduce_to_v w0 in + (* idx1 *) + let red_w1 = reduce_to_v w1 in + (* v1 *) + let red_w2 = reduce_to_v w2 in + (* idx2 *) + let red_w3 = reduce_to_v w3 in + (* v2 *) + let red_w4 = reduce_to_v w4 in + (* idx3 *) + let red_w5 = reduce_to_v w5 in + (* v3 *) + let red_w6 = reduce_to_v w6 in let vars = - [| Some (reduce_to_v w0) - ; Some (reduce_to_v w1) - ; Some (reduce_to_v w2) - ; Some (reduce_to_v w3) - ; Some (reduce_to_v w4) - ; Some (reduce_to_v w5) - ; Some (reduce_to_v w6) + [| Some red_w0 + ; Some red_w1 + ; Some red_w2 + ; Some red_w3 + ; Some red_w4 + ; Some red_w5 + ; Some red_w6 |] in + let lookup1 = (red_w0, (red_w1, red_w2)) in + let lookup2 = (red_w0, (red_w3, red_w4)) in + let lookup3 = (red_w0, (red_w5, red_w6)) in + (* We populate with the first lookup. In the case the user uses the same + index multiple times, the last value will be used *) + sys.runtime_lookups_rev <- + lookup3 :: lookup2 :: lookup1 :: sys.runtime_lookups_rev ; add_row sys vars Lookup [||] | Plonk_constraint.T (RangeCheck0 @@ -2064,9 +2269,9 @@ end = struct //! | 5 | `bound_limb2` | `shifted_limb2` | `excess_limb2` | `word_limb2` | //! | 6 | `bound_limb3` | `shifted_limb3` | `excess_limb3` | `word_limb3` | //! | 7 | `bound_crumb0` | `shifted_crumb0` | `excess_crumb0` | `word_crumb0` | - //! | 8 | `bound_crumb1` | `shifted_crumb1` | `excess_crumb1` | `word_crumb1` | - //! | 9 | `bound_crumb2` | `shifted_crumb2` | `excess_crumb2` | `word_crumb2` | - //! | 10 | `bound_crumb3` | `shifted_crumb3` | `excess_crumb3` | `word_crumb3` | + //! | 8 | `bound_crumb1` | `shifted_crumb1` | `excess_crumb1` | `word_crumb1` | + //! | 9 | `bound_crumb2` | `shifted_crumb2` | `excess_crumb2` | `word_crumb2` | + //! | 10 | `bound_crumb3` | `shifted_crumb3` | `excess_crumb3` | `word_crumb3` | //! | 11 | `bound_crumb4` | `shifted_crumb4` | `excess_crumb4` | `word_crumb4` | //! | 12 | `bound_crumb5` | `shifted_crumb5` | `excess_crumb5` | `word_crumb5` | //! | 13 | `bound_crumb6` | `shifted_crumb6` | `excess_crumb6` | `word_crumb6` | @@ -2091,6 +2296,28 @@ end = struct |] in add_row sys vars_curr Rot64 [| two_to_rot |] + | Plonk_constraint.T (AddFixedLookupTable { id; data }) -> ( + match sys.fixed_lookup_tables with + | Unfinalized_fixed_lookup_tables_rev fixed_lookup_tables -> + let lt : Fp.t Kimchi_types.lookup_table list = + { id; data } :: fixed_lookup_tables + in + sys.fixed_lookup_tables <- Unfinalized_fixed_lookup_tables_rev lt + | Compiled_fixed_lookup_tables _ -> + failwith + "Trying to add a fixed lookup tables when it has been already \ + finalized" ) + | Plonk_constraint.T (AddRuntimeTableCfg { id; first_column }) -> ( + match sys.runtime_tables_cfg with + | Unfinalized_runtime_tables_cfg_rev runtime_tables_cfg -> + let rt_cfg : Fp.t Kimchi_types.runtime_table_cfg list = + { id; first_column } :: runtime_tables_cfg + in + sys.runtime_tables_cfg <- Unfinalized_runtime_tables_cfg_rev rt_cfg + | Compiled_runtime_tables_cfg _ -> + failwith + "Trying to add a runtime table configuration it has been \ + already finalized" ) | Plonk_constraint.T (Raw { kind; values; coeffs }) -> let values = Array.init 15 ~f:(fun i -> diff --git a/src/lib/crypto/kimchi_backend/common/plonk_dlog_oracles.ml b/src/lib/crypto/kimchi_backend/common/plonk_dlog_oracles.ml index 89011971fb8..1aa9b766d1c 100644 --- a/src/lib/crypto/kimchi_backend/common/plonk_dlog_oracles.ml +++ b/src/lib/crypto/kimchi_backend/common/plonk_dlog_oracles.ml @@ -11,20 +11,33 @@ module type Inputs_intf = sig module Proof : sig type t + type with_public_evals + module Challenge_polynomial : T0 module Backend : sig type t + + type with_public_evals end val to_backend : Challenge_polynomial.t list -> Field.t list -> t -> Backend.t + + val to_backend_with_public_evals : + Challenge_polynomial.t list + -> Field.t list + -> with_public_evals + -> Backend.with_public_evals end module Backend : sig type t = Field.t Kimchi_types.oracles val create : Verifier_index.t -> Proof.Backend.t -> t + + val create_with_public_evals : + Verifier_index.t -> Proof.Backend.with_public_evals -> t end end @@ -35,6 +48,11 @@ module Make (Inputs : Inputs_intf) = struct let pi = Proof.to_backend prev_challenge input pi in Backend.create vk pi + let create_with_public_evals vk prev_challenge input + (pi : Proof.with_public_evals) = + let pi = Proof.to_backend_with_public_evals prev_challenge input pi in + Backend.create_with_public_evals vk pi + open Backend let scalar_challenge t = Scalar_challenge.create t diff --git a/src/lib/crypto/kimchi_backend/common/plonk_dlog_proof.ml b/src/lib/crypto/kimchi_backend/common/plonk_dlog_proof.ml index 0c86b236812..1bc9852812a 100644 --- a/src/lib/crypto/kimchi_backend/common/plonk_dlog_proof.ml +++ b/src/lib/crypto/kimchi_backend/common/plonk_dlog_proof.ml @@ -23,7 +23,7 @@ module type Stable_v1 = sig module Latest = V1 end - type t = Stable.V1.t [@@deriving sexp, compare, yojson] + type t = Stable.V1.t [@@deriving sexp, compare, yojson, hash, equal] end module type Inputs_intf = sig @@ -90,27 +90,31 @@ module type Inputs_intf = sig end module Backend : sig + type with_public_evals = + (Curve.Affine.Backend.t, Scalar_field.t) Kimchi_types.proof_with_public + type t = (Curve.Affine.Backend.t, Scalar_field.t) Kimchi_types.prover_proof val create : Index.t - -> Scalar_field.Vector.t - -> Scalar_field.Vector.t - -> Scalar_field.t array - -> Curve.Affine.Backend.t array - -> t + -> primary:Scalar_field.Vector.t + -> auxiliary:Scalar_field.Vector.t + -> prev_chals:Scalar_field.t array + -> prev_comms:Curve.Affine.Backend.t array + -> with_public_evals val create_async : Index.t - -> Scalar_field.Vector.t - -> Scalar_field.Vector.t - -> Scalar_field.t array - -> Curve.Affine.Backend.t array - -> t Promise.t + -> primary:Scalar_field.Vector.t + -> auxiliary:Scalar_field.Vector.t + -> prev_chals:Scalar_field.t array + -> prev_comms:Curve.Affine.Backend.t array + -> with_public_evals Promise.t - val verify : Verifier_index.t -> t -> bool + val verify : Verifier_index.t -> with_public_evals -> bool - val batch_verify : Verifier_index.t array -> t array -> bool Promise.t + val batch_verify : + Verifier_index.t array -> with_public_evals array -> bool Promise.t end end @@ -180,7 +184,7 @@ module Make (Inputs : Inputs_intf) = struct let map_creator c ~f ~messages ~openings = f (c ~messages ~openings) let create ~messages ~openings = - let open Pickles_types.Plonk_types.Proof in + let open Pickles_types.Plonk_types.Proof.Stable.Latest in { messages; openings } end @@ -197,14 +201,54 @@ module Make (Inputs : Inputs_intf) = struct end end] + module T = struct + type t = (G.Affine.t, Fq.t, Fq.t array) Pickles_types.Plonk_types.Proof.t + [@@deriving compare, sexp, yojson, hash, equal] + + let id = "plong_dlog_proof_" ^ Inputs.id + + type 'a creator = + messages:G.Affine.t Pickles_types.Plonk_types.Messages.t + -> openings: + (G.Affine.t, Fq.t, Fq.t array) Pickles_types.Plonk_types.Openings.t + -> 'a + + let map_creator c ~f ~messages ~openings = f (c ~messages ~openings) + + let create ~messages ~openings = + let open Pickles_types.Plonk_types.Proof in + { messages; openings } + end + + include T + include ( - Stable.Latest : + struct + include Allocation_functor.Make.Basic (T) + include Allocation_functor.Make.Partial.Sexp (T) + include Allocation_functor.Make.Partial.Yojson (T) + end : sig - type t [@@deriving compare, sexp, yojson, hash, equal, bin_io] - end - with type t := t ) - - [%%define_locally Stable.Latest.(create)] + include + Allocation_functor.Intf.Output.Basic_intf + with type t := t + and type 'a creator := 'a creator + + include + Allocation_functor.Intf.Output.Sexp_intf + with type t := t + and type 'a creator := 'a creator + + include + Allocation_functor.Intf.Output.Yojson_intf + with type t := t + and type 'a creator := 'a creator + end ) + + type with_public_evals = + { proof : t + ; public_evals : (Scalar_field.t array * Scalar_field.t array) option + } let g t f = G.Affine.of_backend (f t) @@ -213,7 +257,8 @@ module Make (Inputs : Inputs_intf) = struct Array.iter arr ~f:(fun fe -> Fq.Vector.emplace_back vec fe) ; vec - (** Note that this function will panic if any of the points are points at infinity *) + (** Note that this function will panic if any of the points are points at + infinity *) let opening_proof_of_backend_exn (t : Opening_proof_backend.t) = let g (x : G.Affine.Backend.t) : G.Affine.t = G.Affine.of_backend x |> Pickles_types.Or_infinity.finite_exn @@ -287,13 +332,12 @@ module Make (Inputs : Inputs_intf) = struct ; foreign_field_mul_lookup_selector } + let evals_to_tuple ({ zeta; zeta_omega } : _ Kimchi_types.point_evaluations) = + (zeta, zeta_omega) + let of_backend (t : Backend.t) : t = let proof = opening_proof_of_backend_exn t.proof in let evals = - let evals_to_tuple - ({ zeta; zeta_omega } : _ Kimchi_types.point_evaluations) = - (zeta, zeta_omega) - in Plonk_types.Evals.map ~f:evals_to_tuple (eval_of_backend t.evals) in let wo x : Inputs.Curve.Affine.t array = @@ -314,13 +358,30 @@ module Make (Inputs : Inputs_intf) = struct ; lookup = Option.map t.commitments.lookup ~f:(fun l : _ Pickles_types.Plonk_types.Messages.Lookup.t -> - { sorted = Array.map ~f:wo l.sorted + { sorted = + Vector.init + Pickles_types.Plonk_types.Lookup_sorted_minus_1.n + ~f:(fun i -> wo l.sorted.(i)) + ; sorted_5th_column = + (* TODO: This is ugly and error-prone *) + Option.try_with (fun () -> + wo + l.sorted.(Nat.to_int + Pickles_types.Plonk_types + .Lookup_sorted_minus_1 + .n) ) ; aggreg = wo l.aggreg ; runtime = Option.map ~f:wo l.runtime } ) } ~openings:{ proof; evals; ft_eval1 = t.ft_eval1 } + let of_backend_with_public_evals (t : Backend.with_public_evals) : + with_public_evals = + { proof = of_backend t.proof + ; public_evals = Option.map ~f:evals_to_tuple t.public_evals + } + let eval_to_backend { Pickles_types.Plonk_types.Evals.w ; coefficients @@ -380,6 +441,9 @@ module Make (Inputs : Inputs_intf) = struct (v : t) = Array.init (V.length v) ~f:(V.get v) + let evals_of_tuple (zeta, zeta_omega) : _ Kimchi_types.point_evaluations = + { zeta; zeta_omega } + let to_backend' (chal_polys : Challenge_polynomial.t list) primary_input ({ messages = { w_comm; z_comm; t_comm; lookup } ; openings = @@ -392,16 +456,16 @@ module Make (Inputs : Inputs_intf) = struct let g x = G.Affine.to_backend (Pickles_types.Or_infinity.Finite x) in let pcwo t = Poly_comm.to_backend (`Without_degree_bound t) in let lr = Array.map lr ~f:(fun (x, y) -> (g x, g y)) in - let evals_of_tuple (zeta, zeta_omega) : _ Kimchi_types.point_evaluations = - { zeta; zeta_omega } - in { commitments = { w_comm = tuple15_of_vec (Pickles_types.Vector.map ~f:pcwo w_comm) ; z_comm = pcwo z_comm ; t_comm = pcwo t_comm ; lookup = Option.map lookup ~f:(fun t : _ Kimchi_types.lookup_commitments -> - { sorted = Array.map ~f:pcwo t.sorted + { sorted = + Array.map ~f:pcwo + (Array.append (Vector.to_array t.sorted) + (Option.to_array t.sorted_5th_column) ) ; aggreg = pcwo t.aggreg ; runtime = Option.map ~f:pcwo t.runtime } ) @@ -430,7 +494,18 @@ module Make (Inputs : Inputs_intf) = struct let to_backend chal_polys primary_input t = to_backend' chal_polys (List.to_array primary_input) t - let create ?message pk ~primary ~auxiliary = + let to_backend_with_public_evals' (chal_polys : Challenge_polynomial.t list) + primary_input ({ proof; public_evals } : with_public_evals) : + Backend.with_public_evals = + { proof = to_backend' chal_polys primary_input proof + ; public_evals = Option.map ~f:evals_of_tuple public_evals + } + + let to_backend_with_public_evals chal_polys primary_input t = + to_backend_with_public_evals' chal_polys (List.to_array primary_input) t + + (* Extract challenges and commitments from the (optional) message *) + let extract_challenges_and_commitments ~message = let chal_polys = match (message : message option) with Some s -> s | None -> [] in @@ -444,35 +519,31 @@ module Make (Inputs : Inputs_intf) = struct ~f:(fun { Challenge_polynomial.commitment; _ } -> G.Affine.to_backend (Finite commitment) ) in - let res = Backend.create pk primary auxiliary challenges commitments in - of_backend res + (challenges, commitments) + + let create ?message pk ~primary ~auxiliary = + let prev_chals, prev_comms = extract_challenges_and_commitments ~message in + let res = Backend.create pk ~primary ~auxiliary ~prev_chals ~prev_comms in + of_backend_with_public_evals res let create_async ?message pk ~primary ~auxiliary = - let chal_polys = - match (message : message option) with Some s -> s | None -> [] - in - let challenges = - List.map chal_polys ~f:(fun { Challenge_polynomial.challenges; _ } -> - challenges ) - |> Array.concat - in - let commitments = - Array.of_list_map chal_polys - ~f:(fun { Challenge_polynomial.commitment; _ } -> - G.Affine.to_backend (Finite commitment) ) - in + let prev_chals, prev_comms = extract_challenges_and_commitments ~message in let%map.Promise res = - Backend.create_async pk primary auxiliary challenges commitments + Backend.create_async pk ~primary ~auxiliary ~prev_chals ~prev_comms in - of_backend res + of_backend_with_public_evals res let batch_verify' (conv : 'a -> Fq.t array) - (ts : (Verifier_index.t * t * 'a * message option) list) = + (ts : (Verifier_index.t * with_public_evals * 'a * message option) list) = let logger = Internal_tracing_context_logger.get () in [%log internal] "Batch_verify_backend_convert_inputs" ; let vks_and_v = Array.of_list_map ts ~f:(fun (vk, t, xs, m) -> - let p = to_backend' (Option.value ~default:[] m) (conv xs) t in + let p = + to_backend_with_public_evals' + (Option.value ~default:[] m) + (conv xs) t + in (vk, p) ) in [%log internal] "Batch_verify_backend_convert_inputs_done" ; @@ -489,7 +560,7 @@ module Make (Inputs : Inputs_intf) = struct let verify ?message t vk xs : bool = Backend.verify vk - (to_backend' + (to_backend_with_public_evals' (Option.value ~default:[] message) (vec_to_array (module Scalar_field.Vector) xs) t ) diff --git a/src/lib/crypto/kimchi_backend/common/poly_comm.ml b/src/lib/crypto/kimchi_backend/common/poly_comm.ml index 326dbbafce6..c394040811f 100644 --- a/src/lib/crypto/kimchi_backend/common/poly_comm.ml +++ b/src/lib/crypto/kimchi_backend/common/poly_comm.ml @@ -128,7 +128,9 @@ module Make (Inputs : Inputs_intf) = struct `Without_degree_bound (Array.map unshifted ~f:(function | Infinity -> - assert false + failwith + "Pickles cannot handle point at infinity. Commitments must \ + be representable in affine coordinates" | Finite (x, y) -> (x, y) ) ) | _ -> diff --git a/src/lib/crypto/kimchi_backend/common/tests/dune b/src/lib/crypto/kimchi_backend/common/tests/dune new file mode 100644 index 00000000000..befeaadf26c --- /dev/null +++ b/src/lib/crypto/kimchi_backend/common/tests/dune @@ -0,0 +1,38 @@ +(tests + (names test_lookup_table_constraint_kind) + (libraries + ;; opam libraries + alcotest + bignum.bigint + core_kernel + base + digestif + ppx_inline_test.config + zarith + ;; local libraries + kimchi_bindings + kimchi_types + pasta_bindings + kimchi_backend.pasta + kimchi_backend.pasta.basic + kimchi_backend.gadgets_test_runner + kimchi_backend.pasta.constraint_system + bitstring_lib + snarky.intf + snarky.backendless + snarky_group_map + sponge + kimchi_backend + mina_version + base58_check + codable + random_oracle_input + snarky_log + group_map + snarky_curve + key_cache + snark_keys_header + tuple_lib + promise + kimchi_backend.common + ppx_version.runtime)) diff --git a/src/lib/crypto/kimchi_backend/common/tests/test_lookup_table_constraint_kind.ml b/src/lib/crypto/kimchi_backend/common/tests/test_lookup_table_constraint_kind.ml new file mode 100644 index 00000000000..19919d38c87 --- /dev/null +++ b/src/lib/crypto/kimchi_backend/common/tests/test_lookup_table_constraint_kind.ml @@ -0,0 +1,492 @@ +(** Testing + ------- + Component: Kimchi_backend_common + Subject: Testing computation of the witness and the tracking of fixed and + runtime lookup tables + Invocation: dune exec \ + src/lib/crypto/kimchi_backend/common/tests/test_lookup_table_constraint_kind.exe +*) + +(* Keeping the test low-level for learning purposes *) + +open Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint + +module Tick = Kimchi_backend.Pasta.Vesta_based_plonk +module Impl = Snarky_backendless.Snark.Run.Make (Tick) + +let add_constraint c = Impl.assert_ { basic = T c; annotation = None } + +(* Verify finalize_and_get_gates *) +let test_finalize_and_get_gates_with_lookup_tables () = + let cs = Tick.R1CS_constraint_system.create () in + let xor_table = + [| [| Tick.Field.zero; Tick.Field.zero; Tick.Field.zero |] + ; [| Tick.Field.zero; Tick.Field.one; Tick.Field.one |] + ; [| Tick.Field.one; Tick.Field.zero; Tick.Field.one |] + ; [| Tick.Field.one; Tick.Field.one; Tick.Field.one |] + |] + in + let and_table = + [| [| Tick.Field.zero; Tick.Field.zero; Tick.Field.zero |] + ; [| Tick.Field.zero; Tick.Field.one; Tick.Field.zero |] + ; [| Tick.Field.one; Tick.Field.zero; Tick.Field.zero |] + ; [| Tick.Field.one; Tick.Field.one; Tick.Field.one |] + |] + in + let () = + Tick.R1CS_constraint_system.( + add_constraint cs (T (AddFixedLookupTable { id = 1l; data = xor_table }))) + in + let () = + Tick.R1CS_constraint_system.( + add_constraint cs (T (AddFixedLookupTable { id = 2l; data = and_table }))) + in + let () = Tick.R1CS_constraint_system.set_primary_input_size cs 1 in + let _gates, lts, _rt = + Tick.R1CS_constraint_system.finalize_and_get_gates cs + in + assert (lts.(0).id = 1l) ; + assert (lts.(1).id = 2l) ; + assert (Array.length lts = 2) + +let test_finalize_and_get_gates_with_runtime_table_cfg () = + let cs = Tick.R1CS_constraint_system.create () in + + let indexed_runtime_table_cfg = Array.init 4 Tick.Field.of_int in + + let () = + Tick.R1CS_constraint_system.( + add_constraint cs + (T + (AddRuntimeTableCfg + { id = 1l; first_column = indexed_runtime_table_cfg } ) )) + in + let () = Tick.R1CS_constraint_system.set_primary_input_size cs 1 in + let _aux = Tick.R1CS_constraint_system.set_auxiliary_input_size cs 1 in + let _gates, _lt, rt = Tick.R1CS_constraint_system.finalize_and_get_gates cs in + assert (rt.(0).id = 1l) ; + assert (Array.length rt = 1) + +let test_compute_witness_with_lookup_to_the_same_idx_twice () = + (* See the comment in compute_witness when populating the runtime tables. The + function does not check that the runtime table has already been set at a + certain position, and it overwrites the previously set value *) + let table_id = 0 in + let table_size = 10 in + let first_column = Array.init table_size Tick.Field.of_int in + let repeated_idx = 0 in + let other_idx = 1 in + let fv2 = Tick.Field.random () in + let fv3 = Tick.Field.random () in + let external_values = + Tick.Field. + [| of_int table_id + ; of_int repeated_idx + ; random () + ; of_int repeated_idx + ; fv2 + ; of_int other_idx + ; fv3 + |] + in + let cs = + Impl.constraint_system ~input_typ:Impl.Typ.unit ~return_typ:Impl.Typ.unit + (fun () () -> + let vtable_id = + Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(0)) + in + let vidx1 = + Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(1)) + in + let vv1 = + Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(2)) + in + let vidx2 = + Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(3)) + in + let vv2 = + Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(4)) + in + let vidx3 = + Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(5)) + in + let vv3 = + Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(6)) + in + add_constraint + (AddRuntimeTableCfg { id = Int32.of_int table_id; first_column }) ; + add_constraint + (Lookup + { w0 = vtable_id + ; w1 = vidx1 + ; w2 = vv1 + ; w3 = vidx2 + ; w4 = vv2 + ; w5 = vidx3 + ; w6 = vv3 + } ) ) + in + let _ = Tick.R1CS_constraint_system.finalize cs in + let _witnesses, runtime_tables = + Tick.R1CS_constraint_system.compute_witness cs (Array.get external_values) + in + (* checking only one table has been created *) + assert (Array.length runtime_tables = 1) ; + let rt = runtime_tables.(0) in + (* Second value is chosen *) + assert (Tick.Field.equal rt.data.(repeated_idx) fv2) ; + assert (Tick.Field.equal rt.data.(other_idx) fv3) + +let test_compute_witness_returns_correctly_filled_runtime_tables_one_lookup () = + (* We have one table with ID 0, indexed from 0 to n, and we will fill with + some values using the constraint RuntimeLookup. + We start with one lookup + *) + let n = 10 in + let first_column = Array.init n Tick.Field.of_int in + let table_id = 0 in + let idx = Random.int n in + let v = Tick.Field.random () in + let external_values = Tick.Field.[| of_int table_id; of_int idx; v |] in + let cs = + Impl.constraint_system ~input_typ:Impl.Typ.unit ~return_typ:Impl.Typ.unit + (fun () () -> + let vtable_id = + Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(0)) + in + let vidx = + Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(1)) + in + let vv = + Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(2)) + in + (* Config *) + add_constraint + (AddRuntimeTableCfg { id = Int32.of_int table_id; first_column }) ; + add_constraint + (Lookup + { w0 = vtable_id + ; w1 = vidx + ; w2 = vv + ; w3 = vidx + ; w4 = vv + ; w5 = vidx + ; w6 = vv + } ) ) + in + let _ = Tick.R1CS_constraint_system.finalize cs in + let _witnesses, runtime_tables = + Tick.R1CS_constraint_system.compute_witness cs (Array.get external_values) + in + (* checking only one table has been created *) + assert (Array.length runtime_tables = 1) ; + let rt = runtime_tables.(0) in + (* with the correct ID *) + assert (Int32.(equal rt.id (of_int table_id))) ; + let exp_rt = Array.init n (fun i -> if i = idx then v else Tick.Field.zero) in + assert (Array.for_all2 Tick.Field.equal rt.data exp_rt) + +let test_compute_witness_returns_correctly_filled_runtime_tables_multiple_lookup + () = + (* We have one table with ID 0, indexed from 0 to n, and we will fill with + some values using the constraint RuntimeLookup. + We start with one lookup + *) + let n = 10 in + let first_column = Array.init n Tick.Field.of_int in + let table_id = 0 in + let exp_rt_data = Array.init n (fun _ -> Tick.Field.zero) in + (* nb of lookups *) + let m = Random.int n in + let external_values = Array.init (1 + (m * 2)) (fun _ -> Tick.Field.zero) in + let cs = + Impl.constraint_system ~input_typ:Impl.Typ.unit ~return_typ:Impl.Typ.unit + (fun () () -> + let vtable_id = + Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(0)) + in + (* Config *) + add_constraint + (AddRuntimeTableCfg { id = Int32.of_int table_id; first_column }) ; + ignore + @@ List.init m (fun i -> + let j = (2 * i) + 1 in + let idx = Random.int n in + let v = Tick.Field.random () in + external_values.(j) <- Tick.Field.of_int idx ; + external_values.(j + 1) <- v ; + exp_rt_data.(idx) <- v ; + let vidx = + Impl.exists Impl.Field.typ ~compute:(fun () -> + external_values.(j) ) + in + let vv = + Impl.exists Impl.Field.typ ~compute:(fun () -> + external_values.(j + 1) ) + in + add_constraint + (Lookup + { w0 = vtable_id + ; w1 = vidx + ; w2 = vv + ; w3 = vidx + ; w4 = vv + ; w5 = vidx + ; w6 = vv + } ) ) ) + in + let _ = Tick.R1CS_constraint_system.finalize cs in + let _witnesses, runtime_tables = + Tick.R1CS_constraint_system.compute_witness cs (Array.get external_values) + in + (* checking only one table has been created *) + assert (Array.length runtime_tables = 1) ; + let rt = runtime_tables.(0) in + (* with the correct ID *) + assert (Int32.(equal rt.id (of_int table_id))) ; + assert (Array.for_all2 Tick.Field.equal rt.data exp_rt_data) + +(* Checking that lookups within a lookup table works correctly with the Lookup + constraint in the case of the fixed lookup table does not share its ID with a + runtime table *) +let test_compute_witness_with_fixed_lookup_table_and_runtime_table () = + let n = 10 in + (* Fixed table *) + let fixed_lt_id = 2 in + let indexes = Array.init n Tick.Field.of_int in + let fixed_lt_values = Array.init n (fun _ -> Tick.Field.random ()) in + let data = [| indexes; fixed_lt_values |] in + (* Lookup info for fixed lookup *) + let fixed_lookup_idx = 0 in + let fixed_lookup_v = fixed_lt_values.(fixed_lookup_idx) in + (* rt *) + let rt_cfg_id = 3 in + let first_column = Array.init n Tick.Field.of_int in + let rt_idx = 1 in + let rt_v = Tick.Field.random () in + let external_values = + [| Tick.Field.of_int fixed_lt_id + ; Tick.Field.of_int rt_cfg_id + ; Tick.Field.of_int fixed_lookup_idx + ; fixed_lookup_v + ; Tick.Field.of_int rt_idx + ; rt_v + |] + in + let cs = + Impl.constraint_system ~input_typ:Impl.Typ.unit ~return_typ:Impl.Typ.unit + (fun () () -> + (* Add the fixed lookup table to the cs *) + add_constraint + (AddFixedLookupTable { id = Int32.of_int fixed_lt_id; data }) ; + let vfixed_lt_id = + Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(0)) + in + + (* Runtime table cfg *) + let vrt_cfg_id = + Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(1)) + in + (* Config *) + add_constraint + (AddRuntimeTableCfg { id = Int32.of_int rt_cfg_id; first_column }) ; + (* Lookup into fixed lookup table *) + let vfixed_lookup_idx = + Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(2)) + in + let vfixed_lookup_v = + Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(3)) + in + add_constraint + (Lookup + { w0 = vfixed_lt_id + ; w1 = vfixed_lookup_idx + ; w2 = vfixed_lookup_v + ; w3 = vfixed_lookup_idx + ; w4 = vfixed_lookup_v + ; w5 = vfixed_lookup_idx + ; w6 = vfixed_lookup_v + } ) ; + (* Lookup into runtime table *) + let vrt_idx = + Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(4)) + in + let vrt_v = + Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(5)) + in + add_constraint + (Lookup + { w0 = vrt_cfg_id + ; w1 = vrt_idx + ; w2 = vrt_v + ; w3 = vrt_idx + ; w4 = vrt_v + ; w5 = vrt_idx + ; w6 = vrt_v + } ) ) + in + + let _ = Tick.R1CS_constraint_system.finalize cs in + let _witnesses, runtime_tables = + Tick.R1CS_constraint_system.compute_witness cs (Array.get external_values) + in + (* checking only one table has been created *) + assert (Array.length runtime_tables = 1) ; + let rt = runtime_tables.(0) in + (* with the correct ID *) + assert (Int32.(equal rt.id (of_int rt_cfg_id))) ; + assert (Tick.Field.equal rt.data.(rt_idx) rt_v) + +(* Checking that lookups within a lookup table works correctly with the Lookup + constraint in the case of the fixed lookup table does share its ID with a + runtime table. *) +let test_compute_witness_with_fixed_lookup_table_and_runtime_table_sharing_ids + () = + let n = 10 in + (* Fixed table *) + let fixed_lt_id = 2 in + let rt_cfg_id = fixed_lt_id in + let indexes = Array.init n Tick.Field.of_int in + let fixed_lt_values = Array.init n (fun _ -> Tick.Field.random ()) in + let data = [| indexes; fixed_lt_values |] in + (* Lookup into fixed lookup table *) + let fixed_lookup_idx = Random.int n in + let fixed_lookup_v = fixed_lt_values.(fixed_lookup_idx) in + let rt_idx = n + Random.int n in + let rt_v = Tick.Field.random () in + let external_values = + [| Tick.Field.of_int fixed_lt_id + ; Tick.Field.of_int rt_cfg_id + ; Tick.Field.of_int fixed_lookup_idx + ; fixed_lookup_v + ; Tick.Field.of_int rt_idx + ; rt_v + |] + in + (* Extend the lookup table *) + let first_column = Array.init n (fun i -> Tick.Field.of_int (n + i)) in + let cs = + Impl.constraint_system ~input_typ:Impl.Typ.unit ~return_typ:Impl.Typ.unit + (fun () () -> + (* Add the fixed lookup table to the cs *) + add_constraint + (AddFixedLookupTable { id = Int32.of_int fixed_lt_id; data }) ; + + let vfixed_lt_id = + Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(0)) + in + let vrt_cfg_id = + Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(1)) + in + (* Config *) + add_constraint + (AddRuntimeTableCfg { id = Int32.of_int rt_cfg_id; first_column }) ; + let vfixed_lookup_idx = + Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(2)) + in + let vfixed_lookup_v = + Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(3)) + in + add_constraint + (Lookup + { w0 = vfixed_lt_id + ; w1 = vfixed_lookup_idx + ; w2 = vfixed_lookup_v + ; w3 = vfixed_lookup_idx + ; w4 = vfixed_lookup_v + ; w5 = vfixed_lookup_idx + ; w6 = vfixed_lookup_v + } ) ; + (* Lookup into runtime table *) + let vrt_idx = + Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(4)) + in + let vrt_v = + Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(5)) + in + add_constraint + (Lookup + { w0 = vrt_cfg_id + ; w1 = vrt_idx + ; w2 = vrt_v + ; w3 = vrt_idx + ; w4 = vrt_v + ; w5 = vrt_idx + ; w6 = vrt_v + } ) ) + in + let _ = Tick.R1CS_constraint_system.finalize cs in + let _witnesses, runtime_tables = + Tick.R1CS_constraint_system.compute_witness cs (Array.get external_values) + in + (* checking only one table has been created *) + assert (Array.length runtime_tables = 1) ; + let rt = runtime_tables.(0) in + (* with the correct ID *) + assert (Int32.(equal rt.id (of_int rt_cfg_id))) ; + assert (Tick.Field.equal rt.data.(rt_idx - n) rt_v) + +let test_cannot_finalize_twice_the_fixed_lookup_tables () = + let module Tick = Kimchi_backend.Pasta.Vesta_based_plonk in + let size = 1 + Random.int 100 in + let indexes = Array.init size Tick.Field.of_int in + let values = Array.init size (fun _ -> Tick.Field.random ()) in + let cs = Tick.R1CS_constraint_system.create () in + let () = + Tick.R1CS_constraint_system.( + add_constraint cs + (T (AddFixedLookupTable { id = 1l; data = [| indexes; values |] }))) + in + let () = Tick.R1CS_constraint_system.finalize_fixed_lookup_tables cs in + Alcotest.check_raises "Finalize a second time the fixed lookup tables" + (Failure "Fixed lookup tables have already been finalized") (fun () -> + Tick.R1CS_constraint_system.finalize_fixed_lookup_tables cs ) + +let test_cannot_finalize_twice_the_runtime_table_cfgs () = + let module Tick = Kimchi_backend.Pasta.Vesta_based_plonk in + let size = 1 + Random.int 100 in + let first_column = Array.init size Tick.Field.of_int in + let cs = Tick.R1CS_constraint_system.create () in + let () = + Tick.R1CS_constraint_system.( + add_constraint cs (T (AddRuntimeTableCfg { id = 1l; first_column }))) + in + let () = Tick.R1CS_constraint_system.finalize_runtime_lookup_tables cs in + Alcotest.check_raises + "Runtime table configurations have already been finalized" + (Failure "Runtime table configurations have already been finalized") + (fun () -> Tick.R1CS_constraint_system.finalize_runtime_lookup_tables cs) + +let () = + let open Alcotest in + run "Test constraint construction" + [ ( "Lookup tables" + , [ test_case "Add one fixed table" `Quick + test_finalize_and_get_gates_with_lookup_tables + ; test_case "Add one runtime table cfg" `Quick + test_finalize_and_get_gates_with_runtime_table_cfg + ; test_case "Compute witness with one runtime table lookup" `Quick + test_compute_witness_returns_correctly_filled_runtime_tables_one_lookup + ; test_case "Compute witness with multiple runtime table lookup" `Quick + test_compute_witness_returns_correctly_filled_runtime_tables_multiple_lookup + ; test_case + "Compute witness with runtime lookup at same index with\n\ + \ different values" `Quick + test_compute_witness_with_lookup_to_the_same_idx_twice + ; test_case + "Compute witness with lookups within a runtime table and a fixed \ + lookup table, not sharing the same ID" + `Quick + test_compute_witness_with_fixed_lookup_table_and_runtime_table + ; test_case + "Compute witness with lookups within a runtime table and a fixed \ + lookup table, sharing the table ID" + `Quick + test_compute_witness_with_fixed_lookup_table_and_runtime_table_sharing_ids + ; test_case "Check that fixed lookup tables cannot be finalized twice" + `Quick test_cannot_finalize_twice_the_fixed_lookup_tables + ; test_case + "Check that runtime table configurations cannot be finalized twice" + `Quick test_cannot_finalize_twice_the_runtime_table_cfgs + ] ) + ] diff --git a/src/lib/crypto/kimchi_backend/gadgets/affine.ml.disabled b/src/lib/crypto/kimchi_backend/gadgets/affine.ml.disabled new file mode 100644 index 00000000000..ac548772a65 --- /dev/null +++ b/src/lib/crypto/kimchi_backend/gadgets/affine.ml.disabled @@ -0,0 +1,156 @@ +open Core_kernel +module Bignum_bigint = Snarky_backendless.Backend_extended.Bignum_bigint +module Snark_intf = Snarky_backendless.Snark_intf + +(* Affine representation of an elliptic curve point over a foreign field *) + +let tests_enabled = true + +type bignum_point = Bignum_bigint.t * Bignum_bigint.t + +let two_to_4limb = Bignum_bigint.(Common.two_to_3limb * Common.two_to_limb) + +type 'field t = + 'field Foreign_field.Element.Standard.t + * 'field Foreign_field.Element.Standard.t + +let of_coordinates a = a + +let of_bignum_bigint_coordinates (type field) + (module Circuit : Snark_intf.Run with type field = field) + (point : bignum_point) : field t = + let x, y = point in + of_coordinates + ( Foreign_field.Element.Standard.of_bignum_bigint (module Circuit) x + , Foreign_field.Element.Standard.of_bignum_bigint (module Circuit) y ) + +let const_of_bignum_bigint_coordinates (type field) + (module Circuit : Snark_intf.Run with type field = field) + (point : bignum_point) : field t = + let x, y = point in + of_coordinates + ( Foreign_field.Element.Standard.const_of_bignum_bigint (module Circuit) x + , Foreign_field.Element.Standard.const_of_bignum_bigint (module Circuit) y + ) + +let to_coordinates a = a + +let to_string_as_prover (type field) + (module Circuit : Snark_intf.Run with type field = field) a : string = + let x, y = to_coordinates a in + sprintf "(%s, %s)" + (Foreign_field.Element.Standard.to_string_as_prover (module Circuit) x) + (Foreign_field.Element.Standard.to_string_as_prover (module Circuit) y) + +let x a = + let x_element, _ = to_coordinates a in + x_element + +let y a = + let _, y_element = to_coordinates a in + y_element + +let equal_as_prover (type field) + (module Circuit : Snark_intf.Run with type field = field) (left : field t) + (right : field t) : bool = + let left_x, left_y = to_coordinates left in + let right_x, right_y = to_coordinates right in + Foreign_field.Element.Standard.( + equal_as_prover (module Circuit) left_x right_x + && equal_as_prover (module Circuit) left_y right_y) + +(* Create constraints to assert equivalence between two affine points *) +let assert_equal (type field) + (module Circuit : Snark_intf.Run with type field = field) (left : field t) + (right : field t) : unit = + let left_x, left_y = to_coordinates left in + let right_x, right_y = to_coordinates right in + Foreign_field.Element.Standard.( + assert_equal (module Circuit) left_x right_x ; + assert_equal (module Circuit) left_y right_y) + +let check_here_const_of_bignum_bigint_coordinates (type field) + (module Circuit : Snark_intf.Run with type field = field) + (point : bignum_point) : field t = + let const_point = const_of_bignum_bigint_coordinates (module Circuit) point in + let var_point = of_bignum_bigint_coordinates (module Circuit) point in + assert_equal (module Circuit) const_point var_point ; + const_point + +let const_zero (type field) + (module Circuit : Snark_intf.Run with type field = field) : field t = + of_coordinates + Foreign_field.Element.Standard. + ( const_of_bignum_bigint (module Circuit) Bignum_bigint.zero + , const_of_bignum_bigint (module Circuit) Bignum_bigint.zero ) + +(* Uses 6 * 1.5 (Generics per Field) = 9 rows per Affine.if_ *) +let if_ (type field) (module Circuit : Snark_intf.Run with type field = field) + (b : Circuit.Boolean.var) ~(then_ : field t) ~(else_ : field t) : field t = + let then_x, then_y = to_coordinates then_ in + let else_x, else_y = to_coordinates else_ in + of_coordinates + Foreign_field.Element.Standard. + ( if_ (module Circuit) b ~then_:then_x ~else_:else_x + , if_ (module Circuit) b ~then_:then_y ~else_:else_y ) + +(****************) +(* Affine tests *) +(****************) + +let%test_unit "affine" = + if tests_enabled then + let open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + (* Check Affine methods *) + let _cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof (fun () -> + let pt_a = + of_bignum_bigint_coordinates + (module Runner.Impl) + ( Bignum_bigint.of_string + "15038058761817109681921033191530858996191372456511467769172810422323500124150" + , Bignum_bigint.of_string + "64223534476670136480328171927326822445460557333044467340973794755877726909525" + ) + in + Foreign_field.result_row (module Runner.Impl) @@ fst pt_a ; + Foreign_field.result_row (module Runner.Impl) @@ snd pt_a ; + let pt_b = + of_bignum_bigint_coordinates + (module Runner.Impl) + ( Bignum_bigint.of_string + "99660522603236469231535770150980484469424456619444894985600600952621144670700" + , Bignum_bigint.of_string + "8901505138963553768122761105087501646863888139548342861255965172357387323186" + ) + in + Foreign_field.result_row (module Runner.Impl) @@ fst pt_b ; + Foreign_field.result_row (module Runner.Impl) @@ snd pt_b ; + let bit = + Runner.Impl.(exists Boolean.typ_unchecked ~compute:(fun () -> true)) + in + + let pt_c = if_ (module Runner.Impl) bit ~then_:pt_a ~else_:pt_b in + Foreign_field.result_row (module Runner.Impl) (fst pt_c) ; + Foreign_field.result_row (module Runner.Impl) (snd pt_c) ; + + assert_equal (module Runner.Impl) pt_c pt_a ; + + let bit2 = + Runner.Impl.( + exists Boolean.typ_unchecked ~compute:(fun () -> false)) + in + + let pt_d = if_ (module Runner.Impl) bit2 ~then_:pt_a ~else_:pt_b in + Foreign_field.result_row (module Runner.Impl) (fst pt_d) ; + Foreign_field.result_row (module Runner.Impl) (snd pt_d) ; + + assert_equal (module Runner.Impl) pt_d pt_b ; + + () ) + in + () diff --git a/src/lib/crypto/kimchi_backend/gadgets/bitwise.ml b/src/lib/crypto/kimchi_backend/gadgets/bitwise.ml new file mode 100644 index 00000000000..84695a0634a --- /dev/null +++ b/src/lib/crypto/kimchi_backend/gadgets/bitwise.ml @@ -0,0 +1,918 @@ +open Core_kernel + +open Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint + +module Bignum_bigint = Snarky_backendless.Backend_extended.Bignum_bigint + +let tests_enabled = true + +(* Auxiliary functions *) + +(* returns a field containing the all one word of length bits *) +let all_ones_field (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (length : int) : f = + Common.bignum_bigint_to_field (module Circuit) + @@ Bignum_bigint.(pow (of_int 2) (of_int length) - one) + +let fits_in_bits_as_prover (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (word : Circuit.Field.t) (length : int) = + let open Common in + assert ( + Bignum_bigint.( + field_to_bignum_bigint + (module Circuit) + (cvar_field_to_field_as_prover (module Circuit) word) + < pow (of_int 2) (of_int length)) ) + +(* ROT64 *) + +(* Side of rotation *) +type rot_mode = Left | Right + +(* Performs the 64bit rotation and returns rotated word, excess, and shifted *) +let rot_aux (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + ?(check64 = false) (word : Circuit.Field.t) (bits : int) (mode : rot_mode) : + Circuit.Field.t * Circuit.Field.t * Circuit.Field.t = + let open Circuit in + (* Check that the rotation bits is smaller than 64 *) + assert (bits < 64) ; + (* Check that the rotation bits is non-negative *) + assert (bits >= 0) ; + + (* Check that the input word has at most 64 bits *) + as_prover (fun () -> + fits_in_bits_as_prover (module Circuit) word 64 ; + () ) ; + + (* Compute actual length depending on whether the rotation mode is Left or Right *) + let rot_bits = match mode with Left -> bits | Right -> 64 - bits in + + (* Auxiliary Bignum_bigint values *) + let big_2_pow_64 = Bignum_bigint.(pow (of_int 2) (of_int 64)) in + let big_2_pow_rot = Bignum_bigint.(pow (of_int 2) (of_int rot_bits)) in + + (* Compute the rotated word *) + let rotated, excess, shifted, bound = + exists (Typ.array ~length:4 Field.typ) ~compute:(fun () -> + (* Assert that word is at most 64 bits*) + let word_big = + Common.( + field_to_bignum_bigint + (module Circuit) + (cvar_field_to_field_as_prover (module Circuit) word)) + in + assert (Bignum_bigint.(word_big < big_2_pow_64)) ; + + (* Obtain rotated output, excess, and shifted for the equation + word * 2^rot = excess * 2^64 + shifted *) + let excess_big, shifted_big = + Common.bignum_bigint_div_rem + Bignum_bigint.(word_big * big_2_pow_rot) + big_2_pow_64 + in + + (* Compute rotated value as + rotated = excess + shifted *) + let rotated_big = Bignum_bigint.(shifted_big + excess_big) in + + (* Compute bound that is the right input of FFAdd equation *) + let bound_big = + Bignum_bigint.(excess_big + big_2_pow_64 - big_2_pow_rot) + in + + (* Convert back to field *) + let shifted = + Common.bignum_bigint_to_field (module Circuit) shifted_big + in + let excess = + Common.bignum_bigint_to_field (module Circuit) excess_big + in + let rotated = + Common.bignum_bigint_to_field (module Circuit) rotated_big + in + let bound = Common.bignum_bigint_to_field (module Circuit) bound_big in + + [| rotated; excess; shifted; bound |] ) + |> Common.tuple4_of_array + in + + let of_bits = + Common.as_prover_cvar_field_bits_le_to_cvar_field (module Circuit) + in + + (* Current row *) + with_label "rot64_gate" (fun () -> + (* Set up Rot64 gate *) + assert_ + { annotation = Some __LOC__ + ; basic = + Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint.T + (Rot64 + { word + ; rotated + ; excess + ; bound_limb0 = of_bits bound 52 64 + ; bound_limb1 = of_bits bound 40 52 + ; bound_limb2 = of_bits bound 28 40 + ; bound_limb3 = of_bits bound 16 28 + ; bound_crumb0 = of_bits bound 14 16 + ; bound_crumb1 = of_bits bound 12 14 + ; bound_crumb2 = of_bits bound 10 12 + ; bound_crumb3 = of_bits bound 8 10 + ; bound_crumb4 = of_bits bound 6 8 + ; bound_crumb5 = of_bits bound 4 6 + ; bound_crumb6 = of_bits bound 2 4 + ; bound_crumb7 = of_bits bound 0 2 + ; two_to_rot = + Common.bignum_bigint_to_field + (module Circuit) + big_2_pow_rot + } ) + } ) ; + + (* Next row *) + Range_check.bits64 (module Circuit) shifted ; + + (* Following row *) + Range_check.bits64 (module Circuit) excess ; + + if check64 then Range_check.bits64 (module Circuit) word ; + + (rotated, excess, shifted) + +(* 64-bit Rotation of rot_bits to the `mode` side + * Inputs + * - check: whether to check the input word is at most 64 bits (default is false) + * - word of maximum 64 bits to be rotated + * - rot_bits: number of bits to be rotated + * - mode: Left or Right + * Output: rotated word + *) +let rot64 (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + ?(check64 : bool = false) (word : Circuit.Field.t) (rot_bits : int) + (mode : rot_mode) : Circuit.Field.t = + let rotated, _excess, _shifted = + rot_aux (module Circuit) ~check64 word rot_bits mode + in + + rotated + +(* 64-bit bitwise logical shift of bits to the left side + * Inputs + * - check64: whether to check the input word is at most 64 bits (default is false) + * - word of maximum 64 bits to be shifted + * - bits: number of bits to be shifted + * Output: left shifted word (with bits 0s at the least significant positions) + *) +let lsl64 (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + ?(check64 : bool = false) (word : Circuit.Field.t) (bits : int) : + Circuit.Field.t = + let _rotated, _excess, shifted = + rot_aux (module Circuit) ~check64 word bits Left + in + + shifted + +(* 64-bit bitwise logical shift of bits to the right side + * Inputs + * - check64: whether to check the input word is at most 64 bits (default is false) + * - word of maximum 64 bits to be shifted + * - bits: number of bits to be shifted + * Output: right shifted word (with bits 0s at the most significant positions) +*) +let lsr64 (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + ?(check64 : bool = false) (word : Circuit.Field.t) (bits : int) : + Circuit.Field.t = + let _rotated, excess, _shifted = + rot_aux (module Circuit) ~check64 word bits Right + in + + excess + +(* XOR *) + +(* Boolean Xor of length bits + * input1 and input2 are the inputs to the Xor gate + * length is the number of bits to Xor + * len_xor is the number of bits of the lookup table (default is 4) + *) +let bxor (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + ?(len_xor = 4) (input1 : Circuit.Field.t) (input2 : Circuit.Field.t) + (length : int) : Circuit.Field.t = + (* Auxiliar function to compute the next variable for the chain of Xors *) + let as_prover_next_var (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (curr_var : Circuit.Field.t) (var0 : Circuit.Field.t) + (var1 : Circuit.Field.t) (var2 : Circuit.Field.t) (var3 : Circuit.Field.t) + (len_xor : int) : Circuit.Field.t = + let open Circuit in + let two_pow_len = + Common.bignum_bigint_to_field + (module Circuit) + Bignum_bigint.(pow (of_int 2) (of_int len_xor)) + in + let two_pow_2len = Field.Constant.(two_pow_len * two_pow_len) in + let two_pow_3len = Field.Constant.(two_pow_2len * two_pow_len) in + let two_pow_4len = Field.Constant.(two_pow_3len * two_pow_len) in + let next_var = + exists Field.typ ~compute:(fun () -> + let curr_field = + Common.cvar_field_to_field_as_prover (module Circuit) curr_var + in + let field0 = + Common.cvar_field_to_field_as_prover (module Circuit) var0 + in + let field1 = + Common.cvar_field_to_field_as_prover (module Circuit) var1 + in + let field2 = + Common.cvar_field_to_field_as_prover (module Circuit) var2 + in + let field3 = + Common.cvar_field_to_field_as_prover (module Circuit) var3 + in + Field.Constant.( + ( curr_field - field0 - (field1 * two_pow_len) + - (field2 * two_pow_2len) - (field3 * two_pow_3len) ) + / two_pow_4len) ) + in + next_var + in + + (* Recursively builds Xor + * input1and input2 are the inputs to the Xor gate as bits + * output is the output of the Xor gate as bits + * length is the number of remaining bits to Xor + * len_xor is the number of bits of the lookup table (default is 4) + *) + let rec bxor_rec (in1 : Circuit.Field.t) (in2 : Circuit.Field.t) + (out : Circuit.Field.t) (length : int) (len_xor : int) = + let open Circuit in + (* If inputs are zero and length is zero, add the zero check *) + if length = 0 then ( + with_label "xor_zero_check" (fun () -> + assert_ + { annotation = Some __LOC__ + ; basic = + Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint.T + (Raw + { kind = Zero + ; values = [| in1; in2; out |] + ; coeffs = [||] + } ) + } ) ; + Field.Assert.equal Field.zero in1 ; + Field.Assert.equal Field.zero in2 ; + Field.Assert.equal Field.zero out ; + () ) + else + (* Define shorthand helper *) + let of_bits = + Common.as_prover_cvar_field_bits_le_to_cvar_field (module Circuit) + in + + (* Nibble offsets *) + let first = len_xor in + let second = first + len_xor in + let third = second + len_xor in + let fourth = third + len_xor in + + let in1_0 = of_bits in1 0 first in + let in1_1 = of_bits in1 first second in + let in1_2 = of_bits in1 second third in + let in1_3 = of_bits in1 third fourth in + let in2_0 = of_bits in2 0 first in + let in2_1 = of_bits in2 first second in + let in2_2 = of_bits in2 second third in + let in2_3 = of_bits in2 third fourth in + let out_0 = of_bits out 0 first in + let out_1 = of_bits out first second in + let out_2 = of_bits out second third in + let out_3 = of_bits out third fourth in + + (* If length is more than 0, add the Xor gate *) + with_label "xor_gate" (fun () -> + (* Set up Xor gate *) + assert_ + { annotation = Some __LOC__ + ; basic = + Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint.T + (Xor + { in1 + ; in2 + ; out + ; in1_0 + ; in1_1 + ; in1_2 + ; in1_3 + ; in2_0 + ; in2_1 + ; in2_2 + ; in2_3 + ; out_0 + ; out_1 + ; out_2 + ; out_3 + } ) + } ) ; + + let next_in1 = + as_prover_next_var (module Circuit) in1 in1_0 in1_1 in1_2 in1_3 len_xor + in + let next_in2 = + as_prover_next_var (module Circuit) in2 in2_0 in2_1 in2_2 in2_3 len_xor + in + let next_out = + as_prover_next_var (module Circuit) out out_0 out_1 out_2 out_3 len_xor + in + + (* Next length is 4*n less bits *) + let next_length = length - (4 * len_xor) in + + (* Recursively call xor on the next nibble *) + bxor_rec next_in1 next_in2 next_out next_length len_xor ; + () + in + + let open Circuit in + let open Common in + (* Check that the length is positive *) + assert (length > 0 && len_xor > 0) ; + (* Check that the length fits in the field *) + assert (length <= Field.size_in_bits) ; + + (* Initialize array of 255 bools all set to false *) + let input1_array = Array.create ~len:Field.size_in_bits false in + let input2_array = Array.create ~len:Field.size_in_bits false in + + (* Sanity checks about lengths of inputs using bignum *) + as_prover (fun () -> + (* Read inputs, Convert to field type *) + let input1_field = + cvar_field_to_field_as_prover (module Circuit) input1 + in + let input2_field = + cvar_field_to_field_as_prover (module Circuit) input2 + in + + (* Check real lengths are at most the desired length *) + fits_in_bits_as_prover (module Circuit) input1 length ; + fits_in_bits_as_prover (module Circuit) input2 length ; + + (* Convert inputs field elements to list of bits of length 255 *) + let input1_bits = Field.Constant.unpack @@ input1_field in + let input2_bits = Field.Constant.unpack @@ input2_field in + + (* Convert list of bits to arrays *) + let input1_bits_array = List.to_array @@ input1_bits in + let input2_bits_array = List.to_array @@ input2_bits in + + (* Iterate over 255 positions to update value of arrays *) + for i = 0 to Field.size_in_bits - 1 do + input1_array.(i) <- input1_bits_array.(i) ; + input2_array.(i) <- input2_bits_array.(i) + done ; + + () ) ; + + let output_xor = + exists Field.typ ~compute:(fun () -> + (* Sanity checks about lengths of inputs using bignum *) + (* Check real lengths are at most the desired length *) + fits_in_bits_as_prover (module Circuit) input1 length ; + fits_in_bits_as_prover (module Circuit) input2 length ; + + let input1_field = + cvar_field_to_field_as_prover (module Circuit) input1 + in + let input2_field = + cvar_field_to_field_as_prover (module Circuit) input2 + in + + (* Convert inputs field elements to list of bits of length 255 *) + let input1_bits = Field.Constant.unpack @@ input1_field in + let input2_bits = Field.Constant.unpack @@ input2_field in + + (* Xor list of bits to obtain output of the xor *) + let output_bits = + List.map2_exn input1_bits input2_bits ~f:(fun b1 b2 -> + Bool.(not (equal b1 b2)) ) + in + + (* Convert list of output bits to field element *) + Field.Constant.project output_bits ) + in + + (* Obtain pad length until the length is a multiple of 4*n for n-bit length lookup table *) + let pad_length = + if length mod (4 * len_xor) <> 0 then + length + (4 * len_xor) - (length mod (4 * len_xor)) + else length + in + + (* Recursively build Xor gadget *) + bxor_rec input1 input2 output_xor pad_length len_xor ; + + (* Convert back to field *) + output_xor + +(* Boolean Xor of 16 bits + * This is a special case of Xor for 16 bits for Xor lookup table of 4 bits of inputs. + * Receives two input words to Xor together, of maximum 16 bits each. + * Returns the Xor of the two words. + *) +let bxor16 (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (input1 : Circuit.Field.t) (input2 : Circuit.Field.t) : Circuit.Field.t = + bxor (module Circuit) input1 input2 16 ~len_xor:4 + +(* Boolean Xor of 64 bits + * This is a special case of Xor for 64 bits for Xor lookup table of 4 bits of inputs. + * Receives two input words to Xor together, of maximum 64 bits each. + * Returns the Xor of the two words. + *) +let bxor64 (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (input1 : Circuit.Field.t) (input2 : Circuit.Field.t) : Circuit.Field.t = + bxor (module Circuit) input1 input2 64 ~len_xor:4 + +(* AND *) + +(* Boolean And of length bits + * input1 and input2 are the two inputs to AND + * length is the number of bits to AND + * len_xor is the number of bits of the inputs of the Xor lookup table (default is 4) + *) +let band (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + ?(len_xor = 4) (input1 : Circuit.Field.t) (input2 : Circuit.Field.t) + (length : int) : Circuit.Field.t = + let open Circuit in + (* Recursively build And gadget with leading Xors and a final Generic gate *) + (* It will also check the correct lengths of the inputs, no need to do it again *) + let xor_output = bxor (module Circuit) input1 input2 length ~len_xor in + + let and_output = + exists Field.typ ~compute:(fun () -> + Common.cvar_field_bits_combine_as_prover + (module Circuit) + input1 input2 + (fun b1 b2 -> b1 && b2) ) + in + + (* Compute sum of a + b and constrain in the circuit *) + let sum = Generic.add (module Circuit) input1 input2 in + let neg_one = Field.Constant.(negate one) in + let neg_two = Field.Constant.(neg_one + neg_one) in + + (* Constrain AND as 2 * and = sum - xor *) + with_label "and_equation" (fun () -> + assert_ + { annotation = Some __LOC__ + ; basic = + Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint.T + (Basic + { l = (Field.Constant.one, sum) + ; r = (neg_one, xor_output) + ; o = (neg_two, and_output) + ; m = Field.Constant.zero + ; c = Field.Constant.zero + } ) + } ) ; + + and_output + +(* Boolean And of 64 bits + * This is a special case of And for 64 bits for Xor lookup table of 4 bits of inputs. + * Receives two input words to And together, of maximum 64 bits each. + * Returns the And of the two words. + *) +let band64 (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (input1 : Circuit.Field.t) (input2 : Circuit.Field.t) : Circuit.Field.t = + band (module Circuit) input1 input2 64 + +(* NOT *) + +(* Boolean Not of length bits for checked length (uses Xor gadgets inside to constrain the length) + * - input of word to negate + * - length of word to negate + * - len_xor is the length of the Xor lookup table to use beneath (default 4) + * Note that the length needs to be less than the bit length of the field. + *) +let bnot_checked (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + ?(len_xor = 4) (input : Circuit.Field.t) (length : int) : Circuit.Field.t = + let open Circuit in + (* Check it is not 255 or else 2^255-1 will not fit in Pallas *) + assert (length < Circuit.Field.size_in_bits) ; + + let all_ones_f = all_ones_field (module Circuit) length in + let all_ones_var = exists Field.typ ~compute:(fun () -> all_ones_f) in + + (* Negating is equivalent to XORing with all one word *) + let out_not = bxor (module Circuit) input all_ones_var length ~len_xor in + + (* Doing this afterwards or else it can break chainability with Xor16's and Zero *) + Field.Assert.equal (Field.constant all_ones_f) all_ones_var ; + + out_not + +(* Negates a word of 64 bits with checked length of 64 bits. + * This means that the bound in lenght is constrained in the circuit. *) +let bnot64_checked (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (input : Circuit.Field.t) : Circuit.Field.t = + bnot_checked (module Circuit) input 64 + +(* Boolean Not of length bits for unchecked length (uses Generic subtractions inside) + * - input of word to negate + * - length of word to negate + * (Note that this can negate two words per row, but it inputs need to be a copy of another + * variable with a correct length in order to make sure that the length is correct) + *) +let bnot_unchecked (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (input : Circuit.Field.t) (length : int) : Circuit.Field.t = + let open Circuit in + (* Check it is not 255 or else 2^255-1 will not fit in Pallas *) + assert (length < Circuit.Field.size_in_bits) ; + assert (length > 0) ; + + (* Check that the input word has at most length bits. + In the checked version this is done in the Xor *) + as_prover (fun () -> + fits_in_bits_as_prover (module Circuit) input length ; + () ) ; + + let all_ones_f = all_ones_field (module Circuit) length in + let all_ones_var = exists Field.typ ~compute:(fun () -> all_ones_f) in + Field.Assert.equal all_ones_var (Field.constant all_ones_f) ; + + (* Negating is equivalent to subtracting with all one word *) + (* [2^len - 1] - input = not (input) *) + Generic.sub (module Circuit) all_ones_var input + +(* Negates a word of 64 bits, but its length goes unconstrained in the circuit + (unless it is copied from a checked length value) *) +let bnot64_unchecked (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (input : Circuit.Field.t) : Circuit.Field.t = + bnot_unchecked (module Circuit) input 64 + +(**************) +(* UNIT TESTS *) +(**************) + +let%test_unit "bitwise rotation gadget" = + if tests_enabled then ( + let (* Import the gadget test runner *) + open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + + (* Helper to test ROT gadget + * Input operands and expected output: word len mode rotated + * Returns unit if constraints are satisfied, error otherwise. + *) + let test_rot ?cs word length mode result = + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + let open Runner.Impl in + (* Set up snarky variables for inputs and output *) + let word = + exists Field.typ ~compute:(fun () -> + Field.Constant.of_string word ) + in + let result = + exists Field.typ ~compute:(fun () -> + Field.Constant.of_string result ) + in + (* Use the rot gate gadget *) + let output_rot = rot64 (module Runner.Impl) word length mode in + Field.Assert.equal output_rot result + (* Pad with a "dummy" constraint b/c Kimchi requires at least 2 *) ) + in + cs + in + (* Positive tests *) + let _cs = test_rot "0" 0 Left "0" in + let _cs = test_rot "0" 32 Right "0" in + let _cs = test_rot "1" 1 Left "2" in + let _cs = test_rot "1" 63 Left "9223372036854775808" in + let cs = test_rot "256" 4 Right "16" in + (* 0x5A5A5A5A5A5A5A5A is 0xA5A5A5A5A5A5A5A5 both when rotate 4 bits Left or Right*) + let _cs = + test_rot ~cs "6510615555426900570" 4 Right "11936128518282651045" + in + let _cs = test_rot "6510615555426900570" 4 Left "11936128518282651045" in + let cs = test_rot "1234567890" 32 Right "5302428712241725440" in + let _cs = test_rot ~cs "2651214356120862720" 32 Right "617283945" in + let _cs = test_rot ~cs "1153202983878524928" 32 Right "268500993" in + + (* Negatve tests *) + assert (Common.is_error (fun () -> test_rot "0" 1 Left "1")) ; + assert (Common.is_error (fun () -> test_rot "1" 64 Left "1")) ; + assert (Common.is_error (fun () -> test_rot ~cs "0" 0 Left "0")) ) ; + () + +let%test_unit "bitwise shift gadgets" = + if tests_enabled then ( + let (* Import the gadget test runner *) + open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + + (* Helper to test LSL and LSR gadgets + * Input operands and expected output: word len mode shifted + * Returns unit if constraints are satisfied, error otherwise. + *) + let test_shift ?cs word length mode result = + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + let open Runner.Impl in + (* Set up snarky variables for inputs and output *) + let word = + exists Field.typ ~compute:(fun () -> + Field.Constant.of_string word ) + in + let result = + exists Field.typ ~compute:(fun () -> + Field.Constant.of_string result ) + in + (* Use the xor gate gadget *) + let output_shift = + match mode with + | Left -> + lsl64 (module Runner.Impl) word length + | Right -> + lsr64 (module Runner.Impl) word length + in + Field.Assert.equal output_shift result ) + in + cs + in + (* Positive tests *) + let cs1l = test_shift "0" 1 Left "0" in + let cs1r = test_shift "0" 1 Right "0" in + let _cs = test_shift ~cs:cs1l "1" 1 Left "2" in + let _cs = test_shift ~cs:cs1r "1" 1 Right "0" in + let _cs = test_shift "256" 4 Right "16" in + let _cs = test_shift "256" 20 Right "0" in + let _cs = test_shift "6510615555426900570" 16 Right "99344109427290" in + (* All 1's word *) + let cs_allones = + test_shift "18446744073709551615" 15 Left "18446744073709518848" + in + (* Random value ADCC7E30EDCAC126 -> ADCC7E30 -> EDCAC12600000000*) + let _cs = test_shift "12523523412423524646" 32 Right "2915860016" in + let _cs = + test_shift "12523523412423524646" 32 Left "17134720101237391360" + in + + (* Negatve tests *) + assert (Common.is_error (fun () -> test_shift "0" 1 Left "1")) ; + assert (Common.is_error (fun () -> test_shift "1" 64 Left "1")) ; + assert (Common.is_error (fun () -> test_shift ~cs:cs_allones "0" 0 Left "0")) + ) ; + () + +let%test_unit "bitwise xor gadget" = + if tests_enabled then ( + let (* Import the gadget test runner *) + open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + + (* Helper to test XOR gadget + * Inputs operands and expected output: left_input xor right_input + * Returns true if constraints are satisfied, false otherwise. + *) + let test_xor ?cs left_input right_input output_xor length = + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + let open Runner.Impl in + (* Set up snarky variables for inputs and output *) + let left_input = + exists Field.typ ~compute:(fun () -> + Common.field_of_hex (module Runner.Impl) left_input ) + in + let right_input = + exists Field.typ ~compute:(fun () -> + Common.field_of_hex (module Runner.Impl) right_input ) + in + let output_xor = + exists Field.typ ~compute:(fun () -> + Common.field_of_hex (module Runner.Impl) output_xor ) + in + (* Use the xor gate gadget *) + let result = + bxor (module Runner.Impl) left_input right_input length + in + + (* Check that the result is equal to the expected output *) + Field.Assert.equal output_xor result ) + in + cs + in + + (* Positive tests *) + let cs16 = test_xor "1" "0" "1" 16 in + let _cs = test_xor ~cs:cs16 "0" "1" "1" 16 in + let _cs = test_xor ~cs:cs16 "2" "1" "3" 16 in + let _cs = test_xor ~cs:cs16 "a8ca" "ddd5" "751f" 16 in + let _cs = test_xor ~cs:cs16 "0" "0" "0" 8 in + let _cs = test_xor ~cs:cs16 "0" "0" "0" 1 in + let _cs = test_xor ~cs:cs16 "1" "0" "1" 1 in + let _cs = test_xor ~cs:cs16 "0" "0" "0" 4 in + let _cs = test_xor ~cs:cs16 "1" "1" "0" 4 in + let cs32 = test_xor "bb5c6" "edded" "5682b" 20 in + let cs64 = + test_xor "5a5a5a5a5a5a5a5a" "a5a5a5a5a5a5a5a5" "ffffffffffffffff" 64 + in + let _cs = + test_xor ~cs:cs64 "f1f1f1f1f1f1f1f1" "0f0f0f0f0f0f0f0f" "fefefefefefefefe" + 64 + in + let _cs = + test_xor ~cs:cs64 "cad1f05900fcad2f" "deadbeef010301db" "147c4eb601ffacf4" + 64 + in + + (* Negatve tests *) + assert ( + Common.is_error (fun () -> + (* Reusing right CS with bad witness *) + test_xor ~cs:cs32 "ed1ed1" "ed1ed1" "010101" 20 ) ) ; + assert ( + Common.is_error (fun () -> + (* Reusing wrong CS with right witness *) + test_xor ~cs:cs32 "1" "1" "0" 16 ) ) ; + + assert (Common.is_error (fun () -> test_xor ~cs:cs16 "1" "0" "1" 0)) ; + assert (Common.is_error (fun () -> test_xor ~cs:cs16 "1" "0" "0" 1)) ; + assert (Common.is_error (fun () -> test_xor ~cs:cs16 "1111" "2222" "0" 16)) ; + assert (Common.is_error (fun () -> test_xor "0" "0" "0" 256)) ; + assert (Common.is_error (fun () -> test_xor "0" "0" "0" (-4))) ; + assert ( + Common.is_error (fun () -> test_xor ~cs:cs32 "bb5c6" "edded" "ed1ed1" 20) ) + ) ; + () + +let%test_unit "bitwise and gadget" = + if tests_enabled then ( + let (* Import the gadget test runner *) + open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + (* Helper to test AND gadget + * Inputs operands and expected output: left_input and right_input = output + * Returns true if constraints are satisfied, false otherwise. + *) + let test_and ?cs left_input right_input output_and length = + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + let open Runner.Impl in + (* Set up snarky variables for inputs and outputs *) + let left_input = + exists Field.typ ~compute:(fun () -> + Common.field_of_hex (module Runner.Impl) left_input ) + in + let right_input = + exists Field.typ ~compute:(fun () -> + Common.field_of_hex (module Runner.Impl) right_input ) + in + let output_and = + exists Field.typ ~compute:(fun () -> + Common.field_of_hex (module Runner.Impl) output_and ) + in + (* Use the and gate gadget *) + let result = + band (module Runner.Impl) left_input right_input length + in + Field.Assert.equal output_and result ) + in + cs + in + + (* Positive tests *) + let cs = test_and "0" "0" "0" 16 in + let _cs = test_and ~cs "457" "8ae" "6" 16 in + let _cs = test_and ~cs "a8ca" "ddd5" "88c0" 16 in + let _cs = test_and "0" "0" "0" 8 in + let cs = test_and "1" "1" "1" 1 in + let _cs = test_and ~cs "1" "0" "0" 1 in + let _cs = test_and ~cs "0" "1" "0" 1 in + let _cs = test_and ~cs "0" "0" "0" 1 in + let _cs = test_and "f" "f" "f" 4 in + let _cs = test_and "bb5c6" "edded" "a95c4" 20 in + let cs = test_and "5a5a5a5a5a5a5a5a" "a5a5a5a5a5a5a5a5" "0" 64 in + let cs = + test_and ~cs "385e243cb60654fd" "010fde9342c0d700" "e041002005400" 64 + in + (* Negatve tests *) + assert ( + Common.is_error (fun () -> + (* Reusing right CS with wrong witness *) test_and ~cs "1" "1" "0" 20 ) ) ; + assert ( + Common.is_error (fun () -> + (* Reusing wrong CS with right witness *) test_and ~cs "1" "1" "1" 1 ) ) ; + assert (Common.is_error (fun () -> test_and "1" "1" "0" 1)) ; + assert (Common.is_error (fun () -> test_and "ff" "ff" "ff" 7)) ; + assert (Common.is_error (fun () -> test_and "1" "1" "1" (-1))) ) ; + () + +let%test_unit "bitwise not gadget" = + if tests_enabled then ( + let (* Import the gadget test runner *) + open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + (* Helper to test NOT gadget with both checked and unchecked length procedures + * Input and expected output and desired length : not(input) = output + * Returns true if constraints are satisfied, false otherwise. + *) + let test_not ?cs input output length = + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + let open Runner.Impl in + (* Set up snarky variables for input and output *) + let input = + exists Field.typ ~compute:(fun () -> + Common.field_of_hex (module Runner.Impl) input ) + in + + let output = + exists Field.typ ~compute:(fun () -> + Common.field_of_hex (module Runner.Impl) output ) + in + + (* Use the not gate gadget *) + let result_checked = + bnot_checked (module Runner.Impl) input length + in + let result_unchecked = + bnot_unchecked (module Runner.Impl) input length + in + Field.Assert.equal output result_checked ; + Field.Assert.equal output result_unchecked ) + in + cs + in + + (* Positive tests *) + let _cs = test_not "0" "1" 1 in + let _cs = test_not "0" "f" 4 in + let _cs = test_not "0" "ff" 8 in + let _cs = test_not "0" "7ff" 11 in + let cs16 = test_not "0" "ffff" 16 in + let _cs = test_not ~cs:cs16 "a8ca" "5735" 16 in + let _cs = test_not "bb5c6" "44a39" 20 in + let cs64 = test_not "a5a5a5a5a5a5a5a5" "5a5a5a5a5a5a5a5a" 64 in + let _cs = test_not ~cs:cs64 "5a5a5a5a5a5a5a5a" "a5a5a5a5a5a5a5a5" 64 in + let _cs = test_not ~cs:cs64 "7b3f28d7496d75f0" "84c0d728b6928a0f" 64 in + let _cs = test_not ~cs:cs64 "ffffffffffffffff" "0" 64 in + let _cs = test_not ~cs:cs64 "00000fffffffffff" "fffff00000000000" 64 in + let _cs = test_not ~cs:cs64 "fffffffffffff000" "fff" 64 in + let _cs = test_not ~cs:cs64 "0" "ffffffffffffffff" 64 in + let _cs = test_not ~cs:cs64 "0" "ffffffffffffffff" 64 in + let _cs = + test_not + "3FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF" "0" + 254 + in + + (* Negative tests *) + assert ( + Common.is_error (fun () -> + (* Reusing right CS with bad witness *) + test_not ~cs:cs64 "0" "ff" 64 ) ) ; + assert ( + Common.is_error (fun () -> + (* Reusing wrong CS with right witness *) + test_not ~cs:cs16 "1" "0" 1 ) ) ; + assert (Common.is_error (fun () -> test_not "0" "0" 1)) ; + assert (Common.is_error (fun () -> test_not "ff" "0" 4)) ; + assert ( + Common.is_error (fun () -> + test_not + "7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF" + "0" 255 ) ) ) ; + () diff --git a/src/lib/crypto/kimchi_backend/gadgets/bitwise.mli b/src/lib/crypto/kimchi_backend/gadgets/bitwise.mli new file mode 100644 index 00000000000..2df3a65dedf --- /dev/null +++ b/src/lib/crypto/kimchi_backend/gadgets/bitwise.mli @@ -0,0 +1,143 @@ +(* Side of rotation *) +type rot_mode = Left | Right + +(** 64-bit rotation of rot_bits to the `mode` side + * @param check64 whether to check the input word is at most 64 bits (default is false) + * @param word word of maximum 64 bits to be rotated + * @param bits number of bits to be rotated + * @param mode Left or Right + * Returns rotated word + *) +val rot64 : + (module Snarky_backendless.Snark_intf.Run with type field = 'f) + -> ?check64:bool (* false *) + -> 'f Snarky_backendless.Cvar.t + -> int + -> rot_mode + -> 'f Snarky_backendless.Cvar.t + +(** 64-bit bitwise logical shift left of bits to the `mode` side + * Inputs + * @param check64 whether to check the input word is at most 64 bits (default is false) + * @param word word of maximum 64 bits to be shifted + * @param bits number of bits to be shifted + * Output: left shifted word (with bits 0s at the least significant positions) + *) +val lsl64 : + (module Snarky_backendless.Snark_intf.Run with type field = 'f) + -> ?check64:bool (* false *) + -> 'f Snarky_backendless.Cvar.t + -> int + -> 'f Snarky_backendless.Cvar.t + +(** 64-bit bitwise logical shift of bits to the right side + * Inputs + * @param check64 whether to check the input word is at most 64 bits (default is false) + * @param word word of maximum 64 bits to be shifted + * @param bits number of bits to be shifted + * Output: right shifted word (with bits 0s at the most significant positions) + *) +val lsr64 : + (module Snarky_backendless.Snark_intf.Run with type field = 'f) + -> ?check64:bool (* false *) + -> 'f Snarky_backendless.Cvar.t + -> int + -> 'f Snarky_backendless.Cvar.t + +(** Boolean Xor of length bits + * input1 and input2 are the inputs to the Xor gate + * length is the number of bits to Xor + * len_xor is the number of bits of the lookup table (default is 4) + *) +val bxor : + (module Snarky_backendless.Snark_intf.Run with type field = 'f) + -> ?len_xor:int + -> 'f Snarky_backendless.Cvar.t + -> 'f Snarky_backendless.Cvar.t + -> int + -> 'f Snarky_backendless.Cvar.t + +(** Boolean Xor of 16 bits + * This is a special case of Xor for 16 bits for Xor lookup table of 4 bits of inputs. + * Receives two input words to Xor together, of maximum 16 bits each. + * Returns the Xor of the two words. +*) +val bxor16 : + (module Snarky_backendless.Snark_intf.Run with type field = 'f) + -> 'f Snarky_backendless.Cvar.t + -> 'f Snarky_backendless.Cvar.t + -> 'f Snarky_backendless.Cvar.t + +(** Boolean Xor of 64 bits + * This is a special case of Xor for 64 bits for Xor lookup table of 4 bits of inputs. * Receives two input words to Xor together, of maximum 64 bits each. + * Returns the Xor of the two words. +*) +val bxor64 : + (module Snarky_backendless.Snark_intf.Run with type field = 'f) + -> 'f Snarky_backendless.Cvar.t + -> 'f Snarky_backendless.Cvar.t + -> 'f Snarky_backendless.Cvar.t + +(** Boolean And of length bits + * input1 and input2 are the two inputs to AND + * length is the number of bits to AND + * len_xor is the number of bits of the inputs of the Xor lookup table (default is 4) +*) +val band : + (module Snarky_backendless.Snark_intf.Run with type field = 'f) + -> ?len_xor:int + -> 'f Snarky_backendless.Cvar.t + -> 'f Snarky_backendless.Cvar.t + -> int + -> 'f Snarky_backendless.Cvar.t + +(** Boolean And of 64 bits + * This is a special case of And for 64 bits for Xor lookup table of 4 bits of inputs. + * Receives two input words to And together, of maximum 64 bits each. + * Returns the And of the two words. + *) +val band64 : + (module Snarky_backendless.Snark_intf.Run with type field = 'f) + -> 'f Snarky_backendless.Cvar.t + -> 'f Snarky_backendless.Cvar.t + -> 'f Snarky_backendless.Cvar.t + +(** Boolean Not of length bits for checked length (uses Xor gadgets inside to constrain the length) + * - input of word to negate + * - length of word to negate + * - len_xor is the length of the Xor lookup table to use beneath (default 4) + * Note that the length needs to be less than the bit length of the field. + *) +val bnot_checked : + (module Snarky_backendless.Snark_intf.Run with type field = 'f) + -> ?len_xor:int + -> 'f Snarky_backendless.Cvar.t + -> int + -> 'f Snarky_backendless.Cvar.t + +(** Negates a word of 64 bits with checked length of 64 bits. + * This means that the bound in lenght is constrained in the circuit. *) +val bnot64_checked : + (module Snarky_backendless.Snark_intf.Run with type field = 'f) + -> 'f Snarky_backendless.Cvar.t + -> 'f Snarky_backendless.Cvar.t + +(** Boolean Not of length bits for unchecked length (uses Generic subtractions inside) + * - input of word to negate + * - length of word to negate + * (Note that this can negate two words per row, but it inputs need to be a copy of another + variable with a correct length in order to make sure that the length is correct ) + * Note that the length needs to be less than the bit length of the field. + *) +val bnot_unchecked : + (module Snarky_backendless.Snark_intf.Run with type field = 'f) + -> 'f Snarky_backendless.Cvar.t + -> int + -> 'f Snarky_backendless.Cvar.t + +(** Negates a word of 64 bits, but its length goes unconstrained in the circuit + (unless it is copied from a checked length value) *) +val bnot64_unchecked : + (module Snarky_backendless.Snark_intf.Run with type field = 'f) + -> 'f Snarky_backendless.Cvar.t + -> 'f Snarky_backendless.Cvar.t diff --git a/src/lib/crypto/kimchi_backend/gadgets/common.ml b/src/lib/crypto/kimchi_backend/gadgets/common.ml new file mode 100644 index 00000000000..d1ec54db093 --- /dev/null +++ b/src/lib/crypto/kimchi_backend/gadgets/common.ml @@ -0,0 +1,473 @@ +(* Common gadget helpers *) + +open Core_kernel +module Bignum_bigint = Snarky_backendless.Backend_extended.Bignum_bigint + +let tests_enabled = true + +let tuple3_of_array array = + match array with [| a1; a2; a3 |] -> (a1, a2, a3) | _ -> assert false + +let tuple4_of_array array = + match array with + | [| a1; a2; a3; a4 |] -> + (a1, a2, a3, a4) + | _ -> + assert false + +(* Foreign field element limb size *) +let limb_bits = 88 + +(* Foreign field element limb size 2^L where L=88 *) +let two_to_limb = Bignum_bigint.(pow (of_int 2) (of_int limb_bits)) + +(* 2^3L *) +let two_to_3limb = Bignum_bigint.(pow two_to_limb (of_int 3)) + +(* Length of Bignum_bigint.t in bits *) +let bignum_bigint_bit_length (bigint : Bignum_bigint.t) : int = + if Bignum_bigint.(equal bigint zero) then 1 + else Z.log2 (Bignum_bigint.to_zarith_bigint bigint) + 1 + +(* Conventions used in this interface + * 1. Functions prefixed with "as_prover_" only happen during proving + * and not during circuit creation + * * These functions are called twice (once during creation of + * the circuit and once during proving). Inside the definition + * of these functions, whatever resides within the exists is not executed + * during circuit creation, though there could be some + * code outside the exists (such as error checking code) that is + * run during the creation of the circuit. + * * The value returned by exists depends on what mode it is called in + * * In circuit generation mode it allocates a cvar without any backing memory + * * In proof generation mode it allocates a cvar with backing memory to store + * the values associated with the cvar. The prover can then access these + * with As_prover.read. + * 2. Functions suffixed with "_as_prover" can only be called outside + * the circuit. Specifically, this means within an exists, within + * an as_prover or in an "as_prover_" prefixed function) + *) + +(* Convert cvar field element (i.e. Field.t) to field *) +let cvar_field_to_field_as_prover (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (field_element : Circuit.Field.t) : f = + Circuit.As_prover.read Circuit.Field.typ field_element + +(* Convert cvar bool element (i.e. Boolean.t) to field *) +let cvar_bool_to_bool_as_prover (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (b : Circuit.Boolean.var) : bool = + Circuit.As_prover.read Circuit.Boolean.typ b + +(* Combines bits of two cvars with a given boolean function and returns the resulting field element *) +let cvar_field_bits_combine_as_prover (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (input1 : Circuit.Field.t) (input2 : Circuit.Field.t) + (bfun : bool -> bool -> bool) : f = + let open Circuit in + let list1 = + Field.Constant.unpack + @@ cvar_field_to_field_as_prover (module Circuit) + @@ input1 + in + let list2 = + Field.Constant.unpack + @@ cvar_field_to_field_as_prover (module Circuit) + @@ input2 + in + Field.Constant.project @@ List.map2_exn list1 list2 ~f:bfun + +(* field_bits_le_to_field - Create a field element from contiguous bits of another + * + * Inputs: + * field_element: source field element + * start: zero-indexed starting bit offset + * stop: zero-indexed stopping bit index (or -1 to denote the last bit) + * + * Output: + * New field element created from bits [start, stop) of field_element input, + * placed into the lowest possible bit position, like so + * + * start stop + * \ / + * [......xxx.....] field_element + * [xxx...........] output + * lsb msb *) +let field_bits_le_to_field (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (field_element : f) (start : int) (stop : int) : f = + let open Circuit in + (* Check range is valid *) + if stop <> -1 && stop <= start then + invalid_arg "stop offset must be greater than start offset" ; + + (* Create field element *) + let bits = Field.Constant.unpack field_element in + if stop > List.length bits then + invalid_arg "stop must be less than bit-length" ; + + let stop = if stop = -1 then List.length bits else stop in + (* Convert bits range (boolean list) to field element *) + Field.Constant.project @@ List.slice bits start stop + +(* Create cvar field element from contiguous bits of another + See field_bits_le_to_field for more information *) +let as_prover_cvar_field_bits_le_to_cvar_field (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (field_element : Circuit.Field.t) (start : int) (stop : int) : + Circuit.Field.t = + let open Circuit in + (* Check range is valid - for exception handling we need to repeat this check + * so it happens outside exists *) + if stop <> -1 && stop <= start then + invalid_arg "stop offset must be greater than start offset" ; + exists Field.typ ~compute:(fun () -> + field_bits_le_to_field + (module Circuit) + (cvar_field_to_field_as_prover (module Circuit) field_element) + start stop ) + +(* Create field element from base10 string *) +let field_of_base10 (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (base10 : string) = + let open Circuit in + Field.Constant.of_string base10 + +(* Create cvar field element from base10 string *) +let as_prover_cvar_field_of_base10 (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (base10 : string) = + let open Circuit in + exists Field.typ ~compute:(fun () -> field_of_base10 (module Circuit) base10) + +(* Convert field element to bigint *) +let field_to_bignum_bigint (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (field_element : f) : Bignum_bigint.t = + (* Bigint doesn't have bigint operators defined for it, so we must use Bignum_bigint *) + Circuit.Bigint.(to_bignum_bigint (of_field field_element)) + +(* Convert bigint to field element *) +let bignum_bigint_to_field (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (bigint : Bignum_bigint.t) : f = + Circuit.Bigint.(to_field (of_bignum_bigint bigint)) + +(* Returns (quotient, remainder) such that numerator = quotient * denominator + remainder + * where quotient, remainder \in [0, denominator) *) +let bignum_bigint_div_rem (numerator : Bignum_bigint.t) + (denominator : Bignum_bigint.t) : Bignum_bigint.t * Bignum_bigint.t = + let quotient = Bignum_bigint.(numerator / denominator) in + let remainder = Bignum_bigint.(numerator - (denominator * quotient)) in + (quotient, remainder) + +(* Bignum_bigint to bytes *) +let bignum_bigint_unpack_bytes (bignum : Bignum_bigint.t) : string = + Z.to_bits @@ Bignum_bigint.to_zarith_bigint bignum + +(* Bignum_bigint to bool list *) +let bignum_bigint_unpack ?(remove_trailing = false) (bignum : Bignum_bigint.t) : + bool list = + (* Helper to remove trailing false values *) + let remove_trailing_false_values (lst : bool list) = + let rev = List.rev lst in + let rec remove_leading_false_rec lst = + match lst with + | [] -> + [] + | hd :: tl -> + if hd then hd :: tl else remove_leading_false_rec tl + in + List.rev @@ remove_leading_false_rec rev + in + + (* Convert Bignum_bigint to bitstring *) + let bytestr = bignum_bigint_unpack_bytes bignum in + (* Convert bytestring to list of bool *) + let bits = + List.init + (8 * String.length bytestr) + ~f:(fun i -> + let c = Char.to_int bytestr.[i / 8] in + let j = i mod 8 in + if Int.((c lsr j) land 1 = 1) then true else false ) + in + if remove_trailing then remove_trailing_false_values bits else bits + +let bignum_bigint_unpack_as (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + ?(remove_trailing = false) (bignum : Bignum_bigint.t) + (typ : (Circuit.Boolean.var, bool) Circuit.Typ.t) : Circuit.Boolean.var list + = + let open Circuit in + exists + (Typ.list ~length:(bignum_bigint_bit_length bignum) typ) + ~compute:(fun () -> bignum_bigint_unpack ~remove_trailing bignum) + +let bignum_bigint_unpack_as_vars (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + ?(remove_trailing = false) (bignum : Bignum_bigint.t) : + Circuit.Boolean.var list = + bignum_bigint_unpack_as + (module Circuit) + ~remove_trailing bignum Circuit.Boolean.typ + +let bignum_bigint_unpack_as_unchecked_vars (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + ?(remove_trailing = false) (bignum : Bignum_bigint.t) : + Circuit.Boolean.var list = + bignum_bigint_unpack_as + (module Circuit) + ~remove_trailing bignum Circuit.Boolean.typ_unchecked + +(* Bignum_bigint to constants Boolean.var list (without creating boolean constraints) *) +let bignum_bigint_unpack_as_unchecked_consts (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + ?(remove_trailing = false) (bignum : Bignum_bigint.t) : + Circuit.Boolean.var list = + let open Circuit in + List.map + (bignum_bigint_unpack ~remove_trailing bignum) + ~f:Boolean.var_of_value + +(* Bignum_bigint to hex *) +let bignum_bigint_to_hex (bignum : Bignum_bigint.t) : string = + Z.format "%x" @@ Bignum_bigint.to_zarith_bigint bignum + +(* Create Bignum_bigint.t from binary string *) +let bignum_bigint_of_bin (bin : string) : Bignum_bigint.t = + Bignum_bigint.of_zarith_bigint @@ Z.of_bits bin + +(* Bignum_bigint.t of hex *) +let bignum_bigint_of_hex (hex : string) : Bignum_bigint.t = + Bignum_bigint.of_zarith_bigint @@ Z.of_string_base 16 hex + +(* Convert cvar field element (i.e. Field.t) to Bignum_bigint.t *) +let cvar_field_to_bignum_bigint_as_prover (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (field_element : Circuit.Field.t) : Bignum_bigint.t = + let open Circuit in + field_to_bignum_bigint (module Circuit) + @@ As_prover.read Field.typ field_element + +(* Compute modular square root using Tonelli-Shanks algorithm + * See https://en.wikipedia.org/wiki/Tonelli%E2%80%93Shanks_algorithm + *) +let bignum_bigint_sqrt_mod (x : Bignum_bigint.t) (modulus : Bignum_bigint.t) : + Bignum_bigint.t = + let open Z in + let x = Bignum_bigint.to_zarith_bigint x in + let modulus = Bignum_bigint.to_zarith_bigint modulus in + + (* Useful helpers and shorthands *) + let two = of_int 2 in + let mod_minus_1 = pred modulus in + let pow_mod base exp = powm base exp modulus in + + (* Euler's criterion *) + let legendre x = pow_mod x (mod_minus_1 / two) in + + if not (equal (legendre x) one) then + (* t = 0: x is quadratic residue iff x^{(modulus - 1)/2} == 1 *) + Bignum_bigint.zero + else + (* Solve: modulus - 1 = Q * 2^S for S *) + let s = of_int @@ trailing_zeros mod_minus_1 in + if equal s one then + (* Q = (modulus - 1)/2 and r = x^{(Q + 1)/2} *) + Bignum_bigint.of_zarith_bigint + @@ pow_mod x (((mod_minus_1 / two) + one) / two) + else + (* Solve: modulus - 1 = Q * 2^S for Q by shifting away zeros *) + let q = mod_minus_1 asr to_int s in + + (* Search for z in Z/pZ which is a quadratic non-residue *) + let z = + let rec find_non_square z = + if equal (legendre z) mod_minus_1 then z + else find_non_square @@ (z + one) + in + find_non_square two + in + + (* Solving loop *) + let rec loop m c t r = + if equal t one then r + else + (* Use repeated squaring to find the least 0 < i < M s.t. t^{2^i} = 1 *) + let rec find_least_i n i = + if equal n one || geq i m then i + else find_least_i (n * n mod modulus) (i + one) + in + let i = find_least_i t zero in + (* i = m can only happen in the first iteration, and implies + that t is a *primitive* root of unity and therefore not a square + (t is a root of unity by construction, t = n^Q) + *) + if equal i m then zero + else + (* b <- c^{2^{M - i - 1}} *) + let b = pow_mod c (pow_mod two (m - i - one)) in + (* M <- i *) + let m = i in + (* c <- b^2 *) + let c = b * b mod modulus in + (* t <- tb^2 *) + let t = t * c mod modulus in + (* R <- Rb *) + let r = r * b mod modulus in + + (* Recurse *) + loop m c t r + in + + (* M <- S *) + let m = s in + (* c <- Z^Q *) + let c = pow_mod z q in + (* R <- n^{(Q + 1)/2} *) + let r = pow_mod x ((q + one) / two) in + (* t <- x^Q *) + let t = pow_mod x q in + + Bignum_bigint.of_zarith_bigint @@ loop m c t r + +(* Compute square root of Bignum_bigint value x *) +let bignum_bigint_sqrt (x : Bignum_bigint.t) : Bignum_bigint.t = + Bignum_bigint.of_zarith_bigint @@ Z.sqrt @@ Bignum_bigint.to_zarith_bigint x + +(* Compute the inverse of Bignum_bigint value x with modulus *) +let bignum_bigint_inverse (x : Bignum_bigint.t) (modulus : Bignum_bigint.t) : + Bignum_bigint.t = + let x = Bignum_bigint.to_zarith_bigint x in + let modulus = Bignum_bigint.to_zarith_bigint modulus in + Bignum_bigint.of_zarith_bigint @@ Z.invert x modulus + +(* Field to hex *) +let field_to_hex (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (field_element : f) : string = + bignum_bigint_to_hex @@ field_to_bignum_bigint (module Circuit) field_element + +(* Field of hex *) +let field_of_hex (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (hex : string) : f = + bignum_bigint_to_field (module Circuit) @@ bignum_bigint_of_hex hex + +(* List of field elements for each byte of hexadecimal input*) +let field_bytes_of_hex (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (hex : string) : f list = + let chars = String.to_list hex in + let list_pairs = List.groupi chars ~break:(fun i _ _ -> i mod 2 = 0) in + let list_bytes = + List.map list_pairs ~f:(fun byte -> + let hex_i = String.of_char_list byte in + field_of_hex (module Circuit) hex_i ) + in + list_bytes + +(* List of field elements of at most 1 byte to a Bignum_bigint *) +let cvar_field_bytes_to_bignum_bigint_as_prover (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (bytestring : Circuit.Field.t list) : Bignum_bigint.t = + List.fold bytestring ~init:Bignum_bigint.zero ~f:(fun acc x -> + Bignum_bigint.( + (acc * of_int 2) + + cvar_field_to_bignum_bigint_as_prover (module Circuit) x) ) + +(* Negative test helper *) +let is_error (func : unit -> _) = Result.is_error (Or_error.try_with func) + +(* Two to the power of n as a field element *) +let two_pow (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (n : int) = + bignum_bigint_to_field + (module Circuit) + Bignum_bigint.(pow (of_int 2) (of_int n)) + +(*********) +(* Tests *) +(*********) + +let%test_unit "helper field_bits_le_to_field" = + ( if tests_enabled then + let (* Import the gadget test runner *) + open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + + let _cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof (fun () -> + let open Runner.Impl in + let of_bits = + as_prover_cvar_field_bits_le_to_cvar_field (module Runner.Impl) + in + let of_base10 = as_prover_cvar_field_of_base10 (module Runner.Impl) in + + (* Test value *) + let field_element = + of_base10 + "25138500177533925254565157548260087092526215225485178888176592492127995051965" + in + + (* Test extracting all bits as field element *) + Field.Assert.equal (of_bits field_element 0 (-1)) field_element ; + + (* Test extracting 1st bit as field element *) + Field.Assert.equal (of_bits field_element 0 1) (of_base10 "1") ; + + (* Test extracting last bit as field element *) + Field.Assert.equal (of_bits field_element 254 255) (of_base10 "0") ; + + (* Test extracting first 12 bits as field element *) + Field.Assert.equal (of_bits field_element 0 12) (of_base10 "4029") ; + + (* Test extracting third 16 bits as field element *) + Field.Assert.equal (of_bits field_element 32 48) (of_base10 "15384") ; + + (* Test extracting 1st 4 bits as field element *) + Field.Assert.equal (of_bits field_element 0 4) (of_base10 "13") ; + + (* Test extracting 5th 4 bits as field element *) + Field.Assert.equal (of_bits field_element 20 24) (of_base10 "1") ; + + (* Test extracting first 88 bits as field element *) + Field.Assert.equal + (of_bits field_element 0 88) + (of_base10 "155123280218940970272309181") ; + + (* Test extracting second 88 bits as field element *) + Field.Assert.equal + (of_bits field_element 88 176) + (of_base10 "293068737190883252403551981") ; + + (* Test extracting last crumb as field element *) + Field.Assert.equal (of_bits field_element 254 255) (of_base10 "0") ; + + (* Test extracting 2nd to last crumb as field element *) + Field.Assert.equal (of_bits field_element 252 254) (of_base10 "3") ; + + (* Test extracting 3rd to last crumb as field element *) + Field.Assert.equal (of_bits field_element 250 252) (of_base10 "1") ; + + (* Assert litttle-endian order *) + Field.Assert.equal + (of_bits (of_base10 "18446744073709551616" (* 2^64 *)) 64 65) + (of_base10 "1") ; + + (* Test invalid range is denied *) + assert (is_error (fun () -> of_bits field_element 2 2)) ; + assert (is_error (fun () -> of_bits field_element 2 1)) ; + + (* Padding *) + Boolean.Assert.is_true (Field.equal field_element field_element) ) + in + () ) ; + () diff --git a/src/lib/crypto/kimchi_backend/gadgets/curve_params.ml.disabled b/src/lib/crypto/kimchi_backend/gadgets/curve_params.ml.disabled new file mode 100644 index 00000000000..b482f6e3b48 --- /dev/null +++ b/src/lib/crypto/kimchi_backend/gadgets/curve_params.ml.disabled @@ -0,0 +1,210 @@ +(* Elliptic curve public constants *) + +module Bignum_bigint = Snarky_backendless.Backend_extended.Bignum_bigint +module Snark_intf = Snarky_backendless.Snark_intf + +type 'typ ia_points = { acc : 'typ; neg_acc : 'typ } + +(* Out of circuit representation of Elliptic curve *) +type t = + { modulus : Bignum_bigint.t (* Elliptic curve base field modulus *) + ; order : Bignum_bigint.t (* Elliptic curve group order *) + ; a : Bignum_bigint.t (* Elliptic curve a parameter *) + ; b : Bignum_bigint.t (* Elliptic curve b parameter *) + ; gen : Affine.bignum_point (* Elliptic curve generator point *) + ; mutable ia : Affine.bignum_point ia_points + (* Initial accumulator point (and its negation) *) + } + +let ia_of_points (type typ) (acc : typ * typ) (neg_acc : typ * typ) : + (typ * typ) ia_points = + { acc; neg_acc } + +let ia_of_strings ((acc_x, acc_y) : string * string) + ((neg_acc_x, neg_acc_y) : string * string) = + { acc = (Bignum_bigint.of_string acc_x, Bignum_bigint.of_string acc_y) + ; neg_acc = + (Bignum_bigint.of_string neg_acc_x, Bignum_bigint.of_string neg_acc_y) + } + +let ia_to_circuit_constants (type field) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = field) + (ia : Affine.bignum_point ia_points) : field Affine.t ia_points = + { acc = Affine.of_bignum_bigint_coordinates (module Circuit) ia.acc + ; neg_acc = Affine.of_bignum_bigint_coordinates (module Circuit) ia.neg_acc + } + +(* Default, empty curve parameters *) +let default = + { modulus = Bignum_bigint.zero + ; order = Bignum_bigint.zero + ; a = Bignum_bigint.zero + ; b = Bignum_bigint.zero + ; gen = (Bignum_bigint.zero, Bignum_bigint.one) + ; ia = + { acc = (Bignum_bigint.zero, Bignum_bigint.zero) + ; neg_acc = (Bignum_bigint.zero, Bignum_bigint.zero) + } + } + +(* In circuit representation of Elliptic curve (public constants) *) +module InCircuit = struct + type parent_t = t + + type 'field t = + { bignum : parent_t + ; modulus : 'field Foreign_field.standard_limbs + ; order : 'field Foreign_field.standard_limbs + ; order_bit_length : int + ; order_bit_length_const : 'field Snarky_backendless.Cvar.t + ; order_minus_one : 'field Foreign_field.Element.Standard.t + ; order_minus_one_bits : + 'field Snarky_backendless.Cvar.t Snark_intf.Boolean0.t list + ; a : 'field Foreign_field.Element.Standard.t + ; b : 'field Foreign_field.Element.Standard.t + ; gen : 'field Affine.t + ; doubles : 'field Affine.t array + ; ia : 'field Affine.t ia_points + } +end + +let compute_slope_bignum (curve : t) (left : Affine.bignum_point) + (right : Affine.bignum_point) : Bignum_bigint.t = + let left_x, left_y = left in + let right_x, right_y = right in + + let open Bignum_bigint in + if equal left_x right_x && equal left_y right_y then + (* Compute slope using 1st derivative of sqrt(x^3 + a * x + b) + * s' = (3 * Px^2 + a )/ 2 * Py + *) + let numerator = + let point_x_squared = pow left_x (of_int 2) % curve.modulus in + let point_3x_squared = of_int 3 * point_x_squared % curve.modulus in + + (point_3x_squared + curve.a) % curve.modulus + in + let denominator = of_int 2 * left_y % curve.modulus in + + (* Compute inverse of denominator *) + let denominator_inv = + Common.bignum_bigint_inverse denominator curve.modulus + in + numerator * denominator_inv % curve.modulus + else + (* Computes s = (Ry - Ly)/(Rx - Lx) *) + let delta_y = (right_y - left_y) % curve.modulus in + let delta_x = (right_x - left_x) % curve.modulus in + + (* Compute delta_x inverse *) + let delta_x_inv = Common.bignum_bigint_inverse delta_x curve.modulus in + + delta_y * delta_x_inv % curve.modulus + +let double_bignum_point (curve : t) ?slope (point : Affine.bignum_point) : + Affine.bignum_point = + let open Bignum_bigint in + let slope = + match slope with + | Some slope -> + slope + | None -> + compute_slope_bignum curve point point + in + let slope_squared = (pow slope @@ of_int 2) % curve.modulus in + + let point_x, point_y = point in + + (* Compute result's x-coodinate: x = s^2 - 2 * Px *) + let result_x = + let point_x2 = of_int 2 * point_x % curve.modulus in + (slope_squared - point_x2) % curve.modulus + in + + (* Compute result's y-coodinate: y = s * (Px - x) - Py *) + let result_y = + let x_diff = (point_x - result_x) % curve.modulus in + let x_diff_s = slope * x_diff % curve.modulus in + (x_diff_s - point_y) % curve.modulus + in + + (result_x, result_y) + +let to_circuit_constants (type field) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = field) + ?(use_precomputed_gen_doubles = true) (curve : t) : field InCircuit.t = + let open Circuit in + (* Need to know native field size before we can check if it fits *) + Foreign_field.check_modulus_bignum_bigint (module Circuit) curve.modulus ; + Foreign_field.check_modulus_bignum_bigint (module Circuit) curve.order ; + let order_bit_length = Common.bignum_bigint_bit_length curve.order in + let order_minus_one = + Bignum_bigint.(if curve.order > zero then curve.order - one else zero) + in + InCircuit. + { bignum = curve + ; modulus = + Foreign_field.bignum_bigint_to_field_const_standard_limbs + (module Circuit) + curve.modulus + ; order = + Foreign_field.bignum_bigint_to_field_const_standard_limbs + (module Circuit) + curve.order + ; order_bit_length + ; order_bit_length_const = + (let const_len = Field.(constant @@ Constant.of_int order_bit_length) in + let var_len = + exists Field.typ ~compute:(fun () -> + Circuit.Field.Constant.of_int order_bit_length ) + in + Field.Assert.equal const_len var_len ; + const_len ) + ; order_minus_one = + Foreign_field.Element.Standard.check_here_const_of_bignum_bigint + (module Circuit) + order_minus_one + ; order_minus_one_bits = + Common.bignum_bigint_unpack_as_unchecked_consts + (module Circuit) + order_minus_one + ; a = + Foreign_field.Element.Standard.check_here_const_of_bignum_bigint + (module Circuit) + curve.a + ; b = + Foreign_field.Element.Standard.check_here_const_of_bignum_bigint + (module Circuit) + curve.b + ; gen = + Affine.check_here_const_of_bignum_bigint_coordinates + (module Circuit) + curve.gen + ; doubles = + ( if use_precomputed_gen_doubles then ( + (* Precompute 2^i * curve.gen, 0 <= i < curve.order_bit_length *) + let doubles = + Array.init order_bit_length (fun _i -> + Affine.const_zero (module Circuit) ) + in + let point = ref curve.gen in + for i = 0 to order_bit_length - 1 do + point := double_bignum_point curve !point ; + doubles.(i) <- + Affine.check_here_const_of_bignum_bigint_coordinates + (module Circuit) + !point + done ; + doubles ) + else [||] ) + ; ia = + { acc = + Affine.check_here_const_of_bignum_bigint_coordinates + (module Circuit) + curve.ia.acc + ; neg_acc = + Affine.check_here_const_of_bignum_bigint_coordinates + (module Circuit) + curve.ia.neg_acc + } + } diff --git a/src/lib/crypto/kimchi_backend/gadgets/dune b/src/lib/crypto/kimchi_backend/gadgets/dune index a8864d6dfa2..92d6d4bfcee 100644 --- a/src/lib/crypto/kimchi_backend/gadgets/dune +++ b/src/lib/crypto/kimchi_backend/gadgets/dune @@ -6,11 +6,14 @@ (preprocess (pps ppx_version ppx_jane)) (libraries ;; opam libraries + bignum.bigint core_kernel + digestif ppx_inline_test.config + zarith ;; local libraries kimchi_backend.common kimchi_backend.pasta kimchi_gadgets_test_runner - snarky.backendless -)) + mina_stdlib + snarky.backendless)) diff --git a/src/lib/crypto/kimchi_backend/gadgets/ec_group.ml.disabled b/src/lib/crypto/kimchi_backend/gadgets/ec_group.ml.disabled new file mode 100644 index 00000000000..a1ec37040a1 --- /dev/null +++ b/src/lib/crypto/kimchi_backend/gadgets/ec_group.ml.disabled @@ -0,0 +1,4005 @@ +open Core_kernel +module Bignum_bigint = Snarky_backendless.Backend_extended.Bignum_bigint +module Snark_intf = Snarky_backendless.Snark_intf + +let basic_tests_enabled = true + +let scalar_mul_tests_enabled = true + +(* Array to tuple helper *) +let tuple9_of_array array = + match array with + | [| a1; a2; a3; a4; a5; a6; a7; a8; a9 |] -> + (a1, a2, a3, a4, a5, a6, a7, a8, a9) + | _ -> + assert false + +(* Helper to check if point is on elliptic curve curve: y^2 = x^3 + a * x + b *) +let is_on_curve_bignum_point (curve : Curve_params.t) + (point : Affine.bignum_point) : bool = + let x, y = point in + Bignum_bigint.( + zero + = (pow y (of_int 2) - (pow x (of_int 3) + (curve.a * x) + curve.b)) + % curve.modulus) + +(* Gadget for (partial) elliptic curve group addition over foreign field + * + * Given input points L and R, constrains that + * s = (Ry - Ly)/(Rx - Lx) mod f + * x = s^2 - Lx - Rx mod f + * y = s * (Rx - x) - Ry mod f + * + * where f is the foreign field modulus. + * See p. 348 of "Introduction to Modern Cryptography" by Katz and Lindell + * + * Preconditions and limitations: + * L != R + * Lx != Rx (no invertibility) + * L and R are not O (the point at infinity) + * + * External checks: (not counting inputs and output) + * Bound checks: 6 + * Multi-range-checks: 3 + * Compact-range-checks: 3 + * Total range-checks: 12 + * + * Rows: (not counting inputs/outputs and constants) + * Group addition: 13 + * Bound additions: 12 + * Multi-range-checks: 48 + * Total: 73 + * + * Supported group axioms: + * Closure + * Associativity + * + * Note: We elide the Identity property because it is costly in circuit + * and we don't need it for our application. By doing this we also + * lose Invertibility, which we also don't need for our goals. + *) +let add (type f) (module Circuit : Snark_intf.Run with type field = f) + (external_checks : f Foreign_field.External_checks.t) + (curve : f Curve_params.InCircuit.t) (left_input : f Affine.t) + (right_input : f Affine.t) : f Affine.t = + let open Circuit in + (* TODO: Remove sanity checks if this API is not public facing *) + as_prover (fun () -> + (* Sanity check that two points are not equal *) + assert ( + not (Affine.equal_as_prover (module Circuit) left_input right_input) ) ; + (* Sanity check that both points are not infinity *) + assert ( + not + (Affine.equal_as_prover + (module Circuit) + left_input + (Affine.const_zero (module Circuit)) ) ) ; + assert ( + not + (Affine.equal_as_prover + (module Circuit) + right_input + (Affine.const_zero (module Circuit)) ) ) ) ; + + (* Unpack coordinates *) + let left_x, left_y = Affine.to_coordinates left_input in + let right_x, right_y = Affine.to_coordinates right_input in + + (* TODO: Remove sanity checks if this API is not public facing *) + (* Sanity check that x-coordinates are not equal (i.e. we don't support Invertibility) *) + as_prover (fun () -> + assert ( + not + (Foreign_field.Element.Standard.equal_as_prover + (module Circuit) + left_x right_x ) ) ) ; + + (* Compute witness values *) + let ( slope0 + , slope1 + , slope2 + , result_x0 + , result_x1 + , result_x2 + , result_y0 + , result_y1 + , result_y2 ) = + exists (Typ.array ~length:9 Field.typ) ~compute:(fun () -> + let left_x = + Foreign_field.Element.Standard.to_bignum_bigint_as_prover + (module Circuit) + left_x + in + let left_y = + Foreign_field.Element.Standard.to_bignum_bigint_as_prover + (module Circuit) + left_y + in + let right_x = + Foreign_field.Element.Standard.to_bignum_bigint_as_prover + (module Circuit) + right_x + in + let right_y = + Foreign_field.Element.Standard.to_bignum_bigint_as_prover + (module Circuit) + right_y + in + + (* Compute slope and slope squared *) + let slope = + Curve_params.compute_slope_bignum curve.bignum (left_x, left_y) + (right_x, right_y) + in + + let slope_squared = + Bignum_bigint.((pow slope @@ of_int 2) % curve.bignum.modulus) + in + + (* Compute result's x-coodinate: x = s^2 - Lx - Rx *) + let result_x = + Bignum_bigint.( + let slope_squared_x = + (slope_squared - left_x) % curve.bignum.modulus + in + (slope_squared_x - right_x) % curve.bignum.modulus) + in + + (* Compute result's y-coodinate: y = s * (Rx - x) - Ry *) + let result_y = + Bignum_bigint.( + let x_diff = (right_x - result_x) % curve.bignum.modulus in + let x_diff_s = slope * x_diff % curve.bignum.modulus in + (x_diff_s - right_y) % curve.bignum.modulus) + in + + (* Convert from Bignums to field elements *) + let slope0, slope1, slope2 = + Foreign_field.bignum_bigint_to_field_const_standard_limbs + (module Circuit) + slope + in + let result_x0, result_x1, result_x2 = + Foreign_field.bignum_bigint_to_field_const_standard_limbs + (module Circuit) + result_x + in + let result_y0, result_y1, result_y2 = + Foreign_field.bignum_bigint_to_field_const_standard_limbs + (module Circuit) + result_y + in + + (* Return and convert back to Cvars *) + [| slope0 + ; slope1 + ; slope2 + ; result_x0 + ; result_x1 + ; result_x2 + ; result_y0 + ; result_y1 + ; result_y2 + |] ) + |> tuple9_of_array + in + + (* Convert slope and result into foreign field elements *) + let slope = + Foreign_field.Element.Standard.of_limbs (slope0, slope1, slope2) + in + let result_x = + Foreign_field.Element.Standard.of_limbs (result_x0, result_x1, result_x2) + in + let result_y = + Foreign_field.Element.Standard.of_limbs (result_y0, result_y1, result_y2) + in + + (* C1: Constrain computation of slope squared *) + let slope_squared = + (* s * s = s^2 *) + Foreign_field.mul (module Circuit) external_checks slope slope curve.modulus + in + (* Bounds 1: Left input (slope) bound check below. + * Right input (slope) equal to left input (already checked) + * Result (s^2) bound check already tracked by Foreign_field.mul. + *) + Foreign_field.External_checks.append_bound_check external_checks + (slope0, slope1, slope2) ; + + (* + * Constrain result x-coordinate computation: x = s^2 - Lx - Rx with length 2 chain + *) + + (* C2: Constrain s^2 - x = sΔx *) + let slope_squared_minus_x = + Foreign_field.sub + (module Circuit) + ~full:false slope_squared result_x curve.modulus + in + + (* Bounds 2: Left input (s^2) bound check covered by (Bounds 1). + * Right input (x) bound check value is gadget output (checked by caller). + * Result is chained (no bound check required). + *) + + (* C3: Constrain sΔx - Lx = Rx *) + let expected_right_x = + Foreign_field.sub + (module Circuit) + ~full:false slope_squared_minus_x left_x curve.modulus + in + + (* Bounds 3: Left input (sΔx) is chained (no bound check required). + * Right input (Lx) is gadget input (checked by caller). + * Result is (Rx) gadget input (checked by caller) + *) + + (* Copy expected_right_x to right_x *) + Foreign_field.Element.Standard.assert_equal + (module Circuit) + expected_right_x right_x ; + + (* Continue the chain to length 4 by computing (Rx - x) * s (used later) *) + + (* C4: Constrain Rx - x = RxΔ *) + let right_delta = + Foreign_field.sub + (module Circuit) + ~full:false expected_right_x result_x curve.modulus + in + (* Bounds 4: Left input (Rx) is chained (no bound check required). + * Right input (x) is gadget output (checked by caller). + * Addition chain result (right_delta) bound check added below. + *) + Foreign_field.External_checks.append_bound_check external_checks + @@ Foreign_field.Element.Standard.to_limbs right_delta ; + + (* C5: RxΔ * s = RxΔs *) + let right_delta_s = + Foreign_field.mul + (module Circuit) + external_checks right_delta slope curve.modulus + in + + (* Bounds 5: Left input (right_delta) already covered by (Bounds 4) + * Right input (slope) already covered by (Bounds 1). + * Result bound check already tracked by Foreign_field.mul. + *) + + (* + * Constrain slope computation: s = (Ry - Ly)/(Rx - Lx) + * with (Rx - Lx) * s + Ly = Ry + *) + + (* C6: Rx - Lx = Δx *) + let delta_x = + Foreign_field.sub (module Circuit) ~full:false right_x left_x curve.modulus + in + (* Bounds 6: Inputs (Rx and Lx) are gadget inputs (checked by caller). + * Addition chain result (delta_x) bound check below. + *) + Foreign_field.External_checks.append_bound_check external_checks + @@ Foreign_field.Element.Standard.to_limbs delta_x ; + + (* C7: Δx * s = Δxs *) + let delta_x_s = + Foreign_field.mul + (module Circuit) + external_checks delta_x slope curve.modulus + in + + (* Bounds 7: Left input (delta_x) already covered by (Bounds 6) + * Right input (slope) already covered by (Bounds 1). + * Result bound check tracked by Foreign_field.mul. + *) + + (* + * Finish constraining slope in new chain (above mul ended chain) + *) + + (* C8: Δxs + Ly = Ry *) + let expected_right_y = + Foreign_field.add + (module Circuit) + ~full:false delta_x_s left_y curve.modulus + in + + (* Bounds 8: Left input (delta_x_s) check is tracked by (Bounds 7). + * Right input bound check value is gadget input (checked by caller). + * Result is chained (no check required) + *) + + (* Copy expected_right_y to right_y *) + Foreign_field.Element.Standard.assert_equal + (module Circuit) + expected_right_y right_y ; + + (* + * Constrain result y-coordinate computation: y = (Rx - x) * s - Ry + * with Ry + y = (Rx - x) * s + *) + + (* C9: Ry + y = RxΔs *) + let expected_right_delta_s = + Foreign_field.add ~full:false + (module Circuit) + expected_right_y result_y curve.modulus + in + (* Result row *) + Foreign_field.result_row + (module Circuit) + ~label:"Ec_group.add_expected_right_delta_s" expected_right_delta_s ; + (* Bounds 9: Left input (Ry) check is chained (no check required). + * Right input (y) check value is gadget output (checked by caller). + * Addition chain result (expected_right_delta_s) check already covered by (Bounds 5). + *) + (* Copy expected_right_delta_s to right_delta_s *) + Foreign_field.Element.Standard.assert_equal + (module Circuit) + expected_right_delta_s right_delta_s ; + + (* Return result point *) + Affine.of_coordinates (result_x, result_y) + +(* Gadget for (partial) elliptic curve group doubling over foreign field + * + * Given input point P, constrains that + * s' = 3 * Px^2 / (2 * Py) mod f + * x = s'^2 - 2 * Px mod f + * y = s' * (Px - x) - Py mod f + * + * where f is the foreign field modulus. + * See p. 348 of "Introduction to Modern Cryptography" by Katz and Lindell + * + * Preconditions and limitations: + * P is not O (the point at infinity) + * + * External checks: (not counting inputs and output) + * Bound checks: 8 (+1 when a != 0) + * Multi-range-checks: 4 + * Compact-range-checks: 4 + * Total range-checks: 16 + * + * Rows: (not counting inputs/outputs and constants) + * Group double: 16 (+2 when a != 0) + * Bound additions: 16 + * Multi-range-checks: 64 + * Total: 96 + * + * Note: See group addition notes (above) about group properties supported by this implementation + *) +let double (type f) (module Circuit : Snark_intf.Run with type field = f) + (external_checks : f Foreign_field.External_checks.t) + (curve : f Curve_params.InCircuit.t) (point : f Affine.t) : f Affine.t = + let open Circuit in + (* TODO: Remove sanity checks if this API is not public facing *) + as_prover (fun () -> + (* Sanity check that point is not infinity *) + assert ( + not + (Affine.equal_as_prover + (module Circuit) + point + (Affine.const_zero (module Circuit)) ) ) ) ; + + (* Unpack coordinates *) + let point_x, point_y = Affine.to_coordinates point in + + (* Compute witness values *) + let ( slope0 + , slope1 + , slope2 + , result_x0 + , result_x1 + , result_x2 + , result_y0 + , result_y1 + , result_y2 ) = + exists (Typ.array ~length:9 Field.typ) ~compute:(fun () -> + let point = + ( Foreign_field.Element.Standard.to_bignum_bigint_as_prover + (module Circuit) + point_x + , Foreign_field.Element.Standard.to_bignum_bigint_as_prover + (module Circuit) + point_y ) + in + + (* Compute slope *) + let slope = + Curve_params.compute_slope_bignum curve.bignum point point + in + + (* Compute result point *) + let result_x, result_y = + Curve_params.double_bignum_point curve.bignum ~slope point + in + + (* Convert from Bignums to field elements *) + let slope0, slope1, slope2 = + Foreign_field.bignum_bigint_to_field_const_standard_limbs + (module Circuit) + slope + in + let result_x0, result_x1, result_x2 = + Foreign_field.bignum_bigint_to_field_const_standard_limbs + (module Circuit) + result_x + in + let result_y0, result_y1, result_y2 = + Foreign_field.bignum_bigint_to_field_const_standard_limbs + (module Circuit) + result_y + in + + (* Return and convert back to Cvars *) + [| slope0 + ; slope1 + ; slope2 + ; result_x0 + ; result_x1 + ; result_x2 + ; result_y0 + ; result_y1 + ; result_y2 + |] ) + |> tuple9_of_array + in + + (* Convert slope and result into foreign field elements *) + let slope = + Foreign_field.Element.Standard.of_limbs (slope0, slope1, slope2) + in + let result_x = + Foreign_field.Element.Standard.of_limbs (result_x0, result_x1, result_x2) + in + let result_y = + Foreign_field.Element.Standard.of_limbs (result_y0, result_y1, result_y2) + in + + (* C1: Constrain computation of slope squared *) + let slope_squared = + (* s * s = s^2 *) + Foreign_field.mul (module Circuit) external_checks slope slope curve.modulus + in + (* Bounds 1: Left input (slope) checked below. + * Right input (slope) is equal to left input (no check required). + * Result (slope_squared) check already tracked by Foreign_field.mul. + *) + Foreign_field.External_checks.append_bound_check external_checks + (slope0, slope1, slope2) ; + + (* C2: Constrain result x-coordinate computation: x = s^2 - 2 * Px with length 2 chain + * with s^2 - x = 2 * Px + *) + let point_x2 = + (* s^2 - x = 2Px *) + Foreign_field.sub + (module Circuit) + ~full:false slope_squared result_x curve.modulus + in + + (* Bounds 2: Left input (s^2) check covered by (Bounds 1). + * Right input (x) check value is gadget output (checked by caller). + * Result (2Px) chained (no check required). + *) + + (* C3: 2Px - Px = Px *) + let expected_point_x = + Foreign_field.sub + (module Circuit) + ~full:false point_x2 point_x curve.modulus + in + (* Bounds 3: Left input (2Px) is chained (no check required). + * Right input (Px) is gadget input (checked by caller). + * Result (Px) chained (no check required). + *) + (* Copy expected_point_x to point_x *) + Foreign_field.Element.Standard.assert_equal + (module Circuit) + expected_point_x point_x ; + + (* + * Continue the chain to length 4 by computing (Px - x) * s (used later) + *) + + (* C4: Px - x = Δx *) + let delta_x = + Foreign_field.sub + (module Circuit) + ~full:false expected_point_x result_x curve.modulus + in + (* Bounds 4: Left input (Px) is chained (no check required). + * Right input (x) check value is gadget output (checked by caller). + * Addition chain result (delta_x) bound check added below. + *) + Foreign_field.External_checks.append_bound_check external_checks + @@ Foreign_field.Element.Standard.to_limbs delta_x ; + + (* C5: Δx * s = Δxs *) + let delta_xs = + Foreign_field.mul + (module Circuit) + external_checks delta_x slope curve.modulus + in + + (* Bounds 5: Left input (delta_x) check already covered by (Bounds 4). + * Right input (slope) already covered by (Bounds 1). + * Result (delta_xs) bound check already tracked by Foreign_field.mul. + *) + + (* + * Constrain rest of y = s' * (Px - x) - Py and part of slope computation + * s = (3 * Px^2 + a)/(2 * Py) in length 3 chain + *) + + (* C6: Δxs - y = Py *) + let expected_point_y = + Foreign_field.sub + (module Circuit) + ~full:false delta_xs result_y curve.modulus + in + (* Bounds 6: Left input (delta_xs) checked by (Bound 5). + * Right input is gadget output (checked by caller). + * Addition result (Py) is chained (no check required). + *) + (* Copy expected_point_y to point_y *) + Foreign_field.Element.Standard.assert_equal + (module Circuit) + expected_point_y point_y ; + + (* C7: Py + Py = 2Py *) + let point_y2 = + Foreign_field.add (module Circuit) ~full:false point_y point_y curve.modulus + in + + (* Bounds 7: Left input (Py) is gadget input (checked by caller). + * Right input (Py) is gadget input (checked by caller). + * Addition result (2Py) chained (no check required). + *) + + (* C8: 2Py * s = 2Pys *) + let point_y2s = + Foreign_field.mul + (module Circuit) + external_checks point_y2 slope curve.modulus + in + (* Bounds 8: Left input (point_y2) bound check added below. + * Right input (slope) already checked by (Bound 1). + * Result (2Pys) bound check already tracked by Foreign_field.mul. + *) + Foreign_field.External_checks.append_bound_check external_checks + @@ Foreign_field.Element.Standard.to_limbs point_y2 ; + + (* + * Constrain rest slope computation s = (3 * Px^2 + a)/(2 * Py) + *) + + (* C9: 2Px + Px = 3Px *) + let point_x3 = + Foreign_field.add + (module Circuit) + ~full:false point_x2 point_x curve.modulus + in + (* Bounds 9: Left input (point_x2) bound check added below. + * Right input (Px) is gadget input (checked by caller). + * Result (3Px) is chained (no check required). + *) + Foreign_field.External_checks.append_bound_check external_checks + @@ Foreign_field.Element.Standard.to_limbs point_x2 ; + + (* Check if the elliptic curve a parameter requires more constraints + * to be added in order to add final a (e.g. 3Px^2 + a where a != 0). + *) + ( if Bignum_bigint.(curve.bignum.a = zero) then ( + (* C10a: 3Px * Px = 3Px^2 *) + let point_x3_squared = + Foreign_field.mul + (module Circuit) + external_checks ~bound_check_result:false point_x3 point_x curve.modulus + in + + (* Bounds 10a: Left input (point_x3) bound check added below. + * Right input (Px) is gadget input (checked by caller). + * Result (3Px^2) bound check already covered by (Bounds 8) since + * point_x3_squared is equal to point_y2s. + *) + + (* Add point_x3 bound check (Bounds 101) *) + Foreign_field.External_checks.append_bound_check external_checks + @@ Foreign_field.Element.Standard.to_limbs point_x3 ; + + (* Copy point_x3_squared to point_y2s *) + Foreign_field.Element.Standard.assert_equal + (module Circuit) + point_x3_squared point_y2s ) + else + (* C10b: 3Px * Px = 3Px^2 *) + let point_x3_squared = + Foreign_field.mul + (module Circuit) + external_checks point_x3 point_x curve.modulus + in + + (* Bounds 10b: Left input (point_x3) bound check added below. + * Right input (Px) is gadget input (checked by caller). + * Result (3Px^2) bound check already covered by Foreign_field.mul. + *) + + (* Add point_x3 bound check (Bounds 10b) *) + Foreign_field.External_checks.append_bound_check external_checks + @@ Foreign_field.Element.Standard.to_limbs point_x3 ; + + (* Add curve constant a and constrain rest slope computation + * with s = (3 * Px^2 + a)/(2 * Py) + *) + + (* C11: 3Px^2 + a = 3Px^2a *) + let point_x3_squared_plus_a = + Foreign_field.add + (module Circuit) + ~full:false point_x3_squared curve.a curve.modulus + in + (* Bounds 11: Left input (point_x3_squared) already tracked by (Bounds 10b). + * Right input (curve.a) is public constant. + * Result (3Px^2a) bound check already covered by (Bound 8) since + * point_x3_squared_plus_a = point_y2s. + *) + (* Result row *) + Foreign_field.result_row + (module Circuit) + ~label:"Ec_group.double_point_x3_squared_plus_a" point_x3_squared_plus_a ; + + (* Copy point_x3_squared_plus_a to point_y2s *) + Foreign_field.Element.Standard.assert_equal + (module Circuit) + point_x3_squared_plus_a point_y2s ) ; + + (* Return result point *) + Affine.of_coordinates (result_x, result_y) + +(* Gadget for elliptic curve group negation + * + * Note: this gadget does not create a Zero row for the negated result. + * If not already present in witness the caller is responsible for placing + * the negated result somewhere (e.g. in a Zero row or elsewhere). + *) +let negate (type f) (module Circuit : Snark_intf.Run with type field = f) + (curve : f Curve_params.InCircuit.t) (point : f Affine.t) : f Affine.t = + let x, y = Affine.to_coordinates point in + (* Zero constant foreign field elemtn *) + let zero = + Foreign_field.Element.Standard.of_bignum_bigint + (module Circuit) + Bignum_bigint.zero + in + (* C1: Constrain computation of the negated point *) + let neg_y = + (* neg_y = 0 - y *) + Foreign_field.sub (module Circuit) ~full:false zero y curve.modulus + in + + (* Bounds 1: Left input is public constant + * Right input parameter (checked by caller) + * Result bound is part of output (checked by caller) + *) + Affine.of_coordinates (x, neg_y) + +(* Select initial EC scalar mul accumulator value ia using trustless nothing-up-my-sleeve deterministic algorithm + * + * Simple hash-to-curve algorithm + * + * Trustlessly select an elliptic curve point for which noone knows the discrete logarithm! + *) +let compute_ia_points ?(point : Affine.bignum_point option) + (curve : Curve_params.t) : Affine.bignum_point Curve_params.ia_points = + (* Hash generator point to get candidate x-coordinate *) + let open Digestif.SHA256 in + let ctx = init () in + + let start_point = + match point with Some point -> point | None -> curve.gen + in + + assert (is_on_curve_bignum_point curve start_point) ; + + (* Hash to (possible) elliptic curve point function *) + let hash_to_curve_point ctx (point : Affine.bignum_point ref) = + (* Hash curve point *) + let x, y = !point in + let ctx = feed_string ctx @@ Common.bignum_bigint_unpack_bytes x in + let ctx = feed_string ctx @@ Common.bignum_bigint_unpack_bytes y in + let bytes = get ctx |> to_raw_string in + + (* Initialize x-coordinate from hash output *) + let x = Bignum_bigint.(Common.bignum_bigint_of_bin bytes % curve.modulus) in + + (* Compute y-coordinate: y = sqrt(x^3 + a * x + b) *) + let x3 = Bignum_bigint.(pow x (of_int 3) % curve.modulus) in + let ax = Bignum_bigint.(curve.a * x % curve.modulus) in + let x3ax = Bignum_bigint.((x3 + ax) % curve.modulus) in + let y2 = Bignum_bigint.((x3ax + curve.b) % curve.modulus) in + let y = Common.bignum_bigint_sqrt_mod y2 curve.modulus in + + (* Sanity check *) + ( if Bignum_bigint.(not (equal y zero)) then + let y2_computed = Bignum_bigint.(y * y % curve.modulus) in + assert (Bignum_bigint.(y2_computed = y2)) ) ; + + (* Return possibly valid curve point *) + (x, y) + in + + (* Deterministically search for valid curve point *) + let candidate_point = ref (hash_to_curve_point ctx (ref start_point)) in + + while not (is_on_curve_bignum_point curve !candidate_point) do + candidate_point := hash_to_curve_point ctx candidate_point + done ; + + (* We have a valid curve point! *) + let point = !candidate_point in + + (* Compute negated point (i.e. with other y-root) *) + let neg_point = + let x, y = point in + let neg_y = Bignum_bigint.(neg y % curve.modulus) in + (x, neg_y) + in + + Curve_params.ia_of_points point neg_point + +(* Gadget to constrain a point in on the elliptic curve specified by + * y^2 = x^3 + ax + b mod p + * where a, b are the curve parameters and p is the base field modulus (curve.modulus) + * + * External checks: (not counting inputs and output) + * Bound checks: 3 (+1 when a != 0 and +1 when b != 0) + * Multi-range-checks: 3 + * Compact-range-checks: 3 + * Total range-checks: 9 + * + * Rows: (not counting inputs/outputs and constants) + * Curve check: 8 (+1 when a != 0 and +2 when b != 0) + * Bound additions: 6 + * Multi-range-checks: 36 + * Total: 50 + * + * Constants: + * Curve constants: 10 (for 256-bit curve; one-time cost per circuit) + * Pre-computing doubles: 767 (for 256-bit curve; one-time cost per circuit) + *) +let is_on_curve (type f) (module Circuit : Snark_intf.Run with type field = f) + (external_checks : f Foreign_field.External_checks.t) + (curve : f Curve_params.InCircuit.t) (point : f Affine.t) = + let x, y = Affine.to_coordinates point in + + (* C1: x^2 = x * x *) + let x_squared = + Foreign_field.mul (module Circuit) external_checks x x curve.modulus + in + + (* Bounds 1: Left and right inputs are gadget input (checked by caller). + * Result bound check already tracked by Foreign_field.mul + *) + + (* C2: Optionally constrain addition of curve parameter a *) + let x_squared_a = + if not Bignum_bigint.(curve.bignum.a = zero) then ( + (* x^2 + a *) + let x_squared_a = + Foreign_field.add + (module Circuit) + ~full:false x_squared curve.a curve.modulus + in + (* Bounds 2: Left input already checked by (Bounds 1) + * Right input public parameter (no check necessary) + * Result bound check below + *) + (* Add x_squared_a bound check *) + Foreign_field.External_checks.append_bound_check external_checks + @@ Foreign_field.Element.Standard.to_limbs x_squared_a ; + x_squared_a ) + else x_squared + in + + (* C3: x^3 + ax = (x^2 + a) * x *) + let x_cubed_ax = + Foreign_field.mul + (module Circuit) + external_checks x_squared_a x curve.modulus + in + + (* Bounds 3: Left input already checked by (Bounds 2) or (Bounds 1) + * Right input is gadget input (checked by caller). + * Result bound check already tracked by Foreign_field.mul + *) + + (* C4: Optionally constrain addition of curve parameter b *) + let x_cubed_ax_b = + if not Bignum_bigint.(curve.bignum.b = zero) then ( + (* (x^2 + a) * x + b *) + let x_cubed_ax_b = + Foreign_field.add + (module Circuit) + ~full:false x_cubed_ax curve.b curve.modulus + in + (* Result row *) + Foreign_field.result_row + (module Circuit) + ~label:"Ec_group.is_on_curve_x_cubed_ax_b" x_cubed_ax_b ; + + (* Bounds 4: Left input already checked by (Bounds 3) + * Right input public parameter (no check necessary) + * Result bound check below + *) + + (* Add x_cubed_ax_b bound check *) + Foreign_field.External_checks.append_bound_check external_checks + @@ Foreign_field.Element.Standard.to_limbs x_cubed_ax_b ; + + x_cubed_ax_b ) + else x_cubed_ax + in + + (* C5: y^2 = y * y *) + let y_squared = + Foreign_field.mul (module Circuit) external_checks y y curve.modulus + in + + (* Bounds 5: Left and right inputs are gadget input (checked by caller) + * Result bound check already tracked by Foreign_field.mul + *) + + (* Copy y_squared to x_cubed_ax_b *) + Foreign_field.Element.Standard.assert_equal + (module Circuit) + y_squared x_cubed_ax_b ; + () + +(* Gadget to constrain that initial accumulator (ia) point is on elliptic curve and the computation of its negation. + * Note: The value of the ia itself is a deterministically generated public constant (this computation is not checked), + * so using this gadget is only required in some situations. + *) +let check_ia (type f) (module Circuit : Snark_intf.Run with type field = f) + (external_checks : f Foreign_field.External_checks.t) + (curve : f Curve_params.InCircuit.t) (ia : f Affine.t Curve_params.ia_points) + = + (* C1: Check that initial accumulator point is on curve *) + is_on_curve (module Circuit) external_checks curve ia.acc ; + + (* C2: Constrain computation of the negated initial accumulator point *) + let neg_init_acc = negate (module Circuit) curve ia.acc in + (* Result row *) + Foreign_field.result_row + (module Circuit) + ~label:"Ec_group.check_ia_neg_init_y" + @@ Affine.y neg_init_acc ; + + (* Bounds 1: Input is public constant + * Result is part of input (checked by caller) + *) + + (* C3: Copy computed_neg_init_acc to ia.neg_acc *) + Affine.assert_equal (module Circuit) neg_init_acc ia.neg_acc ; + + (* P is on curve <=> -P is on curve, thus we do not need to check + * ai.neg_acc is on curve *) + () + +(* Gadget for elliptic curve group scalar multiplication over foreign field + * + * Given input point P and scalar field element s, computes and constrains that + * Q = s0 * P + ... + sz * 2^z * P + * + * where s0, s1, ..., sz is the binary expansion of s, (+) is group addition + * and the terms P, 2 * P, ... 2^z * P are obtained with group doubling. + * + * Inputs: + * external_checks := Context to track required external checks + * curve := Elliptic curve parameters + * scalar := Boolean array of scalar bits + * point := Affine point to scale + * + * Preconditions and limitations: + * P is not O (the point at infinity) + * P's coordinates are bounds checked + * P is on the curve + * s is not zero + * ia point is randomly selected and constrained to be on the curve + * ia negated point computation is constrained + * ia coordinates are bounds checked + * + * External checks: (per crumb, not counting inputs and output) + * Bound checks: 42 (+1 when a != 0) + * Multi-range-checks: 17 + * Compact-range-checks: 17 + * Total range-checks: 76 + * + * Rows: (per crumb, not counting inputs/outputs and constants) + * Scalar multiplication: ~84 (+2 when a != 0) + * Bound additions: 84 + * Multi-range-checks: 308 + * Total: 476 + * + * Constants: + * Curve constants: 10 (for 256-bit curve; one-time cost per circuit) + * Pre-computing doubles: 767 (for 256-bit curve; one-time cost per circuit) + *) +let scalar_mul (type f) (module Circuit : Snark_intf.Run with type field = f) + (external_checks : f Foreign_field.External_checks.t) + (curve : f Curve_params.InCircuit.t) ?(doubles : f Affine.t array option) + (scalar : Circuit.Boolean.var list) (point : f Affine.t) : f Affine.t = + (* Double-and-add algorithm + * Only used for signature verification, so simple algorithm suffices. + * + * A = O; B = P + * for i in 0..z + * if si == 1 + * A = group_add(A, B) + * B = group_double(B) + * return A + * + * Optimization: + * + * To avoid expensive in-circuit conditional checks for point at infinity, + * we employ a randomized strategy that avoids adding the identity element + * or the same point to itself. The strategy works as follows. + * + * Since the prover knows the the points that it will add and double during + * scaling, the prover could select an initial accumulator point I such that + * the double-and-add algorithm never adds the identity element, same point + * or negated point to itself whilst scaling. + * + * The algorithm above is modified to initialize the accumulator to I and + * then (group) subtract I after scaling to compute the final result point. + * + * A = I; B = P + * for i in 0..z + * if si == 1 + * A = group_add(A, B) + * B = group_double(B) + * return A + -I + * + * The prover MUST additionally constrain that + * 1) point I is on the curve + * 2) I' = -I + * + * Simplification: + * + * Uniformly and randomly select initial accumulator point I, instead of using + * the complicated deterministic process. + * + * For a z-bit scalar, there are z unique B points. Each point also has its + * negative, which we cannot add to itself. Therefore, in total there are + * 2z points that we do not want to select as our initial point nor compute + * as an intermediate A point during scaling. The probability we select or + * compute one of these points is approx 2z^2/n, where n is the order of the + * elliptic curve group. + * + * The probability of selecting a bad point is negligible for our applications + * where z is very small (e.g. 256) and n is very large (e.g. 2^256). Thus, + * we can simply randomly select the initial accumulator I and the + * double-and-add algorithm will succeed with overwhelming probability. + *) + let acc, _base = + List.foldi scalar ~init:(curve.ia.acc, point) (* (acc, base) *) + ~f:(fun i (acc, base) bit -> + (* Add: sum = acc + base *) + let sum = add (module Circuit) external_checks curve acc base in + (* Bounds 1: + * Left input is previous result, so already checked. + * Right input is checked by previous doubling check. + * Initial acc and base are gadget inputs (checked by caller). + * Result bounds check below. + *) + Foreign_field.External_checks.append_bound_check external_checks + @@ Foreign_field.Element.Standard.to_limbs @@ Affine.x sum ; + Foreign_field.External_checks.append_bound_check external_checks + @@ Foreign_field.Element.Standard.to_limbs @@ Affine.y sum ; + + (* Group double: double_base = base + base *) + let double_base = + match doubles with + | None -> + let double_base = + double (module Circuit) external_checks curve base + in + (* Bounds 2: + * Input is previous result, so already checked. + * Initial base is gadget input (checked by caller). + * Result bounds check below. + *) + Foreign_field.External_checks.append_bound_check external_checks + @@ Foreign_field.Element.Standard.to_limbs @@ Affine.x double_base ; + Foreign_field.External_checks.append_bound_check external_checks + @@ Foreign_field.Element.Standard.to_limbs @@ Affine.y double_base ; + double_base + | Some doubles -> + (* When the base point is public (e.g. the secp256k1 generator) we can + * improve performance by having them as precomputed public parameters *) + doubles.(i) + in + + (* Group add conditionally *) + let acc = Affine.if_ (module Circuit) bit ~then_:sum ~else_:acc in + + (acc, double_base) ) + in + + (* Subtract init_point from accumulator for final result *) + add (module Circuit) external_checks curve acc curve.ia.neg_acc + +(* Gadget to check point is in the subgroup + * nP = O + * where n is the elliptic curve group order and O is the point at infinity + *) +let check_subgroup (type f) + (module Circuit : Snark_intf.Run with type field = f) + (external_checks : f Foreign_field.External_checks.t) + (curve : f Curve_params.InCircuit.t) ?(doubles : f Affine.t array option) + (point : f Affine.t) = + (* Subgroup check: nP = O + * We don't support identity element, so instead we check + * ((n - 1) + 1)P = O + * (n - 1)P = -P + *) + + (* C1: Compute (n - 1)P *) + let n_minus_one_point = + scalar_mul + (module Circuit) + external_checks curve ?doubles curve.order_minus_one_bits point + in + (* Bounds 1: Left input is public constant (no bounds check required) + * Right input is gadget input (checked by caller) + * Result bound check below + *) + Foreign_field.External_checks.append_bound_check external_checks + @@ Foreign_field.Element.Standard.to_limbs @@ Affine.x n_minus_one_point ; + Foreign_field.External_checks.append_bound_check external_checks + @@ Foreign_field.Element.Standard.to_limbs @@ Affine.y n_minus_one_point ; + + (* C2: Compute -P *) + let minus_point = negate (module Circuit) curve point in + (* Result row *) + Foreign_field.result_row (module Circuit) ~label:"minus_point_y" + @@ Affine.y minus_point ; + (* Bounds 2: Input is gadget input (checked by caller) + * Result bound check below + *) + Foreign_field.External_checks.append_bound_check external_checks + @@ Foreign_field.Element.Standard.to_limbs @@ Affine.y minus_point ; + + (* C3: Assert (n - 1)P = -P *) + Affine.assert_equal (module Circuit) n_minus_one_point minus_point + +(***************) +(* Group tests *) +(***************) + +let%test_unit "Ec_group.add" = + if basic_tests_enabled then ( + let open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + + (* Test group add *) + let test_add ?cs (curve : Curve_params.t) (left_input : Affine.bignum_point) + (right_input : Affine.bignum_point) + (expected_result : Affine.bignum_point) = + (* Generate and verify proof *) + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + let open Runner.Impl in + (* Prepare test public inputs *) + let curve = + Curve_params.to_circuit_constants (module Runner.Impl) curve + in + let left_input = + Affine.of_bignum_bigint_coordinates + (module Runner.Impl) + left_input + in + let right_input = + Affine.of_bignum_bigint_coordinates + (module Runner.Impl) + right_input + in + let expected_result = + Affine.of_bignum_bigint_coordinates + (module Runner.Impl) + expected_result + in + + (* Create external checks context for tracking extra constraints + that are required for soundness (unused in this simple test) *) + let unused_external_checks = + Foreign_field.External_checks.create (module Runner.Impl) + in + + (* L + R = S *) + let result = + add + (module Runner.Impl) + unused_external_checks curve left_input right_input + in + + (* Check for expected quantity of external checks *) + assert ( + Mina_stdlib.List.Length.equal unused_external_checks.bounds 6 ) ; + assert ( + Mina_stdlib.List.Length.equal unused_external_checks.multi_ranges + 3 ) ; + assert ( + Mina_stdlib.List.Length.equal + unused_external_checks.compact_multi_ranges 3 ) ; + + (* Check output matches expected result *) + as_prover (fun () -> + assert ( + Affine.equal_as_prover + (module Runner.Impl) + result expected_result ) ) ; + () ) + in + + cs + in + + (* Tests for random points *) + let fake_curve5 = + Curve_params.{ default with modulus = Bignum_bigint.of_int 5 } + in + let _cs = + test_add fake_curve5 + (Bignum_bigint.of_int 4, Bignum_bigint.one) (* left_input *) + (Bignum_bigint.of_int 0, Bignum_bigint.of_int 3) (* right_input *) + (Bignum_bigint.of_int 0, Bignum_bigint.of_int 2) + (* expected result *) + in + let _cs = + test_add fake_curve5 + (Bignum_bigint.of_int 2, Bignum_bigint.of_int 3) (* left_input *) + (Bignum_bigint.of_int 1, Bignum_bigint.of_int 0) (* right_input *) + (Bignum_bigint.of_int 1, Bignum_bigint.of_int 0) + (* expected result *) + in + + (* Constraint system reuse tests *) + let fake_curve13 = + Curve_params.{ default with modulus = Bignum_bigint.of_int 13 } + in + let cs = + test_add fake_curve13 + (Bignum_bigint.of_int 3, Bignum_bigint.of_int 8) (* left_input *) + (Bignum_bigint.of_int 5, Bignum_bigint.of_int 11) (* right_input *) + (Bignum_bigint.of_int 4, Bignum_bigint.of_int 10) + (* expected result *) + in + let _cs = + test_add ~cs fake_curve13 + (Bignum_bigint.of_int 10, Bignum_bigint.of_int 4) (* left_input *) + (Bignum_bigint.of_int 12, Bignum_bigint.of_int 7) (* right_input *) + (Bignum_bigint.of_int 3, Bignum_bigint.of_int 0) + (* expected result *) + in + let _cs = + test_add ~cs fake_curve13 + (Bignum_bigint.of_int 8, Bignum_bigint.of_int 6) (* left_input *) + (Bignum_bigint.of_int 2, Bignum_bigint.of_int 1) (* right_input *) + (Bignum_bigint.of_int 12, Bignum_bigint.of_int 8) + (* expected result *) + in + + (* Negative tests *) + let fake_curve9 = + Curve_params.{ default with modulus = Bignum_bigint.of_int 9 } + in + assert ( + Common.is_error (fun () -> + (* Wrong constraint system (changed modulus) *) + test_add ~cs fake_curve9 + (Bignum_bigint.of_int 8, Bignum_bigint.of_int 6) (* left_input *) + (Bignum_bigint.of_int 2, Bignum_bigint.of_int 1) (* right_input *) + (Bignum_bigint.of_int 12, Bignum_bigint.of_int 8) + (* expected result *) ) ) ; + assert ( + Common.is_error (fun () -> + (* Wrong answer (right modulus) *) + test_add ~cs fake_curve13 + (Bignum_bigint.of_int 8, Bignum_bigint.of_int 6) (* left_input *) + (Bignum_bigint.of_int 2, Bignum_bigint.of_int 1) (* right_input *) + (Bignum_bigint.of_int 12, Bignum_bigint.of_int 9) + (* expected result *) ) ) ; + + (* Tests with secp256k1 curve points *) + let random_point1 = + ( Bignum_bigint.of_string + "11498799051185379176527662983290644419148625795866197242742376646044820710107" + , Bignum_bigint.of_string + "87365548140897354715632623292744880448736648603030553868546115582681395400362" + ) + in + let expected_result1 = + ( Bignum_bigint.of_string + "29271032301589161601163082898984274448470999636237808164579416118817375265766" + , Bignum_bigint.of_string + "70576057075545750224511488165986665682391544714639291167940534165970533739040" + ) + in + + assert (is_on_curve_bignum_point Secp256k1.params Secp256k1.params.gen) ; + assert (is_on_curve_bignum_point Secp256k1.params random_point1) ; + assert (is_on_curve_bignum_point Secp256k1.params expected_result1) ; + + let _cs = + test_add Secp256k1.params random_point1 Secp256k1.params.gen + expected_result1 + in + + let random_point2 = + ( Bignum_bigint.of_string + "112776793647017636286801498409683698782792816810143189200772003475655331235512" + , Bignum_bigint.of_string + "37154006933110560524528936279434506593302537023736551486562363002969014272200" + ) + in + let expected_result2 = + ( Bignum_bigint.of_string + "80919512080552099332189419005806362073658070117780992417768444957631350640350" + , Bignum_bigint.of_string + "4839884697531819803579082430572588557482298603278351225895977263486959680227" + ) + in + + assert (is_on_curve_bignum_point Secp256k1.params random_point2) ; + assert (is_on_curve_bignum_point Secp256k1.params expected_result2) ; + + let _cs = + test_add Secp256k1.params expected_result1 (* left_input *) + random_point2 (* right_input *) + expected_result2 + (* expected result *) + in + + let random_point3 = + ( Bignum_bigint.of_string + "36425953153418322223243576029807183106978427220826420108023201968296177476778" + , Bignum_bigint.of_string + "24007339127999344540320969916238304309192480878642453507169699691156248304362" + ) + in + let random_point4 = + ( Bignum_bigint.of_string + "21639969699195480792170626687481368104641445608975892798617312168630290254356" + , Bignum_bigint.of_string + "30444719434143548339668041811488444063562085329168372025420048436035175999301" + ) + in + let expected_result3 = + ( Bignum_bigint.of_string + "113188224115387667795245114738521133409188389625511152470086031332181459812059" + , Bignum_bigint.of_string + "82989616646064102138003387261138741187755389122561858439662322580504431694519" + ) + in + + assert (is_on_curve_bignum_point Secp256k1.params random_point3) ; + assert (is_on_curve_bignum_point Secp256k1.params random_point4) ; + assert (is_on_curve_bignum_point Secp256k1.params expected_result3) ; + + let _cs = + test_add Secp256k1.params random_point3 (* left_input *) + random_point4 (* right_input *) + expected_result3 + (* expected result *) + in + + (* Constraint system reuse tests *) + let pt1 = + ( Bignum_bigint.of_string + "75669526378790147634671888414445173066514756807031971924620136884638031442759" + , Bignum_bigint.of_string + "21417425897684876536576718477824646351185804513111016365368704154638046645765" + ) + in + let pt2 = + ( Bignum_bigint.of_string + "14155322613096941824503892607495280579903778637099750589312382650686697414735" + , Bignum_bigint.of_string + "6513771125762614571725090849784101711151222857564970563886992272283710338112" + ) + in + let expected_pt = + ( Bignum_bigint.of_string + "11234404138675683238798732023399338183955476104311735089175934636931978267582" + , Bignum_bigint.of_string + "2483077095355421104741807026372550508534866555013063406887316930008225336894" + ) + in + + assert (is_on_curve_bignum_point Secp256k1.params pt1) ; + assert (is_on_curve_bignum_point Secp256k1.params pt2) ; + assert (is_on_curve_bignum_point Secp256k1.params expected_pt) ; + + let cs = test_add Secp256k1.params pt1 pt2 expected_pt in + + let pt1 = + ( Bignum_bigint.of_string + "97313026812541560473771297589757921196424145769025529099070592800256734650744" + , Bignum_bigint.of_string + "38700860102018844310665941222140210385381782344695476706452234109902874948789" + ) + in + let pt2 = + ( Bignum_bigint.of_string + "82416105962835331584090450180444085592428397648594295814088133554696721893017" + , Bignum_bigint.of_string + "72361514636959418409520767179749571220723219394228755075988292395103362307597" + ) + in + let expected_pt = + ( Bignum_bigint.of_string + "63066162743654726673830060769616154872212462240062945169518526070045923596428" + , Bignum_bigint.of_string + "54808797958010370431464079583774910620962703868682659560981623451275441505706" + ) + in + + assert (is_on_curve_bignum_point Secp256k1.params pt1) ; + assert (is_on_curve_bignum_point Secp256k1.params pt2) ; + assert (is_on_curve_bignum_point Secp256k1.params expected_pt) ; + + let _cs = test_add ~cs Secp256k1.params pt1 pt2 expected_pt in + + let expected2 = + ( Bignum_bigint.of_string + "23989387498834566531803335539224216637656125335573670100510541031866883369583" + , Bignum_bigint.of_string + "8780199033752628541949962988447578555155504633890539264032735153636423550500" + ) + in + + assert (is_on_curve_bignum_point Secp256k1.params expected2) ; + + let _cs = test_add ~cs Secp256k1.params expected_pt pt1 expected2 in + + (* Negative tests *) + assert ( + Common.is_error (fun () -> + (* Wrong constraint system (changed modulus) *) + test_add ~cs fake_curve9 expected_pt pt1 expected2 ) ) ; + + assert ( + Common.is_error (fun () -> + (* Wrong result *) + test_add ~cs Secp256k1.params expected_pt pt1 expected_pt ) ) ; + + (* Test with some real Ethereum curve points *) + + (* Curve point from pubkey of sender of 1st Ethereum transcation + * https://etherscan.io/tx/0x5c504ed432cb51138bcf09aa5e8a410dd4a1e204ef84bfed1be16dfba1b22060 + *) + let first_eth_tx_pubkey = + ( Bignum_bigint.of_string + "25074680562105920500390488848505179172301959433246133200656053822731415560379" + , Bignum_bigint.of_string + "40207352835024964935479287038185466710938760823387493786206830664631160762596" + ) + in + (* Vb pubkey curve point + * https://etherscan.io/address/0xab5801a7d398351b8be11c439e05c5b3259aec9b + *) + let vitalik_eth_pubkey = + ( Bignum_bigint.of_string + "49781623198970027997721070672560275063607048368575198229673025608762959476014" + , Bignum_bigint.of_string + "44999051047832679156664607491606359183507784636787036192076848057884504239143" + ) + in + let expected_result = + ( Bignum_bigint.of_string + "5673019186984644139884227978304592898127494693953507135947623290000290975721" + , Bignum_bigint.of_string + "63149760798259320533576297417560108418144118481056410815317549443093209180466" + ) + in + + assert (is_on_curve_bignum_point Secp256k1.params first_eth_tx_pubkey) ; + assert (is_on_curve_bignum_point Secp256k1.params vitalik_eth_pubkey) ; + assert (is_on_curve_bignum_point Secp256k1.params expected_result) ; + + let _cs = + test_add ~cs Secp256k1.params first_eth_tx_pubkey vitalik_eth_pubkey + expected_result + in + + () ) + +let%test_unit "Ec_group.add_chained" = + if basic_tests_enabled then ( + let open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + + (* Test chained group add *) + let test_add_chained ?cs ?(chain_left = true) (curve : Curve_params.t) + (left_input : Affine.bignum_point) (right_input : Affine.bignum_point) + (input2 : Affine.bignum_point) (expected_result : Affine.bignum_point) = + (* Generate and verify proof *) + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + let open Runner.Impl in + (* Prepare test public inputs *) + let curve = + Curve_params.to_circuit_constants (module Runner.Impl) curve + in + let left_input = + Affine.of_bignum_bigint_coordinates + (module Runner.Impl) + left_input + in + let right_input = + Affine.of_bignum_bigint_coordinates + (module Runner.Impl) + right_input + in + let input2 = + Affine.of_bignum_bigint_coordinates (module Runner.Impl) input2 + in + let expected_result = + Affine.of_bignum_bigint_coordinates + (module Runner.Impl) + expected_result + in + + (* Create external checks context for tracking extra constraints + * that are required for soundness (unused in this test) *) + let unused_external_checks = + Foreign_field.External_checks.create (module Runner.Impl) + in + + (* L + R = S *) + let result1 = + add + (module Runner.Impl) + unused_external_checks curve left_input right_input + in + + let result2 = + if chain_left then + (* S + T = U *) + (* Chain result to left input *) + add + (module Runner.Impl) + unused_external_checks curve result1 input2 + else + (* Chain result to right input *) + (* T + S = U *) + add + (module Runner.Impl) + unused_external_checks curve input2 result1 + in + + (* Check for expected quantity of external checks *) + assert ( + Mina_stdlib.List.Length.equal unused_external_checks.bounds 12 ) ; + assert ( + Mina_stdlib.List.Length.equal unused_external_checks.multi_ranges + 6 ) ; + assert ( + Mina_stdlib.List.Length.equal + unused_external_checks.compact_multi_ranges 6 ) ; + + (* Check output matches expected result *) + as_prover (fun () -> + assert ( + Affine.equal_as_prover + (module Runner.Impl) + result2 expected_result ) ) ; + () ) + in + + cs + in + + (* Group add chaining test *) + let pt1 = + ( Bignum_bigint.of_string + "22078445491128279362564324454450148838521766213873448035670368771866784776689" + , Bignum_bigint.of_string + "59164395213226911607629035235242369632135709209315776938135875644072412604417" + ) + in + let pt2 = + ( Bignum_bigint.of_string + "43363091675487122074415344565583111028231348930161176231597524718735106294021" + , Bignum_bigint.of_string + "111622036424234525038201689158418296167019583124308154759441266557529051647503" + ) + in + let pt3 = + ( Bignum_bigint.of_string + "27095120504150867682043281371962577090258298278269412698577541627879567814209" + , Bignum_bigint.of_string + "43319029043781297382854244012410471023426320563005937780035785457494374919933" + ) + in + let expected = + ( Bignum_bigint.of_string + "94445004776077869359279503733865512156009118507534561304362934747962270973982" + , Bignum_bigint.of_string + "5771544338553827547535594828872899427364500537732448576560233867747655654290" + ) + in + + assert (is_on_curve_bignum_point Secp256k1.params pt1) ; + assert (is_on_curve_bignum_point Secp256k1.params pt2) ; + assert (is_on_curve_bignum_point Secp256k1.params pt3) ; + assert (is_on_curve_bignum_point Secp256k1.params expected) ; + + (* Correct wiring for left chaining + * Result r1 = pt1 + pt2 and left operand of r2 = r1 + pt3 + * + * ,--------------------------------------------, + * x0: `-> (2, 3) -> (4, 3) -> (20, 3) -> (16, 3) ->` + * r1x0 r1x0 Lx0 Lx0 + * + * ,--------------------------------------------, + * x1: `-> (2, 4) -> (16, 4) -> (20, 4) -> (4, 4) ->` + * r1x1 Lx1 Lx1 r1x1 + * + * ,--------------------------------------------, + * x2: `-> (2, 5) -> (20, 5) -> (4, 5) -> (16, 5) ->` + * r1x2 Lx2 r1x2 Lx2 + * + * ,------------------------, + * y0: `-> (11, 3) -> (23, 3) ->` + * r1y0 Ly0 + * + * ,------------------------, + * y1: `-> (11, 4) -> (23, 4) ->` + * r1y1 Ly1 + * + * ,------------------------, + * y2: `-> (11, 5) -> (23, 5) ->` + * r1y2 Ly2 + *) + let _cs = test_add_chained Secp256k1.params pt1 pt2 pt3 expected in + + (* Correct wiring for right chaining + * Result r1 = pt1 + pt2 and right operand of r2 = pt3 + r1 + * + * ,-------------------------------------------, + * x0: `-> (2, 3) -> (17, 0) -> (4, 3) -> (20, 0) / + * r1x0 Rx0 r1x0 Rx0 + * + * ,-------------------------------------------, + * x1: `-> (2, 4) -> (17, 1) -> (20, 1) -> (4, 4) / + * r1x1 Rx1 Rx1 r1x1 + * + * ,-------------------------------------------, + * x2: `-> (2, 5) -> (4, 5) -> (17, 2) -> (20, 2) / + * r1x2 r1x2 Rx2 Rx2 + * + * ,------------------------, + * y0: `-> (11, 3) -> (24, 0) ->` + * r1y0 Ry0 + * + * ,------------------------, + * y1: `-> (11, 4) -> (24, 1) ->` + * r1y1 Ry1 + * + * ,------------------------, + * y2: `-> (11, 5) -> (24, 2) ->` + * r1y2 Ry2 + *) + let _cs = + test_add_chained ~chain_left:false Secp256k1.params pt1 (* left_input *) + pt2 (* right_input *) + pt3 (* input2 *) + expected + (* expected result *) + in + () ) + +let%test_unit "Ec_group.add_full" = + if basic_tests_enabled then ( + let open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + + (* Test full group add (with bounds cehcks) *) + let test_add_full ?cs (curve : Curve_params.t) + (left_input : Affine.bignum_point) (right_input : Affine.bignum_point) + (expected_result : Affine.bignum_point) = + (* Generate and verify proof *) + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + let open Runner.Impl in + (* Prepare test public inputs *) + let curve = + Curve_params.to_circuit_constants (module Runner.Impl) curve + in + let left_input = + Affine.of_bignum_bigint_coordinates + (module Runner.Impl) + left_input + in + let right_input = + Affine.of_bignum_bigint_coordinates + (module Runner.Impl) + right_input + in + let expected_result = + Affine.of_bignum_bigint_coordinates + (module Runner.Impl) + expected_result + in + + (* Create external checks context for tracking extra constraints + that are required for soundness *) + let external_checks = + Foreign_field.External_checks.create (module Runner.Impl) + in + + (* L + R = S *) + let result = + add + (module Runner.Impl) + external_checks curve left_input right_input + in + + (* Add left_input to external checks *) + Foreign_field.( + External_checks.append_bound_check external_checks + @@ Element.Standard.to_limbs @@ Affine.x left_input) ; + Foreign_field.( + External_checks.append_bound_check external_checks + @@ Element.Standard.to_limbs @@ Affine.y left_input) ; + + (* Add right_input to external checks *) + Foreign_field.( + External_checks.append_bound_check external_checks + @@ Element.Standard.to_limbs @@ Affine.x right_input) ; + Foreign_field.( + External_checks.append_bound_check external_checks + @@ Element.Standard.to_limbs @@ Affine.y right_input) ; + + (* Add result to external checks *) + Foreign_field.( + External_checks.append_bound_check external_checks + @@ Element.Standard.to_limbs @@ Affine.x result) ; + Foreign_field.( + External_checks.append_bound_check external_checks + @@ Element.Standard.to_limbs @@ Affine.y result) ; + + (* Check output matches expected result *) + as_prover (fun () -> + assert ( + Affine.equal_as_prover + (module Runner.Impl) + result expected_result ) ) ; + + (* + * Perform external checks + *) + assert (Mina_stdlib.List.Length.equal external_checks.bounds 12) ; + assert (Mina_stdlib.List.Length.equal external_checks.multi_ranges 3) ; + assert ( + Mina_stdlib.List.Length.equal external_checks.compact_multi_ranges + 3 ) ; + (* Add gates for bound checks, multi-range-checks and compact-multi-range-checks *) + Foreign_field.constrain_external_checks + (module Runner.Impl) + external_checks curve.modulus ; + + () ) + in + cs + in + + (* Full tests *) + let pt1 = + ( Bignum_bigint.of_string + "108106717441068942935036481412556424456551432537879152449804306833272168535105" + , Bignum_bigint.of_string + "76460339884983741488305111710326981694475523676336423409829095132008854584808" + ) + in + let pt2 = + ( Bignum_bigint.of_string + "6918332104414828558125020939363051148342349799951368824506926403525772818971" + , Bignum_bigint.of_string + "112511987857588994657806651103271803396616867673371823390960630078201657435176" + ) + in + let expected = + ( Bignum_bigint.of_string + "87351883076573600335277375022118065102135008483181597654369109297980597321941" + , Bignum_bigint.of_string + "42323967499650833993389664859011147254281400152806022789809987122536303627261" + ) + in + + assert (is_on_curve_bignum_point Secp256k1.params pt1) ; + assert (is_on_curve_bignum_point Secp256k1.params pt2) ; + assert (is_on_curve_bignum_point Secp256k1.params expected) ; + + let _cs = + test_add_full Secp256k1.params pt1 (* left_input *) + pt2 (* right_input *) + expected + (* expected result *) + in + + () ) + +let%test_unit "Ec_group.double" = + if basic_tests_enabled then ( + let open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + + (* Test group double *) + let test_double ?cs (curve : Curve_params.t) (point : Affine.bignum_point) + (expected_result : Affine.bignum_point) = + (* Generate and verify proof *) + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + let open Runner.Impl in + (* Prepare test public inputs *) + let curve = + Curve_params.to_circuit_constants (module Runner.Impl) curve + in + let point = + Affine.of_bignum_bigint_coordinates (module Runner.Impl) point + in + let expected_result = + Affine.of_bignum_bigint_coordinates + (module Runner.Impl) + expected_result + in + + (* Create external checks context for tracking extra constraints + that are required for soundness (unused in this simple test) *) + let unused_external_checks = + Foreign_field.External_checks.create (module Runner.Impl) + in + + (* P + P = D *) + let result = + double (module Runner.Impl) unused_external_checks curve point + in + + (* Check for expected quantity of external checks *) + if Bignum_bigint.(curve.bignum.a = zero) then + assert ( + Mina_stdlib.List.Length.equal unused_external_checks.bounds 8 ) + else + assert ( + Mina_stdlib.List.Length.equal unused_external_checks.bounds 9 ) ; + assert ( + Mina_stdlib.List.Length.equal unused_external_checks.multi_ranges + 4 ) ; + assert ( + Mina_stdlib.List.Length.equal + unused_external_checks.compact_multi_ranges 4 ) ; + + (* Check output matches expected result *) + as_prover (fun () -> + assert ( + Affine.equal_as_prover + (module Runner.Impl) + result expected_result ) ) ; + () ) + in + + cs + in + + (* Test with elliptic curve y^2 = x^3 + 2 * x + 5 mod 13 *) + let _cs = + let fake_curve1 = + Curve_params. + { default with + modulus = Bignum_bigint.of_int 13 + ; a = Bignum_bigint.of_int 2 + ; b = Bignum_bigint.of_int 5 + } + in + let point = (Bignum_bigint.of_int 2, Bignum_bigint.of_int 2) in + let expected_result = (Bignum_bigint.of_int 5, Bignum_bigint.of_int 7) in + assert (is_on_curve_bignum_point fake_curve1 point) ; + assert (is_on_curve_bignum_point fake_curve1 expected_result) ; + test_double fake_curve1 point expected_result + in + + (* Test with elliptic curve y^2 = x^3 + 5 mod 13 *) + let _cs = + let fake_curve2 = + Curve_params. + { default with + modulus = Bignum_bigint.of_int 13 + ; b = Bignum_bigint.of_int 5 + } + in + let point = (Bignum_bigint.of_int 4, Bignum_bigint.of_int 2) in + let expected_result = (Bignum_bigint.of_int 6, Bignum_bigint.of_int 0) in + assert (is_on_curve_bignum_point fake_curve2 point) ; + assert (is_on_curve_bignum_point fake_curve2 expected_result) ; + test_double fake_curve2 point expected_result + in + + (* Test with elliptic curve y^2 = x^3 + 7 mod 13 *) + let fake_curve0 = + Curve_params. + { default with + modulus = Bignum_bigint.of_int 13 + ; b = Bignum_bigint.of_int 7 + } + in + let cs0 = + let point = (Bignum_bigint.of_int 7, Bignum_bigint.of_int 8) in + let expected_result = (Bignum_bigint.of_int 8, Bignum_bigint.of_int 8) in + assert (is_on_curve_bignum_point fake_curve0 point) ; + assert (is_on_curve_bignum_point fake_curve0 expected_result) ; + let cs = test_double fake_curve0 point expected_result in + let _cs = test_double fake_curve0 point expected_result in + cs + in + + (* Test with elliptic curve y^2 = x^3 + 17 * x mod 7879 *) + let fake_curve17 = + Curve_params. + { default with + modulus = Bignum_bigint.of_int 7879 + ; a = Bignum_bigint.of_int 17 + } + in + let cs17 = + let point = (Bignum_bigint.of_int 7331, Bignum_bigint.of_int 888) in + let expected_result = + (Bignum_bigint.of_int 2754, Bignum_bigint.of_int 3623) + in + assert (is_on_curve_bignum_point fake_curve17 point) ; + assert (is_on_curve_bignum_point fake_curve17 expected_result) ; + test_double fake_curve17 point expected_result + in + + (* Constraint system reuse tests *) + let _cs = + let point = (Bignum_bigint.of_int 8, Bignum_bigint.of_int 8) in + let expected_result = (Bignum_bigint.of_int 11, Bignum_bigint.of_int 8) in + assert (is_on_curve_bignum_point fake_curve0 point) ; + assert (is_on_curve_bignum_point fake_curve0 expected_result) ; + test_double ~cs:cs0 fake_curve0 point expected_result + in + + let _cs = + let point = (Bignum_bigint.of_int 1729, Bignum_bigint.of_int 4830) in + let expected_result = + (Bignum_bigint.of_int 6020, Bignum_bigint.of_int 5832) + in + assert (is_on_curve_bignum_point fake_curve17 point) ; + assert (is_on_curve_bignum_point fake_curve17 expected_result) ; + let _cs = test_double ~cs:cs17 fake_curve17 point expected_result in + + (* Negative test *) + assert ( + Common.is_error (fun () -> + (* Wrong constraint system *) + test_double ~cs:cs0 fake_curve17 point expected_result ) ) ; + _cs + in + + (* Tests with secp256k1 curve points *) + let point = + ( Bignum_bigint.of_string + "107002484780363838095534061209472738804517997328105554367794569298664989358181" + , Bignum_bigint.of_string + "92879551684948148252506282887871578114014191438980334462241462418477012406178" + ) + in + let expected_result = + ( Bignum_bigint.of_string + "74712964529040634650603708923084871318006229334056222485473734005356559517441" + , Bignum_bigint.of_string + "115267803285637743262834568062293432343366237647730050692079006689357117890542" + ) + in + + assert (is_on_curve_bignum_point Secp256k1.params point) ; + assert (is_on_curve_bignum_point Secp256k1.params expected_result) ; + + let _cs = test_double Secp256k1.params point expected_result in + + let expected_result = + ( Bignum_bigint.of_string + "89565891926547004231252920425935692360644145829622209833684329913297188986597" + , Bignum_bigint.of_string + "12158399299693830322967808612713398636155367887041628176798871954788371653930" + ) + in + + assert (is_on_curve_bignum_point Secp256k1.params Secp256k1.params.gen) ; + assert (is_on_curve_bignum_point Secp256k1.params expected_result) ; + + let _cs = + test_double Secp256k1.params Secp256k1.params.gen expected_result + in + + let point = + ( Bignum_bigint.of_string + "72340565915695963948758748585975158634181237057659908187426872555266933736285" + , Bignum_bigint.of_string + "26612022505003328753510360357395054342310218908477055087761596777225815854353" + ) + in + let expected_result = + ( Bignum_bigint.of_string + "108904232316543774780790055701972437888102004393747607639914151522482739421637" + , Bignum_bigint.of_string + "12361022197403188621809379658301822420116828257004558379520642349031207949605" + ) + in + + assert (is_on_curve_bignum_point Secp256k1.params point) ; + assert (is_on_curve_bignum_point Secp256k1.params expected_result) ; + + let _cs = test_double Secp256k1.params point expected_result in + + let point = + ( Bignum_bigint.of_string + "108904232316543774780790055701972437888102004393747607639914151522482739421637" + , Bignum_bigint.of_string + "12361022197403188621809379658301822420116828257004558379520642349031207949605" + ) + in + let expected_result = + ( Bignum_bigint.of_string + "6412514063090203022225668498768852033918664033020116827066881895897922497918" + , Bignum_bigint.of_string + "46730676600197705465960490527225757352559615957463874893868944815778370642915" + ) + in + assert (is_on_curve_bignum_point Secp256k1.params point) ; + assert (is_on_curve_bignum_point Secp256k1.params expected_result) ; + + let cs = test_double Secp256k1.params point expected_result in + + (* CS reuse again*) + let point = + ( Bignum_bigint.of_string + "3994127195658013268703905225007935609302368792888634855477505418126918261961" + , Bignum_bigint.of_string + "25535899907968670181603106060653290873698485840006655398881908734054954693109" + ) + in + let expected_result = + ( Bignum_bigint.of_string + "85505889528097925687832670439248941652336655858213625210338216314923495678594" + , Bignum_bigint.of_string + "49191910521103183437466384378802260055879125327516949990516385020354020159575" + ) + in + assert (is_on_curve_bignum_point Secp256k1.params point) ; + assert (is_on_curve_bignum_point Secp256k1.params expected_result) ; + + let _cs = test_double ~cs Secp256k1.params point expected_result in + + (* Negative tests *) + assert ( + Common.is_error (fun () -> + (* Wrong constraint system *) + test_double ~cs:cs0 Secp256k1.params point expected_result ) ) ; + + assert ( + Common.is_error (fun () -> + (* Wrong answer *) + let wrong_result = + ( Bignum_bigint.of_string + "6412514063090203022225668498768852033918664033020116827066881895897922497918" + , Bignum_bigint.of_string + "46730676600197705465960490527225757352559615957463874893868944815778370642914" + ) + in + test_double Secp256k1.params point wrong_result ) ) ; + + () ) + +let%test_unit "Ec_group.double_chained" = + if basic_tests_enabled then ( + let open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + + (* Test group double chaining *) + let test_double_chained ?cs (curve : Curve_params.t) + (point : Affine.bignum_point) (expected_result : Affine.bignum_point) = + (* Generate and verify proof *) + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + let open Runner.Impl in + (* Prepare test public inputs *) + let curve = + Curve_params.to_circuit_constants (module Runner.Impl) curve + in + let point = + Affine.of_bignum_bigint_coordinates (module Runner.Impl) point + in + let expected_result = + Affine.of_bignum_bigint_coordinates + (module Runner.Impl) + expected_result + in + + (* Create external checks context for tracking extra constraints + that are required for soundness (unused in this simple test) *) + let unused_external_checks = + Foreign_field.External_checks.create (module Runner.Impl) + in + + let result = + double (module Runner.Impl) unused_external_checks curve point + in + let result = + double (module Runner.Impl) unused_external_checks curve result + in + + (* Check for expected quantity of external checks *) + if Bignum_bigint.(curve.bignum.a = zero) then + assert ( + Mina_stdlib.List.Length.equal unused_external_checks.bounds 16 ) + else + assert ( + Mina_stdlib.List.Length.equal unused_external_checks.bounds 18 ) ; + assert ( + Mina_stdlib.List.Length.equal unused_external_checks.multi_ranges + 8 ) ; + assert ( + Mina_stdlib.List.Length.equal + unused_external_checks.compact_multi_ranges 8 ) ; + + (* Check output matches expected result *) + as_prover (fun () -> + assert ( + Affine.equal_as_prover + (module Runner.Impl) + result expected_result ) ) ; + () ) + in + + cs + in + + let _cs = + let fake_curve0 = + Curve_params. + { default with + modulus = Bignum_bigint.of_int 7879 + ; a = Bignum_bigint.of_int 17 + } + in + let point = (Bignum_bigint.of_int 1729, Bignum_bigint.of_int 4830) in + let expected_result = + (Bignum_bigint.of_int 355, Bignum_bigint.of_int 3132) + in + assert (is_on_curve_bignum_point fake_curve0 point) ; + assert (is_on_curve_bignum_point fake_curve0 expected_result) ; + test_double_chained fake_curve0 point expected_result + in + + let point = + ( Bignum_bigint.of_string + "42044065574201065781794313442437176970676726666507255383911343977315911214824" + , Bignum_bigint.of_string + "31965905005059593108764147692698952070443290622957461138987132030153087962524" + ) + in + let expected_result = + ( Bignum_bigint.of_string + "25296422933760701668354080561191268087967569090553018544803607419093394376171" + , Bignum_bigint.of_string + "8046470730121032635013615006105175410553103561598164661406103935504325838485" + ) + in + + assert (is_on_curve_bignum_point Secp256k1.params point) ; + assert (is_on_curve_bignum_point Secp256k1.params expected_result) ; + + let _cs = test_double_chained Secp256k1.params point expected_result in + () ) + +let%test_unit "Ec_group.double_full" = + if basic_tests_enabled then ( + let open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + + (* Test group double (full circuit with external checks) *) + let test_double_full ?cs (curve : Curve_params.t) + (point : Affine.bignum_point) (expected_result : Affine.bignum_point) = + (* Generate and verify proof *) + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + let open Runner.Impl in + (* Prepare test public inputs *) + let curve = + Curve_params.to_circuit_constants (module Runner.Impl) curve + in + let point = + Affine.of_bignum_bigint_coordinates (module Runner.Impl) point + in + let expected_result = + Affine.of_bignum_bigint_coordinates + (module Runner.Impl) + expected_result + in + + (* Create external checks context for tracking extra constraints + that are required for soundness *) + let external_checks = + Foreign_field.External_checks.create (module Runner.Impl) + in + + (* P + P = D *) + let result = + double (module Runner.Impl) external_checks curve point + in + + (* Add input point to external checks *) + Foreign_field.( + External_checks.append_bound_check external_checks + @@ Element.Standard.to_limbs @@ Affine.x point) ; + Foreign_field.( + External_checks.append_bound_check external_checks + @@ Element.Standard.to_limbs @@ Affine.y point) ; + + (* Add result to external checks *) + Foreign_field.( + External_checks.append_bound_check external_checks + @@ Element.Standard.to_limbs @@ Affine.x result) ; + Foreign_field.( + External_checks.append_bound_check external_checks + @@ Element.Standard.to_limbs @@ Affine.y result) ; + + (* Check output matches expected result *) + as_prover (fun () -> + assert ( + Affine.equal_as_prover + (module Runner.Impl) + result expected_result ) ) ; + + (* + * Perform external checks + *) + + (* Sanity checks *) + if Bignum_bigint.(curve.bignum.a = zero) then + assert (Mina_stdlib.List.Length.equal external_checks.bounds 12) + else assert (Mina_stdlib.List.Length.equal external_checks.bounds 13) ; + assert (Mina_stdlib.List.Length.equal external_checks.multi_ranges 4) ; + assert ( + Mina_stdlib.List.Length.equal external_checks.compact_multi_ranges + 4 ) ; + + (* Add gates for bound checks, multi-range-checks and compact-multi-range-checks *) + Foreign_field.constrain_external_checks + (module Runner.Impl) + external_checks curve.modulus ; + + () ) + in + + cs + in + + let point = + ( Bignum_bigint.of_string + "422320656143453469357911138554881092132771509739438645920469442837105323580" + , Bignum_bigint.of_string + "99573693339481125202377937570343422789783140695684047090890158240546390265715" + ) + in + let expected_result = + ( Bignum_bigint.of_string + "111592986473580724183094323045895279290564238712238558254671818420787861656338" + , Bignum_bigint.of_string + "21999887286188040786039896471521925680577344653927821650184541049020329991940" + ) + in + + assert (is_on_curve_bignum_point Secp256k1.params point) ; + assert (is_on_curve_bignum_point Secp256k1.params expected_result) ; + + let _cs = test_double_full Secp256k1.params point expected_result in + + let point = + ( Bignum_bigint.of_string + "35572202113406269203741773940276421270986156279943921117631530910348880407195" + , Bignum_bigint.of_string + "77949858788528057664678921426007070786227653051729292366956150514299227362888" + ) + in + let expected_result = + ( Bignum_bigint.of_string + "77054343462981168852324254689119448477035493875004605555517034503407691682302" + , Bignum_bigint.of_string + "71816304404296379298724767646016383731405297016881176644824032740912066853658" + ) + in + + assert (is_on_curve_bignum_point Secp256k1.params point) ; + assert (is_on_curve_bignum_point Secp256k1.params expected_result) ; + + let _cs = test_double_full Secp256k1.params point expected_result in + + () ) + +let%test_unit "Ec_group.ops_mixed" = + if basic_tests_enabled then ( + let open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + + (* Test mix of group operations (e.g. things are wired correctly *) + let test_group_ops_mixed ?cs (curve : Curve_params.t) + (left_input : Affine.bignum_point) (right_input : Affine.bignum_point) + (expected_result : Affine.bignum_point) = + (* Generate and verify proof *) + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + let open Runner.Impl in + (* Prepare test public inputs *) + let curve = + Curve_params.to_circuit_constants (module Runner.Impl) curve + in + let left_input = + Affine.of_bignum_bigint_coordinates + (module Runner.Impl) + left_input + in + let right_input = + Affine.of_bignum_bigint_coordinates + (module Runner.Impl) + right_input + in + let expected_result = + Affine.of_bignum_bigint_coordinates + (module Runner.Impl) + expected_result + in + + (* Create external checks context for tracking extra constraints + that are required for soundness (unused in this simple test) *) + let unused_external_checks = + Foreign_field.External_checks.create (module Runner.Impl) + in + + (* R + L = S *) + let sum = + add + (module Runner.Impl) + unused_external_checks curve left_input right_input + in + + (* S + S = D *) + let double = + double (module Runner.Impl) unused_external_checks curve sum + in + + (* Check for expected quantity of external checks *) + if Bignum_bigint.(curve.bignum.a = zero) then + assert ( + Mina_stdlib.List.Length.equal unused_external_checks.bounds 14 ) + else + assert ( + Mina_stdlib.List.Length.equal unused_external_checks.bounds 15 ) ; + assert ( + Mina_stdlib.List.Length.equal unused_external_checks.multi_ranges + 7 ) ; + assert ( + Mina_stdlib.List.Length.equal + unused_external_checks.compact_multi_ranges 7 ) ; + + (* Check output matches expected result *) + as_prover (fun () -> + assert ( + Affine.equal_as_prover + (module Runner.Impl) + double expected_result ) ) ; + () ) + in + + cs + in + + let _cs = + let fake_curve = + Curve_params. + { default with + modulus = Bignum_bigint.of_int 7879 + ; a = Bignum_bigint.of_int 17 + } + in + let point1 = (Bignum_bigint.of_int 1729, Bignum_bigint.of_int 4830) in + let point2 = (Bignum_bigint.of_int 993, Bignum_bigint.of_int 622) in + let expected_result = + (Bignum_bigint.of_int 6762, Bignum_bigint.of_int 4635) + in + assert (is_on_curve_bignum_point fake_curve point1) ; + assert (is_on_curve_bignum_point fake_curve point2) ; + assert (is_on_curve_bignum_point fake_curve expected_result) ; + + test_group_ops_mixed fake_curve point1 point2 expected_result + in + + let point1 = + ( Bignum_bigint.of_string + "37404488720929062958906788322651728322575666040491554170565829193307192693651" + , Bignum_bigint.of_string + "9656313713772632982161856264262799630428732532087082991934556488549329780427" + ) + in + let point2 = + ( Bignum_bigint.of_string + "31293985021118266786561893156019691372812643656725598796588178883202613100468" + , Bignum_bigint.of_string + "62519749065576060946018142578164411421793328932510041279923944104940749401503" + ) + in + let expected_result = + ( Bignum_bigint.of_string + "43046886127279816590953923378970473409794361644471707353489087385548452456295" + , Bignum_bigint.of_string + "67554760054687646408788973635096250584575090419180209042279187069048864087921" + ) + in + + assert (is_on_curve_bignum_point Secp256k1.params point1) ; + assert (is_on_curve_bignum_point Secp256k1.params point2) ; + assert (is_on_curve_bignum_point Secp256k1.params expected_result) ; + + let _cs = + test_group_ops_mixed Secp256k1.params point1 point2 expected_result + in + () ) + +let%test_unit "Ec_group.properties" = + if basic_tests_enabled then ( + let open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + + (* Test group properties *) + let test_group_properties ?cs (curve : Curve_params.t) + (point_a : Affine.bignum_point) (point_b : Affine.bignum_point) + (point_c : Affine.bignum_point) + (expected_commutative_result : Affine.bignum_point) + (expected_associative_result : Affine.bignum_point) + (expected_distributive_result : Affine.bignum_point) = + (* Generate and verify proof *) + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + let open Runner.Impl in + (* Prepare test public inputs *) + let curve = + Curve_params.to_circuit_constants (module Runner.Impl) curve + in + let point_a = + Affine.of_bignum_bigint_coordinates (module Runner.Impl) point_a + in + let point_b = + Affine.of_bignum_bigint_coordinates (module Runner.Impl) point_b + in + let point_c = + Affine.of_bignum_bigint_coordinates (module Runner.Impl) point_c + in + let expected_commutative_result = + Affine.of_bignum_bigint_coordinates + (module Runner.Impl) + expected_commutative_result + in + let expected_associative_result = + Affine.of_bignum_bigint_coordinates + (module Runner.Impl) + expected_associative_result + in + let expected_distributive_result = + Affine.of_bignum_bigint_coordinates + (module Runner.Impl) + expected_distributive_result + in + + (* Create external checks context for tracking extra constraints + that are required for soundness (unused in this simple test) *) + let unused_external_checks = + Foreign_field.External_checks.create (module Runner.Impl) + in + + (* + * Commutative property tests + * + * A + B = B + A + *) + let a_plus_b = + (* A + B *) + add + (module Runner.Impl) + unused_external_checks curve point_a point_b + in + + let b_plus_a = + (* B + A *) + add + (module Runner.Impl) + unused_external_checks curve point_b point_a + in + + (* Todo add equality wiring *) + (* Assert A + B = B + A *) + as_prover (fun () -> + assert ( + Affine.equal_as_prover (module Runner.Impl) a_plus_b b_plus_a ) ) ; + + (* Assert A + B = expected_commutative_result *) + as_prover (fun () -> + assert ( + Affine.equal_as_prover + (module Runner.Impl) + a_plus_b expected_commutative_result ) ) ; + + (* + * Associativity property tests + * + * (A + B) + C = A + (B + C) + *) + let b_plus_c = + (* B + C *) + add + (module Runner.Impl) + unused_external_checks curve point_b point_c + in + + let a_plus_b_plus_c = + (* (A + B) + C *) + add + (module Runner.Impl) + unused_external_checks curve a_plus_b point_c + in + + let b_plus_c_plus_a = + (* A + (B + C) *) + add + (module Runner.Impl) + unused_external_checks curve point_a b_plus_c + in + + (* Assert (A + B) + C = A + (B + C) *) + Affine.assert_equal + (module Runner.Impl) + a_plus_b_plus_c b_plus_c_plus_a ; + as_prover (fun () -> + assert ( + Affine.equal_as_prover + (module Runner.Impl) + a_plus_b_plus_c b_plus_c_plus_a ) ) ; + + (* Assert A + B = expected_commutative_result *) + Affine.assert_equal + (module Runner.Impl) + a_plus_b_plus_c expected_associative_result ; + as_prover (fun () -> + assert ( + Affine.equal_as_prover + (module Runner.Impl) + a_plus_b_plus_c expected_associative_result ) ) ; + + (* + * Distributive property tests + * + * 2 * (A + B) = 2 * A + 2 * B + *) + let double_of_sum = + (* 2 * (A + B) *) + double (module Runner.Impl) unused_external_checks curve a_plus_b + in + + let double_a = + (* 2 * A *) + double (module Runner.Impl) unused_external_checks curve point_a + in + + let double_b = + (* 2 * B *) + double (module Runner.Impl) unused_external_checks curve point_b + in + + let sum_of_doubles = + (* 2 * A + 2 * B *) + add + (module Runner.Impl) + unused_external_checks curve double_a double_b + in + + (* Assert 2 * (A + B) = 2 * A + 2 * B *) + Affine.assert_equal + (module Runner.Impl) + double_of_sum sum_of_doubles ; + as_prover (fun () -> + assert ( + Affine.equal_as_prover + (module Runner.Impl) + double_of_sum sum_of_doubles ) ) ; + + (* Assert 2 * (A + B) = expected_distributive_result *) + Affine.assert_equal + (module Runner.Impl) + double_of_sum expected_distributive_result ; + as_prover (fun () -> + assert ( + Affine.equal_as_prover + (module Runner.Impl) + double_of_sum expected_distributive_result ) ) ; + () ) + in + + cs + in + + (* Test with secp256k1 curve *) + let point_a = + ( Bignum_bigint.of_string + "104139740379639537914620141697889522643195068624996157573145175343741564772195" + , Bignum_bigint.of_string + "24686993868898088086788882517246409097753788695591891584026176923146938009248" + ) + in + let point_b = + ( Bignum_bigint.of_string + "36743784007303620043843440776745227903854397846775577839885696093428264537689" + , Bignum_bigint.of_string + "37572687997781202307536515813734773072395389211771147301250986255900442183367" + ) + in + let point_c = + ( Bignum_bigint.of_string + "49696436312078070273833592624394555921078337653960324106519507173094660966846" + , Bignum_bigint.of_string + "8233980127281521579593600770666525234073102501648621450313070670075221490597" + ) + in + let expected_commutative_result = + (* A + B *) + ( Bignum_bigint.of_string + "82115184826944281192212047494549730220285137025844635077989275753462094545317" + , Bignum_bigint.of_string + "65806312870411158102677100909644698935674071740730856487954465264167266803940" + ) + in + let expected_associative_result = + (* A + B + C *) + ( Bignum_bigint.of_string + "32754193298666340516904674847278729692077935996237244820399615298932008086168" + , Bignum_bigint.of_string + "98091569220567533408383096211571578494419313923145170353903484742714309353581" + ) + in + (* 2* (A + B) *) + let expected_distributive_result = + ( Bignum_bigint.of_string + "92833221040863134022467437260311951512477869225271942781021131905899386232859" + , Bignum_bigint.of_string + "88875130971526456079808346479572776785614636860343295137331156710761285100759" + ) + in + + assert (is_on_curve_bignum_point Secp256k1.params point_a) ; + assert (is_on_curve_bignum_point Secp256k1.params point_b) ; + assert (is_on_curve_bignum_point Secp256k1.params point_c) ; + assert ( + is_on_curve_bignum_point Secp256k1.params expected_commutative_result ) ; + assert ( + is_on_curve_bignum_point Secp256k1.params expected_associative_result ) ; + assert ( + is_on_curve_bignum_point Secp256k1.params expected_distributive_result ) ; + + let _cs = + test_group_properties Secp256k1.params point_a point_b point_c + expected_commutative_result expected_associative_result + expected_distributive_result + in + + (* + * Test with NIST P-224 curve + * y^2 = x^3 -3 * x + 18958286285566608000408668544493926415504680968679321075787234672564 + *) + let p224_curve = + Curve_params. + { default with + modulus = + Bignum_bigint.of_string + "0xffffffffffffffffffffffffffffffff000000000000000000000001" + ; a = + (* - 3 *) + Bignum_bigint.of_string + "0xfffffffffffffffffffffffffffffffefffffffffffffffffffffffe" + (* Note: p224 a_param < vesta_modulus *) + ; b = + (* 18958286285566608000408668544493926415504680968679321075787234672564 *) + Bignum_bigint.of_string + "0xb4050a850c04b3abf54132565044b0b7d7bfd8ba270b39432355ffb4" + } + in + + let point_a = + ( Bignum_bigint.of_string + "20564182195513988720077877094445678909500371329094056390559170498601" + , Bignum_bigint.of_string + "2677931089606376366731934050370502738338362171950142296573730478996" + ) + in + let point_b = + ( Bignum_bigint.of_string + "15331822097908430690332647239357533892026967275700588538504771910797" + , Bignum_bigint.of_string + "4049755097518382314285232898392449281690500011901831745754040069555" + ) + in + let point_c = + ( Bignum_bigint.of_string + "25082387259758106010480779115787834869202362152205819097823199674591" + , Bignum_bigint.of_string + "5836788343546154757468239805956174785568118741436223437725908467573" + ) + in + let expected_commutative_result = + (* A + B *) + ( Bignum_bigint.of_string + "7995206472745921825893910722935139765985673196416788824369950333191" + , Bignum_bigint.of_string + "8265737252928447574971649463676620963677557474048291412774437728538" + ) + in + let expected_associative_result = + (* A + B + C *) + ( Bignum_bigint.of_string + "3257699169520051230744895047894307554057883749899622226174209882724" + , Bignum_bigint.of_string + "7231957109409135332430424812410043083405298563323557216003172539215" + ) + in + (* 2 * (A + B) *) + let expected_distributive_result = + ( Bignum_bigint.of_string + "12648120179660537445264809843313333879121180184951710403373354501995" + , Bignum_bigint.of_string + "130351274476047354152272911484022089680853927680837325730785745821" + ) + in + assert (is_on_curve_bignum_point p224_curve point_a) ; + assert (is_on_curve_bignum_point p224_curve point_b) ; + assert (is_on_curve_bignum_point p224_curve point_c) ; + assert (is_on_curve_bignum_point p224_curve expected_commutative_result) ; + assert (is_on_curve_bignum_point p224_curve expected_associative_result) ; + assert (is_on_curve_bignum_point p224_curve expected_distributive_result) ; + + let _cs = + test_group_properties p224_curve point_a point_b point_c + expected_commutative_result expected_associative_result + expected_distributive_result + in + + (* + * Test with bn254 curve + * y^2 = x^3 + 0 * x + 2 + *) + let bn254_curve = + Curve_params. + { default with + modulus = + Bignum_bigint.of_string + "16798108731015832284940804142231733909889187121439069848933715426072753864723" + ; a = Bignum_bigint.of_int 0 + ; b = Bignum_bigint.of_int 2 + } + in + + let point_a = + ( Bignum_bigint.of_string + "7489139758950854827551487063927077939563321761044181276420624792983052878185" + , Bignum_bigint.of_string + "2141496180075348025061594016907544139242551437114964865155737156269728330559" + ) + in + let point_b = + ( Bignum_bigint.of_string + "9956514278304933003335636627606783773825106169180128855351756770342193930117" + , Bignum_bigint.of_string + "1762095167736644705377345502398082775379271270251951679097189107067141702434" + ) + in + let point_c = + ( Bignum_bigint.of_string + "15979993511612396332695593711346186397534040520881664680241489873512193259980" + , Bignum_bigint.of_string + "10163302455117602785156120251106605625181898385895334763785764107729313787391" + ) + in + let expected_commutative_result = + (* A + B *) + ( Bignum_bigint.of_string + "13759678784866515747881317697821131633872329198354290325517257690138811932261" + , Bignum_bigint.of_string + "4040037229868341675068324615541961445935091050207890024311587166409180676332" + ) + in + let expected_associative_result = + (* A + B + C *) + ( Bignum_bigint.of_string + "16098676871974911854784905872738346730775870232298829667865365025475731380192" + , Bignum_bigint.of_string + "12574401007382321193248731381385712204251317924015127170657534965607164101869" + ) + in + (* 2 * (A + B) *) + let expected_distributive_result = + ( Bignum_bigint.of_string + "9395314037281443688092936149000099903064729021023078772338895863158377429106" + , Bignum_bigint.of_string + "14218226539011623427628171089944499674924086623747284955166459983416867234215" + ) + in + assert (is_on_curve_bignum_point bn254_curve point_a) ; + assert (is_on_curve_bignum_point bn254_curve point_b) ; + assert (is_on_curve_bignum_point bn254_curve point_c) ; + assert (is_on_curve_bignum_point bn254_curve expected_commutative_result) ; + assert (is_on_curve_bignum_point bn254_curve expected_associative_result) ; + assert (is_on_curve_bignum_point bn254_curve expected_distributive_result) ; + + let _cs = + test_group_properties bn254_curve point_a point_b point_c + expected_commutative_result expected_associative_result + expected_distributive_result + in + + (* + * Test with (Pasta) Pallas curve (on Vesta native) + * y^2 = x^3 + 5 + *) + let pallas_curve = + Curve_params. + { default with + modulus = + Bignum_bigint.of_string + "28948022309329048855892746252171976963363056481941560715954676764349967630337" + ; a = Bignum_bigint.of_int 0 + ; b = Bignum_bigint.of_int 5 + } + in + + let point_a = + ( Bignum_bigint.of_string + "3687554385661875988153708668118568350801595287403286241588941623974773451174" + , Bignum_bigint.of_string + "4125300560830971348224390975663473429075828688503632065713036496032796088150" + ) + in + let point_b = + ( Bignum_bigint.of_string + "13150688393980970390008393861087383374732464068960495642594966124646063172404" + , Bignum_bigint.of_string + "2084472543720136255281934655991399553143524556330848293815942786297013884533" + ) + in + let point_c = + ( Bignum_bigint.of_string + "26740989696982304482414554371640280045791606641637898228291292575942109454805" + , Bignum_bigint.of_string + "14906024627800344780747375705291059367428823794643427263104879621768813059138" + ) + in + let expected_commutative_result = + (* A + B *) + ( Bignum_bigint.of_string + "11878681988771676869370724830611253729756170947285460876552168044614948225457" + , Bignum_bigint.of_string + "14497133356854845193720136968564933709713968802446650329644811738138289288792" + ) + in + let expected_associative_result = + (* A + B + C *) + ( Bignum_bigint.of_string + "8988194870545558903676114324437227470798902472195505563098874771184576333284" + , Bignum_bigint.of_string + "2715074574400479059415686517976976756653616385004805753779147804207672517454" + ) + in + (* 2 * (A + B) *) + let expected_distributive_result = + ( Bignum_bigint.of_string + "5858337972845412034234591451268195730728808894992644330419904703508222498795" + , Bignum_bigint.of_string + "7758708768756582293117808728373210197717986974150537098853332332749930840785" + ) + in + assert (is_on_curve_bignum_point pallas_curve point_a) ; + assert (is_on_curve_bignum_point pallas_curve point_b) ; + assert (is_on_curve_bignum_point pallas_curve point_c) ; + assert (is_on_curve_bignum_point pallas_curve expected_commutative_result) ; + assert (is_on_curve_bignum_point pallas_curve expected_associative_result) ; + assert (is_on_curve_bignum_point pallas_curve expected_distributive_result) ; + + let _cs = + test_group_properties pallas_curve point_a point_b point_c + expected_commutative_result expected_associative_result + expected_distributive_result + in + + () ) + +(*******************************) +(* Scalar multiplication tests *) +(*******************************) + +let%test_unit "Ec_group.is_on_curve" = + if scalar_mul_tests_enabled then ( + let open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + + (* Test is_on_curve *) + let test_is_on_curve ?cs (curve : Curve_params.t) + (point : Affine.bignum_point) = + (* Generate and verify proof *) + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + (* Prepare test public inputs *) + let curve = + Curve_params.to_circuit_constants + (module Runner.Impl) + curve ~use_precomputed_gen_doubles:false + in + let point = + Affine.of_bignum_bigint_coordinates (module Runner.Impl) point + in + + (* Create external checks context for tracking extra constraints + that are required for soundness (unused in this simple test) *) + let unused_external_checks = + Foreign_field.External_checks.create (module Runner.Impl) + in + + (* Check point is on elliptic curve *) + is_on_curve (module Runner.Impl) unused_external_checks curve point ; + + (* Check for expected quantity of external checks *) + let bound_checks_count = ref 3 in + if not Bignum_bigint.(curve.bignum.a = zero) then + bound_checks_count := !bound_checks_count + 1 ; + if not Bignum_bigint.(curve.bignum.b = zero) then + bound_checks_count := !bound_checks_count + 1 ; + assert ( + Mina_stdlib.List.Length.equal unused_external_checks.bounds + !bound_checks_count ) ; + assert ( + Mina_stdlib.List.Length.equal unused_external_checks.multi_ranges + 3 ) ; + assert ( + Mina_stdlib.List.Length.equal + unused_external_checks.compact_multi_ranges 3 ) ; + () ) + in + + cs + in + + (* Positive tests *) + let _cs = test_is_on_curve Secp256k1.params Secp256k1.params.gen in + + let good_pt = + ( Bignum_bigint.of_string + "18950551679048287927361677965259288422489066940346827203675447914841748996155" + , Bignum_bigint.of_string + "47337572658241658062145739798014345835092764795141449413289521900680935648400" + ) + in + let _cs = test_is_on_curve Secp256k1.params good_pt in + let neg_good_pt = + let x, y = good_pt in + (x, Bignum_bigint.((zero - y) % Secp256k1.params.modulus)) + in + let _cs = test_is_on_curve Secp256k1.params neg_good_pt in + + (* Test with y^2 = x^3 -3 * x + 18958286285566608000408668544493926415504680968679321075787234672564 *) + let curve_p224 = + Curve_params. + { default with + modulus = + Bignum_bigint.of_string + "0xffffffffffffffffffffffffffffffff000000000000000000000001" + (* ; order = Bignum_bigint.one *) + ; a = + Bignum_bigint.of_string + "0xfffffffffffffffffffffffffffffffefffffffffffffffffffffffe" + ; b = + Bignum_bigint.of_string + "18958286285566608000408668544493926415504680968679321075787234672564" + } + in + + let point = + ( Bignum_bigint.of_string + "20564182195513988720077877094445678909500371329094056390559170498601" + , Bignum_bigint.of_string + "2677931089606376366731934050370502738338362171950142296573730478996" + ) + in + assert (is_on_curve_bignum_point curve_p224 point) ; + let _cs = test_is_on_curve curve_p224 point in + + (* Test with elliptic curve y^2 = x^3 + 17 * x mod 7879 *) + let curve_c1 = + Curve_params. + { default with + modulus = Bignum_bigint.of_int 7879 + ; a = Bignum_bigint.of_int 17 + } + in + let _cs = + let point = (Bignum_bigint.of_int 7331, Bignum_bigint.of_int 888) in + assert (is_on_curve_bignum_point curve_c1 point) ; + test_is_on_curve curve_c1 point + in + + (* Negative tests *) + assert ( + Common.is_error (fun () -> + let bad_pt = + ( Bignum_bigint.of_string + "67973637023329354644729732876692436096994797487488454090437075702698953132769" + , Bignum_bigint.of_string + "208096131279561713744990959402407452508030289249215221172372441421932322041350" + ) + in + test_is_on_curve Secp256k1.params bad_pt ) ) ; + + assert ( + Common.is_error (fun () -> + let bad_pt = (Bignum_bigint.zero, Bignum_bigint.one) in + test_is_on_curve Secp256k1.params bad_pt ) ) ; + assert ( + Common.is_error (fun () -> + let bad_pt = (Bignum_bigint.one, Bignum_bigint.one) in + test_is_on_curve curve_p224 bad_pt ) ) ; + assert ( + Common.is_error (fun () -> + let bad_pt = (Bignum_bigint.of_int 2, Bignum_bigint.of_int 77) in + test_is_on_curve curve_c1 bad_pt ) ) ; + () ) + +let%test_unit "Ec_group.check_ia" = + if scalar_mul_tests_enabled then ( + let open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + + (* Test check_ia *) + let test_check_ia ?cs (curve : Curve_params.t) + (ia : Affine.bignum_point Curve_params.ia_points) = + (* Generate and verify proof *) + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + (* Prepare test public inputs *) + let curve = + Curve_params.to_circuit_constants (module Runner.Impl) curve + in + let ia = + Curve_params.ia_to_circuit_constants (module Runner.Impl) ia + in + + (* Create external checks context for tracking extra constraints + that are required for soundness (unused in this simple test) *) + let unused_external_checks = + Foreign_field.External_checks.create (module Runner.Impl) + in + + (* Check initial accumulator values *) + check_ia (module Runner.Impl) unused_external_checks curve ia ; + + (* Check for expected quantity of external checks *) + let bounds_checks_count = ref 3 in + if not Bignum_bigint.(curve.bignum.a = zero) then + bounds_checks_count := !bounds_checks_count + 1 ; + if not Bignum_bigint.(curve.bignum.b = zero) then + bounds_checks_count := !bounds_checks_count + 1 ; + assert ( + Mina_stdlib.List.Length.equal unused_external_checks.bounds + !bounds_checks_count ) ; + assert ( + Mina_stdlib.List.Length.equal unused_external_checks.multi_ranges + 3 ) ; + assert ( + Mina_stdlib.List.Length.equal + unused_external_checks.compact_multi_ranges 3 ) ; + () ) + in + + cs + in + + (* + * Positive tests + *) + + (* Check secp256k1 initial accumulator (ia) points are correctly computed *) + let ia = compute_ia_points Secp256k1.params in + assert (Stdlib.(ia = Secp256k1.params.ia)) ; + assert ( + Bignum_bigint.( + equal (fst ia.acc) (fst Secp256k1.params.ia.acc) + && equal (snd ia.acc) (snd Secp256k1.params.ia.acc) + && equal (fst ia.neg_acc) (fst Secp256k1.params.ia.neg_acc) + && equal (snd ia.neg_acc) (snd Secp256k1.params.ia.neg_acc)) ) ; + + (* Check secp256k1 ia *) + let _cs = test_check_ia Secp256k1.params Secp256k1.params.ia in + + (* Check computation and constraining of another ia *) + let some_pt = + ( Bignum_bigint.of_string + "67973637023329354644729732876692436096994797487488454090437075702698953132769" + , Bignum_bigint.of_string + "108096131279561713744990959402407452508030289249215221172372441421932322041359" + ) + in + let ia = compute_ia_points Secp256k1.params ~point:some_pt in + assert ( + Bignum_bigint.( + equal (fst ia.acc) + (Bignum_bigint.of_string + "77808213848094917079255757522755861813805484598820680171349097575367307923684" )) ) ; + assert ( + Bignum_bigint.( + equal (snd ia.acc) + (Bignum_bigint.of_string + "53863434441850287308371409267019602514253829996603354269738630468061457326859" )) ) ; + assert ( + Bignum_bigint.( + equal (fst ia.neg_acc) + (Bignum_bigint.of_string + "77808213848094917079255757522755861813805484598820680171349097575367307923684" )) ) ; + assert ( + Bignum_bigint.( + equal (snd ia.neg_acc) + (Bignum_bigint.of_string + "61928654795465908115199575741668305339016154669037209769718953539847377344804" )) ) ; + let cs = test_check_ia Secp256k1.params ia in + + (* Constraint system reuse *) + let some_pt2 = + ( Bignum_bigint.of_string + "33321203307284859285457570648264200146777100201560799373305582914511875834316" + , Bignum_bigint.of_string + "7129423920069223884043324693587298420542722670070397102650821528843979421489" + ) + in + let another_ia2 = compute_ia_points Secp256k1.params ~point:some_pt2 in + let _cs = test_check_ia ~cs Secp256k1.params another_ia2 in + + (* + * Negative tests + *) + assert ( + Common.is_error (fun () -> + (* Bad negated ia *) + let neg_init_acc = Secp256k1.params.ia.neg_acc in + let bad_neg = + (fst neg_init_acc, Bignum_bigint.(snd neg_init_acc + one)) + in + let bad_ia = + Curve_params.ia_of_points Secp256k1.params.ia.acc bad_neg + in + test_check_ia Secp256k1.params bad_ia ) ) ; + + assert ( + Common.is_error (fun () -> + (* init_acc is not on curve, but negative is good *) + let bad_pt = + ( Bignum_bigint.of_string + "73748207725492941843355928046090697797026070566443284126849221438943867210748" + , Bignum_bigint.of_string + "71805440039692371678177852429904809925653495989672587996663750265844216498843" + ) + in + assert (not (is_on_curve_bignum_point Secp256k1.params bad_pt)) ; + let neg_bad_pt = + let x, y = bad_pt in + (x, Bignum_bigint.((zero - y) % Secp256k1.params.modulus)) + in + let bad_ia = Curve_params.ia_of_points bad_pt neg_bad_pt in + test_check_ia Secp256k1.params bad_ia ) ) ; + () ) + +let%test_unit "Ec_group.scalar_mul" = + if scalar_mul_tests_enabled then + let open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + + (* Test elliptic curve scalar multiplication *) + let test_scalar_mul ?cs (curve : Curve_params.t) (scalar : Bignum_bigint.t) + (point : Affine.bignum_point) (expected_result : Affine.bignum_point) = + (* Generate and verify proof *) + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + let open Runner.Impl in + (* Prepare test public inputs *) + let curve = + Curve_params.to_circuit_constants (module Runner.Impl) curve + in + let scalar_bits = + Common.bignum_bigint_unpack_as_unchecked_vars + (module Runner.Impl) + ~remove_trailing:true scalar + in + let point = + Affine.of_bignum_bigint_coordinates (module Runner.Impl) point + in + let expected_result = + Affine.of_bignum_bigint_coordinates + (module Runner.Impl) + expected_result + in + + (* Create external checks context for tracking extra constraints + that are required for soundness (unused in this simple test) *) + let unused_external_checks = + Foreign_field.External_checks.create (module Runner.Impl) + in + + (* Q = sP *) + let result = + scalar_mul + (module Runner.Impl) + unused_external_checks curve scalar_bits point + in + + (* Check for expected quantity of external checks *) + + (* Check output matches expected result *) + as_prover (fun () -> + assert ( + Affine.equal_as_prover + (module Runner.Impl) + result expected_result ) ) ; + () ) + in + + cs + in + + (* + * EC scalar multiplication tests + *) + + (* Multiply by 1 *) + let scalar = Bignum_bigint.of_int 1 in + let point = + ( Bignum_bigint.of_string + "67973637023329354644729732876692436096994797487488454090437075702698953132769" + , Bignum_bigint.of_string + "108096131279561713744990959402407452508030289249215221172372441421932322041359" + ) + in + let _cs = test_scalar_mul Secp256k1.params scalar point point in + + (* Multiply by 3 *) + let scalar = Bignum_bigint.of_int 3 in + let expected_result = + ( Bignum_bigint.of_string + "157187898623115017197196263696044455473966365375620096488909462468556488992" + , Bignum_bigint.of_string + "8815915990003770986701969284580631365087521759318521999314517238992555623924" + ) + in + let _cs = test_scalar_mul Secp256k1.params scalar point expected_result in + + let scalar = Bignum_bigint.of_int 5 in + let expected_result = + ( Bignum_bigint.of_string + "51167536897757234729699532493775077246692685149885509345450034909880529264629" + , Bignum_bigint.of_string + "44029933166959533883508578962900776387952087967919619281016528212534310213626" + ) + in + let _cs = test_scalar_mul Secp256k1.params scalar point expected_result in + + let scalar = Bignum_bigint.of_int 6 in + let expected_result = + ( Bignum_bigint.of_string + "37941877700581055232085743160302884615963229784754572200220248617732513837044" + , Bignum_bigint.of_string + "103619381845871132282285745641400810486981078987965768860988615362483475376768" + ) + in + let _cs = test_scalar_mul Secp256k1.params scalar point expected_result in + + let scalar = Bignum_bigint.of_int 7 in + let expected_result = + ( Bignum_bigint.of_string + "98789585776319197684463328274590329296514884375780947918152956981890869725107" + , Bignum_bigint.of_string + "53439843286771287571705008292825119475125031375071120429905353259479677320421" + ) + in + let _cs = test_scalar_mul Secp256k1.params scalar point expected_result in + + (* Multiply by 391 (9-bits) *) + let scalar = Bignum_bigint.of_int 391 in + let point = + ( Bignum_bigint.of_string + "54895644447597143434988379138583445778456903839185254067441861567562618370751" + , Bignum_bigint.of_string + "104240867874630534073764110268869655023740253909668464291682942589488282068874" + ) + in + let expected_result = + ( Bignum_bigint.of_string + "92358528850649079329920393962087666882076668287684124835881344341719861256355" + , Bignum_bigint.of_string + "27671880807027823848003850001152132266698242755975705342674616617508656063465" + ) + in + let _cs = test_scalar_mul Secp256k1.params scalar point expected_result in + + (* Multiply by 56081 (16-bits) = 0b1000 1000 1101 1011 *) + let scalar = Bignum_bigint.of_int 56081 in + let point = + ( Bignum_bigint.of_string + "49950185608981313523985721024498375953313579282523275566585584189656370223502" + , Bignum_bigint.of_string + "63146279987886420302806526994276928563454160280333237123111833753346399349172" + ) + in + let expected_result = + ( Bignum_bigint.of_string + "108851670764886172021315090022738025632501895048831561535857748171372817371035" + , Bignum_bigint.of_string + "39836887958851910836029687008284321008437801650048469660046898576758470452396" + ) + in + let _cs = test_scalar_mul Secp256k1.params scalar point expected_result in + + (* Multiply by full-size secp256k1 scalar (256-bits) *) + let scalar = + Bignum_bigint.of_string + "99539640504241691246180604816121958450675059637016987953058113537095650715171" + in + let point = + ( Bignum_bigint.of_string + "68328903637429126750778604407754814031272668830649072423942370967409226150426" + , Bignum_bigint.of_string + "115181214446139478209347980655067703553667234783111668132659797097404834370543" + ) + in + let expected_result = + ( Bignum_bigint.of_string + "39225021357252528375135552880830100632566425214595783585248505195330577648905" + , Bignum_bigint.of_string + "29440534631649867975583896121458013539074827830686556074829823458426851891598" + ) + in + let _cs = test_scalar_mul Secp256k1.params scalar point expected_result in + + (* Multiply by another full-size secp256k1 scalar (256-bits) *) + let scalar = + Bignum_bigint.of_string + "35756276706511369289499344520446188493221382068841792677286014237073874389678" + in + let point = + ( Bignum_bigint.of_string + "43525911664736252471195991194779124044474905699728523733063794335880455509831" + , Bignum_bigint.of_string + "55128733880722898542773180558916537797992134106308528712389282845794719232809" + ) + in + let expected_result = + ( Bignum_bigint.of_string + "92989598011225532261029933411922200506770253480509168102582704300806548851952" + , Bignum_bigint.of_string + "91632035281581329897770791332253791028537996389304501325297573948973121537913" + ) + in + let _cs = test_scalar_mul Secp256k1.params scalar point expected_result in + + (* Compute secp256k1 pub key from secret key *) + let scalar = + Bignum_bigint.of_string + "88112557240431687619949876834386306142823675858092281192015740375511510392207" + in + let expected_pubkey = + ( Bignum_bigint.of_string + "50567548908598322015490923046917426159132337313161362096244889522774999144344" + , Bignum_bigint.of_string + "35561449820918632865961375836489131575522128704654117756369029278244987778295" + ) + in + let cs = + test_scalar_mul Secp256k1.params scalar Secp256k1.params.gen + expected_pubkey + in + (* Constraint system reuse *) + let scalar = + Bignum_bigint.of_string + "93102346685989503200550820820601664115283772668359982393657391253613200462560" + in + let expected_pt = + ( Bignum_bigint.of_string + "115384145918035657737810677734903949889161796282962842129612290299404313800919" + , Bignum_bigint.of_string + "86432196125585910060501672565270170370528330974696895998365685616223611168261" + ) + in + let _cs = + test_scalar_mul ~cs Secp256k1.params scalar Secp256k1.params.gen + expected_pt + in + () + +let%test_unit "Ec_group.scalar_mul_properties" = + if scalar_mul_tests_enabled then ( + let open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + + (* Test elliptic curve scalar multiplication properties *) + let test_scalar_mul_properties ?cs (curve : Curve_params.t) + (a_scalar : Bignum_bigint.t) (b_scalar : Bignum_bigint.t) + (point : Affine.bignum_point) (a_expected_result : Affine.bignum_point) + (b_expected_result : Affine.bignum_point) + (a_plus_b_expected : Affine.bignum_point) + (a_times_b_expected : Affine.bignum_point) + (negation_expected : Affine.bignum_point) = + (* Generate and verify proof *) + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + (* Prepare test public inputs *) + let curve = + Curve_params.to_circuit_constants (module Runner.Impl) curve + in + let a_scalar_bits = + Common.bignum_bigint_unpack_as_unchecked_vars + (module Runner.Impl) + ~remove_trailing:true a_scalar + in + let b_scalar_bits = + Common.bignum_bigint_unpack_as_unchecked_vars + (module Runner.Impl) + ~remove_trailing:true b_scalar + in + let c_scalar_bits = + let c_scalar = + Bignum_bigint.((a_scalar + b_scalar) % curve.bignum.order) + in + Common.bignum_bigint_unpack_as_unchecked_vars + (module Runner.Impl) + ~remove_trailing:true c_scalar + in + let point = + Affine.of_bignum_bigint_coordinates (module Runner.Impl) point + in + let a_expected_result = + Affine.of_bignum_bigint_coordinates + (module Runner.Impl) + a_expected_result + in + let b_expected_result = + Affine.of_bignum_bigint_coordinates + (module Runner.Impl) + b_expected_result + in + let a_plus_b_expected = + Affine.of_bignum_bigint_coordinates + (module Runner.Impl) + a_plus_b_expected + in + let a_times_b_expected = + Affine.of_bignum_bigint_coordinates + (module Runner.Impl) + a_times_b_expected + in + let negation_expected = + Affine.of_bignum_bigint_coordinates + (module Runner.Impl) + negation_expected + in + + (* Create external checks context for tracking extra constraints + that are required for soundness (unused in this simple test) *) + let unused_external_checks = + Foreign_field.External_checks.create (module Runner.Impl) + in + + (* + * Check distributive property with adding scalars: aP + bP = (a + b)P + *) + + (* A = aP *) + let a_result = + scalar_mul + (module Runner.Impl) + unused_external_checks curve a_scalar_bits point + in + + (* B = bP *) + let b_result = + scalar_mul + (module Runner.Impl) + unused_external_checks curve b_scalar_bits point + in + + (* C = (a + b)P *) + let a_plus_b_result = + scalar_mul + (module Runner.Impl) + unused_external_checks curve c_scalar_bits point + in + + (* A + B *) + let a_result_plus_b_result = + add + (module Runner.Impl) + unused_external_checks curve a_result b_result + in + + (* Assert aP = expected A *) + Affine.assert_equal (module Runner.Impl) a_result a_expected_result ; + (* Assert bP = expected B *) + Affine.assert_equal (module Runner.Impl) b_result b_expected_result ; + (* Assert (a + b)P = expected *) + Affine.assert_equal + (module Runner.Impl) + a_plus_b_result a_plus_b_expected ; + (* Assert A + B = (a + b)P = cP *) + Affine.assert_equal + (module Runner.Impl) + a_result_plus_b_result a_plus_b_result ; + + (* + * Check distributive property with multiplying scalars: [a]bP = [b]aP = [a*b]P + *) + + (* [a]bP *) + let a_b_result = + scalar_mul + (module Runner.Impl) + unused_external_checks curve a_scalar_bits b_result + in + + (* [b]aP *) + let b_a_result = + scalar_mul + (module Runner.Impl) + unused_external_checks curve b_scalar_bits a_result + in + + (* Compute a*b as foreign field multiplication in scalar field *) + let ab_scalar_bits = + let ab_scalar = + Bignum_bigint.(a_scalar * b_scalar % curve.bignum.order) + in + Common.bignum_bigint_unpack_as_unchecked_vars + (module Runner.Impl) + ~remove_trailing:true ab_scalar + in + + (* (a * b)P *) + let ab_result = + scalar_mul + (module Runner.Impl) + unused_external_checks curve ab_scalar_bits point + in + + (* Assert [a]bP = [b]aP *) + Affine.assert_equal (module Runner.Impl) a_b_result b_a_result ; + (* Assert [b]aP = (a * b)P *) + Affine.assert_equal (module Runner.Impl) b_a_result ab_result ; + (* Assert (a * b)P = expected *) + Affine.assert_equal + (module Runner.Impl) + ab_result a_times_b_expected ; + + (* + * Check scaling computes with negation: [-a]P = -(aP) + *) + + (* Compute -a_scalar witness *) + let minus_a_scalar_bits = + let minus_a_scalar = + Bignum_bigint.(-a_scalar % curve.bignum.order) + in + Common.bignum_bigint_unpack_as_unchecked_vars + (module Runner.Impl) + ~remove_trailing:true minus_a_scalar + in + + (* [-a]P *) + let minus_a_result = + scalar_mul + (module Runner.Impl) + unused_external_checks curve minus_a_scalar_bits point + in + + (* -(aP) *) + let negated_a_result = negate (module Runner.Impl) curve a_result in + (* Result row: need to write negated y-coordinate to row in order to assert_equal on it *) + Foreign_field.result_row + (module Runner.Impl) + ~label:"negation_property_check" + @@ Affine.y negated_a_result ; + + (* Assert [-a]P = -(aP) *) + Affine.assert_equal + (module Runner.Impl) + minus_a_result negated_a_result ; + (* Assert -(aP) = expected *) + Affine.assert_equal + (module Runner.Impl) + negated_a_result negation_expected ; + + () ) + in + + cs + in + + (* + * EC scalar multiplication properties tests + *) + + (* Tests with generator *) + let a_scalar = + Bignum_bigint.of_string + "79401928295407367700174300280555320402843131478792245979539416476579739380993" + in + (* aG *) + let a_expected = + ( Bignum_bigint.of_string + "17125835931983334217694156357722716412757965999176597307946554943053675538785" + , Bignum_bigint.of_string + "46388026915780724534166509048612278793220290073988306084942872130687658791661" + ) + in + let b_scalar = + Bignum_bigint.of_string + "89091288558408807474211262098870527285408764120538440460973310880924228023627" + in + (* bG *) + let b_expected = + ( Bignum_bigint.of_string + "79327061200655101960260174492040176163202074463842535225851740487556039447898" + , Bignum_bigint.of_string + "17719907321698144940791372349744661269763063699265755816142522447977929876765" + ) + in + (* (a + b)G *) + let a_plus_b_expected = + ( Bignum_bigint.of_string + "81040990384669475923010997008987195868838198748766130146528604954229008315134" + , Bignum_bigint.of_string + "34561268318835956667566052477444512933985042899902969559255322703897774718063" + ) + in + (* (a * b)G *) + let a_times_b_expected = + ( Bignum_bigint.of_string + "81456477659851325370442471400511783773782655276230587738882014172211964156628" + , Bignum_bigint.of_string + "95026373302104994624825470484745116441888023752189438912144935562310761663097" + ) + in + (* [-a]G *) + let negation_expected = + ( Bignum_bigint.of_string + "17125835931983334217694156357722716412757965999176597307946554943053675538785" + , Bignum_bigint.of_string + "69404062321535470889404475960075629060049694591652257954514711877221175880002" + ) + in + + assert (is_on_curve_bignum_point Secp256k1.params a_expected) ; + assert (is_on_curve_bignum_point Secp256k1.params b_expected) ; + assert (is_on_curve_bignum_point Secp256k1.params a_plus_b_expected) ; + assert (is_on_curve_bignum_point Secp256k1.params a_times_b_expected) ; + assert (is_on_curve_bignum_point Secp256k1.params negation_expected) ; + + let _cs = + test_scalar_mul_properties Secp256k1.params a_scalar b_scalar + Secp256k1.params.gen a_expected b_expected a_plus_b_expected + a_times_b_expected negation_expected + in + + (* Tests with another curve point *) + let point = + ( Bignum_bigint.of_string + "33774054739397672981116348681092907963399779523481500939771509974082662984990" + , Bignum_bigint.of_string + "60414776605185041994402340927179985824709402511452021592188768672640080416757" + ) + in + let a_scalar = + Bignum_bigint.of_string + "101698197574283114939368343806106834988902354006673798485060078476846328099457" + in + (* aP *) + let a_expected = + ( Bignum_bigint.of_string + "75195284589272297831705973079897644085806639251981864022525558637369799002975" + , Bignum_bigint.of_string + "21318219854954928210493202207122232794689530644716510309784081397689563830643" + ) + in + let b_scalar = + Bignum_bigint.of_string + "29906750163917842454712060592346612426879165698013462577595179415632189050569" + in + (* bP *) + let b_expected = + ( Bignum_bigint.of_string + "31338730031552911193929716320599408654845663804319033450328019997834721773857" + , Bignum_bigint.of_string + "19509931248131549366806268091016515808560677012657535095393179462073374184004" + ) + in + (* (a + b)P *) + let a_plus_b_expected = + ( Bignum_bigint.of_string + "3785015531479612950834562670482118046158085046729801327010146109899305257240" + , Bignum_bigint.of_string + "67252551234352942899384104854542424500400416990163373189382133933498016564076" + ) + in + (* (a * b)P *) + let a_times_b_expected = + ( Bignum_bigint.of_string + "104796198157638974641325627725056289938393733264860209068332598339943619687138" + , Bignum_bigint.of_string + "62474612839119693016992187953610680368302121786246432257338185158014628586401" + ) + in + (* [-a]P *) + let negation_expected = + ( Bignum_bigint.of_string + "75195284589272297831705973079897644085806639251981864022525558637369799002975" + , Bignum_bigint.of_string + "94473869382361267213077782801565675058580454020924053729673502610219270841020" + ) + in + + assert (is_on_curve_bignum_point Secp256k1.params point) ; + assert (is_on_curve_bignum_point Secp256k1.params a_expected) ; + assert (is_on_curve_bignum_point Secp256k1.params b_expected) ; + assert (is_on_curve_bignum_point Secp256k1.params a_plus_b_expected) ; + assert (is_on_curve_bignum_point Secp256k1.params a_times_b_expected) ; + assert (is_on_curve_bignum_point Secp256k1.params negation_expected) ; + + let _cs = + test_scalar_mul_properties Secp256k1.params a_scalar b_scalar point + a_expected b_expected a_plus_b_expected a_times_b_expected + negation_expected + in + () ) + +let%test_unit "Ec_group.scalar_mul_tiny" = + if scalar_mul_tests_enabled then + let open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + + (* Test elliptic curve scalar multiplication with tiny scalar *) + let test_scalar_mul_tiny ?cs (curve : Curve_params.t) + (scalar : Bignum_bigint.t) (point : Affine.bignum_point) + (expected_result : Affine.bignum_point) = + (* Generate and verify proof *) + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + let open Runner.Impl in + (* Prepare test public inputs *) + let curve = + Curve_params.to_circuit_constants + (module Runner.Impl) + curve ~use_precomputed_gen_doubles:false + in + let scalar_bits = + Common.bignum_bigint_unpack_as_unchecked_vars + (module Runner.Impl) + ~remove_trailing:true scalar + in + let point = + Affine.of_bignum_bigint_coordinates (module Runner.Impl) point + in + let expected_result = + Affine.of_bignum_bigint_coordinates + (module Runner.Impl) + expected_result + in + + (* Create external checks context for tracking extra constraints + that are required for soundness (unused in this simple test) *) + let unused_external_checks = + Foreign_field.External_checks.create (module Runner.Impl) + in + + (* Q = sP *) + let result = + scalar_mul + (module Runner.Impl) + unused_external_checks curve scalar_bits point + in + + (* Check for expected quantity of external checks *) + if Bignum_bigint.(curve.bignum.a = zero) then + assert ( + Mina_stdlib.List.Length.equal unused_external_checks.bounds 42 ) + else + assert ( + Mina_stdlib.List.Length.equal unused_external_checks.bounds 43 ) ; + assert ( + Mina_stdlib.List.Length.equal unused_external_checks.multi_ranges + 17 ) ; + assert ( + Mina_stdlib.List.Length.equal + unused_external_checks.compact_multi_ranges 17 ) ; + + (* Check output matches expected result *) + as_prover (fun () -> + assert ( + Affine.equal_as_prover + (module Runner.Impl) + result expected_result ) ) ; + () ) + in + + cs + in + + (* + * EC scalar multiplication tests + *) + + (* Multiply by 2 *) + let scalar = Bignum_bigint.of_int 2 in + let expected_result = + ( Bignum_bigint.of_string + "89565891926547004231252920425935692360644145829622209833684329913297188986597" + , Bignum_bigint.of_string + "12158399299693830322967808612713398636155367887041628176798871954788371653930" + ) + in + let _cs = + test_scalar_mul_tiny Secp256k1.params scalar Secp256k1.params.gen + expected_result + in + + () + +let%test_unit "Ec_group.scalar_mul_tiny_full" = + if scalar_mul_tests_enabled then + let open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + + (* Test elliptic curve scalar multiplication with tiny scalar (fully constrained) *) + let test_scalar_mul_tiny_full ?cs (curve : Curve_params.t) + (scalar : Bignum_bigint.t) (point : Affine.bignum_point) + (expected_result : Affine.bignum_point) = + (* Generate and verify proof *) + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + let open Runner.Impl in + (* Prepare test public inputs *) + let curve = + Curve_params.to_circuit_constants + (module Runner.Impl) + curve ~use_precomputed_gen_doubles:false + in + let scalar_bits = + Common.bignum_bigint_unpack_as_unchecked_vars + (module Runner.Impl) + ~remove_trailing:true scalar + in + let point = + Affine.of_bignum_bigint_coordinates (module Runner.Impl) point + in + let expected_result = + Affine.of_bignum_bigint_coordinates + (module Runner.Impl) + expected_result + in + + (* Create external checks context for tracking extra constraints + that are required for soundness (unused in this simple test) *) + let external_checks = + Foreign_field.External_checks.create (module Runner.Impl) + in + + (* Q = sP *) + let result = + scalar_mul + (module Runner.Impl) + external_checks curve scalar_bits point + in + + (* + * Perform external checks + *) + + (* Sanity checks *) + if Bignum_bigint.(curve.bignum.a = zero) then + assert (Mina_stdlib.List.Length.equal external_checks.bounds 42) + else assert (Mina_stdlib.List.Length.equal external_checks.bounds 43) ; + assert ( + Mina_stdlib.List.Length.equal external_checks.multi_ranges 17 ) ; + assert ( + Mina_stdlib.List.Length.equal external_checks.compact_multi_ranges + 17 ) ; + + (* Add gates for bound checks, multi-range-checks and compact-multi-range-checks *) + Foreign_field.constrain_external_checks + (module Runner.Impl) + external_checks curve.modulus ; + + (* Check output matches expected result *) + as_prover (fun () -> + assert ( + Affine.equal_as_prover + (module Runner.Impl) + result expected_result ) ) ; + () ) + in + + cs + in + + (* + * EC scalar multiplication full tiny test + *) + + (* Multiply by 2 *) + let scalar = Bignum_bigint.of_int 2 in + let expected_result = + ( Bignum_bigint.of_string + "89565891926547004231252920425935692360644145829622209833684329913297188986597" + , Bignum_bigint.of_string + "12158399299693830322967808612713398636155367887041628176798871954788371653930" + ) + in + let _cs = + test_scalar_mul_tiny_full Secp256k1.params scalar Secp256k1.params.gen + expected_result + in + + () + +let%test_unit "Ec_group.scalar_mul_full" = + if scalar_mul_tests_enabled then + let open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + + (* Test elliptic curve scalar multiplication with scalar (fully constrained) + * Rows without external checks: 9,239 + * Rows with external checks: 51,284 + *) + let test_scalar_mul_full ?cs (curve : Curve_params.t) + (scalar : Bignum_bigint.t) (point : Affine.bignum_point) + (expected_result : Affine.bignum_point) = + (* Generate and verify proof *) + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + let open Runner.Impl in + (* Prepare test public inputs *) + let curve = + Curve_params.to_circuit_constants + (module Runner.Impl) + curve ~use_precomputed_gen_doubles:false + in + let scalar_bits = + Common.bignum_bigint_unpack_as_unchecked_vars + (module Runner.Impl) + ~remove_trailing:true scalar + in + let point = + Affine.of_bignum_bigint_coordinates (module Runner.Impl) point + in + let expected_result = + Affine.of_bignum_bigint_coordinates + (module Runner.Impl) + expected_result + in + + (* Create external checks context for tracking extra constraints + that are required for soundness (unused in this simple test) *) + let external_checks = + Foreign_field.External_checks.create (module Runner.Impl) + in + + (* Q = sP *) + let result = + scalar_mul + (module Runner.Impl) + external_checks curve scalar_bits point + in + + (* Perform external checks *) + Foreign_field.constrain_external_checks + (module Runner.Impl) + external_checks curve.modulus ; + + (* Check output matches expected result *) + as_prover (fun () -> + assert ( + Affine.equal_as_prover + (module Runner.Impl) + result expected_result ) ) ; + () ) + in + + cs + in + + (* + * EC scalar multiplication full test + *) + let scalar = + Bignum_bigint.of_string + "86328453031879654597075713189149610219798626760146420625950995482836591878435" + in + let expected_result = + ( Bignum_bigint.of_string + "34471291466947522722859799187843146224770255220707476910295898769840639813138" + , Bignum_bigint.of_string + "93602351553749687946251059563423164683238306171680072584629082513591162129572" + ) + in + let _cs = + test_scalar_mul_full Secp256k1.params scalar Secp256k1.params.gen + expected_result + in + + () diff --git a/src/lib/crypto/kimchi_backend/gadgets/ecdsa.ml.disabled b/src/lib/crypto/kimchi_backend/gadgets/ecdsa.ml.disabled new file mode 100644 index 00000000000..576db6bda7c --- /dev/null +++ b/src/lib/crypto/kimchi_backend/gadgets/ecdsa.ml.disabled @@ -0,0 +1,1131 @@ +open Core_kernel +module Bignum_bigint = Snarky_backendless.Backend_extended.Bignum_bigint +module Snark_intf = Snarky_backendless.Snark_intf + +let tests_enabled = true + +(* Array to tuple helper *) +let tuple6_of_array array = + match array with + | [| a1; a2; a3; a4; a5; a6 |] -> + (a1, a2, a3, a4, a5, a6) + | _ -> + assert false + +(* Gadget to assert signature scalars r,s \in Fn + * Must be used when r and s are not public parameters + * + * Scalar field external checks: + * Bound checks: 6 + * Multi-range-checks: 2 + * Compact-range-checks: 2 + * Total range-checks: 10 + * + * Rows: (per crumb, not counting inputs/outputs and constants) + * Check: 4 + * Bound additions: 12 + * Multi-range-checks: 40 + * Total: 56 + *) +let signature_scalar_check (type f) + (module Circuit : Snark_intf.Run with type field = f) + (scalar_checks : f Foreign_field.External_checks.t) + (curve : f Curve_params.InCircuit.t) + (signature : + f Foreign_field.Element.Standard.t * f Foreign_field.Element.Standard.t ) + = + let open Circuit in + (* Signaure r and s *) + let r, s = signature in + + (* Compute witness r^-1 and s^-1 needed for not-zero-check *) + let r_inv0, r_inv1, r_inv2, s_inv0, s_inv1, s_inv2 = + exists (Typ.array ~length:6 Field.typ) ~compute:(fun () -> + let curve_order = + Foreign_field.field_const_standard_limbs_to_bignum_bigint + (module Circuit) + curve.order + in + + let r = + Foreign_field.Element.Standard.to_bignum_bigint_as_prover + (module Circuit) + r + in + + let s = + Foreign_field.Element.Standard.to_bignum_bigint_as_prover + (module Circuit) + s + in + + (* Compute r^-1 *) + let r_inv = Common.bignum_bigint_inverse r curve_order in + + (* Compute s^-1 *) + let s_inv = Common.bignum_bigint_inverse s curve_order in + + (* Convert from Bignums to field elements *) + let r_inv0, r_inv1, r_inv2 = + Foreign_field.bignum_bigint_to_field_const_standard_limbs + (module Circuit) + r_inv + in + let s_inv0, s_inv1, s_inv2 = + Foreign_field.bignum_bigint_to_field_const_standard_limbs + (module Circuit) + s_inv + in + + (* Return and convert back to Cvars *) + [| r_inv0; r_inv1; r_inv2; s_inv0; s_inv1; s_inv2 |] ) + |> tuple6_of_array + in + let r_inv = + Foreign_field.Element.Standard.of_limbs (r_inv0, r_inv1, r_inv2) + in + let s_inv = + Foreign_field.Element.Standard.of_limbs (s_inv0, s_inv1, s_inv2) + in + + let one = Foreign_field.Element.Standard.one (module Circuit) in + + (* C1: Constrain that r != 0 *) + let computed_one = + Foreign_field.mul (module Circuit) scalar_checks r r_inv curve.order + in + (* Bounds 1: Left input r is bound checked below + * Right input r_inv is bound checked below + * Result bound check is covered by scalar_checks + *) + Foreign_field.External_checks.append_bound_check scalar_checks + @@ Foreign_field.Element.Standard.to_limbs r ; + Foreign_field.External_checks.append_bound_check scalar_checks + @@ Foreign_field.Element.Standard.to_limbs r_inv ; + (* Assert r * r^-1 = 1 *) + Foreign_field.Element.Standard.assert_equal (module Circuit) computed_one one ; + + (* C2: Constrain that s != 0 *) + let computed_one = + Foreign_field.mul (module Circuit) scalar_checks s s_inv curve.order + in + (* Bounds 2: Left input s is bound checked below + * Right input s_inv is bound checked below + * Result bound check is covered by scalar_checks + *) + Foreign_field.External_checks.append_bound_check scalar_checks + @@ Foreign_field.Element.Standard.to_limbs s ; + Foreign_field.External_checks.append_bound_check scalar_checks + @@ Foreign_field.Element.Standard.to_limbs s_inv ; + (* Assert s * s^-1 = 1 *) + Foreign_field.Element.Standard.assert_equal (module Circuit) computed_one one + +(* C3: Assert r \in [0, n) + * Already covered by bound check on r (Bounds 1) + *) +(* C4: Assert s \in [0, n) + * Already covered by bound check on s (Bounds 2) + *) + +(* Gadget for constraining ECDSA signature verificationin zero-knowledge + * + * Inputs: + * base_checks := Context to track required base field external checks + * scalar_checks := Context to track required scalar field external checks + * curve := Elliptic curve parameters + * pubkey := Public key of signer + * doubles := Optional powers of 2^i * pubkey, 0 <= i < n where n is curve.order_bit_length + * signature := ECDSA signature (r, s) s.t. r, s \in [1, n) + * msg_hash := Message hash s.t. msg_hash \in Fn + * + * Preconditions: + * pubkey is on the curve and not O (use Ec_group.is_on_curve gadget) + * pubkey is in the subgroup (nP = O) (use Ec_group.check_subgroup gadget) + * pubkey is bounds checked (use multi-range-check gadgets) + * r, s \in [1, n) (use signature_scalar_check gadget) + * msg_hash \in Fn (use bytes_to_foreign_field_element gadget) + * + * Public parameters + * gen is the correct elliptic curve group generator point + * a, b are correct elliptic curve parameters + * curve order is the correct elliptic curve group order + * curve modulus is the correct elliptic curve base field modulus + * ia point is publically, deterministically and randomly selected (nothing-up-my-sleeve) + * ia on the curve + * ia negated point computation is correct + * ia coordinates are valid + * + * Base field external checks: (per crumb, not counting inputs and output) + * Bound checks: 100 (+2 when a != 0 and +1 when b != 0) + * Multi-range-checks: 40 + * Compact-range-checks: 40 + * Total range-checks: 180 + * + * Scalar field external checks: (per crumb, not counting inputs and output) + * Bound checks: 5 + * Multi-range-checks: 3 + * Compact-range-checks: 3 + * Total range-checks: 11 + * + * Rows: (per crumb, not counting inputs/outputs and constants) + * Verify: ~205 (+5 when a != 0 and +2 when b != 0) + * Bound additions: 210 + * Multi-range-checks: 764 + * Total: 1179 + * + * Constants: + * Curve constants: 10 (for 256-bit curve; one-time cost per circuit) + * Pre-computing doubles: 767 (for 256-bit curve; one-time cost per circuit) + * + *) +let verify (type f) (module Circuit : Snark_intf.Run with type field = f) + (base_checks : f Foreign_field.External_checks.t) + (scalar_checks : f Foreign_field.External_checks.t) + (curve : f Curve_params.InCircuit.t) (pubkey : f Affine.t) + ?(use_precomputed_gen_doubles = true) ?(scalar_mul_bit_length = 0) + ?(doubles : f Affine.t array option) + (signature : + f Foreign_field.Element.Standard.t * f Foreign_field.Element.Standard.t ) + (msg_hash : f Foreign_field.Element.Standard.t) = + let open Circuit in + (* Signaures r and s *) + let r, s = signature in + + (* Compute witness value u1 and u2 *) + let u1_0, u1_1, u1_2, u2_0, u2_1, u2_2 = + exists (Typ.array ~length:6 Field.typ) ~compute:(fun () -> + let r = + Foreign_field.Element.Standard.to_bignum_bigint_as_prover + (module Circuit) + r + in + + let s = + Foreign_field.Element.Standard.to_bignum_bigint_as_prover + (module Circuit) + s + in + + let msg_hash = + Foreign_field.Element.Standard.to_bignum_bigint_as_prover + (module Circuit) + msg_hash + in + + (* Compute s^-1 *) + let s_inv = Common.bignum_bigint_inverse s curve.bignum.order in + + (* Compute u1 = z * s^-1 *) + let u1 = Bignum_bigint.(msg_hash * s_inv % curve.bignum.order) in + + (* Compute u2 = r * s^-1 *) + let u2 = Bignum_bigint.(r * s_inv % curve.bignum.order) in + + (* Convert from Bignums to field elements *) + let u1_0, u1_1, u1_2 = + Foreign_field.bignum_bigint_to_field_const_standard_limbs + (module Circuit) + u1 + in + let u2_0, u2_1, u2_2 = + Foreign_field.bignum_bigint_to_field_const_standard_limbs + (module Circuit) + u2 + in + + (* Return and convert back to Cvars *) + [| u1_0; u1_1; u1_2; u2_0; u2_1; u2_2 |] ) + |> tuple6_of_array + in + let u1 = Foreign_field.Element.Standard.of_limbs (u1_0, u1_1, u1_2) in + let u2 = Foreign_field.Element.Standard.of_limbs (u2_0, u2_1, u2_2) in + + (* C1: Constrain s * u1 = z *) + let msg_hash_computed = + Foreign_field.mul + (module Circuit) + scalar_checks ~bound_check_result:false s u1 curve.order + in + (* Bounds 1: Left input s is gadget input (checked externally) + * Right input u1 checked below + * Result is gadget input (already checked externally). + *) + Foreign_field.External_checks.append_bound_check scalar_checks + @@ Foreign_field.Element.Standard.to_limbs u1 ; + + (* Assert s * u1 = z *) + Foreign_field.Element.Standard.assert_equal + (module Circuit) + msg_hash_computed msg_hash ; + + (* C2: Constrain s * u2 = r *) + let r_computed = + Foreign_field.mul + (module Circuit) + scalar_checks ~bound_check_result:false s u2 curve.order + in + + (* Bounds 2: Left input s is gadget input (checked externally) + * Right input u2 checked below + * Result is gadget input (already checked externally). + *) + Foreign_field.External_checks.append_bound_check scalar_checks + @@ Foreign_field.Element.Standard.to_limbs u2 ; + + (* Assert s * u2 = r *) + Foreign_field.Element.Standard.assert_equal (module Circuit) r_computed r ; + + (* + * Compute R = u1G + u2P + *) + + (* Set optional alternative scalar_mul_bit_length *) + let scalar_bit_length = + if scalar_mul_bit_length > 0 then scalar_mul_bit_length + else curve.order_bit_length + in + + (* C3: Decompose u1 into bits *) + let u1_bits = + Foreign_field.Element.Standard.unpack + (module Circuit) + u1 ~length:scalar_bit_length + in + + (* C4: Decompose u2 into bits *) + let u2_bits = + Foreign_field.Element.Standard.unpack + (module Circuit) + u2 ~length:scalar_bit_length + in + + (* C5: Constrain scalar multiplication u1G *) + let curve_doubles = + if use_precomputed_gen_doubles then Some curve.doubles else None + in + let u1_point = + Ec_group.scalar_mul + (module Circuit) + base_checks curve ?doubles:curve_doubles u1_bits curve.gen + in + + (* Bounds 5: Generator is gadget input (public parameter) + * Initial accumulator is gadget input (checked externally or public parameter) + * Result bound check for u1_point below. + *) + Foreign_field.External_checks.append_bound_check base_checks + @@ Foreign_field.Element.Standard.to_limbs @@ Affine.x u1_point ; + Foreign_field.External_checks.append_bound_check base_checks + @@ Foreign_field.Element.Standard.to_limbs @@ Affine.y u1_point ; + + (* C6: Constrain scalar multiplication u2P *) + let u2_point = + Ec_group.scalar_mul + (module Circuit) + base_checks curve ?doubles u2_bits pubkey + in + + (* Bounds 6: Pubkey is gadget input (checked externally) + * Initial accumulator is gadget input (checked externally or public parameter) + * Result bound check for u2_point below. + *) + Foreign_field.External_checks.append_bound_check base_checks + @@ Foreign_field.Element.Standard.to_limbs @@ Affine.x u2_point ; + Foreign_field.External_checks.append_bound_check base_checks + @@ Foreign_field.Element.Standard.to_limbs @@ Affine.y u2_point ; + + (* C7: R = u1G + u2P *) + let result = + Ec_group.add (module Circuit) base_checks curve u1_point u2_point + in + + (* Bounds 7: Left and right inputs checked by (Bounds 5) and (Bounds 6) + * Result bound is bound checked below + *) + Foreign_field.External_checks.append_bound_check base_checks + @@ Foreign_field.Element.Standard.to_limbs @@ Affine.x result ; + Foreign_field.External_checks.append_bound_check base_checks + @@ Foreign_field.Element.Standard.to_limbs @@ Affine.y result ; + + (* Constrain that r = Rx (mod n), where n is the scalar field modulus + * + * Note: The scalar field modulus (curve.order) may be greater or smaller than + * the base field modulus (curve.modulus) + * + * curve.order > curve.modulus => Rx = 0 * n + Rx + * + * curve.order < curve.modulus => Rx = q * n + Rx' + * + * Thus, to check for congruence we need to compute the modular reduction of Rx and + * assert that it equals r. + * + * Since we may want to target applications where the scalar field is much smaller + * than the base field, we cannot make any assumptions about the ratio between + * these moduli, so we will constrain Rx = q * n + Rx' using the foreign field + * multiplication gadget, rather than just constraining Rx + 0 with our foreign + * field addition gadget. + * + * As we are reducing Rx modulo n, we are performing foreign field arithmetic modulo n. + * However, the multiplicand n above is not a valid foreign field element in [0, n - 1]. + * To be safe we must constrain Rx = q * (n - 1) + q + Rx' modulo n. + *) + + (* Compute witness value q and Rx' *) + let quotient0, quotient1, quotient2, x_prime0, x_prime1, x_prime2 = + exists (Typ.array ~length:6 Field.typ) ~compute:(fun () -> + let x = + Foreign_field.Element.Standard.to_bignum_bigint_as_prover + (module Circuit) + (Affine.x result) + in + + (* Compute q and r of Rx = q * n + r *) + let quotient, x_prime = + Common.bignum_bigint_div_rem x curve.bignum.order + in + + (* Convert from Bignums to field elements *) + let quotient0, quotient1, quotient2 = + Foreign_field.bignum_bigint_to_field_const_standard_limbs + (module Circuit) + quotient + in + let x_prime0, x_prime1, x_prime2 = + Foreign_field.bignum_bigint_to_field_const_standard_limbs + (module Circuit) + x_prime + in + + (* Return and convert back to Cvars *) + [| quotient0; quotient1; quotient2; x_prime0; x_prime1; x_prime2 |] ) + |> tuple6_of_array + in + + (* C8: Constrain q * (n - 1) *) + let quotient = + Foreign_field.Element.Standard.of_limbs (quotient0, quotient1, quotient2) + in + let quotient_product = + Foreign_field.mul + (module Circuit) + scalar_checks quotient curve.order_minus_one curve.order + in + + (* Bounds 8: Left input q is bound checked below + * Right input (n - 1) is a public parameter so not checked + * Result bound check is already covered by scalar_checks + *) + Foreign_field.External_checks.append_bound_check scalar_checks + @@ Foreign_field.Element.Standard.to_limbs quotient ; + + (* C9: Compute qn = q * (n - 1) + q *) + let quotient_times_n = + Foreign_field.add + (module Circuit) + ~full:false quotient_product quotient curve.order + in + + (* Bounds 9: Left input q * (n - 1) is covered by (Bounds 8) + * Right input q is covered by (Bounds 8) + * Result is chained into subsequent addition (no check necessary) + *) + + (* C10: Compute Rx = qn + Rx' *) + let x_prime = + Foreign_field.Element.Standard.of_limbs (x_prime0, x_prime1, x_prime2) + in + let computed_x = + Foreign_field.add + (module Circuit) + ~full:false quotient_times_n x_prime curve.order + in + (* Addition chain final result row *) + Foreign_field.result_row + (module Circuit) + ~label:"Ecdsa.verify_computed_x" computed_x ; + + (* Bounds 10: Left input qn is chained input, so not checked + * Right input x_prime bounds checked below + * Result already bound checked by (Bounds 7) + *) + Foreign_field.External_checks.append_bound_check scalar_checks + @@ Foreign_field.Element.Standard.to_limbs x_prime ; + + (* C11: Check qn + r = Rx *) + Foreign_field.Element.Standard.assert_equal + (module Circuit) + computed_x (Affine.x result) ; + + (* C12: Check that r = Rx' *) + Foreign_field.Element.Standard.assert_equal (module Circuit) r x_prime ; + + (* C13: Check result is on curve (also implies result is not infinity) *) + Ec_group.is_on_curve (module Circuit) base_checks curve result ; + + (* Bounds 13: Input already bound checked by (Bounds 8) *) + () + +(***************) +(* ECDSA tests *) +(***************) + +let%test_unit "Ecdsa.verify" = + if tests_enabled then ( + let open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + + (* Let's test proving ECDSA signature verification in ZK! *) + let test_verify ?cs ?(use_precomputed_gen_doubles = true) + ?(scalar_mul_bit_length = 0) (curve : Curve_params.t) + (pubkey : Affine.bignum_point) + (signature : Bignum_bigint.t * Bignum_bigint.t) + (msg_hash : Bignum_bigint.t) = + (* Generate and verify proof *) + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + (* Prepare test inputs *) + let curve = + Curve_params.to_circuit_constants + (module Runner.Impl) + curve ~use_precomputed_gen_doubles + in + let pubkey = + Affine.of_bignum_bigint_coordinates (module Runner.Impl) pubkey + in + let signature = + ( Foreign_field.Element.Standard.of_bignum_bigint + (module Runner.Impl) + (fst signature) + , Foreign_field.Element.Standard.of_bignum_bigint + (module Runner.Impl) + (snd signature) ) + in + let msg_hash = + Foreign_field.Element.Standard.of_bignum_bigint + (module Runner.Impl) + msg_hash + in + + (* Create external checks contexts for tracking extra constraints + that are required for soundness (unused in this simple test) *) + let unused_base_checks = + Foreign_field.External_checks.create (module Runner.Impl) + in + let unused_scalar_checks = + Foreign_field.External_checks.create (module Runner.Impl) + in + + (* Subgroup check for pubkey *) + Ec_group.check_subgroup + (module Runner.Impl) + unused_base_checks curve pubkey ; + + (* Check r, s \in [1, n) *) + signature_scalar_check + (module Runner.Impl) + unused_scalar_checks curve signature ; + + (* Verify ECDSA signature *) + verify + (module Runner.Impl) + ~use_precomputed_gen_doubles ~scalar_mul_bit_length + unused_base_checks unused_scalar_checks curve pubkey signature + msg_hash ; + + () ) + in + + cs + in + + (* Test 1: ECDSA verify test with real Ethereum mainnet signature + * Tx: https://etherscan.io/tx/0x0d26b1539304a214a6517b529a027f987cd52e70afd8fdc4244569a93121f144 + * + * Raw tx: 0xf86580850df8475800830186a094353535353535353535353535353535353535353564801ba082de9950cc5aac0dca7210cb4b77320ac9e844717d39b1781e9d941d920a1206a01da497b3c134f50b2fce514d66e20c5e43f9615f097395a5527041d14860a52f + * Msg hash: 0x3e91cd8bd233b3df4e4762b329e2922381da770df1b31276ec77d0557be7fcef + * Raw pubkey: 0x046e0f66759bb520b026a9c7d61c82e8354025f2703696dcdac679b2f7945a352e637c8f71379941fa22f15a9fae9cb725ae337b16f216f5acdeefbd52a0882c27 + * Raw signature: 0x82de9950cc5aac0dca7210cb4b77320ac9e844717d39b1781e9d941d920a12061da497b3c134f50b2fce514d66e20c5e43f9615f097395a5527041d14860a52f1b + * r := 0x82de9950cc5aac0dca7210cb4b77320ac9e844717d39b1781e9d941d920a1206 + * s := 0x1da497b3c134f50b2fce514d66e20c5e43f9615f097395a5527041d14860a52f + * v := 27 + *) + let eth_pubkey = + ( Bignum_bigint.of_string + "49781623198970027997721070672560275063607048368575198229673025608762959476014" + , Bignum_bigint.of_string + "44999051047832679156664607491606359183507784636787036192076848057884504239143" + ) + in + let eth_signature = + ( (* r *) + Bignum_bigint.of_string + "59193968509713231970845573191808992654796038550727015999103892005508493218310" + , (* s *) + Bignum_bigint.of_string + "13407882537414256709292360527926092843766608354464979273376653245977131525423" + ) + in + let tx_msg_hash = + Bignum_bigint.of_string + "0x3e91cd8bd233b3df4e4762b329e2922381da770df1b31276ec77d0557be7fcef" + in + + assert (Ec_group.is_on_curve_bignum_point Secp256k1.params eth_pubkey) ; + + let _cs = + test_verify Secp256k1.params ~use_precomputed_gen_doubles:true eth_pubkey + eth_signature tx_msg_hash + in + + (* Negative test *) + assert ( + Common.is_error (fun () -> + (* Bad hash *) + let bad_tx_msg_hash = + Bignum_bigint.of_string + "0x3e91cd8bd233b3df4e4762b329e2922381da770df1b31276ec77d0557be7fcee" + in + test_verify Secp256k1.params eth_pubkey eth_signature bad_tx_msg_hash ) ) ; + + (* Test 2: ECDSA verify test with another real Ethereum mainnet signature + * Tx: https://etherscan.io/tx/0x9cec14aadb06b59b2646333f47efe0ee7f21fed48d93806023b8eb205aa3b161 + * + * Raw tx: 0x02f9019c018201338405f5e100850cad3895d8830108949440a50cf069e992aa4536211b23f286ef88752187880b1a2bc2ec500000b90124322bba210000000000000000000000008a001303158670e284950565164933372807cd4800000000000000000000000012d220fbda92a9c8f281ea02871afa70dfde81e90000000000000000000000000000000000000000000000000afd4ea3d29472400000000000000000000000000000000000000000461c9bb5bb1c3429b25544e3f4b7bb67d63f9b432df61df28a9897e26284b370adcd7b558fa286babb0efdeb000000000000000000000000000000000000000000000000001cdd1f19bb8dc0000000000000000000000000000000000000000000000000000000006475ed380000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a8f2573c080a0893bc3facf19becba979e31d37ed1b222faab09b8c554a17072f6fbfc1e5658fa01119ef751f0fc3c1ec4d1eeb9db64c9f416ce1aa3267d7b98d8426ab35f0c422 + * Msg hash: 0xf7c5983cdb051f68aa84444c4b8ecfdbf60548fe3f5f3f2d19cc5d3c096f0b5b + * Raw pubkey: 0x04ad53a68c2120f9a81288b1377adbe7477b7cec1b9b5ff57d5e331ee7f9e6c2372f997b48cf3faa91023f77754ef63ec49dcd5a61b681b53cda894616c28422c0 + * Raw signature: 0x893bc3facf19becba979e31d37ed1b222faab09b8c554a17072f6fbfc1e5658f1119ef751f0fc3c1ec4d1eeb9db64c9f416ce1aa3267d7b98d8426ab35f0c4221c + * r := 0x893bc3facf19becba979e31d37ed1b222faab09b8c554a17072f6fbfc1e5658f + * s := 0x1119ef751f0fc3c1ec4d1eeb9db64c9f416ce1aa3267d7b98d8426ab35f0c422 + * v := 0 + *) + let eth_pubkey = + Ethereum.pubkey_hex_to_point + "0x04ad53a68c2120f9a81288b1377adbe7477b7cec1b9b5ff57d5e331ee7f9e6c2372f997b48cf3faa91023f77754ef63ec49dcd5a61b681b53cda894616c28422c0" + in + + let eth_signature = + ( (* r *) + Bignum_bigint.of_string + "0x893bc3facf19becba979e31d37ed1b222faab09b8c554a17072f6fbfc1e5658f" + , (* s *) + Bignum_bigint.of_string + "0x1119ef751f0fc3c1ec4d1eeb9db64c9f416ce1aa3267d7b98d8426ab35f0c422" + ) + in + let tx_msg_hash = + Bignum_bigint.of_string + "0xf7c5983cdb051f68aa84444c4b8ecfdbf60548fe3f5f3f2d19cc5d3c096f0b5b" + in + + assert (Ec_group.is_on_curve_bignum_point Secp256k1.params eth_pubkey) ; + + let _cs = + test_verify Secp256k1.params eth_pubkey eth_signature tx_msg_hash + in + + (* Test 3: ECDSA verify test with yet another real Ethereum mainnet signature + * Tx: https://etherscan.io/tx/0x4eb2087dc31dda8fc1bd8680624cd2ae0c1ed0d880de1daefb6fddac208d08fb + * + * Raw tx: 0x02f90114011c8405f5e100850d90b9d72982f4a8948a3749936e723325c6b645a0901470cd9e790b9480b8a8b88d4fde00000000000000000000000085210d346e2baa59a486dd19cf9d18f1325d9ffc00000000000000000000000039f083386e75120d2c6c152900219849dbdaa7e60000000000000000000000000000000000000000000000000000000000000b7100000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000000360c6ebec080a0a8c5ae8e178c29a3de4a70ef0d22cbb29a8a0013cfa81fea66885556573debe1a031532f9be326029161a4b7bedb80ea4d20b1293cbefb51cc570e72e6aa4ef4d1 + * Msg hash: 0xccdea6d5fce0363b9fbc2cf9a14087fc67c79fbdf55b25789ee2d51dcd82dbc1 + * Raw pubkey: 0x042b7a248bf6fa2acc079d4f451c68c56a40ef81aeaf6a89c10ed6d692f7a6fdea0c05f95d601c3ab4f75d9253d356ab7af4d7d2ac250e0832581d08f1e224a976 + * Raw signature: 0xa8c5ae8e178c29a3de4a70ef0d22cbb29a8a0013cfa81fea66885556573debe131532f9be326029161a4b7bedb80ea4d20b1293cbefb51cc570e72e6aa4ef4d11c + * r := 0xa8c5ae8e178c29a3de4a70ef0d22cbb29a8a0013cfa81fea66885556573debe1 + * s := 0x31532f9be326029161a4b7bedb80ea4d20b1293cbefb51cc570e72e6aa4ef4d1 + * v := 0 + *) + let eth_pubkey = + Ethereum.pubkey_hex_to_point + "0x042b7a248bf6fa2acc079d4f451c68c56a40ef81aeaf6a89c10ed6d692f7a6fdea0c05f95d601c3ab4f75d9253d356ab7af4d7d2ac250e0832581d08f1e224a976" + in + + let eth_signature = + ( (* r *) + Bignum_bigint.of_string + "0xa8c5ae8e178c29a3de4a70ef0d22cbb29a8a0013cfa81fea66885556573debe1" + , (* s *) + Bignum_bigint.of_string + "0x31532f9be326029161a4b7bedb80ea4d20b1293cbefb51cc570e72e6aa4ef4d1" + ) + in + let tx_msg_hash = + Bignum_bigint.of_string + "0xccdea6d5fce0363b9fbc2cf9a14087fc67c79fbdf55b25789ee2d51dcd82dbc1" + in + + assert (Ec_group.is_on_curve_bignum_point Secp256k1.params eth_pubkey) ; + + let cs = + test_verify Secp256k1.params eth_pubkey eth_signature tx_msg_hash + in + + assert ( + Common.is_error (fun () -> + (* Bad signature *) + let bad_eth_signature = + ( (* r *) + Bignum_bigint.of_string + "0xc8c5ae8e178c29a3de4a70ef0d22cbb29a8a0013cfa81fea66885556573debe1" + , (* s *) + Bignum_bigint.of_string + "0x31532f9be326029161a4b7bedb80ea4d20b1293cbefb51cc570e72e6aa4ef4d1" + ) + in + test_verify Secp256k1.params eth_pubkey bad_eth_signature tx_msg_hash ) ) ; + + (* Test 4: Constraint system reuse + * Tx: https://etherscan.io/tx/0xfc7d65547eb5192c2f35b7e190b4792a9ebf79876f164ead32288e9fe2b7e4f3 + * + * Raw tx: 0x02f8730113843b9aca00851405ffdc00825b0494a9d1e08c7793af67e9d92fe308d5697fb81d3e4388299ce7c69d7b9c1780c001a06d5a635efe29deca27e52e96dd2d4056cff1a4b51f88d363f1c3802a26cd67a0a07c34d16c2831ee6265d6d2a55cee6e3273f41480424686d44fe709ce7cfd1567 + * Msg hash: 0x62c771b337f1a0070dddb863b953017aa12918fc37f338419f7664fda443ce93 + * Raw pubkey: 0x041d4911ee95f0858df65b942fe88cd54d6c06f73fc9e716db1e153d9994b16930e0284e96e308ef77f1d588aa446237111ab370eeab84059a08980e7e7ab0c467 + * Raw signature: 0x6d5a635efe29deca27e52e96dd2d4056cff1a4b51f88d363f1c3802a26cd67a07c34d16c2831ee6265d6d2a55cee6e3273f41480424686d44fe709ce7cfd15671b + * r := 0xa8c5ae8e178c29a3de4a70ef0d22cbb29a8a0013cfa81fea66885556573debe1 + * s := 0x31532f9be326029161a4b7bedb80ea4d20b1293cbefb51cc570e72e6aa4ef4d1 + * v := 1 + *) + let eth_pubkey = + Ethereum.pubkey_hex_to_point + "0x041d4911ee95f0858df65b942fe88cd54d6c06f73fc9e716db1e153d9994b16930e0284e96e308ef77f1d588aa446237111ab370eeab84059a08980e7e7ab0c467" + in + + let eth_signature = + ( (* r *) + Bignum_bigint.of_string + "0x6d5a635efe29deca27e52e96dd2d4056cff1a4b51f88d363f1c3802a26cd67a0" + , (* s *) + Bignum_bigint.of_string + "0x7c34d16c2831ee6265d6d2a55cee6e3273f41480424686d44fe709ce7cfd1567" + ) + in + let tx_msg_hash = + Bignum_bigint.of_string + "0x62c771b337f1a0070dddb863b953017aa12918fc37f338419f7664fda443ce93" + in + + assert (Ec_group.is_on_curve_bignum_point Secp256k1.params eth_pubkey) ; + + let _cs = + test_verify ~cs Secp256k1.params eth_pubkey eth_signature tx_msg_hash + in + + (* Test without using precomputed curve doubles *) + let _cs = + test_verify ~use_precomputed_gen_doubles:false Secp256k1.params eth_pubkey + eth_signature tx_msg_hash + in + + () ) + +let%test_unit "Ecdsa.verify_light" = + if tests_enabled then ( + let open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + + (* Light ecdsa verify circuit for manual checks *) + let test_verify_light ?cs ?(use_precomputed_gen_doubles = true) + ?(scalar_mul_bit_length = 0) (curve : Curve_params.t) + (pubkey : Affine.bignum_point) + (signature : Bignum_bigint.t * Bignum_bigint.t) + (msg_hash : Bignum_bigint.t) = + (* Generate and verify proof *) + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + (* Prepare test inputs *) + let curve = + Curve_params.to_circuit_constants + (module Runner.Impl) + curve ~use_precomputed_gen_doubles + in + let pubkey = + Affine.of_bignum_bigint_coordinates (module Runner.Impl) pubkey + in + Foreign_field.result_row (module Runner.Impl) (fst pubkey) ; + Foreign_field.result_row (module Runner.Impl) (snd pubkey) ; + let signature = + ( Foreign_field.Element.Standard.of_bignum_bigint + (module Runner.Impl) + (fst signature) + , Foreign_field.Element.Standard.of_bignum_bigint + (module Runner.Impl) + (snd signature) ) + in + Foreign_field.result_row (module Runner.Impl) (fst signature) ; + Foreign_field.result_row (module Runner.Impl) (snd signature) ; + let msg_hash = + Foreign_field.Element.Standard.of_bignum_bigint + (module Runner.Impl) + msg_hash + in + Foreign_field.result_row (module Runner.Impl) msg_hash ; + + (* Create external checks contexts for tracking extra constraints + that are required for soundness (unused in this simple test) *) + let unused_base_checks = + Foreign_field.External_checks.create (module Runner.Impl) + in + let unused_scalar_checks = + Foreign_field.External_checks.create (module Runner.Impl) + in + + (* Omit pubkey subgroup check *) + + (* Omit checking r, s \in [1, n) *) + + (* Verify ECDSA signature *) + verify + (module Runner.Impl) + ~use_precomputed_gen_doubles ~scalar_mul_bit_length + unused_base_checks unused_scalar_checks curve pubkey signature + msg_hash ; + + (* The base field external check counts depend on curve and scalar size. We elide + * checking these because we want this test function able to be used with different + * curves, scalars and other parameters. + *) + + (* Check scalar field external check counts *) + assert (Mina_stdlib.List.Length.equal unused_scalar_checks.bounds 5) ; + assert ( + Mina_stdlib.List.Length.equal unused_scalar_checks.multi_ranges 3 ) ; + assert ( + Mina_stdlib.List.Length.equal + unused_scalar_checks.compact_multi_ranges 3 ) ; + () ) + in + + cs + in + + (* Tiny secp256k1 signature test: results in 2-bit u1 and u2 scalars + * Extracted with k = 1 -> secret key = 57896044618658097711785492504343953926418782139537452191302581570759080747168 *) + let pubkey = + ( Bignum_bigint.of_string + "86918276961810349294276103416548851884759982251107" + , Bignum_bigint.of_string + "28597260016173315074988046521176122746119865902901063272803125467328307387891" + ) + in + let signature = + ( (* r = Gx *) + Bignum_bigint.of_string + "55066263022277343669578718895168534326250603453777594175500187360389116729240" + , (* s = r/2 *) + Bignum_bigint.of_string + "27533131511138671834789359447584267163125301726888797087750093680194558364620" + ) + in + let msg_hash = + (* z = 2s *) + Bignum_bigint.of_string + "55066263022277343669578718895168534326250603453777594175500187360389116729240" + in + + assert (Ec_group.is_on_curve_bignum_point Secp256k1.params pubkey) ; + + let _cs = + test_verify_light Secp256k1.params ~scalar_mul_bit_length:2 pubkey + signature msg_hash + in + let _cs = + test_verify_light Secp256k1.params ~use_precomputed_gen_doubles:false + ~scalar_mul_bit_length:2 pubkey signature msg_hash + in + + () ) + +let%test_unit "Ecdsa.secp256k1_verify_tiny_full" = + if tests_enabled then ( + let open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + + (* Tiny full circuit for ecdsa on secp256k1 manual checks. + * Note: pubkey, signature and msg_hash need to be specially crafted to produce 2-bit scalars + *) + let secp256k1_verify_tiny_full ?cs ?(use_precomputed_gen_doubles = true) + (pubkey : Affine.bignum_point) + (signature : Bignum_bigint.t * Bignum_bigint.t) + (msg_hash : Bignum_bigint.t) = + (* Generate and verify proof *) + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + (* Prepare test inputs *) + let curve = + Curve_params.to_circuit_constants + (module Runner.Impl) + Secp256k1.params ~use_precomputed_gen_doubles + in + let pubkey = + Affine.of_bignum_bigint_coordinates (module Runner.Impl) pubkey + in + Foreign_field.result_row (module Runner.Impl) (fst pubkey) ; + Foreign_field.result_row (module Runner.Impl) (snd pubkey) ; + let signature = + ( Foreign_field.Element.Standard.of_bignum_bigint + (module Runner.Impl) + (fst signature) + , Foreign_field.Element.Standard.of_bignum_bigint + (module Runner.Impl) + (snd signature) ) + in + Foreign_field.result_row (module Runner.Impl) (fst signature) ; + Foreign_field.result_row (module Runner.Impl) (snd signature) ; + let msg_hash = + Foreign_field.Element.Standard.of_bignum_bigint + (module Runner.Impl) + msg_hash + in + Foreign_field.result_row (module Runner.Impl) msg_hash ; + + (* Create external checks contexts for tracking extra constraints + that are required for soundness (unused in this simple test) *) + let base_checks = + Foreign_field.External_checks.create (module Runner.Impl) + in + let scalar_checks = + Foreign_field.External_checks.create (module Runner.Impl) + in + + (* Omit pubkey subgroup check *) + + (* Omit checking r, s \in [1, n) *) + + (* Verify ECDSA signature *) + verify + (module Runner.Impl) + ~use_precomputed_gen_doubles ~scalar_mul_bit_length:2 base_checks + scalar_checks curve pubkey signature msg_hash ; + + (* + * Perform base field external checks + *) + + (* Sanity check *) + let base_bound_checks_count = ref (42 + 2 + 42 + 2 + 6 + 2 + 3) in + if not Bignum_bigint.(curve.bignum.a = zero) then + base_bound_checks_count := !base_bound_checks_count + 2 ; + if not Bignum_bigint.(curve.bignum.b = zero) then + base_bound_checks_count := !base_bound_checks_count + 1 ; + assert ( + Mina_stdlib.List.Length.equal base_checks.bounds + !base_bound_checks_count ) ; + assert (Mina_stdlib.List.Length.equal base_checks.multi_ranges 40) ; + assert ( + Mina_stdlib.List.Length.equal base_checks.compact_multi_ranges 40 ) ; + + (* Add gates for bound checks, multi-range-checks and compact-multi-range-checks *) + Foreign_field.constrain_external_checks + (module Runner.Impl) + base_checks curve.modulus ; + + (* + * Perform scalar field external checks + *) + + (* Sanity checks *) + assert (Mina_stdlib.List.Length.equal scalar_checks.bounds 5) ; + assert (Mina_stdlib.List.Length.equal scalar_checks.multi_ranges 3) ; + assert ( + Mina_stdlib.List.Length.equal scalar_checks.compact_multi_ranges 3 ) ; + + (* Add gates for bound checks, multi-range-checks and compact-multi-range-checks *) + Foreign_field.constrain_external_checks + (module Runner.Impl) + scalar_checks curve.order ; + + () ) + in + + cs + in + + (* Tiny secp256k1 signature test: results in 2-bit u1 and u2 scalars + * Extracted with k = 1 -> secret key = 57896044618658097711785492504343953926418782139537452191302581570759080747168 *) + let pubkey = + (* secret key d = (s - z)/r *) + ( Bignum_bigint.of_string + "86918276961810349294276103416548851884759982251107" + , Bignum_bigint.of_string + "28597260016173315074988046521176122746119865902901063272803125467328307387891" + ) + in + let signature = + ( (* r = Gx *) + Bignum_bigint.of_string + "55066263022277343669578718895168534326250603453777594175500187360389116729240" + , (* s = r/2 *) + Bignum_bigint.of_string + "27533131511138671834789359447584267163125301726888797087750093680194558364620" + ) + in + let msg_hash = + (* z = 2s *) + Bignum_bigint.of_string + "55066263022277343669578718895168534326250603453777594175500187360389116729240" + in + + assert (Ec_group.is_on_curve_bignum_point Secp256k1.params pubkey) ; + + let _cs = + secp256k1_verify_tiny_full ~use_precomputed_gen_doubles:false pubkey + signature msg_hash + in + + () ) + +let%test_unit "Ecdsa.verify_full_no_subgroup_check" = + if tests_enabled then ( + let open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + + (* Prove ECDSA signature verification in ZK (no subgroup check)! *) + let test_verify_full_no_subgroup_check ?cs + ?(use_precomputed_gen_doubles = true) ?(scalar_mul_bit_length = 0) + (curve : Curve_params.t) (pubkey : Affine.bignum_point) + (signature : Bignum_bigint.t * Bignum_bigint.t) + (msg_hash : Bignum_bigint.t) = + (* Generate and verify proof *) + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + (* Prepare test inputs *) + let curve = + Curve_params.to_circuit_constants + (module Runner.Impl) + curve ~use_precomputed_gen_doubles + in + let pubkey = + Affine.of_bignum_bigint_coordinates (module Runner.Impl) pubkey + in + let signature = + ( Foreign_field.Element.Standard.of_bignum_bigint + (module Runner.Impl) + (fst signature) + , Foreign_field.Element.Standard.of_bignum_bigint + (module Runner.Impl) + (snd signature) ) + in + let msg_hash = + Foreign_field.Element.Standard.of_bignum_bigint + (module Runner.Impl) + msg_hash + in + + (* Create external checks contexts for tracking extra constraints + that are required for soundness *) + let base_checks = + Foreign_field.External_checks.create (module Runner.Impl) + in + let scalar_checks = + Foreign_field.External_checks.create (module Runner.Impl) + in + + (* Subgroup check for pubkey is too expensive for test without chunking *) + + (* Check r, s \in [1, n) *) + signature_scalar_check + (module Runner.Impl) + scalar_checks curve signature ; + + (* Verify ECDSA signature *) + verify + (module Runner.Impl) + ~use_precomputed_gen_doubles ~scalar_mul_bit_length base_checks + scalar_checks curve pubkey signature msg_hash ; + + (* + * Perform base field external checks + *) + Foreign_field.constrain_external_checks + (module Runner.Impl) + base_checks curve.modulus ; + + (* + * Perform scalar field external checks + *) + Foreign_field.constrain_external_checks + (module Runner.Impl) + scalar_checks curve.order ; + + () ) + in + + cs + in + + (* Test 1: No chunking (big test that doesn't require chunkning) + * Uses precomputed generator doubles. + * Extracted s,d such that that u1 and u2 scalars are equal to m = 95117056129877063566687163501128961107874747202063760588013341337 (216 bits) *) + let pubkey = + (* secret key d = (s - z)/r *) + ( Bignum_bigint.of_string + "28335432349034412295843546619549969371276098848890005110917167585721026348383" + , Bignum_bigint.of_string + "40779711449769771629236800666139862371172776689379727569918249313574127557987" + ) + in + let signature = + ( (* r = Gx *) + Bignum_bigint.of_string + "55066263022277343669578718895168534326250603453777594175500187360389116729240" + , (* s = r/m *) + Bignum_bigint.of_string + "92890023769187417206640608811117482540691917151111621018323984641303111040093" + ) + in + let msg_hash = + (* z = ms *) + Bignum_bigint.of_string + "55066263022277343669578718895168534326250603453777594175500187360389116729240" + in + + assert (Ec_group.is_on_curve_bignum_point Secp256k1.params pubkey) ; + + let _cs = + test_verify_full_no_subgroup_check Secp256k1.params + ~scalar_mul_bit_length:216 pubkey signature msg_hash + in + + (* Test 2: No chunking (big test that doesn't require chunkning) + * Extracted s,d such that that u1 and u2 scalars are equal to m = 177225723614878382952356121702918977654 (128 bits) *) + let pubkey = + (* secret key d = (s - z)/r *) + ( Bignum_bigint.of_string + "6559447345535823731364817861985473100513487071640065635466595453031721007862" + , Bignum_bigint.of_string + "74970879557849263394678708702512922877596422437120940411392434995042287566169" + ) + in + let signature = + ( (* r = Gx *) + Bignum_bigint.of_string + "55066263022277343669578718895168534326250603453777594175500187360389116729240" + , (* s = r/m *) + Bignum_bigint.of_string + "66524399747416926971392827702286928407253072170352243437129959464602950571595" + ) + in + let msg_hash = + (* z = ms *) + Bignum_bigint.of_string + "55066263022277343669578718895168534326250603453777594175500187360389116729240" + in + + let _cs = + test_verify_full_no_subgroup_check Secp256k1.params + ~use_precomputed_gen_doubles:false ~scalar_mul_bit_length:128 pubkey + signature msg_hash + in + + () ) diff --git a/src/lib/crypto/kimchi_backend/gadgets/ethereum.ml b/src/lib/crypto/kimchi_backend/gadgets/ethereum.ml new file mode 100644 index 00000000000..0dac6edfefe --- /dev/null +++ b/src/lib/crypto/kimchi_backend/gadgets/ethereum.ml @@ -0,0 +1,7 @@ +module Bignum_bigint = Snarky_backendless.Backend_extended.Bignum_bigint + +let pubkey_hex_to_point (hex : string) : Bignum_bigint.t * Bignum_bigint.t = + assert (132 = String.length hex) ; + let x_hex = "0x" ^ String.sub hex 4 64 in + let y_hex = "0x" ^ String.sub hex 68 64 in + (Bignum_bigint.of_string x_hex, Bignum_bigint.of_string y_hex) diff --git a/src/lib/crypto/kimchi_backend/gadgets/foreign_field.ml.disabled b/src/lib/crypto/kimchi_backend/gadgets/foreign_field.ml.disabled new file mode 100644 index 00000000000..560084198cb --- /dev/null +++ b/src/lib/crypto/kimchi_backend/gadgets/foreign_field.ml.disabled @@ -0,0 +1,2224 @@ +open Core_kernel + +open Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint + +module Bignum_bigint = Snarky_backendless.Backend_extended.Bignum_bigint +module Snark_intf = Snarky_backendless.Snark_intf + +let tests_enabled = true + +let tuple5_of_array array = + match array with + | [| a1; a2; a3; a4; a5 |] -> + (a1, a2, a3, a4, a5) + | _ -> + assert false + +let tuple15_of_array array = + match array with + | [| a1; a2; a3; a4; a5; a6; a7; a8; a9; a10; a11; a12; a13; a14; a15 |] -> + (a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15) + | _ -> + assert false + +(* 2^2L *) +let two_to_2limb = Bignum_bigint.(pow Common.two_to_limb (of_int 2)) + +let two_to_limb_field (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) = + Common.(bignum_bigint_to_field (module Circuit) two_to_limb) + +let two_to_2limb_field (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) = + Common.(bignum_bigint_to_field (module Circuit) two_to_2limb) + +(* Binary modulus *) +let binary_modulus = Common.two_to_3limb + +(* Maximum foreign field modulus for multiplication m = sqrt(2^t * n), see RFC for more details + * For simplicity and efficiency we use the approximation m = floor(sqrt(2^t * n)) + * * Distinct from this approximation is the maximum prime foreign field modulus + * for both Pallas and Vesta given our CRT scheme: + * 926336713898529563388567880069503262826888842373627227613104999999999999999607 *) +let max_foreign_field_modulus (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) : + Bignum_bigint.t = + (* m = floor(sqrt(2^t * n)) *) + let product = + (* We need Zarith for sqrt *) + Bignum_bigint.to_zarith_bigint + @@ Bignum_bigint.(binary_modulus * Circuit.Field.size) + (* Zarith.sqrt truncates (rounds down to int) ~ floor *) + in + Bignum_bigint.of_zarith_bigint @@ Z.sqrt product + +(* Type of operation *) +type op_mode = Add | Sub + +(* Foreign field modulus is abstract on two parameters + * - Field type + * - Limbs structure + * + * There are 2 specific limb structures required + * - Standard mode : 3 limbs of L-bits each + * - Compact mode : 2 limbs where the lowest is 2L bits and the highest is L bits + *) + +type 'field standard_limbs = 'field * 'field * 'field + +type 'field compact_limbs = 'field * 'field + +(* Convert Bignum_bigint.t to Bignum_bigint standard_limbs *) +let bignum_bigint_to_standard_limbs (bigint : Bignum_bigint.t) : + Bignum_bigint.t standard_limbs = + let l12, l0 = Common.(bignum_bigint_div_rem bigint two_to_limb) in + let l2, l1 = Common.(bignum_bigint_div_rem l12 two_to_limb) in + (l0, l1, l2) + +(* Convert Bignum_bigint.t to field standard_limbs *) +let bignum_bigint_to_field_const_standard_limbs (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (bigint : Bignum_bigint.t) : f standard_limbs = + let l0, l1, l2 = bignum_bigint_to_standard_limbs bigint in + ( Common.bignum_bigint_to_field (module Circuit) l0 + , Common.bignum_bigint_to_field (module Circuit) l1 + , Common.bignum_bigint_to_field (module Circuit) l2 ) + +(* Convert Bignum_bigint.t to Bignum_bigint compact_limbs *) +let bignum_bigint_to_compact_limbs (bigint : Bignum_bigint.t) : + Bignum_bigint.t compact_limbs = + let l2, l01 = Common.bignum_bigint_div_rem bigint two_to_2limb in + (l01, l2) + +(* Convert Bignum_bigint.t to field compact_limbs *) +let bignum_bigint_to_field_const_compact_limbs (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (bigint : Bignum_bigint.t) : f compact_limbs = + let l01, l2 = bignum_bigint_to_compact_limbs bigint in + ( Common.bignum_bigint_to_field (module Circuit) l01 + , Common.bignum_bigint_to_field (module Circuit) l2 ) + +(* Convert field standard_limbs to Bignum_bigint.t standard_limbs *) +let field_const_standard_limbs_to_bignum_bigint_standard_limbs (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (field_limbs : f standard_limbs) : Bignum_bigint.t standard_limbs = + let l0, l1, l2 = field_limbs in + ( Common.field_to_bignum_bigint (module Circuit) l0 + , Common.field_to_bignum_bigint (module Circuit) l1 + , Common.field_to_bignum_bigint (module Circuit) l2 ) + +(* Convert field standard_limbs to Bignum_bigint.t *) +let field_const_standard_limbs_to_bignum_bigint (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (field_limbs : f standard_limbs) : Bignum_bigint.t = + let l0, l1, l2 = + field_const_standard_limbs_to_bignum_bigint_standard_limbs + (module Circuit) + field_limbs + in + Bignum_bigint.(l0 + (Common.two_to_limb * l1) + (two_to_2limb * l2)) + +(* Foreign field element interface *) +(* TODO: It would be better if this were created with functor that + * takes are arguments the native field and the foreign field modulus. + * Then when creating foreign field elements it could check that + * they are valid (less than the foreign field modulus). We'd need a + * mode to override this last check for bound additions. + *) +module type Element_intf = sig + type 'field t + + type 'a limbs_type + + module Cvar = Snarky_backendless.Cvar + + (* Create foreign field element from Cvar limbs *) + val of_limbs : 'field Cvar.t limbs_type -> 'field t + + (* Create foreign field element from field limbs *) + val of_field_limbs : + (module Snark_intf.Run with type field = 'field) + -> 'field limbs_type + -> 'field t + + (* Create foreign field element from Bignum_bigint.t *) + val of_bignum_bigint : + (module Snark_intf.Run with type field = 'field) + -> Bignum_bigint.t + -> 'field t + + (* Create constant foreign field element from Bignum_bigint.t *) + val const_of_bignum_bigint : + (module Snark_intf.Run with type field = 'field) + -> Bignum_bigint.t + -> 'field t + + (* Convert foreign field element into Cvar limbs *) + val to_limbs : 'field t -> 'field Cvar.t limbs_type + + (* Map foreign field element's Cvar limbs into some other limbs with the mapping function func *) + val map : 'field t -> ('field Cvar.t -> 'g) -> 'g limbs_type + + (* One constant *) + val one : (module Snark_intf.Run with type field = 'field) -> 'field t + + (* Convert foreign field element into field limbs *) + val to_field_limbs_as_prover : + (module Snark_intf.Run with type field = 'field) + -> 'field t + -> 'field limbs_type + + (* Convert foreign field element into Bignum_bigint.t limbs *) + val to_bignum_bigint_limbs_as_prover : + (module Snark_intf.Run with type field = 'field) + -> 'field t + -> Bignum_bigint.t limbs_type + + (* Convert foreign field element into a Bignum_bigint.t *) + val to_bignum_bigint_as_prover : + (module Snark_intf.Run with type field = 'field) + -> 'field t + -> Bignum_bigint.t + + (* Convert foreign field affine point to string *) + val to_string_as_prover : + (module Snark_intf.Run with type field = 'field) -> 'field t -> string + + (* Constrain zero check computation with boolean output *) + val is_zero : + (module Snark_intf.Run with type field = 'field) + -> 'field t + -> 'field Cvar.t Snark_intf.Boolean0.t + + (* Compare if two foreign field elements are equal *) + val equal_as_prover : + (module Snark_intf.Run with type field = 'field) + -> 'field t + -> 'field t + -> bool + + (* Add copy constraints that two foreign field elements are equal *) + val assert_equal : + (module Snark_intf.Run with type field = 'field) + -> 'field t + -> 'field t + -> unit + + (* Create and constrain foreign field element from Bignum_bigint.t *) + val check_here_const_of_bignum_bigint : + (module Snark_intf.Run with type field = 'field) + -> Bignum_bigint.t + -> 'field t + + (* Add conditional constraints to select foreign field element *) + val if_ : + (module Snark_intf.Run with type field = 'field) + -> 'field Cvar.t Snark_intf.Boolean0.t + -> then_:'field t + -> else_:'field t + -> 'field t + + (* Decompose and constrain foreign field element into list of boolean cvars *) + val unpack : + (module Snark_intf.Run with type field = 'field) + -> 'field t + -> length:int + -> 'field Cvar.t Snark_intf.Boolean0.t list +end + +(* Foreign field element structures *) +module Element : sig + (* Foreign field element (standard limbs) *) + module Standard : sig + include Element_intf with type 'a limbs_type = 'a standard_limbs + + (* Check that the foreign element is smaller than a given field modulus *) + val fits_as_prover : + (module Snark_intf.Run with type field = 'field) + -> 'field t + -> 'field standard_limbs + -> bool + end + + (* Foreign field element (compact limbs) *) + module Compact : Element_intf with type 'a limbs_type = 'a compact_limbs +end = struct + (* Standard limbs foreign field element *) + module Standard = struct + module Cvar = Snarky_backendless.Cvar + + type 'field limbs_type = 'field standard_limbs + + type 'field t = 'field Cvar.t standard_limbs + + let of_limbs x = x + + let of_field_limbs (type field) + (module Circuit : Snark_intf.Run with type field = field) + (x : field limbs_type) : field t = + let open Circuit in + let x = + exists (Typ.array ~length:3 Field.typ) ~compute:(fun () -> + let x0, x1, x2 = x in + [| x0; x1; x2 |] ) + |> Common.tuple3_of_array + in + of_limbs x + + let of_bignum_bigint (type field) + (module Circuit : Snark_intf.Run with type field = field) x : field t = + let open Circuit in + let l12, l0 = Common.(bignum_bigint_div_rem x two_to_limb) in + let l2, l1 = Common.(bignum_bigint_div_rem l12 two_to_limb) in + let limb_vars = + exists (Typ.array ~length:3 Field.typ) ~compute:(fun () -> + [| Common.bignum_bigint_to_field (module Circuit) l0 + ; Common.bignum_bigint_to_field (module Circuit) l1 + ; Common.bignum_bigint_to_field (module Circuit) l2 + |] ) + in + of_limbs (limb_vars.(0), limb_vars.(1), limb_vars.(2)) + + let const_of_bignum_bigint (type field) + (module Circuit : Snark_intf.Run with type field = field) x : field t = + let open Circuit in + let l12, l0 = Common.(bignum_bigint_div_rem x two_to_limb) in + let l2, l1 = Common.(bignum_bigint_div_rem l12 two_to_limb) in + of_limbs + Field. + ( constant @@ Common.bignum_bigint_to_field (module Circuit) l0 + , constant @@ Common.bignum_bigint_to_field (module Circuit) l1 + , constant @@ Common.bignum_bigint_to_field (module Circuit) l2 ) + + let to_limbs x = x + + let map (x : 'field t) (func : 'field Cvar.t -> 'g) : 'g limbs_type = + let l0, l1, l2 = to_limbs x in + (func l0, func l1, func l2) + + let to_field_limbs_as_prover (type field) + (module Circuit : Snark_intf.Run with type field = field) (x : field t) + : field limbs_type = + map x (Common.cvar_field_to_field_as_prover (module Circuit)) + + let to_bignum_bigint_limbs_as_prover (type field) + (module Circuit : Snark_intf.Run with type field = field) (x : field t) + : Bignum_bigint.t limbs_type = + map x (Common.cvar_field_to_bignum_bigint_as_prover (module Circuit)) + + let one (type field) + (module Circuit : Snark_intf.Run with type field = field) : field t = + of_bignum_bigint (module Circuit) Bignum_bigint.one + + let to_bignum_bigint_as_prover (type field) + (module Circuit : Snark_intf.Run with type field = field) (x : field t) + : Bignum_bigint.t = + let l0, l1, l2 = to_bignum_bigint_limbs_as_prover (module Circuit) x in + Bignum_bigint.(l0 + (Common.two_to_limb * l1) + (two_to_2limb * l2)) + + let to_string_as_prover (type field) + (module Circuit : Snark_intf.Run with type field = field) a : string = + sprintf "%s" @@ Bignum_bigint.to_string + @@ to_bignum_bigint_as_prover (module Circuit) a + + let is_zero (type field) + (module Circuit : Snark_intf.Run with type field = field) (x : field t) + : Circuit.Boolean.var = + let open Circuit in + let x0, x1, x2 = to_limbs x in + let x0_is_zero = Field.(equal x0 zero) in + let x1_is_zero = Field.(equal x1 zero) in + let x2_is_zero = Field.(equal x2 zero) in + Boolean.(x0_is_zero && x1_is_zero && x2_is_zero) + + let equal_as_prover (type field) + (module Circuit : Snark_intf.Run with type field = field) + (left : field t) (right : field t) : bool = + let open Circuit in + let left0, left1, left2 = + to_field_limbs_as_prover (module Circuit) left + in + let right0, right1, right2 = + to_field_limbs_as_prover (module Circuit) right + in + Field.Constant.( + equal left0 right0 && equal left1 right1 && equal left2 right2) + + let assert_equal (type field) + (module Circuit : Snark_intf.Run with type field = field) + (left : field t) (right : field t) : unit = + let open Circuit in + let left0, left1, left2 = to_limbs left in + let right0, right1, right2 = to_limbs right in + Field.Assert.equal left0 right0 ; + Field.Assert.equal left1 right1 ; + Field.Assert.equal left2 right2 + + let check_here_const_of_bignum_bigint (type field) + (module Circuit : Snark_intf.Run with type field = field) x : field t = + let const_x = const_of_bignum_bigint (module Circuit) x in + let var_x = of_bignum_bigint (module Circuit) x in + assert_equal (module Circuit) const_x var_x ; + const_x + + let fits_as_prover (type field) + (module Circuit : Snark_intf.Run with type field = field) (x : field t) + (modulus : field standard_limbs) : bool = + let modulus = + field_const_standard_limbs_to_bignum_bigint (module Circuit) modulus + in + Bignum_bigint.(to_bignum_bigint_as_prover (module Circuit) x < modulus) + + let if_ (type field) + (module Circuit : Snark_intf.Run with type field = field) + (b : Circuit.Boolean.var) ~(then_ : field t) ~(else_ : field t) : + field t = + let open Circuit in + let then0, then1, then2 = to_limbs then_ in + let else0, else1, else2 = to_limbs else_ in + of_limbs + ( Field.if_ b ~then_:then0 ~else_:else0 + , Field.if_ b ~then_:then1 ~else_:else1 + , Field.if_ b ~then_:then2 ~else_:else2 ) + + let unpack (type field) + (module Circuit : Snark_intf.Run with type field = field) (x : field t) + ~(length : int) : Circuit.Boolean.var list = + let open Circuit in + (* TODO: Performance improvement, we could use this trick from Halo paper + * https://github.com/MinaProtocol/mina/blob/43e2994b64b9d3e99055d644ac6279d39c22ced5/src/lib/pickles/scalar_challenge.ml#L12 + *) + let l0, l1, l2 = to_limbs x in + fst + @@ List.fold [ l0; l1; l2 ] ~init:([], length) + ~f:(fun (lst, length) limb -> + let bits_to_copy = min length Common.limb_bits in + ( lst @ Field.unpack limb ~length:bits_to_copy + , length - bits_to_copy ) ) + end + + (* Compact limbs foreign field element *) + module Compact = struct + module Cvar = Snarky_backendless.Cvar + + type 'field limbs_type = 'field compact_limbs + + type 'field t = 'field Cvar.t compact_limbs + + let of_limbs x = x + + let of_field_limbs (type field) + (module Circuit : Snark_intf.Run with type field = field) + (x : field limbs_type) : field t = + let open Circuit in + let x = + exists Typ.(Field.typ * Field.typ) ~compute:(fun () -> (fst x, snd x)) + in + of_limbs x + + let of_bignum_bigint (type field) + (module Circuit : Snark_intf.Run with type field = field) x : field t = + let open Circuit in + let l2, l01 = Common.(bignum_bigint_div_rem x two_to_2limb) in + + let limb_vars = + exists (Typ.array ~length:2 Field.typ) ~compute:(fun () -> + [| Common.bignum_bigint_to_field (module Circuit) l01 + ; Common.bignum_bigint_to_field (module Circuit) l2 + |] ) + in + of_limbs (limb_vars.(0), limb_vars.(1)) + + let to_limbs x = x + + let const_of_bignum_bigint (type field) + (module Circuit : Snark_intf.Run with type field = field) x : field t = + let open Circuit in + let l2, l01 = Common.(bignum_bigint_div_rem x two_to_2limb) in + of_limbs + Field. + ( constant @@ Common.bignum_bigint_to_field (module Circuit) l01 + , constant @@ Common.bignum_bigint_to_field (module Circuit) l2 ) + + let map (x : 'field t) (func : 'field Cvar.t -> 'g) : 'g limbs_type = + let l0, l1 = to_limbs x in + (func l0, func l1) + + let one (type field) + (module Circuit : Snark_intf.Run with type field = field) : field t = + of_bignum_bigint (module Circuit) Bignum_bigint.one + + let to_field_limbs_as_prover (type field) + (module Circuit : Snark_intf.Run with type field = field) (x : field t) + : field limbs_type = + map x (Common.cvar_field_to_field_as_prover (module Circuit)) + + let to_bignum_bigint_limbs_as_prover (type field) + (module Circuit : Snark_intf.Run with type field = field) (x : field t) + : Bignum_bigint.t limbs_type = + map x (Common.cvar_field_to_bignum_bigint_as_prover (module Circuit)) + + let to_bignum_bigint_as_prover (type field) + (module Circuit : Snark_intf.Run with type field = field) (x : field t) + = + let l01, l2 = to_bignum_bigint_limbs_as_prover (module Circuit) x in + Bignum_bigint.(l01 + (two_to_2limb * l2)) + + let to_string_as_prover (type field) + (module Circuit : Snark_intf.Run with type field = field) a : string = + sprintf "%s" @@ Bignum_bigint.to_string + @@ to_bignum_bigint_as_prover (module Circuit) a + + let is_zero (type field) + (module Circuit : Snark_intf.Run with type field = field) (x : field t) + : Circuit.Boolean.var = + let open Circuit in + let x01, x2 = to_limbs x in + let x01_is_zero = Field.(equal x01 zero) in + let x2_is_zero = Field.(equal x2 zero) in + Boolean.(x01_is_zero && x2_is_zero) + + let equal_as_prover (type field) + (module Circuit : Snark_intf.Run with type field = field) + (left : field t) (right : field t) : bool = + let open Circuit in + let left01, left2 = to_field_limbs_as_prover (module Circuit) left in + let right01, right2 = to_field_limbs_as_prover (module Circuit) right in + Field.Constant.(equal left01 right01 && equal left2 right2) + + let assert_equal (type field) + (module Circuit : Snark_intf.Run with type field = field) + (left : field t) (right : field t) : unit = + let open Circuit in + let left01, left2 = to_limbs left in + let right01, right2 = to_limbs right in + Field.Assert.equal left01 right01 ; + Field.Assert.equal left2 right2 + + let check_here_const_of_bignum_bigint (type field) + (module Circuit : Snark_intf.Run with type field = field) x : field t = + let const_x = const_of_bignum_bigint (module Circuit) x in + let var_x = of_bignum_bigint (module Circuit) x in + assert_equal (module Circuit) const_x var_x ; + const_x + + let if_ (type field) + (module Circuit : Snark_intf.Run with type field = field) + (b : Circuit.Boolean.var) ~(then_ : field t) ~(else_ : field t) : + field t = + let open Circuit in + let then01, then2 = to_limbs then_ in + let else01, else2 = to_limbs else_ in + of_limbs + ( Field.if_ b ~then_:then01 ~else_:else01 + , Field.if_ b ~then_:then2 ~else_:else2 ) + + let unpack (type field) + (module Circuit : Snark_intf.Run with type field = field) (x : field t) + ~(length : int) : Circuit.Boolean.var list = + (* TODO: Performance improvement, we could use this trick from Halo paper + * https://github.com/MinaProtocol/mina/blob/43e2994b64b9d3e99055d644ac6279d39c22ced5/src/lib/pickles/scalar_challenge.ml#L12 + *) + let open Circuit in + let l01, l2 = to_limbs x in + fst + @@ List.foldi [ l01; l2 ] ~init:([], length) + ~f:(fun i (lst, length) limb -> + let bits_to_copy = min length ((2 - i) * Common.limb_bits) in + ( lst @ Field.unpack limb ~length:bits_to_copy + , length - bits_to_copy ) ) + end +end + +(* Structure for tracking external checks that must be made + * (using other gadgets) in order to acheive soundess for a + * given multiplication *) +module External_checks = struct + module Cvar = Snarky_backendless.Cvar + + type 'field t = + { mutable multi_ranges : 'field Cvar.t standard_limbs list + ; mutable compact_multi_ranges : 'field Cvar.t compact_limbs list + ; mutable bounds : 'field Cvar.t standard_limbs list + } + + let create (type field) + (module Circuit : Snark_intf.Run with type field = field) : field t = + { multi_ranges = []; compact_multi_ranges = []; bounds = [] } + + (* Track a multi-range-check *) + (* TODO: improve names of these from append_ to add_, push_ or insert_ *) + let append_multi_range_check (external_checks : 'field t) + (x : 'field Cvar.t standard_limbs) = + external_checks.multi_ranges <- x :: external_checks.multi_ranges + + (* Track a compact-multi-range-check *) + let append_compact_multi_range_check (external_checks : 'field t) + (x : 'field Cvar.t compact_limbs) = + external_checks.compact_multi_ranges <- + x :: external_checks.compact_multi_ranges + + (* Track a bound check (i.e. valid_element check) *) + let append_bound_check (external_checks : 'field t) + (x : 'field Cvar.t standard_limbs) = + external_checks.bounds <- x :: external_checks.bounds +end + +(* Common auxiliary functions for foreign field gadgets *) + +(* Check that the foreign modulus is less than the maximum allowed *) +let check_modulus_bignum_bigint (type f) + (module Circuit : Snark_intf.Run with type field = f) + (foreign_field_modulus : Bignum_bigint.t) = + (* Note that the maximum foreign field modulus possible for addition is much + * larger than that supported by multiplication. + * + * Specifically, since the 88-bit limbs are embedded in a native field element + * of ~2^255 bits and foreign field addition increases the number of bits + * logarithmically, for addition we can actually support a maximum field modulus + * of 2^264 - 1 (i.e. binary_modulus - 1) for circuits up to length ~ 2^79 - 1, + * which is far larger than the maximum circuit size supported by Kimchi. + * + * However, for compatibility with multiplication operations, we must use the + * same maximum as foreign field multiplication. + *) + assert ( + Bignum_bigint.( + foreign_field_modulus < max_foreign_field_modulus (module Circuit)) ) + +(* Check that the foreign modulus is less than the maximum allowed *) +let check_modulus (type f) (module Circuit : Snark_intf.Run with type field = f) + (foreign_field_modulus : f standard_limbs) = + let foreign_field_modulus = + field_const_standard_limbs_to_bignum_bigint + (module Circuit) + foreign_field_modulus + in + + check_modulus_bignum_bigint (module Circuit) foreign_field_modulus + +(* Represents two limbs as one single field element with twice as many bits *) +let as_prover_compact_limb (type f) + (module Circuit : Snark_intf.Run with type field = f) (lo : f) (hi : f) : f + = + Circuit.Field.Constant.(lo + (hi * two_to_limb_field (module Circuit))) + +(* FOREIGN FIELD ADDITION GADGET *) + +(* Internal computation for foreign field addition *) +let sum_setup (type f) (module Circuit : Snark_intf.Run with type field = f) + (left_input : f Element.Standard.t) (right_input : f Element.Standard.t) + (operation : op_mode) (foreign_field_modulus : f standard_limbs) : + f Element.Standard.t * f * Circuit.Field.t = + let open Circuit in + (* Decompose modulus into limbs *) + let foreign_field_modulus0, foreign_field_modulus1, foreign_field_modulus2 = + foreign_field_modulus + in + (* Decompose left input into limbs *) + let left_input0, left_input1, left_input2 = + Element.Standard.to_limbs left_input + in + (* Decompose right input into limbs. If final check, right_input2 will contain 2^limb *) + let right_input0, right_input1, right_input2 = + Element.Standard.to_limbs right_input + in + + (* Addition or subtraction *) + let sign = + match operation with + | Sub -> + Field.Constant.(negate one) + | Add -> + Field.Constant.one + in + + (* Given a left and right inputs to an addition or subtraction, and a modulus, it computes + * all necessary values needed for the witness layout. Meaning, it returns an [FFAddValues] instance + * - the result of the addition/subtraction as a ForeignElement + * - the sign of the operation + * - the overflow flag + * - the carry value *) + let result0, result1, result2, field_overflow, carry = + exists (Typ.array ~length:5 Field.typ) ~compute:(fun () -> + (* Compute bigint version of the inputs *) + let modulus = + field_const_standard_limbs_to_bignum_bigint + (module Circuit) + foreign_field_modulus + in + let left = + Element.Standard.to_bignum_bigint_as_prover + (module Circuit) + left_input + in + let right = + Element.Standard.to_bignum_bigint_as_prover + (module Circuit) + right_input + in + + (* Compute values for the ffadd *) + + (* Overflow if addition and greater than modulus or + * underflow if subtraction and less than zero + *) + let has_overflow = + match operation with + | Sub -> + Bignum_bigint.(left < right) + | Add -> + Bignum_bigint.(left + right >= modulus) + in + + (* 0 for no overflow + * -1 for underflow + * +1 for overflow + *) + let field_overflow = + if has_overflow then sign else Field.Constant.zero + in + + (* Compute the result + * result = left + sign * right - field_overflow * modulus + * TODO: unluckily, we cannot do it in one line if we keep these types, because one + * cannot combine field elements and biguints in the same operation automatically + *) + let is_sub = match operation with Sub -> true | Add -> false in + let result = + Element.Standard.of_bignum_bigint (module Circuit) + @@ Bignum_bigint.( + if is_sub then + if not has_overflow then (* normal subtraction *) + left - right + else (* underflow *) + modulus + left - right + else if not has_overflow then (* normal addition *) + left + right + else (* overflow *) + left + right - modulus) + in + + (* c = [ (a1 * 2^88 + a0) + s * (b1 * 2^88 + b0) - q * (f1 * 2^88 + f0) - (r1 * 2^88 + r0) ] / 2^176 + * <=> + * c = r2 - a2 - s*b2 + q*f2 *) + let left_input0, left_input1, left_input2 = + Element.Standard.to_field_limbs_as_prover (module Circuit) left_input + in + let right_input0, right_input1, right_input2 = + Element.Standard.to_field_limbs_as_prover (module Circuit) right_input + in + let result0, result1, result2 = + Element.Standard.to_field_limbs_as_prover (module Circuit) result + in + + (* Compute the carry value *) + let carry_bot = + Field.Constant.( + ( as_prover_compact_limb (module Circuit) left_input0 left_input1 + + as_prover_compact_limb (module Circuit) right_input0 right_input1 + * sign + - as_prover_compact_limb + (module Circuit) + foreign_field_modulus0 foreign_field_modulus1 + * field_overflow + - as_prover_compact_limb (module Circuit) result0 result1 ) + / two_to_2limb_field (module Circuit)) + in + + let carry_top = + Field.Constant.( + result2 - left_input2 - (sign * right_input2) + + (field_overflow * foreign_field_modulus2)) + in + + (* Check that both ways of computing the carry value are equal *) + assert (Field.Constant.equal carry_top carry_bot) ; + + (* Return the ffadd values *) + [| result0; result1; result2; field_overflow; carry_bot |] ) + |> tuple5_of_array + in + + (* Create the gate *) + with_label "ffadd_gate" (fun () -> + (* Set up FFAdd gate *) + assert_ + { annotation = Some __LOC__ + ; basic = + Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint.T + (ForeignFieldAdd + { left_input_lo = left_input0 + ; left_input_mi = left_input1 + ; left_input_hi = left_input2 + ; right_input_lo = right_input0 + ; right_input_mi = right_input1 + ; right_input_hi = right_input2 + ; field_overflow + ; carry + ; foreign_field_modulus0 + ; foreign_field_modulus1 + ; foreign_field_modulus2 + ; sign + } ) + } ) ; + + (* Return the result *) + (Element.Standard.of_limbs (result0, result1, result2), sign, field_overflow) + +(* Gadget for creating an addition or subtraction result row (Zero gate with result) *) +let result_row (type f) (module Circuit : Snark_intf.Run with type field = f) + ?(label = "result_zero_row") (result : f Element.Standard.t) = + let open Circuit in + let result0, result1, result2 = Element.Standard.to_limbs result in + with_label label (fun () -> + assert_ + { annotation = Some __LOC__ + ; basic = + Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint.T + (Raw + { kind = Zero + ; values = [| result0; result1; result2 |] + ; coeffs = [||] + } ) + } ) + +(* Gadget to check the supplied value is a valid foreign field element for the + * supplied foreign field modulus + * + * This gadget checks in the circuit that a value is less than the foreign field modulus. + * Part of this involves computing a bound value that is both added to external_checks + * and also returned. The caller may use either one, depending on the situation. + * + * Inputs: + * external_checks := Context to track required external checks + * value := the value to check + * foreign_field_modulus := the modulus of the foreign field + * + * Outputs: + * Inserts the gates (described below) into the circuit + * Adds bound value to be multi-range-checked to external_checks + * Returns bound value + * + * Effects to the circuit: + * - 1 FFAdd gate + * - 1 Zero gate + *) +let valid_element (type f) (module Circuit : Snark_intf.Run with type field = f) + (external_checks : f External_checks.t) (value : f Element.Standard.t) + (foreign_field_modulus : f standard_limbs) : f Element.Standard.t = + let open Circuit in + (* Compute the value for the right input of the addition as 2^264 *) + let offset0 = Field.zero in + let offset1 = Field.zero in + let offset2 = + exists Field.typ ~compute:(fun () -> two_to_limb_field (module Circuit)) + in + (*let offset2 = Field.(mul one two_to_88) in*) + (* Checks that these cvars have constant values are added as generics *) + let offset = Element.Standard.of_limbs (offset0, offset1, offset2) in + + (* Check that the value fits in the foreign field *) + as_prover (fun () -> + assert ( + Element.Standard.fits_as_prover + (module Circuit) + value foreign_field_modulus ) ; + () ) ; + + (* Create FFAdd gate to compute the bound value (i.e. part of valid_element check) *) + let bound, sign, ovf = + sum_setup (module Circuit) value offset Add foreign_field_modulus + in + (* Result row *) + result_row (module Circuit) ~label:"final_add_zero_gate" bound ; + + (* Sanity check *) + as_prover (fun () -> + (* Check that the correct expected values were obtained *) + let ovf = Common.cvar_field_to_field_as_prover (module Circuit) ovf in + assert (Field.Constant.(equal sign one)) ; + assert (Field.Constant.(equal ovf one)) ) ; + + (* Set up copy constraints with overflow with the overflow check*) + Field.Assert.equal ovf Field.one ; + + (* Check that the highest limb of right input is 2^88*) + let two_to_88 = two_to_limb_field (module Circuit) in + Field.Assert.equal (Field.constant two_to_88) offset2 ; + + (* Add external check for multi range check *) + External_checks.append_multi_range_check external_checks + @@ Element.Standard.to_limbs bound ; + + (* Return the bound value *) + bound + +(* Gadget to constrain external checks using supplied modulus *) +let constrain_external_checks (type field) + (module Circuit : Snark_intf.Run with type field = field) + (external_checks : field External_checks.t) (modulus : field standard_limbs) + = + (* 1) Add gates for external bound additions. + * Note: internally this also adds multi-range-checks for the + * computed bound to the external_checks.multi-ranges, which + * are then constrainted in (2) + *) + List.iter external_checks.bounds ~f:(fun value -> + let _bound = + valid_element + (module Circuit) + external_checks + (Element.Standard.of_limbs value) + modulus + in + () ) ; + + (* 2) Add gates for external multi-range-checks *) + List.iter external_checks.multi_ranges ~f:(fun multi_range -> + let v0, v1, v2 = multi_range in + Range_check.multi (module Circuit) v0 v1 v2 ; + () ) ; + + (* 3) Add gates for external compact-multi-range-checks *) + List.iter external_checks.compact_multi_ranges ~f:(fun compact_multi_range -> + let v01, v2 = compact_multi_range in + Range_check.compact_multi (module Circuit) v01 v2 ; + () ) + +(* FOREIGN FIELD ADDITION CHAIN GADGET *) + +(** Gadget for a chain of foreign field sums (additions or subtractions) + * + * Inputs: + * inputs := All the inputs to the chain of sums + * operations := List of operation modes Add or Sub indicating whether th + * corresponding addition is a subtraction + * foreign_field_modulus := The modulus of the foreign field (all the same) + * + * Outputs: + * Inserts the gates (described below) into the circuit + * Returns the final result of the chain of sums + * + * For n+1 inputs, the gadget creates n foreign field addition gates, followed by a final + * foreign field addition gate for the bound check (i.e. valid_element check). For this, a + * an additional multi range check must also be performed. + * By default, the range check takes place right after the final Raw row. + *) +let sum_chain (type f) (module Circuit : Snark_intf.Run with type field = f) + (inputs : f Element.Standard.t list) (operations : op_mode list) + (foreign_field_modulus : f standard_limbs) : f Element.Standard.t = + let open Circuit in + (* Check foreign field modulus < max allowed *) + check_modulus (module Circuit) foreign_field_modulus ; + (* Check that the number of inputs is correct *) + let n = List.length operations in + assert (List.length inputs = n + 1) ; + + (* Initialize first left input and check it fits in the foreign mod *) + let left = [| List.hd_exn inputs |] in + as_prover (fun () -> + assert ( + Element.Standard.fits_as_prover + (module Circuit) + left.(0) foreign_field_modulus ) ; + () ) ; + + (* For all n additions, compute its values and create gates *) + for i = 0 to n - 1 do + let op = List.nth_exn operations i in + let right = List.nth_exn inputs (i + 1) in + (* Make sure that inputs are smaller than the foreign modulus *) + as_prover (fun () -> + assert ( + Element.Standard.fits_as_prover + (module Circuit) + right foreign_field_modulus ) ; + () ) ; + + (* Create the foreign field addition row *) + let result, _sign, _ovf = + sum_setup (module Circuit) left.(0) right op foreign_field_modulus + in + + (* Update left input for next iteration *) + left.(0) <- result ; () + done ; + + (* Add the final gate for the bound *) + (* result + (2^264 - f) = bound *) + let result = left.(0) in + let unused_external_checks = External_checks.create (module Circuit) in + let bound = + valid_element + (module Circuit) + unused_external_checks result foreign_field_modulus + in + let bound0, bound1, bound2 = Element.Standard.to_limbs bound in + + (* Include Multi range check for the bound right after *) + Range_check.multi (module Circuit) bound0 bound1 bound2 ; + + (* Return result *) + result + +(* FOREIGN FIELD ADDITION SINGLE GADGET *) + +(* Definition of a gadget for a single foreign field addition + * + * Inputs: + * full := Flag for whether to perform a full addition with valid_element check + * on the result (default true) or just a single FFAdd row (false) + * left_input := 3 limbs foreign field element + * right_input := 3 limbs foreign field element + * foreign_field_modulus := The modulus of the foreign field + * + * Outputs: + * Inserts the gates (described below) into the circuit + * Returns the result of the addition as a 3 limbs element + * + * In default mode: + * It adds a FFAdd gate, + * followed by a Zero gate, + * a FFAdd gate for the bound check, + * a Zero gate after this bound check, + * and a Multi Range Check gadget. + * + * In false mode: + * It adds a FFAdd gate. + *) +let add (type f) (module Circuit : Snark_intf.Run with type field = f) + ?(full = true) (left_input : f Element.Standard.t) + (right_input : f Element.Standard.t) + (foreign_field_modulus : f standard_limbs) : f Element.Standard.t = + match full with + | true -> + sum_chain + (module Circuit) + [ left_input; right_input ] + [ Add ] foreign_field_modulus + | false -> + let result, _sign, _ovf = + sum_setup + (module Circuit) + left_input right_input Add foreign_field_modulus + in + result + +(* Definition of a gadget for a single foreign field subtraction + * + * Inputs: + * full := Flag for whether to perform a full subtraction with valid_element check + * on the result (default true) or just a single FFAdd row (false) + * left_input := 3 limbs foreign field element + * right_input := 3 limbs foreign field element + * foreign_field_modulus := The modulus of the foreign field + * + * Outputs: + * Inserts the gates (described below) into the circuit + * Returns the result of the addition as a 3 limbs element + * + * In default mode: + * It adds a FFAdd gate, + * followed by a Zero gate, + * a FFAdd gate for the bound check, + * a Zero gate after this bound check, + * and a Multi Range Check gadget. + * + * In false mode: + * It adds a FFAdd gate. + *) +let sub (type f) (module Circuit : Snark_intf.Run with type field = f) + ?(full = true) (left_input : f Element.Standard.t) + (right_input : f Element.Standard.t) + (foreign_field_modulus : f standard_limbs) : f Element.Standard.t = + match full with + | true -> + sum_chain + (module Circuit) + [ left_input; right_input ] + [ Sub ] foreign_field_modulus + | false -> + let result, _sign, _ovf = + sum_setup + (module Circuit) + left_input right_input Sub foreign_field_modulus + in + result + +(* FOREIGN FIELD MULTIPLICATION *) + +(* Compute non-zero intermediate products + * + * For more details see the "Intermediate products" Section of + * the [Foreign Field Multiplication RFC](https://o1-labs.github.io/proof-systems/rfcs/foreign_field_mul.html) + * + * Preconditions: this entire function is witness code and, therefore, must be + * only called from an exists construct. + *) +let compute_intermediate_products (type f) + (module Circuit : Snark_intf.Run with type field = f) + (left_input : f Element.Standard.t) (right_input : f Element.Standard.t) + (quotient : f standard_limbs) (neg_foreign_field_modulus : f standard_limbs) + : f * f * f = + let open Circuit in + let left_input0, left_input1, left_input2 = + Element.Standard.to_field_limbs_as_prover (module Circuit) left_input + in + let right_input0, right_input1, right_input2 = + Element.Standard.to_field_limbs_as_prover (module Circuit) right_input + in + let quotient0, quotient1, quotient2 = quotient in + let ( neg_foreign_field_modulus0 + , neg_foreign_field_modulus1 + , neg_foreign_field_modulus2 ) = + neg_foreign_field_modulus + in + ( (* p0 = a0 * b0 + q0 + f'0 *) + Field.Constant.( + (left_input0 * right_input0) + (quotient0 * neg_foreign_field_modulus0)) + , (* p1 = a0 * b1 + a1 * b0 + q0 * f'1 + q1 * f'0 *) + Field.Constant.( + (left_input0 * right_input1) + + (left_input1 * right_input0) + + (quotient0 * neg_foreign_field_modulus1) + + (quotient1 * neg_foreign_field_modulus0)) + , (* p2 = a0 * b2 + a2 * b0 + a1 * b1 - q0 * f'2 + q2 * f'0 + q1 * f'1 *) + Field.Constant.( + (left_input0 * right_input2) + + (left_input2 * right_input0) + + (left_input1 * right_input1) + + (quotient0 * neg_foreign_field_modulus2) + + (quotient2 * neg_foreign_field_modulus0) + + (quotient1 * neg_foreign_field_modulus1)) ) + +(* Compute intermediate sums + * For more details see the "Optimizations" Section of + * the [Foreign Field Multiplication RFC](https://o1-labs.github.io/proof-systems/rfcs/foreign_field_mul.html) *) +let compute_intermediate_sums (type f) + (module Circuit : Snark_intf.Run with type field = f) + (quotient : f standard_limbs) (neg_foreign_field_modulus : f standard_limbs) + : f * f = + let open Circuit in + let quotient0, quotient1, quotient2 = quotient in + let ( neg_foreign_field_modulus0 + , neg_foreign_field_modulus1 + , neg_foreign_field_modulus2 ) = + neg_foreign_field_modulus + in + (* let q01 = q0 + 2^L * q1 *) + let quotient01 = + Field.Constant.( + quotient0 + (two_to_limb_field (module Circuit) * quotient1)) + in + + (* f'01 = f'0 + 2^L * f'1 *) + let neg_foreign_field_modulus01 = + Field.Constant.( + neg_foreign_field_modulus0 + + (two_to_limb_field (module Circuit) * neg_foreign_field_modulus1)) + in + ( (* q'01 = q01 + f'01 *) + Field.Constant.(quotient01 + neg_foreign_field_modulus01) + , (* q'2 = q2 + f'2 *) + Field.Constant.(quotient2 + neg_foreign_field_modulus2) ) + +(* Compute witness variables related for foreign field multplication *) +let compute_witness_variables (type f) + (module Circuit : Snark_intf.Run with type field = f) + (products : Bignum_bigint.t standard_limbs) + (remainder : Bignum_bigint.t standard_limbs) : f * f * f * f * f * f = + let products0, products1, products2 = products in + let remainder0, remainder1, remainder2 = remainder in + + (* C1-C2: Compute components of product1 *) + let product1_hi, product1_lo = + Common.(bignum_bigint_div_rem products1 two_to_limb) + in + let product1_hi_1, product1_hi_0 = + Common.(bignum_bigint_div_rem product1_hi two_to_limb) + in + + (* C3-C5: Compute v0 = the top 2 bits of (p0 + 2^L * p10 - r0 - 2^L * r1) / 2^2L + * N.b. To avoid an underflow error, the equation must sum the intermediate + * product terms before subtracting limbs of the remainder. *) + let carry0 = + Bignum_bigint.( + ( products0 + + (Common.two_to_limb * product1_lo) + - remainder0 + - (Common.two_to_limb * remainder1) ) + / two_to_2limb) + in + + (* C6-C7: Compute v1 = the top L + 3 bits (p2 + p11 + v0 - r2) / 2^L + * N.b. Same as above, to avoid an underflow error, the equation must + * sum the intermediate product terms before subtracting the remainder. *) + let carry1 = + Bignum_bigint.( + (products2 + product1_hi + carry0 - remainder2) / Common.two_to_limb) + in + (* Compute v10 and v11 *) + let carry1_hi, carry1_lo = + Common.(bignum_bigint_div_rem carry1 two_to_limb) + in + + ( Common.bignum_bigint_to_field (module Circuit) product1_lo + , Common.bignum_bigint_to_field (module Circuit) product1_hi_0 + , Common.bignum_bigint_to_field (module Circuit) product1_hi_1 + , Common.bignum_bigint_to_field (module Circuit) carry0 + , Common.bignum_bigint_to_field (module Circuit) carry1_lo + , Common.bignum_bigint_to_field (module Circuit) carry1_hi ) + +(* Perform integer bound addition computation x' = x + f' *) +let compute_bound (x : Bignum_bigint.t) + (neg_foreign_field_modulus : Bignum_bigint.t) : Bignum_bigint.t = + let x_bound = Bignum_bigint.(x + neg_foreign_field_modulus) in + assert (Bignum_bigint.(x_bound < binary_modulus)) ; + x_bound + +(* Compute bound witness carry bit *) +let compute_bound_witness_carry (type f) + (module Circuit : Snark_intf.Run with type field = f) + (sums : Bignum_bigint.t compact_limbs) + (bound : Bignum_bigint.t compact_limbs) : f = + let sums01, _sums2 = sums in + let bound01, _bound2 = bound in + + (* C9: witness data is created by externally by called and multi-range-check gate *) + + (* C10-C11: Compute q'_carry01 = (s01 - q'01)/2^2L *) + let quotient_bound_carry, _ = + Common.bignum_bigint_div_rem Bignum_bigint.(sums01 - bound01) two_to_2limb + in + Common.bignum_bigint_to_field (module Circuit) quotient_bound_carry + +(* Foreign field multiplication gadget definition *) +let mul (type f) (module Circuit : Snark_intf.Run with type field = f) + (external_checks : f External_checks.t) ?(bound_check_result = true) + (left_input : f Element.Standard.t) (right_input : f Element.Standard.t) + (foreign_field_modulus : f standard_limbs) : f Element.Standard.t = + let open Circuit in + (* Check foreign field modulus < max allowed *) + check_modulus (module Circuit) foreign_field_modulus ; + + (* Compute gate coefficients + * This happens when circuit is created / not part of witness (e.g. exists, As_prover code) + *) + let foreign_field_modulus0, foreign_field_modulus1, foreign_field_modulus2 = + foreign_field_modulus + in + let ( neg_foreign_field_modulus + , ( neg_foreign_field_modulus0 + , neg_foreign_field_modulus1 + , neg_foreign_field_modulus2 ) ) = + let foreign_field_modulus = + field_const_standard_limbs_to_bignum_bigint + (module Circuit) + foreign_field_modulus + in + (* Compute negated foreign field modulus f' = 2^t - f public parameter *) + let neg_foreign_field_modulus = + Bignum_bigint.(binary_modulus - foreign_field_modulus) + in + ( neg_foreign_field_modulus + , bignum_bigint_to_field_const_standard_limbs + (module Circuit) + neg_foreign_field_modulus ) + in + + (* Compute witness values *) + let ( carry1_lo + , carry1_hi + , product1_hi_1 + , carry0 + , quotient0 + , quotient1 + , quotient2 + , quotient_bound_carry + , remainder0 + , remainder1 + , remainder2 + , quotient_bound01 + , quotient_bound2 + , product1_lo + , product1_hi_0 ) = + exists (Typ.array ~length:15 Field.typ) ~compute:(fun () -> + (* Compute quotient remainder and negative foreign field modulus *) + let quotient, remainder = + (* Bignum_bigint computations *) + let left_input = + Element.Standard.to_bignum_bigint_as_prover + (module Circuit) + left_input + in + let right_input = + Element.Standard.to_bignum_bigint_as_prover + (module Circuit) + right_input + in + let foreign_field_modulus = + field_const_standard_limbs_to_bignum_bigint + (module Circuit) + foreign_field_modulus + in + + (* Compute quotient and remainder using foreign field modulus *) + let quotient, remainder = + Common.bignum_bigint_div_rem + Bignum_bigint.(left_input * right_input) + foreign_field_modulus + in + (quotient, remainder) + in + + (* Compute the intermediate products *) + let products = + let quotient = + bignum_bigint_to_field_const_standard_limbs + (module Circuit) + quotient + in + let neg_foreign_field_modulus = + bignum_bigint_to_field_const_standard_limbs + (module Circuit) + neg_foreign_field_modulus + in + let product0, product1, product2 = + compute_intermediate_products + (module Circuit) + left_input right_input quotient neg_foreign_field_modulus + in + + ( Common.field_to_bignum_bigint (module Circuit) product0 + , Common.field_to_bignum_bigint (module Circuit) product1 + , Common.field_to_bignum_bigint (module Circuit) product2 ) + in + + (* Compute the intermediate sums *) + let sums = + let quotient = + bignum_bigint_to_field_const_standard_limbs + (module Circuit) + quotient + in + let neg_foreign_field_modulus = + bignum_bigint_to_field_const_standard_limbs + (module Circuit) + neg_foreign_field_modulus + in + let sum01, sum2 = + compute_intermediate_sums + (module Circuit) + quotient neg_foreign_field_modulus + in + ( Common.field_to_bignum_bigint (module Circuit) sum01 + , Common.field_to_bignum_bigint (module Circuit) sum2 ) + in + + (* Compute witness variables *) + let ( product1_lo + , product1_hi_0 + , product1_hi_1 + , carry0 + , carry1_lo + , carry1_hi ) = + compute_witness_variables + (module Circuit) + products + (bignum_bigint_to_standard_limbs remainder) + in + + (* Compute bounds for multi-range-checks on quotient and remainder *) + let quotient_bound = compute_bound quotient neg_foreign_field_modulus in + + (* Compute quotient bound addition witness variables *) + let quotient_bound_carry = + compute_bound_witness_carry + (module Circuit) + sums + (bignum_bigint_to_compact_limbs quotient_bound) + in + + (* Compute the rest of the witness data *) + let quotient0, quotient1, quotient2 = + bignum_bigint_to_field_const_standard_limbs (module Circuit) quotient + in + let remainder0, remainder1, remainder2 = + bignum_bigint_to_field_const_standard_limbs (module Circuit) remainder + in + let quotient_bound01, quotient_bound2 = + bignum_bigint_to_field_const_compact_limbs + (module Circuit) + quotient_bound + in + + [| carry1_lo + ; carry1_hi + ; product1_hi_1 + ; carry0 + ; quotient0 + ; quotient1 + ; quotient2 + ; quotient_bound_carry + ; remainder0 + ; remainder1 + ; remainder2 + ; quotient_bound01 + ; quotient_bound2 + ; product1_lo + ; product1_hi_0 + |] ) + |> tuple15_of_array + in + + (* Add external checks *) + External_checks.append_multi_range_check external_checks + (carry1_lo, product1_lo, product1_hi_0) ; + External_checks.append_compact_multi_range_check external_checks + (quotient_bound01, quotient_bound2) ; + if bound_check_result then + External_checks.append_bound_check external_checks + (remainder0, remainder1, remainder2) ; + + let left_input0, left_input1, left_input2 = + Element.Standard.to_limbs left_input + in + let right_input0, right_input1, right_input2 = + Element.Standard.to_limbs right_input + in + + (* Create ForeignFieldMul gate *) + with_label "foreign_field_mul" (fun () -> + assert_ + { annotation = Some __LOC__ + ; basic = + Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint.T + (ForeignFieldMul + { (* Current row *) left_input0 + ; left_input1 + ; left_input2 + ; right_input0 + ; right_input1 + ; right_input2 + ; carry1_lo + ; carry1_hi + ; carry0 + ; quotient0 + ; quotient1 + ; quotient2 + ; quotient_bound_carry + ; product1_hi_1 + ; (* Next row *) remainder0 + ; remainder1 + ; remainder2 + ; quotient_bound01 + ; quotient_bound2 + ; product1_lo + ; product1_hi_0 + ; (* Coefficients *) foreign_field_modulus0 + ; foreign_field_modulus1 + ; foreign_field_modulus2 + ; neg_foreign_field_modulus0 + ; neg_foreign_field_modulus1 + ; neg_foreign_field_modulus2 + } ) + } ) ; + Element.Standard.of_limbs (remainder0, remainder1, remainder2) + +(* Gadget to constrain conversion of bytes array (output of Keccak gadget) + into foreign field element with standard limbs (input of ECDSA gadget). + Include the endianness of the bytes list. *) +let bytes_to_standard_element (type f) + (module Circuit : Snark_intf.Run with type field = f) + ~(endian : Keccak.endianness) (bytestring : Circuit.Field.t list) + (fmod : f standard_limbs) (fmod_bitlen : int) = + let open Circuit in + (* Make the input bytestring a big endian value *) + let bytestring = + match endian with Little -> List.rev bytestring | Big -> bytestring + in + + (* Convert the bytestring into a bigint *) + let bytestring = Array.of_list bytestring in + + (* C1: Check modulus_bit_length = # of bits you unpack + * This is partly implicit in the circuit given the number of byte outputs of Keccak: + * · input_bitlen < fmod_bitlen : OK + * · input_bitlen = fmod_bitlen : OK + * · input_bitlen > fmod_bitlen : CONSTRAIN + * Check that the most significant byte of the input is less than 2^(fmod_bitlen % 8) + *) + let input_bitlen = Array.length bytestring * 8 in + if input_bitlen > fmod_bitlen then + (* For the most significant one, constrain that it is less bits than required *) + Lookup.less_than_bits + (module Circuit) + ~bits:(fmod_bitlen % 8) bytestring.(0) ; + (* C2: Constrain bytes into standard foreign field element limbs => foreign field element z *) + let elem = + Element.Standard.of_bignum_bigint (module Circuit) + @@ Common.cvar_field_bytes_to_bignum_bigint_as_prover (module Circuit) + @@ Array.to_list bytestring + in + (* C3: Reduce z modulo foreign_field_modulus + * + * Constrain z' = z + 0 modulo foreign_field_modulus using foreign field addition gate + * + * Note: this is sufficient because z cannot be double the size due to bit length constraint + *) + let zero = Element.Standard.of_limbs (Field.zero, Field.zero, Field.zero) in + (* C4: Range check z' < f *) + (* Altogether this is a call to Foreign_field.add in default mode *) + let output = add (module Circuit) elem zero fmod in + + (* return z' *) + output + +(*********) +(* Tests *) +(*********) + +let%test_unit "foreign_field arithmetics gadgets" = + if tests_enabled then ( + let (* Import the gadget test runner *) + open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + + (* Helper to test foreign_field_add gadget + * Inputs: + * - left_input + * - right_input + * - foreign_field_modulus + * Checks with multi range checks the size of the inputs. + *) + let test_add ?cs (left_input : Bignum_bigint.t) + (right_input : Bignum_bigint.t) (foreign_field_modulus : Bignum_bigint.t) + = + (* Generate and verify proof *) + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + let open Runner.Impl in + (* Prepare test inputs *) + let expected = + Bignum_bigint.((left_input + right_input) % foreign_field_modulus) + in + let foreign_field_modulus = + bignum_bigint_to_field_const_standard_limbs + (module Runner.Impl) + foreign_field_modulus + in + let left_input = + Element.Standard.of_bignum_bigint (module Runner.Impl) left_input + in + let right_input = + Element.Standard.of_bignum_bigint (module Runner.Impl) right_input + in + (* Create the gadget *) + let sum = + add + (module Runner.Impl) + left_input right_input foreign_field_modulus + in + (* Create external checks context for tracking extra constraints *) + let external_checks = External_checks.create (module Runner.Impl) in + (* Check that the inputs were foreign field elements*) + let _out = + valid_element + (module Runner.Impl) + external_checks left_input foreign_field_modulus + in + let _out = + valid_element + (module Runner.Impl) + external_checks right_input foreign_field_modulus + in + + assert (Mina_stdlib.List.Length.equal external_checks.multi_ranges 2) ; + List.iter external_checks.multi_ranges ~f:(fun multi_range -> + let v0, v1, v2 = multi_range in + Range_check.multi (module Runner.Impl) v0 v1 v2 ; + () ) ; + + as_prover (fun () -> + let expected = + Element.Standard.of_bignum_bigint + (module Runner.Impl) + expected + in + assert ( + Element.Standard.equal_as_prover + (module Runner.Impl) + expected sum ) ) ; + () ) + in + cs + in + + (* Helper to test foreign_field_mul gadget with external checks + * Inputs: + * - inputs + * - foreign_field_modulus + * - is_sub: list of operations to perform + *) + let test_add_chain ?cs (inputs : Bignum_bigint.t list) + (operations : op_mode list) (foreign_field_modulus : Bignum_bigint.t) = + (* Generate and verify proof *) + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + let open Runner.Impl in + (* compute result of the chain *) + let n = List.length operations in + let chain_result = [| List.nth_exn inputs 0 |] in + for i = 0 to n - 1 do + let operation = List.nth_exn operations i in + let op_sign = + match operation with + | Add -> + Bignum_bigint.one + | Sub -> + Bignum_bigint.of_int (-1) + in + let inp = List.nth_exn inputs (i + 1) in + let sum = + Bignum_bigint.( + (chain_result.(0) + (op_sign * inp)) % foreign_field_modulus) + in + chain_result.(0) <- sum ; () + done ; + + let inputs = + List.map + ~f:(fun x -> + Element.Standard.of_bignum_bigint (module Runner.Impl) x ) + inputs + in + let foreign_field_modulus = + bignum_bigint_to_field_const_standard_limbs + (module Runner.Impl) + foreign_field_modulus + in + + (* Create the gadget *) + let sum = + sum_chain + (module Runner.Impl) + inputs operations foreign_field_modulus + in + (* Check sum matches expected result *) + as_prover (fun () -> + let expected = + Element.Standard.of_bignum_bigint + (module Runner.Impl) + chain_result.(0) + in + assert ( + Element.Standard.equal_as_prover + (module Runner.Impl) + expected sum ) ) ; + () ) + in + cs + in + + (* Helper to test foreign_field_mul gadget + * Inputs: + * cs := optional constraint system to reuse + * left_input := left multiplicand + * right_input := right multiplicand + * foreign_field_modulus := foreign field modulus + *) + let test_mul ?cs (left_input : Bignum_bigint.t) + (right_input : Bignum_bigint.t) (foreign_field_modulus : Bignum_bigint.t) + = + (* Generate and verify proof *) + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + let open Runner.Impl in + (* Prepare test inputs *) + let expected = + Bignum_bigint.(left_input * right_input % foreign_field_modulus) + in + let foreign_field_modulus = + bignum_bigint_to_field_const_standard_limbs + (module Runner.Impl) + foreign_field_modulus + in + let left_input = + Element.Standard.of_bignum_bigint (module Runner.Impl) left_input + in + let right_input = + Element.Standard.of_bignum_bigint (module Runner.Impl) right_input + in + + (* Create external checks context for tracking extra constraints + that are required for soundness (unused in this simple test) *) + let unused_external_checks = + External_checks.create (module Runner.Impl) + in + + (* Create the gadget *) + let product = + mul + (module Runner.Impl) + unused_external_checks left_input right_input + foreign_field_modulus + in + (* Check product matches expected result *) + as_prover (fun () -> + let expected = + Element.Standard.of_bignum_bigint + (module Runner.Impl) + expected + in + assert ( + Element.Standard.equal_as_prover + (module Runner.Impl) + expected product ) ) ; + () ) + in + + cs + in + + (* Helper to test foreign_field_mul gadget with external checks + * Inputs: + * cs := optional constraint system to reuse + * left_input := left multiplicand + * right_input := right multiplicand + * foreign_field_modulus := foreign field modulus + *) + let test_mul_full ?cs (left_input : Bignum_bigint.t) + (right_input : Bignum_bigint.t) (foreign_field_modulus : Bignum_bigint.t) + = + (* Generate and verify first proof *) + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + let open Runner.Impl in + (* Prepare test inputs *) + let expected = + Bignum_bigint.(left_input * right_input % foreign_field_modulus) + in + let foreign_field_modulus = + bignum_bigint_to_field_const_standard_limbs + (module Runner.Impl) + foreign_field_modulus + in + let left_input = + Element.Standard.of_bignum_bigint (module Runner.Impl) left_input + in + let right_input = + Element.Standard.of_bignum_bigint (module Runner.Impl) right_input + in + + (* Create external checks context for tracking extra constraints + that are required for soundness *) + let external_checks = External_checks.create (module Runner.Impl) in + + (* External checks for this test (example, circuit designer has complete flexibility about organization) + * Layout + * 0) ForeignFieldMul + * 1) Zero + * 2) ForeignFieldAdd (result bound addition) + * 3) Zero (result bound addition) + * 4) ForeignFieldAdd (left bound addition) + * 5) Zero (left bound addition) + * 6) ForeignFieldAdd (right bound addition) + * 7) Zero (right bound addition) + * 8-11) multi-range-check (right bound) + * 12-15) multi-range-check (left bound) + * 16-19) multi-range-check (result bound) + * 20-23) multi-range-check (product1_lo, product1_hi_0, carry1_lo) + * 24-27) compact-multi-range-check (quotient) + *) + + (* Create the foreign field mul gadget *) + let product = + mul + (module Runner.Impl) + external_checks left_input right_input foreign_field_modulus + in + + (* Sanity check product matches expected result *) + as_prover (fun () -> + let expected = + Element.Standard.of_bignum_bigint + (module Runner.Impl) + expected + in + assert ( + Element.Standard.equal_as_prover + (module Runner.Impl) + expected product ) ) ; + + (* Add multi-range-check left input *) + External_checks.append_bound_check external_checks + @@ Element.Standard.to_limbs left_input ; + + (* Add multi-range-check right input *) + External_checks.append_bound_check external_checks + @@ Element.Standard.to_limbs right_input ; + + (* + * Perform external checks + *) + assert (Mina_stdlib.List.Length.equal external_checks.bounds 3) ; + assert (Mina_stdlib.List.Length.equal external_checks.multi_ranges 1) ; + assert ( + Mina_stdlib.List.Length.equal external_checks.compact_multi_ranges + 1 ) ; + + (* Add gates for bound checks, multi-range-checks and compact-multi-range-checks *) + constrain_external_checks + (module Runner.Impl) + external_checks foreign_field_modulus ) + in + + cs + in + + (* Helper to test foreign field arithmetics together + * It computes a * b + a - b + *) + let test_ff ?cs (left_input : Bignum_bigint.t) + (right_input : Bignum_bigint.t) (foreign_field_modulus : Bignum_bigint.t) + = + (* Generate and verify proof *) + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + let open Runner.Impl in + (* Prepare test inputs *) + let expected_mul = + Bignum_bigint.(left_input * right_input % foreign_field_modulus) + in + let expected_add = + Bignum_bigint.( + (expected_mul + left_input) % foreign_field_modulus) + in + let expected_sub = + Bignum_bigint.( + (expected_add - right_input) % foreign_field_modulus) + in + let foreign_field_modulus = + bignum_bigint_to_field_const_standard_limbs + (module Runner.Impl) + foreign_field_modulus + in + let left_input = + Element.Standard.of_bignum_bigint (module Runner.Impl) left_input + in + let right_input = + Element.Standard.of_bignum_bigint (module Runner.Impl) right_input + in + + (* Create external checks context for tracking extra constraints + that are required for soundness *) + let unused_external_checks = + External_checks.create (module Runner.Impl) + in + + let product = + mul + (module Runner.Impl) + unused_external_checks left_input right_input + foreign_field_modulus + in + + let addition = + add (module Runner.Impl) product left_input foreign_field_modulus + in + let subtraction = + sub + (module Runner.Impl) + addition right_input foreign_field_modulus + in + let external_checks = External_checks.create (module Runner.Impl) in + + (* Check product matches expected result *) + (* Check that the inputs were foreign field elements*) + let _out = + valid_element + (module Runner.Impl) + external_checks left_input foreign_field_modulus + in + let _out = + valid_element + (module Runner.Impl) + external_checks right_input foreign_field_modulus + in + + assert (Mina_stdlib.List.Length.equal external_checks.multi_ranges 2) ; + List.iter external_checks.multi_ranges ~f:(fun multi_range -> + let v0, v1, v2 = multi_range in + Range_check.multi (module Runner.Impl) v0 v1 v2 ; + () ) ; + + (* Check product matches expected result *) + as_prover (fun () -> + let expected_mul = + Element.Standard.of_bignum_bigint + (module Runner.Impl) + expected_mul + in + let expected_add = + Element.Standard.of_bignum_bigint + (module Runner.Impl) + expected_add + in + let expected_sub = + Element.Standard.of_bignum_bigint + (module Runner.Impl) + expected_sub + in + assert ( + Element.Standard.equal_as_prover + (module Runner.Impl) + expected_mul product ) ; + assert ( + Element.Standard.equal_as_prover + (module Runner.Impl) + expected_add addition ) ; + assert ( + Element.Standard.equal_as_prover + (module Runner.Impl) + expected_sub subtraction ) ) ) + in + cs + in + + (* Test constants *) + let secp256k1_modulus = + Common.bignum_bigint_of_hex + "fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f" + in + let secp256k1_max = Bignum_bigint.(secp256k1_modulus - Bignum_bigint.one) in + let secp256k1_sqrt = Common.bignum_bigint_sqrt secp256k1_max in + let pallas_modulus = + Common.bignum_bigint_of_hex + "40000000000000000000000000000000224698fc094cf91b992d30ed00000001" + in + let pallas_max = Bignum_bigint.(pallas_modulus - Bignum_bigint.one) in + let pallas_sqrt = Common.bignum_bigint_sqrt pallas_max in + let vesta_modulus = + Common.bignum_bigint_of_hex + "40000000000000000000000000000000224698fc0994a8dd8c46eb2100000001" + in + let vesta_max = Bignum_bigint.(vesta_modulus - Bignum_bigint.one) in + + (* FFAdd TESTS *) + (* Single tests *) + let cs = + test_add + (Common.bignum_bigint_of_hex + "7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" ) + (Common.bignum_bigint_of_hex + "80000000000000000000000000000000000000000000000000000000000000d0" ) + secp256k1_modulus + in + let _cs = test_add ~cs secp256k1_max secp256k1_max secp256k1_modulus in + let _cs = test_add ~cs pallas_max pallas_max secp256k1_modulus in + let _cs = test_add ~cs vesta_modulus pallas_modulus secp256k1_modulus in + let cs = test_add Bignum_bigint.zero Bignum_bigint.zero secp256k1_modulus in + let _cs = + test_add ~cs Bignum_bigint.zero Bignum_bigint.zero secp256k1_modulus + in + let _cs = + test_add ~cs + (Common.bignum_bigint_of_hex + "1f2d8f0d0cd52771bfb86ffdf651b7907e2e0fa87f7c9c2a41b0918e2a7820d" ) + (Common.bignum_bigint_of_hex + "b58c271d1f2b1c632a61a548872580228430495e9635842591d9118236bacfa2" ) + secp256k1_modulus + in + + assert ( + Common.is_error (fun () -> + (* check that the inputs need to be smaller than the modulus *) + let _cs = + test_add ~cs secp256k1_modulus secp256k1_modulus secp256k1_modulus + in + () ) ) ; + + assert ( + Common.is_error (fun () -> + (* check wrong cs fails *) + let _cs = + test_add ~cs secp256k1_modulus secp256k1_modulus pallas_modulus + in + () ) ) ; + + (* Chain tests *) + let cs = + test_add_chain + [ pallas_max + ; pallas_max + ; Common.bignum_bigint_of_hex + "1f2d8f0d0cd52771bfb86ffdf651b7907e2e0fa87f7c9c2a41b0918e2a7820d" + ; Common.bignum_bigint_of_hex + "69cc93598e05239aa77b85d172a9785f6f0405af91d91094f693305da68bf15" + ; vesta_max + ] + [ Add; Sub; Sub; Add ] vesta_modulus + in + let _cs = + test_add_chain ~cs + [ vesta_max + ; pallas_max + ; Common.bignum_bigint_of_hex + "b58c271d1f2b1c632a61a548872580228430495e9635842591d9118236" + ; Common.bignum_bigint_of_hex + "1342835834869e59534942304a03534963893045203528b523532232543" + ; Common.bignum_bigint_of_hex + "1f2d8f0d0cd52771bfb86ffdf651ddddbbddeeeebbbaaaaffccee20d" + ] + [ Add; Sub; Sub; Add ] vesta_modulus + in + (* Check that the number of inputs need to be coherent with number of operations *) + assert ( + Common.is_error (fun () -> + let _cs = + test_add_chain ~cs [ pallas_max; pallas_max ] [ Add; Sub; Sub; Add ] + secp256k1_modulus + in + () ) ) ; + + (* FFMul TESTS*) + + (* Positive tests *) + (* zero_mul: 0 * 0 *) + let cs = test_mul Bignum_bigint.zero Bignum_bigint.zero secp256k1_modulus in + (* one_mul: max * 1 *) + let _cs = test_mul ~cs secp256k1_max Bignum_bigint.one secp256k1_modulus in + (* max_native_square: pallas_sqrt * pallas_sqrt *) + let _cs = test_mul ~cs pallas_sqrt pallas_sqrt secp256k1_modulus in + (* max_foreign_square: secp256k1_sqrt * secp256k1_sqrt *) + let _cs = test_mul ~cs secp256k1_sqrt secp256k1_sqrt secp256k1_modulus in + (* max_native_multiplicands: pallas_max * pallas_max *) + let _cs = test_mul ~cs pallas_max pallas_max secp256k1_modulus in + (* max_foreign_multiplicands: secp256k1_max * secp256k1_max *) + let _cs = test_mul ~cs secp256k1_max secp256k1_max secp256k1_modulus in + (* nonzero carry0 bits *) + let _cs = + test_mul ~cs + (Common.bignum_bigint_of_hex + "fbbbd91e03b48cebbac38855289060f8b29fa6ad3cffffffffffffffffffffff" ) + (Common.bignum_bigint_of_hex + "d551c3d990f42b6d780275d9ca7e30e72941aa29dcffffffffffffffffffffff" ) + secp256k1_modulus + in + (* test nonzero carry10 *) + let _cs = + test_mul + (Common.bignum_bigint_of_hex + "4000000000000000000000000000000000000000000000000000000000000000" ) + (Common.bignum_bigint_of_hex + "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0" ) + Bignum_bigint.(pow (of_int 2) (of_int 259)) + in + (* test nonzero carry1_hi *) + let _cs = + test_mul + (Common.bignum_bigint_of_hex + "7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" ) + (Common.bignum_bigint_of_hex + "8000000000000000000000000000000000000000000000000000000000000000d0" ) + Bignum_bigint.(pow (of_int 2) (of_int 259) - one) + in + (* test nonzero_second_bit_carry1_hi *) + let _cs = + test_mul ~cs + (Common.bignum_bigint_of_hex + "ffffffffffffffffffffffffffffffffffffffffffffffff8a9dec7cfd1acdeb" ) + (Common.bignum_bigint_of_hex + "fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2e" ) + secp256k1_modulus + in + (* test random_multiplicands_carry1_lo *) + let _cs = + test_mul ~cs + (Common.bignum_bigint_of_hex + "ffd913aa9e17a63c7a0ff2354218037aafcd6ecaa67f56af1de882594a434dd3" ) + (Common.bignum_bigint_of_hex + "7d313d6b42719a39acea5f51de9d50cd6a4ec7147c003557e114289e9d57dffc" ) + secp256k1_modulus + in + (* test random_multiplicands_valid *) + let _cs = + test_mul ~cs + (Common.bignum_bigint_of_hex + "1f2d8f0d0cd52771bfb86ffdf651b7907e2e0fa87f7c9c2a41b0918e2a7820d" ) + (Common.bignum_bigint_of_hex + "b58c271d1f2b1c632a61a548872580228430495e9635842591d9118236bacfa2" ) + secp256k1_modulus + in + (* test smaller foreign field modulus *) + let _cs = + test_mul + (Common.bignum_bigint_of_hex + "5945fa400436f458cb9e994dcd315ded43e9b60eb68e2ae7b5cf1d07b48ca1c" ) + (Common.bignum_bigint_of_hex + "747109f882b8e26947dfcd887273c0b0720618cb7f6d407c9ba74dbe0eda22f" ) + (Common.bignum_bigint_of_hex + "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" ) + in + (* vesta non-native on pallas native modulus *) + let _cs = + test_mul + (Common.bignum_bigint_of_hex + "69cc93598e05239aa77b85d172a9785f6f0405af91d91094f693305da68bf15" ) + (Common.bignum_bigint_of_hex + "1fffe27b14baa740db0c8bb6656de61d2871a64093908af6181f46351a1c1909" ) + vesta_modulus + in + + (* Full test including all external checks *) + let cs = + test_mul_full + (Common.bignum_bigint_of_hex "2") + (Common.bignum_bigint_of_hex "3") + secp256k1_modulus + in + + let _cs = + test_mul_full ~cs + (Common.bignum_bigint_of_hex + "1f2d8f0d0cd52771bfb86ffdf651b7907e2e0fa87f7c9c2a41b0918e2a7820d" ) + (Common.bignum_bigint_of_hex + "b58c271d1f2b1c632a61a548872580228430495e9635842591d9118236bacfa2" ) + secp256k1_modulus + in + + (* COMBINED TESTS *) + let _cs = + test_ff + (Common.bignum_bigint_of_hex + "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" ) + (Common.bignum_bigint_of_hex + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" ) + secp256k1_modulus + in + () ) ; + () + +let%test_unit "foreign_field equal_as_prover" = + if tests_enabled then + let open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + (* Check equal_as_prover *) + let _cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof (fun () -> + let open Runner.Impl in + let x = + Element.Standard.of_bignum_bigint (module Runner.Impl) + @@ Common.bignum_bigint_of_hex + "5925fa400436f458cb9e994dcd315ded43e9b60eb68e2ae7b5cf1d07b48ca1c" + in + let y = + Element.Standard.of_bignum_bigint (module Runner.Impl) + @@ Common.bignum_bigint_of_hex + "69bc93598e05239aa77b85d172a9785f6f0405af91d91094f693305da68bf15" + in + let z = + Element.Standard.of_bignum_bigint (module Runner.Impl) + @@ Common.bignum_bigint_of_hex + "69bc93598e05239aa77b85d172a9785f6f0405af91d91094f693305da68bf15" + in + as_prover (fun () -> + assert ( + not (Element.Standard.equal_as_prover (module Runner.Impl) x y) ) ; + assert (Element.Standard.equal_as_prover (module Runner.Impl) y z) ) ; + + let x = + Element.Compact.of_bignum_bigint (module Runner.Impl) + @@ Common.bignum_bigint_of_hex + "5925fa400436f458cb9e994dcd315ded43e9b60eb68e2ae7b5cf1d07b48ca1c" + in + let y = + Element.Compact.of_bignum_bigint (module Runner.Impl) + @@ Common.bignum_bigint_of_hex + "69bc93598e05239aa77b85d172a9785f6f0405af91d91094f693305da68bf15" + in + let z = + Element.Compact.of_bignum_bigint (module Runner.Impl) + @@ Common.bignum_bigint_of_hex + "69bc93598e05239aa77b85d172a9785f6f0405af91d91094f693305da68bf15" + in + as_prover (fun () -> + assert ( + not (Element.Compact.equal_as_prover (module Runner.Impl) x y) ) ; + assert (Element.Compact.equal_as_prover (module Runner.Impl) y z) ) ; + + (* Pad with a "dummy" constraint b/c Kimchi requires at least 2 *) + let fake = + exists Field.typ ~compute:(fun () -> Field.Constant.zero) + in + Boolean.Assert.is_true (Field.equal fake Field.zero) ; + () ) + in + () + +let%test_unit "foreign_field equal_as_prover" = + if tests_enabled then + let open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + (* Check equal_as_prover *) + let _cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof (fun () -> + let open Runner.Impl in + let x = + Element.Standard.of_bignum_bigint (module Runner.Impl) + @@ Common.bignum_bigint_of_hex + "5925fa400436f458cb9e994dcd315ded43e9b60eb68e2ae7b5cf1d07b48ca1c" + in + let y = + Element.Standard.of_bignum_bigint (module Runner.Impl) + @@ Common.bignum_bigint_of_hex + "69bc93598e05239aa77b85d172a9785f6f0405af91d91094f693305da68bf15" + in + let z = + Element.Standard.of_bignum_bigint (module Runner.Impl) + @@ Common.bignum_bigint_of_hex + "69bc93598e05239aa77b85d172a9785f6f0405af91d91094f693305da68bf15" + in + as_prover (fun () -> + assert ( + not (Element.Standard.equal_as_prover (module Runner.Impl) x y) ) ; + assert (Element.Standard.equal_as_prover (module Runner.Impl) y z) ) ; + + let x = + Element.Compact.of_bignum_bigint (module Runner.Impl) + @@ Common.bignum_bigint_of_hex + "5925fa400436f458cb9e994dcd315ded43e9b60eb68e2ae7b5cf1d07b48ca1c" + in + let y = + Element.Compact.of_bignum_bigint (module Runner.Impl) + @@ Common.bignum_bigint_of_hex + "69bc93598e05239aa77b85d172a9785f6f0405af91d91094f693305da68bf15" + in + let z = + Element.Compact.of_bignum_bigint (module Runner.Impl) + @@ Common.bignum_bigint_of_hex + "69bc93598e05239aa77b85d172a9785f6f0405af91d91094f693305da68bf15" + in + as_prover (fun () -> + assert ( + not (Element.Compact.equal_as_prover (module Runner.Impl) x y) ) ; + assert (Element.Compact.equal_as_prover (module Runner.Impl) y z) ) ; + + (* Pad with a "dummy" constraint b/c Kimchi requires at least 2 *) + let fake = + exists Field.typ ~compute:(fun () -> Field.Constant.zero) + in + Boolean.Assert.is_true (Field.equal fake Field.zero) ; + () ) + in + () diff --git a/src/lib/crypto/kimchi_backend/gadgets/foreign_field.mli.disabled b/src/lib/crypto/kimchi_backend/gadgets/foreign_field.mli.disabled new file mode 100644 index 00000000000..e0c51dacca1 --- /dev/null +++ b/src/lib/crypto/kimchi_backend/gadgets/foreign_field.mli.disabled @@ -0,0 +1,348 @@ +module Bignum_bigint = Snarky_backendless.Backend_extended.Bignum_bigint +module Snark_intf = Snarky_backendless.Snark_intf + +(** Conventions used + * 1. Functions prefixed with "as_prover_" only happen during proving + * and not during circuit creation + * 2. Functions suffixed with "_as_prover" can only be called outside + * the circuit. Specifically, this means within an exists, within + * an as_prover or in an "as_prover_" prefixed function) + *) + +(** Foreign field modulus is abstract on two parameters + * Field type + * Limbs structure + * + * There are 2 specific limb structures required + * Standard mode := 3 limbs of L-bits each + * Compact mode := 2 limbs where the lowest is 2L bits and the highest is L bits + *) +type 'field standard_limbs = 'field * 'field * 'field + +type 'field compact_limbs = 'field * 'field + +val bignum_bigint_to_field_const_standard_limbs : + (module Snark_intf.Run with type field = 'field) + -> Bignum_bigint.t + -> 'field standard_limbs + +val field_const_standard_limbs_to_bignum_bigint : + (module Snark_intf.Run with type field = 'field) + -> 'field standard_limbs + -> Bignum_bigint.t + +val check_modulus : + (module Snark_intf.Run with type field = 'field) + -> 'field standard_limbs + -> unit + +val check_modulus_bignum_bigint : + (module Snark_intf.Run with type field = 'field) -> Bignum_bigint.t -> unit + +(** Foreign field element base type - not used directly *) +module type Element_intf = sig + type 'field t + + type 'a limbs_type + + module Cvar = Snarky_backendless.Cvar + + (** Create foreign field element from Cvar limbs *) + val of_limbs : 'field Cvar.t limbs_type -> 'field t + + (** Create foreign field element from field limbs *) + val of_field_limbs : + (module Snark_intf.Run with type field = 'field) + -> 'field limbs_type + -> 'field t + + (** Create foreign field element from Bignum_bigint.t *) + val of_bignum_bigint : + (module Snark_intf.Run with type field = 'field) + -> Bignum_bigint.t + -> 'field t + + (** Create constant foreign field element from Bignum_bigint.t *) + val const_of_bignum_bigint : + (module Snark_intf.Run with type field = 'field) + -> Bignum_bigint.t + -> 'field t + + (** Convert foreign field element into Cvar limbs *) + val to_limbs : 'field t -> 'field Cvar.t limbs_type + + (** Map foreign field element's Cvar limbs into some other limbs with the mapping function func *) + val map : 'field t -> ('field Cvar.t -> 'g) -> 'g limbs_type + + (** One constant *) + val one : (module Snark_intf.Run with type field = 'field) -> 'field t + + (** Convert foreign field element into field limbs *) + val to_field_limbs_as_prover : + (module Snark_intf.Run with type field = 'field) + -> 'field t + -> 'field limbs_type + + (** Convert foreign field element into Bignum_bigint.t limbs *) + val to_bignum_bigint_limbs_as_prover : + (module Snark_intf.Run with type field = 'field) + -> 'field t + -> Bignum_bigint.t limbs_type + + (** Convert foreign field element into a Bignum_bigint.t *) + val to_bignum_bigint_as_prover : + (module Snark_intf.Run with type field = 'field) + -> 'field t + -> Bignum_bigint.t + + (** Convert foreign field affine point to string *) + val to_string_as_prover : + (module Snark_intf.Run with type field = 'field) -> 'field t -> string + + (** Constrain zero check computation with boolean output *) + val is_zero : + (module Snark_intf.Run with type field = 'field) + -> 'field t + -> 'field Cvar.t Snark_intf.Boolean0.t + + (** Compare if two foreign field elements are equal *) + val equal_as_prover : + (module Snark_intf.Run with type field = 'field) + -> 'field t + -> 'field t + -> bool + + (** Add copy constraints that two foreign field elements are equal *) + val assert_equal : + (module Snark_intf.Run with type field = 'field) + -> 'field t + -> 'field t + -> unit + + (* Create and constrain foreign field element from Bignum_bigint.t *) + val check_here_const_of_bignum_bigint : + (module Snark_intf.Run with type field = 'field) + -> Bignum_bigint.t + -> 'field t + + (** Add conditional constraints to select foreign field element *) + val if_ : + (module Snark_intf.Run with type field = 'field) + -> 'field Cvar.t Snark_intf.Boolean0.t + -> then_:'field t + -> else_:'field t + -> 'field t + + (** Decompose and constrain foreign field element into list of boolean cvars *) + val unpack : + (module Snark_intf.Run with type field = 'field) + -> 'field t + -> length:int + -> 'field Cvar.t Snark_intf.Boolean0.t list +end + +module Element : sig + (** Foreign field element type (standard limbs) *) + module Standard : sig + include Element_intf with type 'a limbs_type = 'a standard_limbs + end +end + +(** Context for tracking external checks that must be made + * (using other gadgets) in order to acheive soundess for a + * given multiplication + *) +module External_checks : sig + module Cvar = Snarky_backendless.Cvar + + type 'field t = + { mutable multi_ranges : 'field Cvar.t standard_limbs list + ; mutable compact_multi_ranges : 'field Cvar.t compact_limbs list + ; mutable bounds : 'field Cvar.t standard_limbs list + } + + val create : (module Snark_intf.Run with type field = 'field) -> 'field t + + val append_multi_range_check : + 'field t -> 'field Cvar.t standard_limbs -> unit + + val append_compact_multi_range_check : + 'field t -> 'field Cvar.t compact_limbs -> unit + + val append_bound_check : 'field t -> 'field Cvar.t standard_limbs -> unit +end + +(* Type of operation *) +type op_mode = Add | Sub + +(** Gadget to check the supplied value is a valid foreign field element for the + * supplied foreign field modulus + * + * This gadget checks in the circuit that a value is less than the foreign field modulus. + * Part of this involves computing a bound value that is both added to external_checks + * and also returned. The caller may use either one, depending on the situation. + * + * Inputs: + * external_checks := Context to track required external checks + * value := the value to check + * foreign_field_modulus := the modulus of the foreign field + * + * Outputs: + * Inserts the gates (described below) into the circuit + * Adds bound value to be multi-range-checked to external_checks + * Returns bound value + * + * Effects to the circuit: + * - 1 FFAdd gate + * - 1 Zero gate + *) +val valid_element : + (module Snark_intf.Run with type field = 'f) + -> 'f External_checks.t (* external_checks context *) + -> 'f Element.Standard.t (* value *) + -> 'f standard_limbs (* foreign_field_modulus *) + -> 'f Element.Standard.t +(* result *) + +(** Gadget to constrain external checks using supplied modulus *) +val constrain_external_checks : + (module Snark_intf.Run with type field = 'f) + -> 'f External_checks.t + -> 'f standard_limbs + -> unit + +(** Gadget for a chain of foreign field sums (additions or subtractions) + * + * Inputs: + * inputs := All the inputs to the chain of sums + * operations := List of operation modes Add or Sub indicating whether th + * corresponding addition is a subtraction + * foreign_field_modulus := The modulus of the foreign field (all the same) + * + * Outputs: + * Inserts the gates (described below) into the circuit + * Returns the final result of the chain of sums + * + * For n+1 inputs, the gadget creates n foreign field addition gates, followed by a final + * foreign field addition gate for the bound check (i.e. valid_element check). For this, a + * an additional multi range check must also be performed. + * By default, the range check takes place right after the final Raw row. + *) +val sum_chain : + (module Snark_intf.Run with type field = 'f) + -> 'f Element.Standard.t list (* inputs *) + -> op_mode list (* operations *) + -> 'f standard_limbs (* foreign_field_modulus *) + -> 'f Element.Standard.t +(* result *) + +(** Gadget for a single foreign field addition + * + * Inputs: + * full := flag for whether to perform a full addition with valid_element check + * on the result (default true) or just a single FFAdd row (false) + * left_input := 3 limbs foreign field element + * right_input := 3 limbs foreign field element + * foreign_field_modulus := The modulus of the foreign field + * + * Outputs: + * Inserts the gates (described below) into the circuit + * Returns the result of the addition as a 3 limbs element + * + * In default mode: + * It adds a FFAdd gate, + * followed by a Zero gate, + * a FFAdd gate for the bound check, + * a Zero gate after this bound check, + * and a Multi Range Check gadget. + * + * In false mode: + * It adds a FFAdd gate. + *) +val add : + (module Snark_intf.Run with type field = 'f) + -> ?full:bool (* full *) + -> 'f Element.Standard.t (* left_input *) + -> 'f Element.Standard.t (* right_input *) + -> 'f standard_limbs (* foreign_field_modulus *) + -> 'f Element.Standard.t +(* result *) + +(** Gadget for a single foreign field subtraction + * + * Inputs: + * full := flag for whether to perform a full subtraction with valid_element check + * on the result (default true) or just a single FFAdd row (false) + * left_input := 3 limbs foreign field element + * right_input := 3 limbs foreign field element + * foreign_field_modulus := The modulus of the foreign field + * + * Outputs: + * Inserts the gates (described below) into the circuit + * Returns the result of the addition as a 3 limbs element + * + * In default mode: + * It adds a FFAdd gate, + * followed by a Zero gate, + * a FFAdd gate for the bound check, + * a Zero gate after this bound check, + * and a Multi Range Check gadget. + * + * In false mode: + * It adds a FFAdd gate. + *) +val sub : + (module Snark_intf.Run with type field = 'f) + -> ?full:bool (* full *) + -> 'f Element.Standard.t (* left_input *) + -> 'f Element.Standard.t (* right_input *) + -> 'f standard_limbs (* foreign_field_modulus *) + -> 'f Element.Standard.t +(* result *) + +(* Gadget for creating an addition or subtraction result row (Zero gate with result) *) +val result_row : + (module Snark_intf.Run with type field = 'f) + -> ?label:string + -> 'f Element.Standard.t + -> unit + +(** Gadget for foreign field multiplication + * + * Constrains that + * + * left_input * right_input = quotient * foreign_field_modulus + remainder + * + * where remainder is the product. + * + * Inputs: + * external_checks := Context to track required external checks + * left_input := Multiplicand foreign field element + * right_input := Multiplicand foreign field element + * foreign_field_modulus := Must be less than than max foreign field modulus + * + * Outputs: + * Inserts the ForeignFieldMul gate, followed by Zero gate into the circuit + * Appends required values to external_checks + * Returns the product + *) +val mul : + (module Snark_intf.Run with type field = 'f) + -> 'f External_checks.t (* external_checks *) + -> ?bound_check_result:bool + -> 'f Element.Standard.t (* left_input *) + -> 'f Element.Standard.t (* right_input *) + -> 'f standard_limbs (* foreign_field_modulus *) + -> 'f Element.Standard.t +(* product *) + +(** Gadget to constrain conversion of bytes list (output of Keccak gadget) + into foreign field element with standard limbs (input of ECDSA gadget). + Include the endianness of the bytes list. *) +val bytes_to_standard_element : + (module Snark_intf.Run with type field = 'f) + -> endian:Keccak.endianness + -> 'f Snarky_backendless.Cvar.t list + -> 'f standard_limbs + -> int + -> 'f Element.Standard.t diff --git a/src/lib/crypto/kimchi_backend/gadgets/generic.ml b/src/lib/crypto/kimchi_backend/gadgets/generic.ml index 535edec1974..7184e5754f6 100644 --- a/src/lib/crypto/kimchi_backend/gadgets/generic.ml +++ b/src/lib/crypto/kimchi_backend/gadgets/generic.ml @@ -2,7 +2,9 @@ open Core_kernel open Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint -(* EXAMPLE generic addition gate gadget *) +let tests_enabled = true + +(* Generic addition gate gadget *) let add (type f) (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) (left_input : Circuit.Field.t) (right_input : Circuit.Field.t) : @@ -16,6 +18,7 @@ let add (type f) Field.Constant.add left_input right_input ) in + let neg_one = Field.Constant.(negate one) in (* Set up generic add gate *) with_label "generic_add_gadget" (fun () -> assert_ @@ -25,14 +28,47 @@ let add (type f) (Basic { l = (Field.Constant.one, left_input) ; r = (Field.Constant.one, right_input) - ; o = (Option.value_exn Field.(to_constant (negate one)), sum) + ; o = (neg_one, sum) ; m = Field.Constant.zero ; c = Field.Constant.zero } ) } ; sum ) -(* EXAMPLE generic multiplication gate gadget *) +(* Generic subtraction gate gadget *) +let sub (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (left_input : Circuit.Field.t) (right_input : Circuit.Field.t) : + Circuit.Field.t = + let open Circuit in + (* Witness computation; difference = left_input - right_input *) + let difference = + exists Field.typ ~compute:(fun () -> + let left_input = As_prover.read Field.typ left_input in + let right_input = As_prover.read Field.typ right_input in + Field.Constant.sub left_input right_input ) + in + + (* Negative one gate coefficient *) + let neg_one = Field.Constant.(negate one) in + + (* Set up generic sub gate *) + with_label "generic_sub_gadget" (fun () -> + assert_ + { annotation = Some __LOC__ + ; basic = + Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint.T + (Basic + { l = (Field.Constant.one, left_input) + ; r = (neg_one, right_input) + ; o = (neg_one, difference) + ; m = Field.Constant.zero + ; c = Field.Constant.zero + } ) + } ; + difference ) + +(* Generic multiplication gate gadget *) let mul (type f) (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) (left_input : Circuit.Field.t) (right_input : Circuit.Field.t) : @@ -46,6 +82,7 @@ let mul (type f) Field.Constant.mul left_input right_input ) in + let neg_one = Field.Constant.(negate one) in (* Set up generic mul gate *) with_label "generic_mul_gadget" (fun () -> assert_ @@ -55,27 +92,34 @@ let mul (type f) (Basic { l = (Field.Constant.zero, left_input) ; r = (Field.Constant.zero, right_input) - ; o = (Option.value_exn Field.(to_constant (negate one)), prod) + ; o = (neg_one, prod) ; m = Field.Constant.one ; c = Field.Constant.zero } ) } ; prod ) +(*********) +(* Tests *) +(*********) + let%test_unit "generic gadgets" = - (* Import the gadget test runner *) - let open Kimchi_gadgets_test_runner in - (* Initialize the SRS cache. *) - let () = Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] in - - (* Helper to test generic add gate gadget - * Inputs operands and expected output: left_input + right_input = sum - * Returns true if constraints are satisfied, false otherwise. - *) - let test_generic_add left_input right_input sum = - try - let _proof_keypair, _proof = - Runner.generate_and_verify_proof (fun () -> + if tests_enabled then ( + let (* Import the gadget test runner *) + open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + + (* Helper to test generic add gate gadget + * Inputs operands and expected output: left_input + right_input = sum + * Returns true if constraints are satisfied, false otherwise. + *) + let test_generic_add ?cs left_input right_input sum = + (* Generate and verify proof *) + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> let open Runner.Impl in (* Set up snarky variables for inputs and outputs *) let left_input = @@ -95,18 +139,50 @@ let%test_unit "generic gadgets" = (* Pad with a "dummy" constraint b/c Kimchi requires at least 2 *) Boolean.Assert.is_true (Field.equal sum sum) ) in - true - with _ -> false - in - (* Helper to test generic multimplication gate gadget - * Inputs operands and expected output: left_input * right_input = prod - * Returns true if constraints are satisfied, false otherwise. - *) - let test_generic_mul left_input right_input prod = - try - let _proof_keypair, _proof = - Runner.generate_and_verify_proof (fun () -> + cs + in + + (* Helper to test generic sub gate gadget + * Inputs operands and expected output: left_input - right_input = difference + * Returns true if constraints are satisfied, false otherwise. + *) + let test_generic_sub ?cs left_input right_input difference = + (* Generate and verify proof *) + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + let open Runner.Impl in + (* Set up snarky variables for inputs and outputs *) + let left_input = + exists Field.typ ~compute:(fun () -> + Field.Constant.of_int left_input ) + in + let right_input = + exists Field.typ ~compute:(fun () -> + Field.Constant.of_int right_input ) + in + let difference = + exists Field.typ ~compute:(fun () -> + Field.Constant.of_int difference ) + in + (* Use the generic sub gate gadget *) + let result = sub (module Runner.Impl) left_input right_input in + Field.Assert.equal difference result ; + (* Pad with a "dummy" constraint b/c Kimchi requires at least 2 *) + Boolean.Assert.is_true (Field.equal difference difference) ) + in + + cs + in + + (* Helper to test generic multimplication gate gadget + * Inputs operands and expected output: left_input * right_input = prod + * Returns true if constraints are satisfied, false otherwise. + *) + let test_generic_mul ?cs left_input right_input prod = + (* Generate and verify proof *) + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> let open Runner.Impl in (* Set up snarky variables for inputs and outputs *) let left_input = @@ -126,24 +202,31 @@ let%test_unit "generic gadgets" = (* Pad with a "dummy" constraint b/c Kimchi requires at least 2 *) Boolean.Assert.is_true (Field.equal prod prod) ) in - true - with _ -> false - in - (* TEST generic add gadget *) - (* Positive tests *) - assert (Bool.equal (test_generic_add 0 0 0) true) ; - assert (Bool.equal (test_generic_add 1 2 3) true) ; - (* Negatve tests *) - assert (Bool.equal (test_generic_add 1 0 0) false) ; - assert (Bool.equal (test_generic_add 2 4 7) false) ; - - (* TEST generic mul gadget *) - (* Positive tests *) - assert (Bool.equal (test_generic_mul 0 0 0) true) ; - assert (Bool.equal (test_generic_mul 1 2 2) true) ; - (* Negatve tests *) - assert (Bool.equal (test_generic_mul 1 0 1) false) ; - assert (Bool.equal (test_generic_mul 2 4 7) false) ; + cs + in + + (* TEST generic add gadget *) + (* Positive tests *) + let cs = test_generic_add 0 0 0 in + let _cs = test_generic_add ~cs 1 2 3 in + (* Negatve tests *) + assert (Common.is_error (fun () -> test_generic_add ~cs 1 0 0)) ; + assert (Common.is_error (fun () -> test_generic_add ~cs 2 4 7)) ; + + (* TEST generic sub gadget *) + (* Positive tests *) + let cs = test_generic_sub 0 0 0 in + let _cs = test_generic_sub ~cs 2 1 1 in + (* Negatve tests *) + assert (Common.is_error (fun () -> test_generic_sub ~cs 4 2 1)) ; + assert (Common.is_error (fun () -> test_generic_sub ~cs 13 4 10)) ; + (* TEST generic mul gadget *) + (* Positive tests *) + let cs = test_generic_mul 0 0 0 in + let _cs = test_generic_mul ~cs 1 2 2 in + (* Negatve tests *) + assert (Common.is_error (fun () -> test_generic_mul ~cs 1 0 1)) ; + assert (Common.is_error (fun () -> test_generic_mul ~cs 2 4 7)) ) ; () diff --git a/src/lib/crypto/kimchi_backend/gadgets/generic.mli b/src/lib/crypto/kimchi_backend/gadgets/generic.mli new file mode 100644 index 00000000000..32fd3035055 --- /dev/null +++ b/src/lib/crypto/kimchi_backend/gadgets/generic.mli @@ -0,0 +1,32 @@ +(** Generic addition gate gadget + * Constrains left_input + right_input = sum + * Returns sum + *) +val add : + (module Snarky_backendless.Snark_intf.Run with type field = 'f) + -> 'f Snarky_backendless.Cvar.t (* left_input *) + -> 'f Snarky_backendless.Cvar.t (* right_input *) + -> 'f Snarky_backendless.Cvar.t +(* sum *) + +(** Generic subtraction gate gadget + * Constrains left_input - right_input = difference + * Returns difference + *) +val sub : + (module Snarky_backendless.Snark_intf.Run with type field = 'f) + -> 'f Snarky_backendless.Cvar.t (* left_input *) + -> 'f Snarky_backendless.Cvar.t (* right_input *) + -> 'f Snarky_backendless.Cvar.t +(* difference *) + +(** Generic multiplication gate gadget + * Constrains left_input * right_input = product + * Returns product + *) +val mul : + (module Snarky_backendless.Snark_intf.Run with type field = 'f) + -> 'f Snarky_backendless.Cvar.t (* left_input *) + -> 'f Snarky_backendless.Cvar.t (* right_input *) + -> 'f Snarky_backendless.Cvar.t +(* product *) diff --git a/src/lib/crypto/kimchi_backend/gadgets/keccak.ml b/src/lib/crypto/kimchi_backend/gadgets/keccak.ml new file mode 100644 index 00000000000..100d06c1439 --- /dev/null +++ b/src/lib/crypto/kimchi_backend/gadgets/keccak.ml @@ -0,0 +1,831 @@ +open Core_kernel +module Bignum_bigint = Snarky_backendless.Backend_extended.Bignum_bigint +module Snark_intf = Snarky_backendless.Snark_intf + +let tests_enabled = true + +(* Endianness type *) +type endianness = Big | Little + +(* DEFINITIONS OF CONSTANTS FOR KECCAK *) + +(* Length of the square matrix side of Keccak states *) +let keccak_dim = 5 + +(* value `l` in Keccak, ranges from 0 to 6 (7 possible values) *) +let keccak_ell = 6 + +(* width of the lane of the state, meaning the length of each word in bits (64) *) +let keccak_word = Int.pow 2 keccak_ell + +(* number of bytes that fit in a word (8) *) +let bytes_per_word = keccak_word / 8 + +(* length of the state in bits, meaning the 5x5 matrix of words in bits (1600) *) +let keccak_state_length = Int.pow keccak_dim 2 * keccak_word + +(* number of rounds of the Keccak permutation function depending on the value `l` (24) *) +let keccak_rounds = 12 + (2 * keccak_ell) + +(* Creates the 5x5 table of rotation offset for Keccak modulo 64 + * | x \ y | 0 | 1 | 2 | 3 | 4 | + * | ----- | -- | -- | -- | -- | -- | + * | 0 | 0 | 36 | 3 | 41 | 18 | + * | 1 | 1 | 44 | 10 | 45 | 2 | + * | 2 | 62 | 6 | 43 | 15 | 61 | + * | 3 | 28 | 55 | 25 | 21 | 56 | + * | 4 | 27 | 20 | 39 | 8 | 14 | +*) +let rot_table = + [| [| 0; 36; 3; 41; 18 |] + ; [| 1; 44; 10; 45; 2 |] + ; [| 62; 6; 43; 15; 61 |] + ; [| 28; 55; 25; 21; 56 |] + ; [| 27; 20; 39; 8; 14 |] + |] + +let round_consts = + [| "0000000000000001" + ; "0000000000008082" + ; "800000000000808A" + ; "8000000080008000" + ; "000000000000808B" + ; "0000000080000001" + ; "8000000080008081" + ; "8000000000008009" + ; "000000000000008A" + ; "0000000000000088" + ; "0000000080008009" + ; "000000008000000A" + ; "000000008000808B" + ; "800000000000008B" + ; "8000000000008089" + ; "8000000000008003" + ; "8000000000008002" + ; "8000000000000080" + ; "000000000000800A" + ; "800000008000000A" + ; "8000000080008081" + ; "8000000000008080" + ; "0000000080000001" + ; "8000000080008008" + |] + +(* Auxiliary function to check composition of 8 bytes into a 64-bit word *) +let check_bytes_to_word (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (word : Circuit.Field.t) (word_bytes : Circuit.Field.t array) = + let open Circuit in + let composition = + Array.foldi word_bytes ~init:Field.zero ~f:(fun i acc x -> + let shift = Field.constant @@ Common.two_pow (module Circuit) (8 * i) in + Field.(acc + (x * shift)) ) + in + Field.Assert.equal word composition + +(* Internal struct for Keccak State *) + +module State = struct + type 'a matrix = 'a array array + + (* Creates a state formed by a matrix of 5x5 Cvar zeros *) + let zeros (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) : + Circuit.Field.t matrix = + let open Circuit in + let state = + Array.make_matrix ~dimx:keccak_dim ~dimy:keccak_dim Field.zero + in + state + + (* Updates the cells of a state with new values *) + let update (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + ~(prev : Circuit.Field.t matrix) ~(next : Circuit.Field.t matrix) = + for x = 0 to keccak_dim - 1 do + prev.(x) <- next.(x) + done + + (* Converts a list of bytes to a matrix of Field elements *) + let of_bytes (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (bytestring : Circuit.Field.t list) : Circuit.Field.t matrix = + let open Circuit in + assert (List.length bytestring = 200) ; + let bytestring = Array.of_list bytestring in + let state = + Array.make_matrix ~dimx:keccak_dim ~dimy:keccak_dim Field.zero + in + for y = 0 to keccak_dim - 1 do + for x = 0 to keccak_dim - 1 do + let idx = bytes_per_word * ((keccak_dim * y) + x) in + (* Create an array containing the 8 bytes starting on idx that correspond to the word in [x,y] *) + let word_bytes = Array.sub bytestring ~pos:idx ~len:bytes_per_word in + for z = 0 to bytes_per_word - 1 do + (* Field element containing value 2^(8*z) *) + let shift_field = + Common.bignum_bigint_to_field + (module Circuit) + Bignum_bigint.(pow (of_int 2) (of_int (Int.( * ) 8 z))) + in + let shift = Field.constant shift_field in + state.(x).(y) <- Field.(state.(x).(y) + (shift * word_bytes.(z))) + done + done + done ; + + state + + (* Converts a state of cvars to a list of bytes as cvars and creates constraints for it *) + let as_prover_to_bytes (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (state : Circuit.Field.t matrix) : Circuit.Field.t list = + let open Circuit in + assert ( + Array.length state = keccak_dim && Array.length state.(0) = keccak_dim ) ; + let state_length_in_bytes = keccak_state_length / 8 in + let bytestring = + Array.init state_length_in_bytes ~f:(fun idx -> + exists Field.typ ~compute:(fun () -> + (* idx = z + 8 * ((dim * y) + x) *) + let z = idx % bytes_per_word in + let x = idx / bytes_per_word % keccak_dim in + let y = idx / bytes_per_word / keccak_dim in + (* [7 6 5 4 3 2 1 0] [x=0,y=1] [x=0,y=2] [x=0,y=3] [x=0,y=4] + * [x=1,y=0] [x=1,y=1] [x=1,y=2] [x=1,y=3] [x=1,y=4] + * [x=2,y=0] [x=2,y=1] [x=2,y=2] [x=2,y=3] [x=2,y=4] + * [x=3,y=0] [x=3,y=1] [x=3,y=2] [x=3,y=3] [x=3,y=4] + * [x=4,y=0] [x=4,y=1] [x=4,y=0] [x=4,y=3] [x=4,y=4] + *) + let word = + Common.cvar_field_to_bignum_bigint_as_prover + (module Circuit) + state.(x).(y) + in + let byte = + Common.bignum_bigint_to_field + (module Circuit) + Bignum_bigint.((word asr Int.(8 * z)) land of_int 0xff) + in + byte ) ) + in + (* Check all words are composed correctly from bytes *) + for y = 0 to keccak_dim - 1 do + for x = 0 to keccak_dim - 1 do + let idx = bytes_per_word * ((keccak_dim * y) + x) in + (* Create an array containing the 8 bytes starting on idx that correspond to the word in [x,y] *) + let word_bytes = Array.sub bytestring ~pos:idx ~len:bytes_per_word in + (* Assert correct decomposition of bytes from state *) + check_bytes_to_word (module Circuit) state.(x).(y) word_bytes + done + done ; + + Array.to_list bytestring + + let xor (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (input1 : Circuit.Field.t matrix) (input2 : Circuit.Field.t matrix) : + Circuit.Field.t matrix = + assert ( + Array.length input1 = keccak_dim && Array.length input1.(0) = keccak_dim ) ; + assert ( + Array.length input2 = keccak_dim && Array.length input2.(0) = keccak_dim ) ; + + (* Calls Bitwise.bxor64 on each pair (x,y) of the states input1 and input2 + and outputs the output Cvars as a new matrix *) + Array.map2_exn input1 input2 + ~f:(Array.map2_exn ~f:(Bitwise.bxor64 (module Circuit))) +end + +(* KECCAK HASH FUNCTION IMPLEMENTATION *) + +(* Computes the number of required extra bytes to pad a message of length bytes *) +let bytes_to_pad (rate : int) (length : int) = + (rate / 8) - (length mod (rate / 8)) + +(* Pads a message M as: + * M || pad[x](|M|) + * Padding rule 0x06 ..0*..1. + * The padded message vector will start with the message vector + * followed by the 0*1 rule to fulfil a length that is a multiple of rate (in bytes) + * (This means a 0110 sequence, followed with as many 0s as needed, and a final 1 bit) + *) +let pad_nist (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (message : Circuit.Field.t list) (rate : int) : Circuit.Field.t list = + let open Circuit in + (* Find out desired length of the padding in bytes *) + (* If message is already rate bits, need to pad full rate again *) + let extra_bytes = bytes_to_pad rate (List.length message) in + (* 0x06 0x00 ... 0x00 0x80 or 0x86 *) + let last_field = Common.two_pow (module Circuit) 7 in + let last = Field.constant last_field in + (* Create the padding vector *) + let pad = Array.init extra_bytes ~f:(fun _ -> Field.zero) in + pad.(0) <- Field.of_int 6 ; + pad.(extra_bytes - 1) <- Field.add pad.(extra_bytes - 1) last ; + (* Cast the padding array to a list *) + let pad = Array.to_list pad in + (* Return the padded message *) + message @ pad + +(* Pads a message M as: + * M || pad[x](|M|) + * Padding rule 10*1. + * The padded message vector will start with the message vector + * followed by the 10*1 rule to fulfil a length that is a multiple of rate (in bytes) + * (This means a 1 bit, followed with as many 0s as needed, and a final 1 bit) +*) +let pad_101 (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (message : Circuit.Field.t list) (rate : int) : Circuit.Field.t list = + let open Circuit in + (* Find out desired length of the padding in bytes *) + (* If message is already rate bits, need to pad full rate again *) + let extra_bytes = bytes_to_pad rate (List.length message) in + (* 0x01 0x00 ... 0x00 0x80 or 0x81 *) + let last_field = Common.two_pow (module Circuit) 7 in + let last = Field.constant @@ last_field in + (* Create the padding vector *) + let pad = Array.init extra_bytes ~f:(fun _ -> Field.zero) in + pad.(0) <- Field.one ; + pad.(extra_bytes - 1) <- Field.add pad.(extra_bytes - 1) last ; + (* Cast the padding array to a list *) + (* Return the padded message *) + message @ Array.to_list pad + +(* + * First algrithm in the compression step of Keccak for 64-bit words. + * C[x] = A[x,0] xor A[x,1] xor A[x,2] xor A[x,3] xor A[x,4] + * D[x] = C[x-1] xor ROT(C[x+1], 1) + * E[x,y] = A[x,y] xor D[x] + * In the Keccak reference, it corresponds to the `theta` algorithm. + * We use the first index of the state array as the x coordinate and the second index as the y coordinate. + *) +let theta (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (state : Circuit.Field.t State.matrix) : Circuit.Field.t State.matrix = + let state_a = state in + (* XOR the elements of each row together *) + (* for all x in {0..4}: C[x] = A[x,0] xor A[x,1] xor A[x,2] xor A[x,3] xor A[x,4] *) + let state_c = + Array.map state_a ~f:(Array.reduce_exn ~f:(Bitwise.bxor64 (module Circuit))) + in + (* for all x in {0..4}: D[x] = C[x-1] xor ROT(C[x+1], 1) *) + let state_d = + Array.init keccak_dim ~f:(fun x -> + Bitwise.( + bxor64 + (module Circuit) + (* using (x + m mod m) to avoid negative values *) + state_c.((x + keccak_dim - 1) mod keccak_dim) + (rot64 (module Circuit) state_c.((x + 1) mod keccak_dim) 1 Left)) ) + in + (* for all x in {0..4} and y in {0..4}: E[x,y] = A[x,y] xor D[x] *) + (* return E *) + Array.map2_exn state_a state_d ~f:(fun state_a state_d -> + Array.map state_a ~f:(Bitwise.bxor64 (module Circuit) state_d) ) + +(* + * Second and third steps in the compression step of Keccak for 64-bit words. + * B[y,2x+3y] = ROT(E[x,y], r[x,y]) + * which is equivalent to the `rho` algorithm followed by the `pi` algorithm in the Keccak reference as follows: + * rho: + * A[0,0] = a[0,0] + * | x | = | 1 | + * | y | = | 0 | + * for t = 0 to 23 do + * A[x,y] = ROT(a[x,y], (t+1)(t+2)/2 mod 64))) + * | x | = | 0 1 | | x | + * | | = | | * | | + * | y | = | 2 3 | | y | + * end for + * pi: + * for x = 0 to 4 do + * for y = 0 to 4 do + * | X | = | 0 1 | | x | + * | | = | | * | | + * | Y | = | 2 3 | | y | + * A[X,Y] = a[x,y] + * end for + * end for + * We use the first index of the state array as the x coordinate and the second index as the y coordinate. + *) +let pi_rho (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (state : Circuit.Field.t State.matrix) : Circuit.Field.t State.matrix = + let state_e = state in + let state_b = State.zeros (module Circuit) in + (* for all x in {0..4} and y in {0..4}: B[y,2x+3y] = ROT(E[x,y], r[x,y]) *) + for x = 0 to keccak_dim - 1 do + for y = 0 to keccak_dim - 1 do + (* No need to use module since this is always positive *) + state_b.(y).(((2 * x) + (3 * y)) mod keccak_dim) <- + Bitwise.rot64 (module Circuit) state_e.(x).(y) rot_table.(x).(y) Left + done + done ; + state_b + +(* + * Fourth step of the compression function of Keccak for 64-bit words. + * F[x,y] = B[x,y] xor ((not B[x+1,y]) and B[x+2,y]) + * It corresponds to the chi algorithm in the Keccak reference. + * for y = 0 to 4 do + * for x = 0 to 4 do + * A[x,y] = a[x,y] xor ((not a[x+1,y]) and a[x+2,y]) + * end for + * end for + *) +let chi (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (state : Circuit.Field.t State.matrix) : Circuit.Field.t State.matrix = + let state_b = state in + let state_f = State.zeros (module Circuit) in + (* for all x in {0..4} and y in {0..4}: F[x,y] = B[x,y] xor ((not B[x+1,y]) and B[x+2,y]) *) + for x = 0 to keccak_dim - 1 do + for y = 0 to keccak_dim - 1 do + state_f.(x).(y) <- + Bitwise.( + bxor64 + (module Circuit) + state_b.(x).(y) + (band64 + (module Circuit) + (bnot64_unchecked (module Circuit) state_b.((x + 1) mod 5).(y)) + state_b.((x + 2) mod 5).(y) )) + done + done ; + (* We can use unchecked NOT because the length of the input is constrained to be + 64 bits thanks to the fact that it is the output of a previous Xor64 *) + state_f + +(* + * Fifth step of the permutation function of Keccak for 64-bit words. + * It takes the word located at the position (0,0) of the state and XORs it with the round constant. + *) +let iota (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (state : Circuit.Field.t State.matrix) (rc : Circuit.Field.t) : + Circuit.Field.t State.matrix = + (* Round constants for this round for the iota algorithm *) + let state_g = state in + state_g.(0).(0) <- Bitwise.(bxor64 (module Circuit) state_g.(0).(0) rc) ; + (* Check it is the right round constant is implicit from reusing the right cvar *) + state_g + +(* The round applies the lambda function and then chi and iota + * It consists of the concatenation of the theta, rho, and pi algorithms. + * lambda = pi o rho o theta + * Thus: + * iota o chi o pi o rho o theta + *) +let round (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (state : Circuit.Field.t State.matrix) (rc : Circuit.Field.t) : + Circuit.Field.t State.matrix = + let state_a = state in + let state_e = theta (module Circuit) state_a in + let state_b = pi_rho (module Circuit) state_e in + let state_f = chi (module Circuit) state_b in + let state_d = iota (module Circuit) state_f rc in + state_d + +(* Keccak permutation function with a constant number of rounds *) +let permutation (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (state : Circuit.Field.t State.matrix) (rc : Circuit.Field.t array) : + Circuit.Field.t State.matrix = + for i = 0 to keccak_rounds - 1 do + let state_i = round (module Circuit) state rc.(i) in + (* Update state for next step *) + State.update (module Circuit) ~prev:state ~next:state_i + done ; + state + +(* Absorb padded message into a keccak state with given rate and capacity *) +let absorb (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (padded_message : Circuit.Field.t list) ~(capacity : int) ~(rate : int) + ~(rc : Circuit.Field.t array) : Circuit.Field.t State.matrix = + let open Circuit in + let root_state = State.zeros (module Circuit) in + let state = root_state in + + (* split into blocks of rate bits *) + (* for each block of rate bits in the padded message -> this is rate/8 bytes *) + let chunks = List.chunks_of padded_message ~length:(rate / 8) in + (* (capacity / 8) zero bytes *) + let zeros = Array.to_list @@ Array.create ~len:(capacity / 8) Field.zero in + for i = 0 to List.length chunks - 1 do + let block = List.nth_exn chunks i in + (* pad the block with 0s to up to 1600 bits *) + let padded_block = block @ zeros in + (* padded with zeros each block until they are 1600 bit long *) + assert (List.length padded_block * 8 = keccak_state_length) ; + let block_state = State.of_bytes (module Circuit) padded_block in + (* xor the state with the padded block *) + let state_xor = State.xor (module Circuit) state block_state in + (* apply the permutation function to the xored state *) + let state_perm = permutation (module Circuit) state_xor rc in + State.update (module Circuit) ~prev:state ~next:state_perm + done ; + + state + +(* Squeeze state until it has a desired length in bits *) +let squeeze (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (state : Circuit.Field.t State.matrix) ~(length : int) ~(rate : int) + ~(rc : Circuit.Field.t array) : Circuit.Field.t list = + let copy (bytestring : Circuit.Field.t list) + (output_array : Circuit.Field.t array) ~(start : int) ~(length : int) = + for i = 0 to length - 1 do + output_array.(start + i) <- List.nth_exn bytestring i + done ; + () + in + + let open Circuit in + (* bytes per squeeze *) + let bytes_per_squeeze = rate / 8 in + (* number of squeezes *) + let squeezes = (length / rate) + 1 in + (* multiple of rate that is larger than output_length, in bytes *) + let output_length = squeezes * bytes_per_squeeze in + (* array with sufficient space to store the output *) + let output_array = Array.create ~len:output_length Field.zero in + (* first state to be squeezed *) + let bytestring = State.as_prover_to_bytes (module Circuit) state in + let output_bytes = List.take bytestring bytes_per_squeeze in + copy output_bytes output_array ~start:0 ~length:bytes_per_squeeze ; + (* for the rest of squeezes *) + for i = 1 to squeezes - 1 do + (* apply the permutation function to the state *) + let new_state = permutation (module Circuit) state rc in + State.update (module Circuit) ~prev:state ~next:new_state ; + (* append the output of the permutation function to the output *) + let bytestring_i = State.as_prover_to_bytes (module Circuit) state in + let output_bytes_i = List.take bytestring_i bytes_per_squeeze in + copy output_bytes_i output_array ~start:(bytes_per_squeeze * i) + ~length:bytes_per_squeeze ; + () + done ; + (* Obtain the hash selecting the first bitlength/8 bytes of the output array *) + let hashed = Array.sub output_array ~pos:0 ~len:(length / 8) in + + Array.to_list hashed + +(* Keccak sponge function for 1600 bits of state width + * Need to split the message into blocks of 1088 bits. + *) +let sponge (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (padded_message : Circuit.Field.t list) ~(length : int) ~(capacity : int) + ~(rate : int) : Circuit.Field.t list = + let open Circuit in + (* check that the padded message is a multiple of rate *) + assert (List.length padded_message * 8 mod rate = 0) ; + (* setup cvars for round constants *) + let rc = + exists (Typ.array ~length:24 Field.typ) ~compute:(fun () -> + Array.map round_consts ~f:(Common.field_of_hex (module Circuit)) ) + in + (* absorb *) + let state = absorb (module Circuit) padded_message ~capacity ~rate ~rc in + (* squeeze *) + let hashed = squeeze (module Circuit) state ~length ~rate ~rc in + hashed + +(* Checks in the circuit that a list of cvars are at most 8 bits each *) +let check_bytes (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (inputs : Circuit.Field.t list) : unit = + let open Circuit in + (* Create a second list of shifted inputs with 4 more bits*) + let shifted = + Core_kernel.List.map ~f:(fun x -> Field.(of_int 16 * x)) inputs + in + (* We need to lookup that both the inputs and the shifted values are less than 12 bits *) + (* Altogether means that it was less than 8 bits *) + let lookups = inputs @ shifted in + (* Make sure that a multiple of 3 cvars is in the list *) + let lookups = + match List.length lookups % 3 with + | 2 -> + lookups @ [ Field.zero ] + | 1 -> + lookups @ [ Field.zero; Field.zero ] + | _ -> + lookups + in + (* We can fit 3 12-bit lookups per row *) + for i = 0 to (List.length lookups / 3) - 1 do + Lookup.three_12bit + (module Circuit) + (List.nth_exn lookups (3 * i)) + (List.nth_exn lookups ((3 * i) + 1)) + (List.nth_exn lookups ((3 * i) + 2)) ; + () + done ; + () + +(* +* Keccak hash function with input message passed as list of Cvar bytes. +* The message will be parsed as follows: +* - the first byte of the message will be the least significant byte of the first word of the state (A[0][0]) +* - the 10*1 pad will take place after the message, until reaching the bit length rate. +* - then, {0} pad will take place to finish the 1600 bits of the state. +*) +let hash (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + ?(inp_endian = Big) ?(out_endian = Big) ?(byte_checks = false) + (message : Circuit.Field.t list) ~(length : int) ~(capacity : int) + (nist_version : bool) : Circuit.Field.t list = + assert (capacity > 0) ; + assert (capacity < keccak_state_length) ; + assert (length > 0) ; + assert (length mod 8 = 0) ; + (* Set input to Big Endian format *) + let message = + match inp_endian with Big -> message | Little -> List.rev message + in + (* Check each cvar input is 8 bits at most if it was not done before at creation time*) + if byte_checks then check_bytes (module Circuit) message ; + let rate = keccak_state_length - capacity in + let padded = + match nist_version with + | true -> + pad_nist (module Circuit) message rate + | false -> + pad_101 (module Circuit) message rate + in + let hash = sponge (module Circuit) padded ~length ~capacity ~rate in + (* Check each cvar output is 8 bits at most. Always because they are created here *) + check_bytes (module Circuit) hash ; + (* Set input to desired endianness *) + let hash = match out_endian with Big -> hash | Little -> List.rev hash in + (* Check each cvar output is 8 bits at most *) + hash + +(* Gagdet for NIST SHA-3 function for output lengths 224/256/384/512. + * Input and output endianness can be specified. Default is big endian. + *) +let nist_sha3 (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + ?(inp_endian = Big) ?(out_endian = Big) ?(byte_checks = false) (len : int) + (message : Circuit.Field.t list) : Circuit.Field.t list = + let hash = + match len with + | 224 -> + hash + (module Circuit) + message ~length:224 ~capacity:448 true ~inp_endian ~out_endian + ~byte_checks + | 256 -> + hash + (module Circuit) + message ~length:256 ~capacity:512 true ~inp_endian ~out_endian + ~byte_checks + | 384 -> + hash + (module Circuit) + message ~length:384 ~capacity:768 true ~inp_endian ~out_endian + ~byte_checks + | 512 -> + hash + (module Circuit) + message ~length:512 ~capacity:1024 true ~inp_endian ~out_endian + ~byte_checks + | _ -> + assert false + in + hash + +(* Gadget for Keccak hash function for the parameters used in Ethereum. + * Input and output endianness can be specified. Default is big endian. + *) +let ethereum (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + ?(inp_endian = Big) ?(out_endian = Big) ?(byte_checks = false) + (message : Circuit.Field.t list) : Circuit.Field.t list = + hash + (module Circuit) + message ~length:256 ~capacity:512 false ~inp_endian ~out_endian ~byte_checks + +(* Gagdet for pre-NIST SHA-3 function for output lengths 224/256/384/512. + * Input and output endianness can be specified. Default is big endian. + * Note that when calling with output length 256 this is equivalent to the ethereum function + *) +let pre_nist (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + ?(inp_endian = Big) ?(out_endian = Big) ?(byte_checks = false) (len : int) + (message : Circuit.Field.t list) : Circuit.Field.t list = + match len with + | 224 -> + hash + (module Circuit) + message ~length:224 ~capacity:448 false ~inp_endian ~out_endian + ~byte_checks + | 256 -> + ethereum (module Circuit) message ~inp_endian ~out_endian ~byte_checks + | 384 -> + hash + (module Circuit) + message ~length:384 ~capacity:768 false ~inp_endian ~out_endian + ~byte_checks + | 512 -> + hash + (module Circuit) + message ~length:512 ~capacity:1024 false ~inp_endian ~out_endian + ~byte_checks + | _ -> + assert false + +(* KECCAK GADGET TESTS *) + +let%test_unit "keccak gadget" = + if tests_enabled then ( + let (* Import the gadget test runner *) + open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + + let test_keccak ?cs ?inp_endian ?out_endian ~nist ~len message expected = + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + let open Runner.Impl in + assert (String.length message % 2 = 0) ; + let message = + Array.to_list + @@ exists + (Typ.array ~length:(String.length message / 2) Field.typ) + ~compute:(fun () -> + Array.of_list + @@ Common.field_bytes_of_hex (module Runner.Impl) message + ) + in + let hashed = + Array.of_list + @@ + match nist with + | true -> + nist_sha3 + (module Runner.Impl) + len message ?inp_endian ?out_endian ~byte_checks:true + | false -> + pre_nist + (module Runner.Impl) + len message ?inp_endian ?out_endian ~byte_checks:true + in + + let expected = + Array.of_list + @@ Common.field_bytes_of_hex (module Runner.Impl) expected + in + (* Check expected hash output *) + as_prover (fun () -> + for i = 0 to Array.length hashed - 1 do + let byte_hash = + Common.cvar_field_to_bignum_bigint_as_prover + (module Runner.Impl) + hashed.(i) + in + let byte_exp = + Common.field_to_bignum_bigint + (module Runner.Impl) + expected.(i) + in + assert (Bignum_bigint.(byte_hash = byte_exp)) + done ; + () ) ; + () ) + in + cs + in + + (* Positive tests *) + let cs_eth256_1byte = + test_keccak ~nist:false ~len:256 "30" + "044852b2a670ade5407e78fb2863c51de9fcb96542a07186fe3aeda6bb8a116d" + in + + let cs_nist512_1byte = + test_keccak ~nist:true ~len:512 "30" + "2d44da53f305ab94b6365837b9803627ab098c41a6013694f9b468bccb9c13e95b3900365eb58924de7158a54467e984efcfdabdbcc9af9a940d49c51455b04c" + in + + (* I am the owner of the NFT with id X on the Ethereum chain *) + let _cs = + test_keccak ~nist:false ~len:256 + "4920616d20746865206f776e6572206f6620746865204e465420776974682069642058206f6e2074686520457468657265756d20636861696e" + "63858e0487687c3eeb30796a3e9307680e1b81b860b01c88ff74545c2c314e36" + in + let _cs = + test_keccak ~nist:false ~len:512 + "4920616d20746865206f776e6572206f6620746865204e465420776974682069642058206f6e2074686520457468657265756d20636861696e" + "848cf716c2d64444d2049f215326b44c25a007127d2871c1b6004a9c3d102f637f31acb4501e59f3a0160066c8814816f4dc58a869f37f740e09b9a8757fa259" + in + + (* The following two tests use 2 blocks instead *) + (* For Keccak *) + let _cs = + test_keccak ~nist:false ~len:256 + "044852b2a670ade5407e78fb2863c51de9fcb96542a07186fe3aeda6bb8a116df9e2eaaa42d9fe9e558a9b8ef1bf366f190aacaa83bad2641ee106e9041096e42d44da53f305ab94b6365837b9803627ab098c41a6013694f9b468bccb9c13e95b3900365eb58924de7158a54467e984efcfdabdbcc9af9a940d49c51455b04c63858e0487687c3eeb30796a3e9307680e1b81b860b01c88ff74545c2c314e36" + "560deb1d387f72dba729f0bd0231ad45998dda4b53951645322cf95c7b6261d9" + in + (* For NIST *) + let _cs = + test_keccak ~nist:true ~len:256 + "044852b2a670ade5407e78fb2863c51de9fcb96542a07186fe3aeda6bb8a116df9e2eaaa42d9fe9e558a9b8ef1bf366f190aacaa83bad2641ee106e9041096e42d44da53f305ab94b6365837b9803627ab098c41a6013694f9b468bccb9c13e95b3900365eb58924de7158a54467e984efcfdabdbcc9af9a940d49c51455b04c63858e0487687c3eeb30796a3e9307680e1b81b860b01c88ff74545c2c314e36" + "1784354c4bbfa5f54e5db23041089e65a807a7b970e3cfdba95e2fbe63b1c0e4" + in + + (* Padding of input 1080 bits and 1088 bits *) + (* 135 bits, uses the following single padding byte as 0x81 *) + let cs135 = + test_keccak ~nist:false ~len:256 + "391ccf9b5de23bb86ec6b2b142adb6e9ba6bee8519e7502fb8be8959fbd2672934cc3e13b7b45bf2b8a5cb48881790a7438b4a326a0c762e31280711e6b64fcc2e3e4e631e501d398861172ea98603618b8f23b91d0208b0b992dfe7fdb298b6465adafbd45e4f88ee9dc94e06bc4232be91587f78572c169d4de4d8b95b714ea62f1fbf3c67a4" + "7d5655391ede9ca2945f32ad9696f464be8004389151ce444c89f688278f2e1d" + in + + (* 136 bits, 2 blocks and second is just padding *) + let cs136 = + test_keccak ~nist:false ~len:256 + "ff391ccf9b5de23bb86ec6b2b142adb6e9ba6bee8519e7502fb8be8959fbd2672934cc3e13b7b45bf2b8a5cb48881790a7438b4a326a0c762e31280711e6b64fcc2e3e4e631e501d398861172ea98603618b8f23b91d0208b0b992dfe7fdb298b6465adafbd45e4f88ee9dc94e06bc4232be91587f78572c169d4de4d8b95b714ea62f1fbf3c67a4" + "37694fd4ba137be747eb25a85b259af5563e0a7a3010d42bd15963ac631b9d3f" + in + + (* Input already looks like padded *) + let _cs = + test_keccak ~cs:cs135 ~nist:false ~len:256 + "800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001" + "0edbbae289596c7da9fafe65931c5dce3439fb487b8286d6c1970e44eea39feb" + in + + let _cs = + test_keccak ~cs:cs136 ~nist:false ~len:256 + "80000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001" + "bbf1f49a2cc5678aa62196d0c3108d89425b81780e1e90bcec03b4fb5f834714" + in + + (* Reusing *) + let _cs = + test_keccak ~cs:cs_eth256_1byte ~nist:false ~len:256 "00" + "bc36789e7a1e281436464229828f817d6612f7b477d66591ff96a9e064bcc98a" + in + + let cs2 = + test_keccak ~nist:false ~len:256 "a2c0" + "9856642c690c036527b8274db1b6f58c0429a88d9f3b9298597645991f4f58f0" + in + + let _cs = + test_keccak ~cs:cs2 ~nist:false ~len:256 "0a2c" + "295b48ad49eff61c3abfd399c672232434d89a4ef3ca763b9dbebb60dbb32a8b" + in + + (* Endianness *) + let _cs = + test_keccak ~nist:false ~len:256 ~inp_endian:Little ~out_endian:Little + "2c0a" + "8b2ab3db60bbbe9d3b76caf34e9ad834242372c699d3bf3a1cf6ef49ad485b29" + in + + (* Negative tests *) + (* Check cannot use bad hex inputs *) + assert ( + Common.is_error (fun () -> + test_keccak ~nist:false ~len:256 "a2c" + "07f02d241eeba9c909a1be75e08d9e8ac3e61d9e24fa452a6785083e1527c467" ) ) ; + + (* Check cannot use bad hex inputs *) + assert ( + Common.is_error (fun () -> + test_keccak ~nist:true ~len:256 "0" + "f39f4526920bb4c096e5722d64161ea0eb6dbd0b4ff0d812f31d56fb96142084" ) ) ; + + (* Cannot reuse CS for different output length *) + assert ( + Common.is_error (fun () -> + test_keccak ~cs:cs_nist512_1byte ~nist:true ~len:256 "30" + "f9e2eaaa42d9fe9e558a9b8ef1bf366f190aacaa83bad2641ee106e9041096e4" ) ) ; + + (* Checking cannot reuse CS for same length but different padding *) + assert ( + Common.is_error (fun () -> + test_keccak ~cs:cs_eth256_1byte ~nist:true ~len:256 + "4920616d20746865206f776e6572206f6620746865204e465420776974682069642058206f6e2074686520457468657265756d20636861696e" + "63858e0487687c3eeb30796a3e9307680e1b81b860b01c88ff74545c2c314e36" ) ) ; + + (* Cannot reuse cs with different endianness *) + assert ( + Common.is_error (fun () -> + test_keccak ~cs:cs2 ~nist:false ~len:256 ~inp_endian:Little + ~out_endian:Little "2c0a" + "8b2ab3db60bbbe9d3b76caf34e9ad834242372c699d3bf3a1cf6ef49ad485b29" ) ) ; + + () ) ; + + () diff --git a/src/lib/crypto/kimchi_backend/gadgets/keccak.mli b/src/lib/crypto/kimchi_backend/gadgets/keccak.mli new file mode 100644 index 00000000000..b3889a9cc4c --- /dev/null +++ b/src/lib/crypto/kimchi_backend/gadgets/keccak.mli @@ -0,0 +1,58 @@ +(* Endianness type *) +type endianness = Big | Little + +(** Gagdet for NIST SHA-3 function for output lengths 224/256/384/512 + * Input: + * - Endianness of the input (default is Big). + * - Endianness of the output (default is Big). + * - Flag to enable input byte checks (default is false). Outputs are always constrained. + * - int representing the output length of the hash function (224|256|384|512) + * - Arbitrary length list of Cvars representing the input to the hash function where each of them is a byte + * Output: + * - List of `int` Cvars representing the output of the hash function where each of them is a byte + *) +val nist_sha3 : + (module Snarky_backendless.Snark_intf.Run with type field = 'f) + -> ?inp_endian:endianness + -> ?out_endian:endianness + -> ?byte_checks:bool + -> int + -> 'f Snarky_backendless.Cvar.t list + -> 'f Snarky_backendless.Cvar.t list + +(** Gadget for Keccak hash function for the parameters used in Ethereum + * Input: + * - Endianness of the input (default is Big). + * - Endianness of the output (default is Big). + * - Flag to enable input byte checks (default is false). Outputs are always constrained. + * - Arbitrary length list of Cvars representing the input to the hash function where each of them is a byte + * Output: + * - List of 256 Cvars representing the output of the hash function where each of them is a byte + *) +val ethereum : + (module Snarky_backendless.Snark_intf.Run with type field = 'f) + -> ?inp_endian:endianness + -> ?out_endian:endianness + -> ?byte_checks:bool + -> 'f Snarky_backendless.Cvar.t list + -> 'f Snarky_backendless.Cvar.t list + +(*** Gagdet for pre-NIST SHA-3 function for output lengths 224/256/384/512. + * Note that when calling with output length 256 this is equivalent to the ethereum function + * Input: + * - Endianness of the input (default is Big). + * - Endianness of the output (default is Big). + * - Flag to enable input byte checks (default is false). Outputs are always constrained. + * - int representing the output length of the hash function (224|256|384|512) + * - Arbitrary length list of Cvars Cvars representing the input to the hash function where each of them is a byte + * Output: + * - List of `int` Cvars representing the output of the hash function where each of them is a byte + *) +val pre_nist : + (module Snarky_backendless.Snark_intf.Run with type field = 'f) + -> ?inp_endian:endianness + -> ?out_endian:endianness + -> ?byte_checks:bool + -> int + -> 'f Snarky_backendless.Cvar.t list + -> 'f Snarky_backendless.Cvar.t list diff --git a/src/lib/crypto/kimchi_backend/gadgets/lookup.ml b/src/lib/crypto/kimchi_backend/gadgets/lookup.ml new file mode 100644 index 00000000000..dee61cc7983 --- /dev/null +++ b/src/lib/crypto/kimchi_backend/gadgets/lookup.ml @@ -0,0 +1,101 @@ +open Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint + +let tests_enabled = true + +(* Looks up three values (at most 12 bits each) + * BEWARE: it needs in the circuit at least one gate (even if dummy) that uses the 12-bit lookup table for it to work + *) +let three_12bit (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (v0 : Circuit.Field.t) (v1 : Circuit.Field.t) (v2 : Circuit.Field.t) : unit + = + let open Circuit in + with_label "triple_lookup" (fun () -> + assert_ + { annotation = Some __LOC__ + ; basic = + Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint.T + (Lookup + { w0 = Field.one + ; w1 = v0 + ; w2 = Field.zero + ; w3 = v1 + ; w4 = Field.zero + ; w5 = v2 + ; w6 = Field.zero + } ) + } ) ; + () + +(* Check that one value is at most X bits (at most 12), default is 12. + * BEWARE: it needs in the circuit at least one gate (even if dummy) that uses the 12-bit lookup table for it to work + *) +let less_than_bits (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + ?(bits = 12) (value : Circuit.Field.t) : unit = + let open Circuit in + assert (bits > 0 && bits <= 12) ; + (* In order to check that a value is less than 2^x bits value < 2^x + you first check that value < 2^12 bits using the lookup table + and then that the value * shift < 2^12 where shift = 2^(12-x) + (because moving shift to the right hand side that gives value < 2^x) *) + let shift = + exists Field.typ ~compute:(fun () -> + let power = Core_kernel.Int.pow 2 (12 - bits) in + Field.Constant.of_int power ) + in + three_12bit (module Circuit) value Field.(value * shift) Field.zero ; + () + +(*********) +(* Tests *) +(*********) + +let%test_unit "lookup gadget" = + if tests_enabled then ( + let (* Import the gadget test runner *) + open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + + (* Helper to test lookup less than gadget for both variables and constants + * Inputs value to be checked and number of bits + * Returns true if constraints are satisfied, false otherwise. + *) + let test_lookup ?cs ~bits value = + (* Generate and verify proof *) + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + let open Runner.Impl in + (* Set up snarky constant *) + let const = Field.constant @@ Field.Constant.of_int value in + (* Set up snarky variable *) + let value = + exists Field.typ ~compute:(fun () -> Field.Constant.of_int value) + in + (* Use the lookup gadget *) + less_than_bits (module Runner.Impl) ~bits value ; + less_than_bits (module Runner.Impl) ~bits const ; + (* Use a dummy range check to load the table *) + Range_check.bits64 (module Runner.Impl) Field.zero ; + () ) + in + cs + in + + (* TEST generic mul gadget *) + (* Positive tests *) + let cs12 = test_lookup ~bits:12 4095 in + let cs8 = test_lookup ~bits:8 255 in + let cs1 = test_lookup ~bits:1 0 in + let _cs = test_lookup ~cs:cs1 ~bits:1 1 in + (* Negatve tests *) + assert (Common.is_error (fun () -> test_lookup ~cs:cs12 ~bits:12 4096)) ; + assert (Common.is_error (fun () -> test_lookup ~cs:cs12 ~bits:12 (-1))) ; + assert (Common.is_error (fun () -> test_lookup ~cs:cs8 ~bits:8 256)) ; + assert (Common.is_error (fun () -> test_lookup ~cs:cs1 ~bits:1 2)) ; + () ) ; + + () diff --git a/src/lib/crypto/kimchi_backend/gadgets/lookup.mli b/src/lib/crypto/kimchi_backend/gadgets/lookup.mli new file mode 100644 index 00000000000..dcaa9cc77be --- /dev/null +++ b/src/lib/crypto/kimchi_backend/gadgets/lookup.mli @@ -0,0 +1,20 @@ +(*TODO: perhaps move this to an internal file, as the dummy gate could be misleading for users *) + +(** Looks up three values (at most 12 bits each) + * BEWARE: it needs in the circuit at least one gate (even if dummy) that uses the 12-bit lookup table for it to work + *) +val three_12bit : + (module Snarky_backendless.Snark_intf.Run with type field = 'f) + -> 'f Snarky_backendless.Cvar.t (* v0 *) + -> 'f Snarky_backendless.Cvar.t (* v1 *) + -> 'f Snarky_backendless.Cvar.t (* v2 *) + -> unit + +(** Check that one value is at most X bits (at most 12). Default is 12. + * BEWARE: it needs in the circuit at least one gate (even if dummy) that uses the 12-bit lookup table for it to work + *) +val less_than_bits : + (module Snarky_backendless.Snark_intf.Run with type field = 'f) + -> ?bits:int (* bits *) + -> 'f Snarky_backendless.Cvar.t (* value *) + -> unit diff --git a/src/lib/crypto/kimchi_backend/gadgets/range_check.ml b/src/lib/crypto/kimchi_backend/gadgets/range_check.ml new file mode 100644 index 00000000000..83cd95b9bfa --- /dev/null +++ b/src/lib/crypto/kimchi_backend/gadgets/range_check.ml @@ -0,0 +1,425 @@ +open Core_kernel + +open Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint + +let tests_enabled = true + +(* Helper to create RangeCheck0 gate, configured in various ways + * - is_64bit : create 64-bit range check + * - is_compact : compact limbs mode (only used by compact multi-range-check) + *) +let range_check0 (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + ~(label : string) ?(is_compact : bool = false) (v0 : Circuit.Field.t) + (v0p0 : Circuit.Field.t) (v0p1 : Circuit.Field.t) = + let open Circuit in + (* Define shorthand helper *) + let of_bits = + Common.as_prover_cvar_field_bits_le_to_cvar_field (module Circuit) + in + + (* Sanity check v0p0 and v1p1 correspond to the correct bits of v0 *) + as_prover (fun () -> + let open Circuit.Field in + let v0p0_expected = of_bits v0 76 88 in + let v0p1_expected = of_bits v0 64 76 in + + Assert.equal v0p0 v0p0_expected ; + Assert.equal v0p1 v0p1_expected ) ; + + (* Create sublimbs *) + let v0p2 = of_bits v0 52 64 in + let v0p3 = of_bits v0 40 52 in + let v0p4 = of_bits v0 28 40 in + let v0p5 = of_bits v0 16 28 in + let v0c0 = of_bits v0 14 16 in + let v0c1 = of_bits v0 12 14 in + let v0c2 = of_bits v0 10 12 in + let v0c3 = of_bits v0 8 10 in + let v0c4 = of_bits v0 6 8 in + let v0c5 = of_bits v0 4 6 in + let v0c6 = of_bits v0 2 4 in + let v0c7 = of_bits v0 0 2 in + + (* Set up compact mode coefficient *) + let compact = + if is_compact then Field.Constant.one else Field.Constant.zero + in + + (* Create RangeCheck0 gate *) + with_label label (fun () -> + assert_ + { annotation = Some __LOC__ + ; basic = + Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint.T + (RangeCheck0 + { (* Current row *) v0 + ; v0p0 + ; v0p1 + ; v0p2 + ; v0p3 + ; v0p4 + ; v0p5 + ; v0c0 + ; v0c1 + ; v0c2 + ; v0c3 + ; v0c4 + ; v0c5 + ; v0c6 + ; v0c7 + ; (* Coefficients *) + compact + } ) + } ) + +(* Helper to create RangeCheck1 gate *) +let range_check1 (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + ~(label : string) (v0p0 : Circuit.Field.t) (v0p1 : Circuit.Field.t) + (v1p0 : Circuit.Field.t) (v1p1 : Circuit.Field.t) (v2 : Circuit.Field.t) + (v12 : Circuit.Field.t) = + let open Circuit in + (* Define shorthand helper *) + let of_bits = + Common.as_prover_cvar_field_bits_le_to_cvar_field (module Circuit) + in + + (* Create sublimbs - current row *) + let v2c0 = of_bits v2 86 88 in + let v2p0 = of_bits v2 74 86 in + let v2p1 = of_bits v2 62 74 in + let v2p2 = of_bits v2 50 62 in + let v2p3 = of_bits v2 38 50 in + let v2c1 = of_bits v2 36 38 in + let v2c2 = of_bits v2 34 36 in + let v2c3 = of_bits v2 32 34 in + let v2c4 = of_bits v2 30 32 in + let v2c5 = of_bits v2 28 30 in + let v2c6 = of_bits v2 26 28 in + let v2c7 = of_bits v2 24 26 in + let v2c8 = of_bits v2 22 24 in + + (* Create sublimbs - next row *) + let v2c9 = of_bits v2 20 22 in + let v2c10 = of_bits v2 18 20 in + let v2c11 = of_bits v2 16 18 in + let v2c12 = of_bits v2 14 16 in + let v2c13 = of_bits v2 12 14 in + let v2c14 = of_bits v2 10 12 in + let v2c15 = of_bits v2 8 10 in + let v2c16 = of_bits v2 6 8 in + let v2c17 = of_bits v2 4 6 in + let v2c18 = of_bits v2 2 4 in + let v2c19 = of_bits v2 0 2 in + + (* Create RangeCheck0 gate *) + with_label label (fun () -> + assert_ + { annotation = Some __LOC__ + ; basic = + Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint.T + (RangeCheck1 + { (* Current row *) v2 + ; v12 + ; v2c0 + ; v2p0 + ; v2p1 + ; v2p2 + ; v2p3 + ; v2c1 + ; v2c2 + ; v2c3 + ; v2c4 + ; v2c5 + ; v2c6 + ; v2c7 + ; v2c8 + ; (* Next row *) v2c9 + ; v2c10 + ; v2c11 + ; v0p0 + ; v0p1 + ; v1p0 + ; v1p1 + ; v2c12 + ; v2c13 + ; v2c14 + ; v2c15 + ; v2c16 + ; v2c17 + ; v2c18 + ; v2c19 + } ) + } ) + +(* 64-bit range-check gadget - checks v0 \in [0, 2^64) *) +let bits64 (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (v0 : Circuit.Field.t) = + range_check0 + (module Circuit) + ~label:"range_check64" ~is_compact:false v0 Circuit.Field.zero + Circuit.Field.zero + +(* multi-range-check gadget - checks v0,v1,v2 \in [0, 2^88) *) +let multi (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (v0 : Circuit.Field.t) (v1 : Circuit.Field.t) (v2 : Circuit.Field.t) = + let open Circuit in + let of_bits = + Common.as_prover_cvar_field_bits_le_to_cvar_field (module Circuit) + in + let v0p0 = of_bits v0 76 88 in + let v0p1 = of_bits v0 64 76 in + range_check0 + (module Circuit) + ~label:"multi_range_check" ~is_compact:false v0 v0p0 v0p1 ; + let v1p0 = of_bits v1 76 88 in + let v1p1 = of_bits v1 64 76 in + range_check0 + (module Circuit) + ~label:"multi_range_check" ~is_compact:false v1 v1p0 v1p1 ; + let zero = exists Field.typ ~compute:(fun () -> Field.Constant.zero) in + range_check1 + (module Circuit) + ~label:"multi_range_check" v0p0 v0p1 v1p0 v1p1 v2 zero + +(* compact multi-range-check gadget - checks + * - v0,v1,v2 \in [0, 2^88) + * - v01 = v0 + 2^88 * v1 + *) +let compact_multi (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (v01 : Circuit.Field.t) (v2 : Circuit.Field.t) = + let open Circuit in + (* Set up helper *) + let bignum_bigint_to_field = Common.bignum_bigint_to_field (module Circuit) in + (* Prepare range-check values *) + let v1, v0 = + exists + Typ.(Field.typ * Field.typ) + ~compute:(fun () -> + (* Decompose v0 and v1 from v01 = 2^L * v1 + v0 *) + let v01 = + Common.field_to_bignum_bigint + (module Circuit) + (As_prover.read Field.typ v01) + in + let v1, v0 = Common.(bignum_bigint_div_rem v01 two_to_limb) in + (bignum_bigint_to_field v1, bignum_bigint_to_field v0) ) + in + let of_bits = + Common.as_prover_cvar_field_bits_le_to_cvar_field (module Circuit) + in + let v2p0 = of_bits v2 76 88 in + let v2p1 = of_bits v2 64 76 in + range_check0 + (module Circuit) + ~label:"compact_multi_range_check" ~is_compact:false v2 v2p0 v2p1 ; + let v0p0 = of_bits v0 76 88 in + let v0p1 = of_bits v0 64 76 in + range_check0 + (module Circuit) + ~label:"compact_multi_range_check" ~is_compact:true v0 v0p0 v0p1 ; + range_check1 + (module Circuit) + ~label:"compact_multi_range_check" v2p0 v2p1 v0p0 v0p1 v1 v01 + +(*********) +(* Tests *) +(*********) + +let%test_unit "range_check64 gadget" = + if tests_enabled then ( + let (* Import the gadget test runner *) + open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + + (* Helper to test range_check64 gadget + * Input: value to be range checked in [0, 2^64) + *) + let test_range_check64 ?cs base10 = + let open Runner.Impl in + let value = Common.field_of_base10 (module Runner.Impl) base10 in + + let make_circuit value = + (* Circuit definition *) + let value = exists Field.typ ~compute:(fun () -> value) in + bits64 (module Runner.Impl) value ; + (* Padding *) + Boolean.Assert.is_true (Field.equal value value) + in + + (* Generate and verify proof *) + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> make_circuit value) + in + cs + in + + (* Positive tests *) + let cs = test_range_check64 "0" in + let _cs = test_range_check64 ~cs "4294967" in + let _cs = test_range_check64 ~cs "18446744073709551615" in + (* 2^64 - 1 *) + (* Negative tests *) + assert ( + Common.is_error (fun () -> + test_range_check64 ~cs "18446744073709551616" (* 2^64 *) ) ) ; + assert ( + Common.is_error (fun () -> + test_range_check64 ~cs "170141183460469231731687303715884105728" + (* 2^127 *) ) ) ) ; + () + +let%test_unit "multi_range_check gadget" = + if tests_enabled then ( + let (* Import the gadget test runner *) + open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + + (* Helper to test multi_range_check gadget *) + let test_multi_range_check ?cs v0 v1 v2 = + let open Runner.Impl in + let v0 = Common.field_of_base10 (module Runner.Impl) v0 in + let v1 = Common.field_of_base10 (module Runner.Impl) v1 in + let v2 = Common.field_of_base10 (module Runner.Impl) v2 in + + let make_circuit v0 v1 v2 = + (* Circuit definition *) + let values = + exists (Typ.array ~length:3 Field.typ) ~compute:(fun () -> + [| v0; v1; v2 |] ) + in + multi (module Runner.Impl) values.(0) values.(1) values.(2) + in + + (* Generate and verify proof *) + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> make_circuit v0 v1 v2) + in + + cs + in + + (* Positive tests *) + let cs = + test_multi_range_check "0" "4294967" "309485009821345068724781055" + in + let _cs = + test_multi_range_check ~cs "267475740839011166017999907" + "120402749546803056196583080" "1159834292458813579124542" + in + let _cs = + test_multi_range_check ~cs "309485009821345068724781055" + "309485009821345068724781055" "309485009821345068724781055" + in + let _cs = test_multi_range_check ~cs "0" "0" "0" in + (* Negative tests *) + assert ( + Common.is_error (fun () -> + test_multi_range_check ~cs "0" "4294967" "309485009821345068724781056" ) ) ; + assert ( + Common.is_error (fun () -> + test_multi_range_check ~cs "0" "309485009821345068724781056" + "309485009821345068724781055" ) ) ; + assert ( + Common.is_error (fun () -> + test_multi_range_check ~cs "309485009821345068724781056" "4294967" + "309485009821345068724781055" ) ) ; + assert ( + Common.is_error (fun () -> + test_multi_range_check ~cs + "28948022309329048855892746252171976963317496166410141009864396001978282409984" + "0170141183460469231731687303715884105728" + "170141183460469231731687303715884105728" ) ) ; + assert ( + Common.is_error (fun () -> + test_multi_range_check ~cs "0" "0" + "28948022309329048855892746252171976963317496166410141009864396001978282409984" ) ) ; + assert ( + Common.is_error (fun () -> + test_multi_range_check ~cs "0170141183460469231731687303715884105728" + "0" + "28948022309329048855892746252171976963317496166410141009864396001978282409984" ) ) + ) ; + () + +let%test_unit "compact_multi_range_check gadget" = + if tests_enabled then ( + let (* Import the gadget test runner *) + open Kimchi_gadgets_test_runner in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + + (* Helper to test compact_multi_range_check gadget *) + let test_compact_multi_range_check v01 v2 : unit = + let open Runner.Impl in + let v01 = Common.field_of_base10 (module Runner.Impl) v01 in + let v2 = Common.field_of_base10 (module Runner.Impl) v2 in + + let make_circuit v01 v2 = + (* Circuit definition *) + let v01, v2 = + exists Typ.(Field.typ * Field.typ) ~compute:(fun () -> (v01, v2)) + in + compact_multi (module Runner.Impl) v01 v2 + in + + (* Generate and verify first proof *) + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof (fun () -> make_circuit v01 v2) + in + + (* Set up another witness *) + let mutate_witness value = + Field.Constant.(if equal zero value then value + one else value - one) + in + let v01 = mutate_witness v01 in + let v2 = mutate_witness v2 in + + (* Generate and verify second proof, reusing constraint system *) + let _cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ~cs (fun () -> make_circuit v01 v2) + in + + () + in + + (* Positive tests *) + test_compact_multi_range_check "0" "0" ; + test_compact_multi_range_check + "95780971304118053647396689196894323976171195136475135" (* 2^176 - 1 *) + "309485009821345068724781055" + (* 2^88 - 1 *) ; + (* Negative tests *) + assert ( + Common.is_error (fun () -> + test_compact_multi_range_check + "28948022309329048855892746252171976963317496166410141009864396001978282409984" + "0" ) ) ; + assert ( + Common.is_error (fun () -> + test_compact_multi_range_check "0" + "28948022309329048855892746252171976963317496166410141009864396001978282409984" ) ) ; + assert ( + Common.is_error (fun () -> + test_compact_multi_range_check + "95780971304118053647396689196894323976171195136475136" (* 2^176 *) + "309485009821345068724781055" ) (* 2^88 - 1 *) ) ; + assert ( + Common.is_error (fun () -> + test_compact_multi_range_check + "95780971304118053647396689196894323976171195136475135" + (* 2^176 - 1 *) + "309485009821345068724781056" ) (* 2^88 *) ) ) ; + () diff --git a/src/lib/crypto/kimchi_backend/gadgets/range_check.mli b/src/lib/crypto/kimchi_backend/gadgets/range_check.mli new file mode 100644 index 00000000000..71d74849bfd --- /dev/null +++ b/src/lib/crypto/kimchi_backend/gadgets/range_check.mli @@ -0,0 +1,22 @@ +(** 64-bit range-check gadget - checks value \in [0, 2^64) *) +val bits64 : + (module Snarky_backendless.Snark_intf.Run with type field = 'f) + -> 'f Snarky_backendless.Cvar.t (* value *) + -> unit + +(** multi-range-check gadget - checks v0,v1,v2 \in [0, 2^88) *) +val multi : + (module Snarky_backendless.Snark_intf.Run with type field = 'f) + -> 'f Snarky_backendless.Cvar.t (* v0 *) + -> 'f Snarky_backendless.Cvar.t (* v1 *) + -> 'f Snarky_backendless.Cvar.t (* v2 *) + -> unit + +(** compact multi-range-check gadget - checks + * - v0,v1,v2 \in [0, 2^88) + * - v01 = v0 + 2^88 * v1 *) +val compact_multi : + (module Snarky_backendless.Snark_intf.Run with type field = 'f) + -> 'f Snarky_backendless.Cvar.t (* v01 *) + -> 'f Snarky_backendless.Cvar.t (* v2 *) + -> unit diff --git a/src/lib/crypto/kimchi_backend/gadgets/runner/example/example.ml b/src/lib/crypto/kimchi_backend/gadgets/runner/example/example.ml index fa0f2fb0254..6fbd886af1b 100644 --- a/src/lib/crypto/kimchi_backend/gadgets/runner/example/example.ml +++ b/src/lib/crypto/kimchi_backend/gadgets/runner/example/example.ml @@ -11,9 +11,9 @@ let () = Tick.Keypair.set_urs_info [] Note that this adds more than 1 constraint, because there is an assertion in kimchi that there is more than 1 gate (which is probably an error). *) -let example ~valid_witness () = - let _proof_keypair, _proof = - generate_and_verify_proof (fun () -> +let example ?cs ~valid_witness () = + let cs, _proof_keypair, _proof = + generate_and_verify_proof ?cs (fun () -> let open Impl in (* Create a fresh snarky variable. *) let a = @@ -36,16 +36,16 @@ let example ~valid_witness () = (* Assert equality directly via the permutation argument. *) Field.Assert.equal a_squared a_plus_b ) in - () + cs (* Generate a proof with a valid witness. *) -let () = example ~valid_witness:true () +let _cs = example ~valid_witness:true () (* Sanity-check: ensure that the proof with an invalid witness fails. *) let () = let test_failed = try - example ~valid_witness:false () ; + let _cs = example ~valid_witness:false () in false with _ -> true in diff --git a/src/lib/crypto/kimchi_backend/gadgets/runner/runner.ml b/src/lib/crypto/kimchi_backend/gadgets/runner/runner.ml index 1a4607a221e..2a2c0956673 100644 --- a/src/lib/crypto/kimchi_backend/gadgets/runner/runner.ml +++ b/src/lib/crypto/kimchi_backend/gadgets/runner/runner.ml @@ -2,11 +2,15 @@ module Tick = Kimchi_backend.Pasta.Vesta_based_plonk module Impl = Snarky_backendless.Snark.Run.Make (Tick) -let generate_and_verify_proof circuit = +let generate_and_verify_proof ?cs circuit = (* Generate constraint system for the circuit *) let constraint_system = - Impl.constraint_system ~input_typ:Impl.Typ.unit ~return_typ:Impl.Typ.unit - (fun () () -> circuit ()) + match cs with + | Some cs -> + cs + | None -> + Impl.constraint_system ~input_typ:Impl.Typ.unit + ~return_typ:Impl.Typ.unit (fun () () -> circuit ()) in (* Generate the indexes from the constraint system *) let proof_keypair = @@ -28,10 +32,11 @@ let generate_and_verify_proof circuit = (fun () () -> circuit ()) () in + (* Verify proof *) let verifier_index = Tick.Keypair.vk proof_keypair in (* We have an empty public input; create an empty vector. *) let public_input = Kimchi_bindings.FieldVectors.Fp.create () in (* Assert that the proof verifies. *) assert (Tick.Proof.verify ~message:[] proof verifier_index public_input) ; - (proof_keypair, proof) + (constraint_system, proof_keypair, proof) diff --git a/src/lib/crypto/kimchi_backend/gadgets/secp256k1.ml.disabled b/src/lib/crypto/kimchi_backend/gadgets/secp256k1.ml.disabled new file mode 100644 index 00000000000..78c3214645a --- /dev/null +++ b/src/lib/crypto/kimchi_backend/gadgets/secp256k1.ml.disabled @@ -0,0 +1,30 @@ +(* secp256k1 elliptic curve parameters *) + +module Bignum_bigint = Snarky_backendless.Backend_extended.Bignum_bigint +module Snark_intf = Snarky_backendless.Snark_intf + +let params = + Curve_params. + { modulus = + Bignum_bigint.of_string + "115792089237316195423570985008687907853269984665640564039457584007908834671663" + ; order = + Bignum_bigint.of_string + "115792089237316195423570985008687907852837564279074904382605163141518161494337" + ; a = Bignum_bigint.of_int 0 + ; b = Bignum_bigint.of_int 7 + ; gen = + ( Bignum_bigint.of_string + "55066263022277343669578718895168534326250603453777594175500187360389116729240" + , Bignum_bigint.of_string + "32670510020758816978083085130507043184471273380659243275938904335757337482424" + ) + ; ia = + Curve_params.ia_of_strings + ( "73748207725492941843355928046090697797026070566443284126849221438943867210749" + , "71805440039692371678177852429904809925653495989672587996663750265844216498843" + ) + ( "73748207725492941843355928046090697797026070566443284126849221438943867210749" + , "43986649197623823745393132578783097927616488675967976042793833742064618172820" + ) + } diff --git a/src/lib/crypto/kimchi_backend/gadgets/test.ml.disabled b/src/lib/crypto/kimchi_backend/gadgets/test.ml.disabled new file mode 100644 index 00000000000..d38285ec684 --- /dev/null +++ b/src/lib/crypto/kimchi_backend/gadgets/test.ml.disabled @@ -0,0 +1,156 @@ +let tests_enabled = true + +let%test_unit "custom gates integration" = + ( if tests_enabled then + let (* Import the gadget test runner *) + open Kimchi_gadgets_test_runner in + let open Foreign_field in + (* Initialize the SRS cache. *) + let () = + try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> () + in + + (* Convert Bignum_bigint.t to Bignum_bigint standard_limbs *) + let bignum_bigint_to_standard_limbs (bigint : Bignum_bigint.t) : + Bignum_bigint.t standard_limbs = + let l12, l0 = Common.(bignum_bigint_div_rem bigint two_to_limb) in + let l2, l1 = Common.(bignum_bigint_div_rem l12 two_to_limb) in + (l0, l1, l2) + in + + (* Convert Bignum_bigint.t to field standard_limbs *) + let bignum_bigint_to_field_const_standard_limbs (type f) + (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) + (bigint : Bignum_bigint.t) : f standard_limbs = + let l0, l1, l2 = bignum_bigint_to_standard_limbs bigint in + ( Common.bignum_bigint_to_field (module Circuit) l0 + , Common.bignum_bigint_to_field (module Circuit) l1 + , Common.bignum_bigint_to_field (module Circuit) l2 ) + in + + (* Helper to test all custom gates for Ethereum primitives. + * The circuit being created is the following: + * - rotate first 64-bit word by 5 bits to the right + * - multiply by 2^176 + * - xor it with the second word which is a native field element (255 bits) + * - and it with the first word (254 bits) + * - not the output for 254 bits + * - create limbs for the output and decompose + * - multiply it with itself (256 bits) + * - ffadd it with the third input which is a foreign element (256 bits) + * - multi range check the 3 limbs of the output + *) + let test_gates ?cs word_64bit native_elem foreign_elem = + let cs, _proof_keypair, _proof = + Runner.generate_and_verify_proof ?cs (fun () -> + let open Runner.Impl in + let open Bitwise in + let secp256k1_modulus = + bignum_bigint_to_field_const_standard_limbs (module Runner.Impl) + @@ Common.bignum_bigint_of_hex + "fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f" + in + + (* Set up snarky variables for inputs and outputs *) + let word_64bit = + exists Field.typ ~compute:(fun () -> + Common.field_of_hex (module Runner.Impl) word_64bit ) + in + let native_elem = + exists Field.typ ~compute:(fun () -> + Common.field_of_hex (module Runner.Impl) native_elem ) + in + let foreign_elem = + Element.Standard.of_bignum_bigint (module Runner.Impl) + @@ Common.bignum_bigint_of_hex foreign_elem + in + let out_rot = rot64 (module Runner.Impl) word_64bit 5 Right in + + let two_to_88 = + exists Field.typ ~compute:(fun () -> + Common.field_of_hex + (module Runner.Impl) + "10000000000000000000000" ) + in + let two_to_176 = Field.(two_to_88 * two_to_88) in + + let out_mul = Generic.mul (module Runner.Impl) out_rot two_to_176 in + let out_xor = bxor (module Runner.Impl) out_mul native_elem 255 in + let out_and = band (module Runner.Impl) out_xor word_64bit 254 in + let out_not_c = bnot_checked (module Runner.Impl) out_and 254 in + let out_not_u = bnot_unchecked (module Runner.Impl) out_and 254 in + Field.Assert.equal out_not_u out_not_c ; + + let l0, l1, l2 = + exists (Typ.array ~length:3 Field.typ) ~compute:(fun () -> + let big = + Common.cvar_field_to_bignum_bigint_as_prover + (module Runner.Impl) + out_not_c + in + let two_to_88 = Bignum_bigint.(pow (of_int 2) (of_int 88)) in + let two_to_176 = + Bignum_bigint.(pow (of_int 2) (of_int 176)) + in + let l2 = Bignum_bigint.(big / two_to_176) in + let l1 = + Bignum_bigint.((big - (l2 * two_to_176)) / two_to_88) + in + let l0 = + Bignum_bigint.(big - (l2 * two_to_176) - (l1 * two_to_88)) + in + let l2 = + Common.bignum_bigint_to_field (module Runner.Impl) l2 + in + let l1 = + Common.bignum_bigint_to_field (module Runner.Impl) l1 + in + let l0 = + Common.bignum_bigint_to_field (module Runner.Impl) l0 + in + [| l0; l1; l2 |] ) + |> Common.tuple3_of_array + in + let out_l1 = Generic.mul (module Runner.Impl) l1 two_to_88 in + let out_l1l0 = Generic.add (module Runner.Impl) out_l1 l0 in + let out_l2 = Generic.mul (module Runner.Impl) l2 two_to_176 in + let out_limbs = Generic.add (module Runner.Impl) out_l1l0 out_l2 in + Field.Assert.equal out_limbs out_not_c ; + let limbs = Element.Standard.of_limbs (l0, l1, l2) in + + (* Create external checks context for tracking extra constraints + that are required for soundness (not used in this test) *) + let unused_external_checks = + External_checks.create (module Runner.Impl) + in + + let out_ffmul = + Foreign_field.mul + (module Runner.Impl) + unused_external_checks limbs limbs secp256k1_modulus + in + + let out_ffadd = + Foreign_field.add + (module Runner.Impl) + out_ffmul foreign_elem secp256k1_modulus + in + let l0, l1, l2 = Element.Standard.to_limbs out_ffadd in + Range_check.multi (module Runner.Impl) l0 l1 l2 ; + () ) + in + cs + in + + let cs = + test_gates "7b3f28d7496d75f0" + "3fffe27b14baa740db0c8bb6656de61d2871a64093908af6181f46351a1c1909" + "b58c271d1f2b1c632a61a548872580228430495e9635842591d9118236bacfa2" + in + let _cs = + test_gates ~cs "84c0d728b6928a0f" + "1f2d8f0d0cd52771bfb86ffdf651b7907e2e0fa87f7c9c2a41b0918e2a7820d" + "7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" + in + () ) ; + () diff --git a/src/lib/crypto/kimchi_backend/pasta/basic/kimchi_pasta_basic.ml b/src/lib/crypto/kimchi_backend/pasta/basic/kimchi_pasta_basic.ml index 8caba494c7f..245a7c0ad83 100644 --- a/src/lib/crypto/kimchi_backend/pasta/basic/kimchi_pasta_basic.ml +++ b/src/lib/crypto/kimchi_backend/pasta/basic/kimchi_pasta_basic.ml @@ -147,3 +147,9 @@ module Fp_poly_comm = Kimchi_backend_common.Poly_comm.Make (struct fun unshifted shifted : t -> { shifted; unshifted } end end) + +(* poseidon params *) + +let poseidon_params_fp = Sponge.Params.(map pasta_p_kimchi ~f:Fp.of_string) + +let poseidon_params_fq = Sponge.Params.(map pasta_q_kimchi ~f:Fq.of_string) diff --git a/src/lib/crypto/kimchi_backend/pasta/constraint_system/caml/pallas_constraint_system.ml b/src/lib/crypto/kimchi_backend/pasta/constraint_system/caml/pallas_constraint_system.ml index cb59e2d0344..1cec41aa925 100644 --- a/src/lib/crypto/kimchi_backend/pasta/constraint_system/caml/pallas_constraint_system.ml +++ b/src/lib/crypto/kimchi_backend/pasta/constraint_system/caml/pallas_constraint_system.ml @@ -4,8 +4,5 @@ open Kimchi_pasta_basic include Plonk_constraint_system.Make (Fq) (Kimchi_bindings.Protocol.Gates.Vector.Fq) (struct - let params = - Sponge.Params.( - map pasta_q_kimchi ~f:(fun x -> - Fq.of_bigint (Bigint256.of_decimal_string x) )) + let params = poseidon_params_fq end) diff --git a/src/lib/crypto/kimchi_backend/pasta/constraint_system/caml/vesta_constraint_system.ml b/src/lib/crypto/kimchi_backend/pasta/constraint_system/caml/vesta_constraint_system.ml index 364673a5b85..0fff63228cd 100644 --- a/src/lib/crypto/kimchi_backend/pasta/constraint_system/caml/vesta_constraint_system.ml +++ b/src/lib/crypto/kimchi_backend/pasta/constraint_system/caml/vesta_constraint_system.ml @@ -4,8 +4,5 @@ open Kimchi_pasta_basic include Plonk_constraint_system.Make (Fp) (Kimchi_bindings.Protocol.Gates.Vector.Fp) (struct - let params = - Sponge.Params.( - map pasta_p_kimchi ~f:(fun x -> - Fp.of_bigint (Bigint256.of_decimal_string x) )) + let params = poseidon_params_fp end) diff --git a/src/lib/crypto/kimchi_backend/pasta/constraint_system/intf.ml b/src/lib/crypto/kimchi_backend/pasta/constraint_system/intf.ml index f9bca06f792..6f9a0648a29 100644 --- a/src/lib/crypto/kimchi_backend/pasta/constraint_system/intf.ml +++ b/src/lib/crypto/kimchi_backend/pasta/constraint_system/intf.ml @@ -27,23 +27,43 @@ module type With_accessors = sig end module type Full = sig - include With_accessors - type fp type gates + include + With_accessors + with type t = (fp, gates) Kimchi_backend_common.Plonk_constraint_system.t + val add_constraint : ?label:string -> t -> (fp Snarky_backendless.Cvar.t, fp) Snarky_backendless.Constraint.basic -> unit - val compute_witness : t -> (int -> fp) -> fp array array + val compute_witness : + t -> (int -> fp) -> fp array array * fp Kimchi_types.runtime_table array val finalize : t -> unit - val finalize_and_get_gates : t -> gates + val finalize_and_get_gates : + t + -> gates + * fp Kimchi_types.lookup_table array + * fp Kimchi_types.runtime_table_cfg array + + (** Return the size of all the fixed lookup tables concatenated, without the + built-in XOR and RangeCheck tables *) + val get_concatenated_fixed_lookup_table_size : t -> int + + (** Return the size of all the runtime lookup tables concatenated *) + val get_concatenated_runtime_lookup_table_size : t -> int + + (** Finalize the fixed lookup tables. The function can not be called twice *) + val finalize_fixed_lookup_tables : t -> unit + + (** Finalize the runtime lookup table configurations. The function can not be called twice. *) + val finalize_runtime_lookup_tables : t -> unit val digest : t -> Md5.t diff --git a/src/lib/crypto/kimchi_backend/pasta/pallas_based_plonk.ml b/src/lib/crypto/kimchi_backend/pasta/pallas_based_plonk.ml index e9c889716f3..cf747c75755 100644 --- a/src/lib/crypto/kimchi_backend/pasta/pallas_based_plonk.ml +++ b/src/lib/crypto/kimchi_backend/pasta/pallas_based_plonk.ml @@ -77,13 +77,18 @@ module Proof = Plonk_dlog_proof.Make (struct , Pasta_bindings.Fq.t ) Kimchi_types.prover_proof + type with_public_evals = + ( Pasta_bindings.Fp.t Kimchi_types.or_infinity + , Pasta_bindings.Fq.t ) + Kimchi_types.proof_with_public + include Kimchi_bindings.Protocol.Proof.Fq let batch_verify vks ts = Promise.run_in_thread (fun () -> batch_verify vks ts) - let create_aux ~f:create (pk : Keypair.t) primary auxiliary prev_chals - prev_comms = + let create_aux ~f:backend_create (pk : Keypair.t) ~primary ~auxiliary + ~prev_chals ~prev_comms = (* external values contains [1, primary..., auxiliary ] *) let external_values i = let open Field.Vector in @@ -92,7 +97,7 @@ module Proof = Plonk_dlog_proof.Make (struct in (* compute witness *) - let computed_witness = + let computed_witness, runtime_tables = R1CS_constraint_system.compute_witness pk.cs external_values in let num_rows = Array.length computed_witness.(0) in @@ -106,16 +111,19 @@ module Proof = Plonk_dlog_proof.Make (struct done ; witness ) in - create pk.index witness_cols prev_chals prev_comms + backend_create pk.index witness_cols runtime_tables prev_chals prev_comms - let create_async (pk : Keypair.t) primary auxiliary prev_chals prev_comms = - create_aux pk primary auxiliary prev_chals prev_comms - ~f:(fun pk auxiliary_input prev_challenges prev_sgs -> + let create_async (pk : Keypair.t) ~primary ~auxiliary ~prev_chals + ~prev_comms = + create_aux pk ~primary ~auxiliary ~prev_chals ~prev_comms + ~f:(fun index witness runtime_tables prev_chals prev_sgs -> Promise.run_in_thread (fun () -> - create pk auxiliary_input prev_challenges prev_sgs ) ) + Kimchi_bindings.Protocol.Proof.Fq.create index witness + runtime_tables prev_chals prev_sgs ) ) - let create (pk : Keypair.t) primary auxiliary prev_chals prev_comms = - create_aux pk primary auxiliary prev_chals prev_comms ~f:create + let create (pk : Keypair.t) ~primary ~auxiliary ~prev_chals ~prev_comms = + create_aux pk ~primary ~auxiliary ~prev_chals ~prev_comms + ~f:Kimchi_bindings.Protocol.Proof.Fq.create end module Verifier_index = Kimchi_bindings.Protocol.VerifierIndex.Fq @@ -165,5 +173,7 @@ module Oracles = Plonk_dlog_oracles.Make (struct include Kimchi_bindings.Protocol.Oracles.Fq let create = with_lagrange create + + let create_with_public_evals = with_lagrange create_with_public_evals end end) diff --git a/src/lib/crypto/kimchi_backend/pasta/vesta_based_plonk.ml b/src/lib/crypto/kimchi_backend/pasta/vesta_based_plonk.ml index e68427ebdad..4321b8963eb 100644 --- a/src/lib/crypto/kimchi_backend/pasta/vesta_based_plonk.ml +++ b/src/lib/crypto/kimchi_backend/pasta/vesta_based_plonk.ml @@ -76,13 +76,18 @@ module Proof = Plonk_dlog_proof.Make (struct , Pasta_bindings.Fp.t ) Kimchi_types.prover_proof + type with_public_evals = + ( Pasta_bindings.Fq.t Kimchi_types.or_infinity + , Pasta_bindings.Fp.t ) + Kimchi_types.proof_with_public + include Kimchi_bindings.Protocol.Proof.Fp let batch_verify vks ts = Promise.run_in_thread (fun () -> batch_verify vks ts) - let create_aux ~f:create (pk : Keypair.t) primary auxiliary prev_chals - prev_comms = + let create_aux ~f:backend_create (pk : Keypair.t) primary auxiliary + prev_chals prev_comms = (* external values contains [1, primary..., auxiliary ] *) let external_values i = let open Field.Vector in @@ -91,7 +96,7 @@ module Proof = Plonk_dlog_proof.Make (struct in (* compute witness *) - let computed_witness = + let computed_witness, runtime_tables = R1CS_constraint_system.compute_witness pk.cs external_values in let num_rows = Array.length computed_witness.(0) in @@ -105,16 +110,19 @@ module Proof = Plonk_dlog_proof.Make (struct done ; witness ) in - create pk.index witness_cols prev_chals prev_comms + backend_create pk.index witness_cols runtime_tables prev_chals prev_comms - let create_async (pk : Keypair.t) primary auxiliary prev_chals prev_comms = + let create_async (pk : Keypair.t) ~primary ~auxiliary ~prev_chals + ~prev_comms = create_aux pk primary auxiliary prev_chals prev_comms - ~f:(fun pk auxiliary_input prev_challenges prev_sgs -> + ~f:(fun index witness runtime_tables prev_chals prev_sgs -> Promise.run_in_thread (fun () -> - create pk auxiliary_input prev_challenges prev_sgs ) ) + Kimchi_bindings.Protocol.Proof.Fp.create index witness + runtime_tables prev_chals prev_sgs ) ) - let create (pk : Keypair.t) primary auxiliary prev_chals prev_comms = - create_aux pk primary auxiliary prev_chals prev_comms ~f:create + let create (pk : Keypair.t) ~primary ~auxiliary ~prev_chals ~prev_comms = + create_aux pk primary auxiliary prev_chals prev_comms + ~f:Kimchi_bindings.Protocol.Proof.Fp.create end module Verifier_index = Kimchi_bindings.Protocol.VerifierIndex.Fp @@ -164,5 +172,7 @@ module Oracles = Plonk_dlog_oracles.Make (struct include Kimchi_bindings.Protocol.Oracles.Fp let create = with_lagrange create + + let create_with_public_evals = with_lagrange create_with_public_evals end end) diff --git a/src/lib/crypto/kimchi_backend/tests.ml b/src/lib/crypto/kimchi_backend/tests.ml index d6ea5f33e5a..0e849e44d2e 100644 --- a/src/lib/crypto/kimchi_backend/tests.ml +++ b/src/lib/crypto/kimchi_backend/tests.ml @@ -24,8 +24,6 @@ let%test_unit "of_affine" = | Infinity -> assert false in - Pasta_bindings.Fp.print x ; - Pasta_bindings.Fp.print y ; Pasta_bindings.Pallas.(ignore (of_affine_coordinates x y : t)) let%test_unit "vector test" = diff --git a/src/lib/crypto/kimchi_bindings/js/bindings-bigint256.js b/src/lib/crypto/kimchi_bindings/js/bindings-bigint256.js new file mode 100644 index 00000000000..50fa2a00048 --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/js/bindings-bigint256.js @@ -0,0 +1,50 @@ +/* global tsBindings +*/ + +// Provides: caml_bigint_256_of_numeral +// Requires: tsBindings +var caml_bigint_256_of_numeral = tsBindings.caml_bigint_256_of_numeral; + +// Provides: caml_bigint_256_of_decimal_string +// Requires: tsBindings +var caml_bigint_256_of_decimal_string = tsBindings.caml_bigint_256_of_decimal_string; + +// Provides: caml_bigint_256_num_limbs +// Requires: tsBindings +var caml_bigint_256_num_limbs = tsBindings.caml_bigint_256_num_limbs; + +// Provides: caml_bigint_256_bytes_per_limb +// Requires: tsBindings +var caml_bigint_256_bytes_per_limb = tsBindings.caml_bigint_256_bytes_per_limb; + +// Provides: caml_bigint_256_div +// Requires: tsBindings +var caml_bigint_256_div = tsBindings.caml_bigint_256_div; + +// Provides: caml_bigint_256_compare +// Requires: tsBindings +var caml_bigint_256_compare = tsBindings.caml_bigint_256_compare; + +// Provides: caml_bigint_256_print +// Requires: tsBindings +var caml_bigint_256_print = tsBindings.caml_bigint_256_print; + +// Provides: caml_bigint_256_to_string +// Requires: tsBindings +var caml_bigint_256_to_string = tsBindings.caml_bigint_256_to_string; + +// Provides: caml_bigint_256_test_bit +// Requires: tsBindings +var caml_bigint_256_test_bit = tsBindings.caml_bigint_256_test_bit; + +// Provides: caml_bigint_256_to_bytes +// Requires: tsBindings +var caml_bigint_256_to_bytes = tsBindings.caml_bigint_256_to_bytes; + +// Provides: caml_bigint_256_of_bytes +// Requires: tsBindings +var caml_bigint_256_of_bytes = tsBindings.caml_bigint_256_of_bytes; + +// Provides: caml_bigint_256_deep_copy +// Requires: tsBindings +var caml_bigint_256_deep_copy = tsBindings.caml_bigint_256_deep_copy diff --git a/src/lib/crypto/kimchi_bindings/js/bindings-curve.js b/src/lib/crypto/kimchi_bindings/js/bindings-curve.js new file mode 100644 index 00000000000..18d67a0b28d --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/js/bindings-curve.js @@ -0,0 +1,118 @@ +/* global tsBindings +*/ + +// pallas + +// Provides: caml_pallas_one +// Requires: tsBindings +var caml_pallas_one = tsBindings.caml_pallas_one; + +// Provides: caml_pallas_add +// Requires: tsBindings +var caml_pallas_add = tsBindings.caml_pallas_add; + +// Provides: caml_pallas_sub +// Requires: tsBindings +var caml_pallas_sub = tsBindings.caml_pallas_sub; + +// Provides: caml_pallas_negate +// Requires: tsBindings +var caml_pallas_negate = tsBindings.caml_pallas_negate; + +// Provides: caml_pallas_double +// Requires: tsBindings +var caml_pallas_double = tsBindings.caml_pallas_double; + +// Provides: caml_pallas_scale +// Requires: tsBindings +var caml_pallas_scale = tsBindings.caml_pallas_scale; + +// Provides: caml_pallas_random +// Requires: tsBindings +var caml_pallas_random = tsBindings.caml_pallas_random; + +// Provides: caml_pallas_rng +// Requires: tsBindings +var caml_pallas_rng = tsBindings.caml_pallas_rng; + +// Provides: caml_pallas_endo_base +// Requires: tsBindings +var caml_pallas_endo_base = tsBindings.caml_pallas_endo_base; + +// Provides: caml_pallas_endo_scalar +// Requires: tsBindings +var caml_pallas_endo_scalar = tsBindings.caml_pallas_endo_scalar; + +// Provides: caml_pallas_to_affine +// Requires: tsBindings +var caml_pallas_to_affine = tsBindings.caml_pallas_to_affine; + +// Provides: caml_pallas_of_affine +// Requires: tsBindings +var caml_pallas_of_affine = tsBindings.caml_pallas_of_affine; + +// Provides: caml_pallas_of_affine_coordinates +// Requires: tsBindings +var caml_pallas_of_affine_coordinates = tsBindings.caml_pallas_of_affine_coordinates; + +// Provides: caml_pallas_affine_deep_copy +// Requires: tsBindings +var caml_pallas_affine_deep_copy = tsBindings.caml_pallas_affine_deep_copy; + +// vesta + +// Provides: caml_vesta_one +// Requires: tsBindings +var caml_vesta_one = tsBindings.caml_vesta_one; + +// Provides: caml_vesta_add +// Requires: tsBindings +var caml_vesta_add = tsBindings.caml_vesta_add; + +// Provides: caml_vesta_sub +// Requires: tsBindings +var caml_vesta_sub = tsBindings.caml_vesta_sub; + +// Provides: caml_vesta_negate +// Requires: tsBindings +var caml_vesta_negate = tsBindings.caml_vesta_negate; + +// Provides: caml_vesta_double +// Requires: tsBindings +var caml_vesta_double = tsBindings.caml_vesta_double; + +// Provides: caml_vesta_scale +// Requires: tsBindings +var caml_vesta_scale = tsBindings.caml_vesta_scale; + +// Provides: caml_vesta_random +// Requires: tsBindings +var caml_vesta_random = tsBindings.caml_vesta_random; + +// Provides: caml_vesta_rng +// Requires: tsBindings +var caml_vesta_rng = tsBindings.caml_vesta_rng; + +// Provides: caml_vesta_endo_base +// Requires: tsBindings +var caml_vesta_endo_base = tsBindings.caml_vesta_endo_base; + +// Provides: caml_vesta_endo_scalar +// Requires: tsBindings +var caml_vesta_endo_scalar = tsBindings.caml_vesta_endo_scalar; + +// Provides: caml_vesta_to_affine +// Requires: tsBindings +var caml_vesta_to_affine = tsBindings.caml_vesta_to_affine; + +// Provides: caml_vesta_of_affine +// Requires: tsBindings +var caml_vesta_of_affine = tsBindings.caml_vesta_of_affine; + +// Provides: caml_vesta_of_affine_coordinates +// Requires: tsBindings +var caml_vesta_of_affine_coordinates = tsBindings.caml_vesta_of_affine_coordinates; + +// Provides: caml_vesta_affine_deep_copy +// Requires: tsBindings +var caml_vesta_affine_deep_copy = tsBindings.caml_vesta_affine_deep_copy; diff --git a/src/lib/crypto/kimchi_bindings/js/bindings-field.js b/src/lib/crypto/kimchi_bindings/js/bindings-field.js new file mode 100644 index 00000000000..4b93a52cecd --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/js/bindings-field.js @@ -0,0 +1,253 @@ +/* global tsBindings +*/ + +// Provides: caml_pasta_fp_copy +// Requires: tsBindings +var caml_pasta_fp_copy = tsBindings.caml_pasta_fp_copy; + +// Provides: caml_pasta_fp_size_in_bits +// Requires: tsBindings +var caml_pasta_fp_size_in_bits = tsBindings.caml_pasta_fp_size_in_bits; + +// Provides: caml_pasta_fp_size +// Requires: tsBindings +var caml_pasta_fp_size = tsBindings.caml_pasta_fp_size; + +// Provides: caml_pasta_fp_add +// Requires: tsBindings +var caml_pasta_fp_add = tsBindings.caml_pasta_fp_add; + +// Provides: caml_pasta_fp_sub +// Requires: tsBindings +var caml_pasta_fp_sub = tsBindings.caml_pasta_fp_sub; + +// Provides: caml_pasta_fp_negate +// Requires: tsBindings +var caml_pasta_fp_negate = tsBindings.caml_pasta_fp_negate; + +// Provides: caml_pasta_fp_mul +// Requires: tsBindings +var caml_pasta_fp_mul = tsBindings.caml_pasta_fp_mul; + +// Provides: caml_pasta_fp_div +// Requires: tsBindings +var caml_pasta_fp_div = tsBindings.caml_pasta_fp_div; + +// Provides: caml_pasta_fp_inv +// Requires: tsBindings +var caml_pasta_fp_inv = tsBindings.caml_pasta_fp_inv; + +// Provides: caml_pasta_fp_square +// Requires: tsBindings +var caml_pasta_fp_square = tsBindings.caml_pasta_fp_square + +// Provides: caml_pasta_fp_is_square +// Requires: tsBindings +var caml_pasta_fp_is_square = tsBindings.caml_pasta_fp_is_square; + +// Provides: caml_pasta_fp_sqrt +// Requires: tsBindings +var caml_pasta_fp_sqrt = tsBindings.caml_pasta_fp_sqrt; + +// Provides: caml_pasta_fp_of_int +// Requires: tsBindings +var caml_pasta_fp_of_int = tsBindings.caml_pasta_fp_of_int + +// Provides: caml_pasta_fp_to_string +// Requires: tsBindings +var caml_pasta_fp_to_string = tsBindings.caml_pasta_fp_to_string; + +// Provides: caml_pasta_fp_of_string +// Requires: tsBindings +var caml_pasta_fp_of_string = tsBindings.caml_pasta_fp_of_string; + +// Provides: caml_pasta_fp_print +// Requires: tsBindings +var caml_pasta_fp_print = tsBindings.caml_pasta_fp_print; + +// Provides: caml_pasta_fp_mut_add +// Requires: tsBindings +var caml_pasta_fp_mut_add = tsBindings.caml_pasta_fp_mut_add; + +// Provides: caml_pasta_fp_mut_sub +// Requires: tsBindings +var caml_pasta_fp_mut_sub = tsBindings.caml_pasta_fp_mut_sub; + +// Provides: caml_pasta_fp_mut_mul +// Requires: tsBindings +var caml_pasta_fp_mut_mul = tsBindings.caml_pasta_fp_mut_mul; + +// Provides: caml_pasta_fp_mut_square +// Requires: tsBindings +var caml_pasta_fp_mut_square = tsBindings.caml_pasta_fp_mut_square; + +// Provides: caml_pasta_fp_compare +// Requires: tsBindings +var caml_pasta_fp_compare = tsBindings.caml_pasta_fp_compare; + +// Provides: caml_pasta_fp_equal +// Requires: tsBindings +var caml_pasta_fp_equal = tsBindings.caml_pasta_fp_equal; + +// Provides: caml_pasta_fp_random +// Requires: tsBindings +var caml_pasta_fp_random = tsBindings.caml_pasta_fp_random; + +// Provides: caml_pasta_fp_rng +// Requires: tsBindings +var caml_pasta_fp_rng = tsBindings.caml_pasta_fp_rng; + +// Provides: caml_pasta_fp_to_bigint +// Requires: tsBindings +var caml_pasta_fp_to_bigint = tsBindings.caml_pasta_fp_to_bigint; + +// Provides: caml_pasta_fp_of_bigint +// Requires: tsBindings +var caml_pasta_fp_of_bigint = tsBindings.caml_pasta_fp_of_bigint; + +// Provides: caml_pasta_fp_two_adic_root_of_unity +// Requires: tsBindings +var caml_pasta_fp_two_adic_root_of_unity = tsBindings.caml_pasta_fp_two_adic_root_of_unity; + +// Provides: caml_pasta_fp_domain_generator +// Requires: tsBindings +var caml_pasta_fp_domain_generator = tsBindings.caml_pasta_fp_domain_generator; + +// Provides: caml_pasta_fp_to_bytes +// Requires: tsBindings +var caml_pasta_fp_to_bytes = tsBindings.caml_pasta_fp_to_bytes; + +// Provides: caml_pasta_fp_of_bytes +// Requires: tsBindings +var caml_pasta_fp_of_bytes = tsBindings.caml_pasta_fp_of_bytes; + +// Provides: caml_pasta_fp_deep_copy +// Requires: tsBindings +var caml_pasta_fp_deep_copy = tsBindings.caml_pasta_fp_deep_copy; + + + + +// Provides: caml_pasta_fq_copy +// Requires: tsBindings +var caml_pasta_fq_copy = tsBindings.caml_pasta_fq_copy; + +// Provides: caml_pasta_fq_size_in_bits +// Requires: tsBindings +var caml_pasta_fq_size_in_bits = tsBindings.caml_pasta_fq_size_in_bits; + +// Provides: caml_pasta_fq_size +// Requires: tsBindings +var caml_pasta_fq_size = tsBindings.caml_pasta_fq_size; + +// Provides: caml_pasta_fq_add +// Requires: tsBindings +var caml_pasta_fq_add = tsBindings.caml_pasta_fq_add; + +// Provides: caml_pasta_fq_sub +// Requires: tsBindings +var caml_pasta_fq_sub = tsBindings.caml_pasta_fq_sub; + +// Provides: caml_pasta_fq_negate +// Requires: tsBindings +var caml_pasta_fq_negate = tsBindings.caml_pasta_fq_negate; + +// Provides: caml_pasta_fq_mul +// Requires: tsBindings +var caml_pasta_fq_mul = tsBindings.caml_pasta_fq_mul; + +// Provides: caml_pasta_fq_div +// Requires: tsBindings +var caml_pasta_fq_div = tsBindings.caml_pasta_fq_div; + +// Provides: caml_pasta_fq_inv +// Requires: tsBindings +var caml_pasta_fq_inv = tsBindings.caml_pasta_fq_inv; + +// Provides: caml_pasta_fq_square +// Requires: tsBindings +var caml_pasta_fq_square = tsBindings.caml_pasta_fq_square + +// Provides: caml_pasta_fq_is_square +// Requires: tsBindings +var caml_pasta_fq_is_square = tsBindings.caml_pasta_fq_is_square; + +// Provides: caml_pasta_fq_sqrt +// Requires: tsBindings +var caml_pasta_fq_sqrt = tsBindings.caml_pasta_fq_sqrt; + +// Provides: caml_pasta_fq_of_int +// Requires: tsBindings +var caml_pasta_fq_of_int = tsBindings.caml_pasta_fq_of_int; + +// Provides: caml_pasta_fq_to_string +// Requires: tsBindings +var caml_pasta_fq_to_string = tsBindings.caml_pasta_fq_to_string; + +// Provides: caml_pasta_fq_of_string +// Requires: tsBindings +var caml_pasta_fq_of_string = tsBindings.caml_pasta_fq_of_string; + +// Provides: caml_pasta_fq_print +// Requires: tsBindings +var caml_pasta_fq_print = tsBindings.caml_pasta_fq_print; + +// Provides: caml_pasta_fq_mut_add +// Requires: tsBindings +var caml_pasta_fq_mut_add = tsBindings.caml_pasta_fq_mut_add; + +// Provides: caml_pasta_fq_mut_sub +// Requires: tsBindings +var caml_pasta_fq_mut_sub = tsBindings.caml_pasta_fq_mut_sub; + +// Provides: caml_pasta_fq_mut_mul +// Requires: tsBindings +var caml_pasta_fq_mut_mul = tsBindings.caml_pasta_fq_mut_mul; + +// Provides: caml_pasta_fq_mut_square +// Requires: tsBindings +var caml_pasta_fq_mut_square = tsBindings.caml_pasta_fq_mut_square; + +// Provides: caml_pasta_fq_compare +// Requires: tsBindings +var caml_pasta_fq_compare = tsBindings.caml_pasta_fq_compare; + +// Provides: caml_pasta_fq_equal +// Requires: tsBindings +var caml_pasta_fq_equal = tsBindings.caml_pasta_fq_equal; + +// Provides: caml_pasta_fq_random +// Requires: tsBindings +var caml_pasta_fq_random = tsBindings.caml_pasta_fq_random; + +// Provides: caml_pasta_fq_rng +// Requires: tsBindings +var caml_pasta_fq_rng = tsBindings.caml_pasta_fq_rng; + +// Provides: caml_pasta_fq_to_bigint +// Requires: tsBindings +var caml_pasta_fq_to_bigint = tsBindings.caml_pasta_fq_to_bigint; + +// Provides: caml_pasta_fq_of_bigint +// Requires: tsBindings +var caml_pasta_fq_of_bigint = tsBindings.caml_pasta_fq_of_bigint; + +// Provides: caml_pasta_fq_two_adic_root_of_unity +// Requires: tsBindings +var caml_pasta_fq_two_adic_root_of_unity = tsBindings.caml_pasta_fq_two_adic_root_of_unity; + +// Provides: caml_pasta_fq_domain_generator +// Requires: tsBindings +var caml_pasta_fq_domain_generator = tsBindings.caml_pasta_fq_domain_generator; + +// Provides: caml_pasta_fq_to_bytes +// Requires: tsBindings +var caml_pasta_fq_to_bytes = tsBindings.caml_pasta_fq_to_bytes; + +// Provides: caml_pasta_fq_of_bytes +// Requires: tsBindings +var caml_pasta_fq_of_bytes = tsBindings.caml_pasta_fq_of_bytes; + +// Provides: caml_pasta_fq_deep_copy +// Requires: tsBindings +var caml_pasta_fq_deep_copy = tsBindings.caml_pasta_fq_deep_copy; diff --git a/src/lib/crypto/kimchi_bindings/js/bindings-vector.js b/src/lib/crypto/kimchi_bindings/js/bindings-vector.js new file mode 100644 index 00000000000..9ff2441bc33 --- /dev/null +++ b/src/lib/crypto/kimchi_bindings/js/bindings-vector.js @@ -0,0 +1,45 @@ +/* global tsBindings */ + +// fp + +// Provides: caml_fp_vector_create +// Requires: tsBindings +var caml_fp_vector_create = tsBindings.caml_fp_vector_create; + +// Provides: caml_fp_vector_length +// Requires: tsBindings +var caml_fp_vector_length = tsBindings.caml_fp_vector_length; + +// Provides: caml_fp_vector_emplace_back +// Requires: tsBindings +var caml_fp_vector_emplace_back = tsBindings.caml_fp_vector_emplace_back; + +// Provides: caml_fp_vector_get +// Requires: tsBindings +var caml_fp_vector_get = tsBindings.caml_fp_vector_get; + +// Provides: caml_fp_vector_set +// Requires: tsBindings +var caml_fp_vector_set = tsBindings.caml_fp_vector_set; + +// fq + +// Provides: caml_fq_vector_create +// Requires: tsBindings +var caml_fq_vector_create = tsBindings.caml_fq_vector_create; + +// Provides: caml_fq_vector_length +// Requires: tsBindings +var caml_fq_vector_length = tsBindings.caml_fq_vector_length; + +// Provides: caml_fq_vector_emplace_back +// Requires: tsBindings +var caml_fq_vector_emplace_back = tsBindings.caml_fq_vector_emplace_back; + +// Provides: caml_fq_vector_get +// Requires: tsBindings +var caml_fq_vector_get = tsBindings.caml_fq_vector_get; + +// Provides: caml_fq_vector_set +// Requires: tsBindings +var caml_fq_vector_set = tsBindings.caml_fq_vector_set; diff --git a/src/lib/crypto/kimchi_bindings/js/bindings.js b/src/lib/crypto/kimchi_bindings/js/bindings.js index 989d7c92551..32907233043 100644 --- a/src/lib/crypto/kimchi_bindings/js/bindings.js +++ b/src/lib/crypto/kimchi_bindings/js/bindings.js @@ -1,9 +1,25 @@ -/* global joo_global_object, plonk_wasm, caml_js_to_bool, caml_jsstring_of_string, - caml_string_of_jsstring +/* global plonk_wasm, caml_jsstring_of_string, caml_string_of_jsstring, caml_create_bytes, caml_bytes_unsafe_set, caml_bytes_unsafe_get, caml_ml_bytes_length, UInt64, caml_int64_of_int32 */ +// Provides: tsBindings +var tsBindings = globalThis.__snarkyTsBindings; + +// Provides: tsRustConversion +// Requires: tsBindings, plonk_wasm +var tsRustConversion = tsBindings.rustConversion(plonk_wasm); + +// Provides: tsSrs +// Requires: tsBindings, plonk_wasm +var tsSrs = tsBindings.srs(plonk_wasm); + +// Provides: getTsBindings +// Requires: tsBindings +function getTsBindings() { + return tsBindings; +} + // Provides: caml_bytes_of_uint8array // Requires: caml_create_bytes, caml_bytes_unsafe_set var caml_bytes_of_uint8array = function (uint8array) { @@ -20,7 +36,7 @@ var caml_bytes_of_uint8array = function (uint8array) { // Requires: caml_ml_bytes_length, caml_bytes_unsafe_get var caml_bytes_to_uint8array = function (ocaml_bytes) { var length = caml_ml_bytes_length(ocaml_bytes); - var bytes = new joo_global_object.Uint8Array(length); + var bytes = new globalThis.Uint8Array(length); for (var i = 0; i < length; i++) { // No need to convert here: OCaml Char.t is just an int under the hood. bytes[i] = caml_bytes_unsafe_get(ocaml_bytes, i); @@ -28,81 +44,6 @@ var caml_bytes_to_uint8array = function (ocaml_bytes) { return bytes; }; -// Provides: caml_bigint_256_of_numeral -// Requires: plonk_wasm, caml_jsstring_of_string -var caml_bigint_256_of_numeral = function (s, len, base) { - return plonk_wasm.caml_bigint_256_of_numeral( - caml_jsstring_of_string(s), - len, - base - ); -}; - -// Provides: caml_bigint_256_of_decimal_string -// Requires: plonk_wasm, caml_jsstring_of_string -var caml_bigint_256_of_decimal_string = function (s) { - return plonk_wasm.caml_bigint_256_of_decimal_string( - caml_jsstring_of_string(s) - ); -}; - -// Provides: caml_bigint_256_num_limbs -// Requires: plonk_wasm -var caml_bigint_256_num_limbs = plonk_wasm.caml_bigint_256_num_limbs; - -// Provides: caml_bigint_256_bytes_per_limb -// Requires: plonk_wasm -var caml_bigint_256_bytes_per_limb = plonk_wasm.caml_bigint_256_bytes_per_limb; - -// Provides: caml_bigint_256_div -// Requires: plonk_wasm -var caml_bigint_256_div = plonk_wasm.caml_bigint_256_div; - -// Provides: caml_bigint_256_compare -// Requires: plonk_wasm -var caml_bigint_256_compare = plonk_wasm.caml_bigint_256_compare; - -// Provides: caml_bigint_256_print -// Requires: plonk_wasm -var caml_bigint_256_print = plonk_wasm.caml_bigint_256_print; - -// Provides: caml_bigint_256_to_string -// Requires: plonk_wasm, caml_string_of_jsstring -var caml_bigint_256_to_string = function (x) { - return caml_string_of_jsstring(plonk_wasm.caml_bigint_256_to_string(x)); -}; - -// Provides: caml_bigint_256_test_bit -// Requires: plonk_wasm, caml_js_to_bool -var caml_bigint_256_test_bit = function (x, i) { - return caml_js_to_bool(plonk_wasm.caml_bigint_256_test_bit(x, i)); -}; - -// Provides: caml_bigint_256_to_bytes -// Requires: plonk_wasm, caml_bytes_of_uint8array -var caml_bigint_256_to_bytes = function (x) { - return caml_bytes_of_uint8array(plonk_wasm.caml_bigint_256_to_bytes(x)); -}; - -// Provides: caml_bigint_256_of_bytes -// Requires: plonk_wasm, caml_bytes_to_uint8array -var caml_bigint_256_of_bytes = function (ocaml_bytes) { - return plonk_wasm.caml_bigint_256_of_bytes( - caml_bytes_to_uint8array(ocaml_bytes) - ); -}; - -// Provides: caml_bigint_256_deep_copy -// Requires: plonk_wasm -var caml_bigint_256_deep_copy = plonk_wasm.caml_bigint_256_deep_copy; - -// Provides: caml_pasta_fp_copy -var caml_pasta_fp_copy = function (x, y) { - for (var i = 0, l = x.length; i < l; i++) { - x[i] = y[i]; - } -}; - // Provides: caml_option_of_maybe_undefined var caml_option_of_maybe_undefined = function (x) { if (x === undefined) { @@ -122,446 +63,12 @@ var caml_option_to_maybe_undefined = function (x) { } }; -// Provides: caml_pasta_fp_size_in_bits -// Requires: plonk_wasm -var caml_pasta_fp_size_in_bits = plonk_wasm.caml_pasta_fp_size_in_bits; - -// Provides: caml_pasta_fp_size -// Requires: plonk_wasm -var caml_pasta_fp_size = plonk_wasm.caml_pasta_fp_size; - -// Provides: caml_pasta_fp_add -// Requires: plonk_wasm -var caml_pasta_fp_add = plonk_wasm.caml_pasta_fp_add; - -// Provides: caml_pasta_fp_sub -// Requires: plonk_wasm -var caml_pasta_fp_sub = plonk_wasm.caml_pasta_fp_sub; - -// Provides: caml_pasta_fp_negate -// Requires: plonk_wasm -var caml_pasta_fp_negate = plonk_wasm.caml_pasta_fp_negate; - -// Provides: caml_pasta_fp_mul -// Requires: plonk_wasm -var caml_pasta_fp_mul = plonk_wasm.caml_pasta_fp_mul; - -// Provides: caml_pasta_fp_div -// Requires: plonk_wasm -var caml_pasta_fp_div = plonk_wasm.caml_pasta_fp_div; - -// Provides: caml_pasta_fp_inv -// Requires: plonk_wasm, caml_option_of_maybe_undefined -var caml_pasta_fp_inv = function (x) { - return caml_option_of_maybe_undefined(plonk_wasm.caml_pasta_fp_inv(x)); -}; - -// Provides: caml_pasta_fp_square -// Requires: plonk_wasm -var caml_pasta_fp_square = plonk_wasm.caml_pasta_fp_square; - -// Provides: caml_pasta_fp_is_square -// Requires: plonk_wasm, caml_js_to_bool -var caml_pasta_fp_is_square = function (x) { - return caml_js_to_bool(plonk_wasm.caml_pasta_fp_is_square(x)); -}; - -// Provides: caml_pasta_fp_sqrt -// Requires: plonk_wasm, caml_option_of_maybe_undefined -var caml_pasta_fp_sqrt = function (x) { - return caml_option_of_maybe_undefined(plonk_wasm.caml_pasta_fp_sqrt(x)); -}; - -// Provides: caml_pasta_fp_of_int -// Requires: plonk_wasm -var caml_pasta_fp_of_int = plonk_wasm.caml_pasta_fp_of_int; - -// Provides: caml_pasta_fp_to_string -// Requires: plonk_wasm, caml_string_of_jsstring -var caml_pasta_fp_to_string = function (x) { - return caml_string_of_jsstring(plonk_wasm.caml_pasta_fp_to_string(x)); -}; - -// Provides: caml_pasta_fp_of_string -// Requires: plonk_wasm, caml_jsstring_of_string -var caml_pasta_fp_of_string = function (x) { - return plonk_wasm.caml_pasta_fp_of_string(caml_jsstring_of_string(x)); -}; - -// Provides: caml_pasta_fp_print -// Requires: plonk_wasm -var caml_pasta_fp_print = plonk_wasm.caml_pasta_fp_print; - -// Provides: caml_pasta_fp_mut_add -// Requires: caml_pasta_fp_copy, caml_pasta_fp_add -var caml_pasta_fp_mut_add = function (x, y) { - caml_pasta_fp_copy(x, caml_pasta_fp_add(x, y)); -}; - -// Provides: caml_pasta_fp_mut_sub -// Requires: caml_pasta_fp_copy, caml_pasta_fp_sub -var caml_pasta_fp_mut_sub = function (x, y) { - caml_pasta_fp_copy(x, caml_pasta_fp_sub(x, y)); -}; - -// Provides: caml_pasta_fp_mut_mul -// Requires: caml_pasta_fp_copy, caml_pasta_fp_mul -var caml_pasta_fp_mut_mul = function (x, y) { - caml_pasta_fp_copy(x, caml_pasta_fp_mul(x, y)); -}; - -// Provides: caml_pasta_fp_mut_square -// Requires: caml_pasta_fp_copy, caml_pasta_fp_square -var caml_pasta_fp_mut_square = function (x) { - caml_pasta_fp_copy(x, caml_pasta_fp_square(x)); -}; - -// Provides: caml_pasta_fp_compare -// Requires: plonk_wasm -var caml_pasta_fp_compare = plonk_wasm.caml_pasta_fp_compare; - -// Provides: caml_pasta_fp_equal -// Requires: plonk_wasm -var caml_pasta_fp_equal = plonk_wasm.caml_pasta_fp_equal; - -// Provides: caml_pasta_fp_random -// Requires: plonk_wasm -var caml_pasta_fp_random = plonk_wasm.caml_pasta_fp_random; - -// Provides: caml_pasta_fp_rng -// Requires: plonk_wasm -var caml_pasta_fp_rng = plonk_wasm.caml_pasta_fp_rng; - -// Provides: caml_pasta_fp_to_bigint -// Requires: plonk_wasm -var caml_pasta_fp_to_bigint = plonk_wasm.caml_pasta_fp_to_bigint; - -// Provides: caml_pasta_fp_of_bigint -// Requires: plonk_wasm -var caml_pasta_fp_of_bigint = plonk_wasm.caml_pasta_fp_of_bigint; - -// Provides: caml_pasta_fp_two_adic_root_of_unity -// Requires: plonk_wasm -var caml_pasta_fp_two_adic_root_of_unity = - plonk_wasm.caml_pasta_fp_two_adic_root_of_unity; - -// Provides: caml_pasta_fp_domain_generator -// Requires: plonk_wasm -var caml_pasta_fp_domain_generator = plonk_wasm.caml_pasta_fp_domain_generator; - -// Provides: caml_pasta_fp_to_bytes -// Requires: plonk_wasm, caml_bytes_of_uint8array -var caml_pasta_fp_to_bytes = function (x) { - var res = plonk_wasm.caml_pasta_fp_to_bytes(x); - return caml_bytes_of_uint8array(plonk_wasm.caml_pasta_fp_to_bytes(x)); -}; - -// Provides: caml_pasta_fp_of_bytes -// Requires: plonk_wasm, caml_bytes_to_uint8array -var caml_pasta_fp_of_bytes = function (ocaml_bytes) { - return plonk_wasm.caml_pasta_fp_of_bytes( - caml_bytes_to_uint8array(ocaml_bytes) - ); -}; - -// Provides: caml_pasta_fp_deep_copy -// Requires: plonk_wasm -var caml_pasta_fp_deep_copy = plonk_wasm.caml_pasta_fp_deep_copy; - -// Provides: caml_pasta_fq_copy -var caml_pasta_fq_copy = function (x, y) { - for (var i = 0, l = x.length; i < l; i++) { - x[i] = y[i]; - } -}; - -// Provides: caml_pasta_fq_size_in_bits -// Requires: plonk_wasm -var caml_pasta_fq_size_in_bits = plonk_wasm.caml_pasta_fq_size_in_bits; - -// Provides: caml_pasta_fq_size -// Requires: plonk_wasm -var caml_pasta_fq_size = plonk_wasm.caml_pasta_fq_size; - -// Provides: caml_pasta_fq_add -// Requires: plonk_wasm -var caml_pasta_fq_add = plonk_wasm.caml_pasta_fq_add; - -// Provides: caml_pasta_fq_sub -// Requires: plonk_wasm -var caml_pasta_fq_sub = plonk_wasm.caml_pasta_fq_sub; - -// Provides: caml_pasta_fq_negate -// Requires: plonk_wasm -var caml_pasta_fq_negate = plonk_wasm.caml_pasta_fq_negate; - -// Provides: caml_pasta_fq_mul -// Requires: plonk_wasm -var caml_pasta_fq_mul = plonk_wasm.caml_pasta_fq_mul; - -// Provides: caml_pasta_fq_div -// Requires: plonk_wasm -var caml_pasta_fq_div = plonk_wasm.caml_pasta_fq_div; - -// Provides: caml_pasta_fq_inv -// Requires: plonk_wasm, caml_option_of_maybe_undefined -var caml_pasta_fq_inv = function (x) { - return caml_option_of_maybe_undefined(plonk_wasm.caml_pasta_fq_inv(x)); -}; - -// Provides: caml_pasta_fq_square -// Requires: plonk_wasm -var caml_pasta_fq_square = plonk_wasm.caml_pasta_fq_square; - -// Provides: caml_pasta_fq_is_square -// Requires: plonk_wasm, caml_js_to_bool -var caml_pasta_fq_is_square = function (x) { - return caml_js_to_bool(plonk_wasm.caml_pasta_fq_is_square(x)); -}; - -// Provides: caml_pasta_fq_sqrt -// Requires: plonk_wasm, caml_option_of_maybe_undefined -var caml_pasta_fq_sqrt = function (x) { - return caml_option_of_maybe_undefined(plonk_wasm.caml_pasta_fq_sqrt(x)); -}; - -// Provides: caml_pasta_fq_of_int -// Requires: plonk_wasm -var caml_pasta_fq_of_int = plonk_wasm.caml_pasta_fq_of_int; - -// Provides: caml_pasta_fq_to_string -// Requires: plonk_wasm, caml_string_of_jsstring -var caml_pasta_fq_to_string = function (x) { - return caml_string_of_jsstring(plonk_wasm.caml_pasta_fq_to_string(x)); -}; - -// Provides: caml_pasta_fq_of_string -// Requires: plonk_wasm, caml_jsstring_of_string -var caml_pasta_fq_of_string = function (x) { - return plonk_wasm.caml_pasta_fq_of_string(caml_jsstring_of_string(x)); -}; - -// Provides: caml_pasta_fq_print -// Requires: plonk_wasm -var caml_pasta_fq_print = plonk_wasm.caml_pasta_fq_print; - -// Provides: caml_pasta_fq_mut_add -// Requires: caml_pasta_fq_copy, caml_pasta_fq_add -var caml_pasta_fq_mut_add = function (x, y) { - caml_pasta_fq_copy(x, caml_pasta_fq_add(x, y)); -}; - -// Provides: caml_pasta_fq_mut_sub -// Requires: caml_pasta_fq_copy, caml_pasta_fq_sub -var caml_pasta_fq_mut_sub = function (x, y) { - caml_pasta_fq_copy(x, caml_pasta_fq_sub(x, y)); -}; - -// Provides: caml_pasta_fq_mut_mul -// Requires: caml_pasta_fq_copy, caml_pasta_fq_mul -var caml_pasta_fq_mut_mul = function (x, y) { - caml_pasta_fq_copy(x, caml_pasta_fq_mul(x, y)); -}; - -// Provides: caml_pasta_fq_mut_square -// Requires: caml_pasta_fq_copy, caml_pasta_fq_square -var caml_pasta_fq_mut_square = function (x) { - caml_pasta_fq_copy(x, caml_pasta_fq_square(x)); -}; - -// Provides: caml_pasta_fq_compare -// Requires: plonk_wasm -var caml_pasta_fq_compare = plonk_wasm.caml_pasta_fq_compare; - -// Provides: caml_pasta_fq_equal -// Requires: plonk_wasm -var caml_pasta_fq_equal = plonk_wasm.caml_pasta_fq_equal; - -// Provides: caml_pasta_fq_random -// Requires: plonk_wasm -var caml_pasta_fq_random = plonk_wasm.caml_pasta_fq_random; - -// Provides: caml_pasta_fq_rng -// Requires: plonk_wasm -var caml_pasta_fq_rng = plonk_wasm.caml_pasta_fq_rng; - -// Provides: caml_pasta_fq_to_bigint -// Requires: plonk_wasm -var caml_pasta_fq_to_bigint = plonk_wasm.caml_pasta_fq_to_bigint; - -// Provides: caml_pasta_fq_of_bigint -// Requires: plonk_wasm -var caml_pasta_fq_of_bigint = plonk_wasm.caml_pasta_fq_of_bigint; - -// Provides: caml_pasta_fq_two_adic_root_of_unity -// Requires: plonk_wasm -var caml_pasta_fq_two_adic_root_of_unity = - plonk_wasm.caml_pasta_fq_two_adic_root_of_unity; - -// Provides: caml_pasta_fq_domain_generator -// Requires: plonk_wasm -var caml_pasta_fq_domain_generator = plonk_wasm.caml_pasta_fq_domain_generator; - -// Provides: caml_pasta_fq_to_bytes -// Requires: plonk_wasm, caml_bytes_of_uint8array -var caml_pasta_fq_to_bytes = function (x) { - var res = plonk_wasm.caml_pasta_fq_to_bytes(x); - return caml_bytes_of_uint8array(plonk_wasm.caml_pasta_fq_to_bytes(x)); -}; - -// Provides: caml_pasta_fq_of_bytes -// Requires: plonk_wasm, caml_bytes_to_uint8array -var caml_pasta_fq_of_bytes = function (ocaml_bytes) { - return plonk_wasm.caml_pasta_fq_of_bytes( - caml_bytes_to_uint8array(ocaml_bytes) - ); -}; - -// Provides: caml_pasta_fq_deep_copy -// Requires: plonk_wasm -var caml_pasta_fq_deep_copy = plonk_wasm.caml_pasta_fq_deep_copy; - -// Provides: caml_u8array_vector_to_rust_flat_vector -var caml_u8array_vector_to_rust_flat_vector = function (v) { - var i = 1; // The first entry is the OCaml tag for arrays - var len = v.length - i; - if (len === 0) { - return new joo_global_object.Uint8Array(0); - } - var inner_len = v[i].length; - var res = new joo_global_object.Uint8Array(len * inner_len); - for (var pos = 0; i <= len; i++) { - for (var j = 0; j < inner_len; j++, pos++) { - res[pos] = v[i][j]; - } - } - return res; -}; - -// Provides: caml_u8array_vector_of_rust_flat_vector -var caml_u8array_vector_of_rust_flat_vector = function (v, inner_len) { - var len = v.length; - var output_len = len / inner_len; - var res = new Array(output_len + 1); - res[0] = 0; // OCaml tag before array contents, so that we can use this with arrays or vectors - for (var i = 1, pos = 0; i <= output_len; i++) { - var inner_res = new joo_global_object.Uint8Array(inner_len); - for (var j = 0; j < inner_len; j++, pos++) { - inner_res[j] = v[pos]; - } - res[i] = inner_res; - } - return res; -}; - -// Provides: js_class_vector_to_rust_vector -var js_class_vector_to_rust_vector = function (v) { - var len = v.length; - var res = new joo_global_object.Uint32Array(len); - for (var i = 0; i < len; i++) { - // Beware: caller may need to do finalizer things to avoid these - // pointers disappearing out from under us. - res[i] = v[i].ptr; - } - return res; -}; - -// Provides: js_class_vector_of_rust_vector -var js_class_vector_of_rust_vector = function (v, klass) { - // return v.map(klass.__wrap) - var len = v.length; - var res = new Array(len); - for (var i = 0; i < len; i++) { - // Beware: the caller may need to add finalizers to these. - res[i] = klass.__wrap(v[i]); - } - return res; -}; - -// Provides: caml_fp_vector_create -var caml_fp_vector_create = function () { - return [0]; // OCaml tag for arrays, so that we can use the same utility fns on both -}; - -// Provides: caml_fp_vector_length -var caml_fp_vector_length = function (v) { - return v.length - 1; -}; - -// Provides: caml_fp_vector_emplace_back -var caml_fp_vector_emplace_back = function (v, x) { - v.push(x); -}; - -// Provides: caml_fp_vector_get -var caml_fp_vector_get = function (v, i) { - var value = v[i + 1]; - if (value === undefined) { - throw Error( - 'caml_fp_vector_get: Index out of bounds, got ' + i + '/' + (v.length - 1) - ); - } - return new joo_global_object.Uint8Array(value); -}; - -// Provides: caml_fp_vector_to_rust -// Requires: caml_u8array_vector_to_rust_flat_vector -var caml_fp_vector_to_rust = function (v) { - return caml_u8array_vector_to_rust_flat_vector(v); -}; - -// Provides: caml_fp_vector_of_rust -// Requires: caml_u8array_vector_of_rust_flat_vector -var caml_fp_vector_of_rust = function (v) { - // TODO: Hardcoding this is a little brittle - return caml_u8array_vector_of_rust_flat_vector(v, 32); -}; - -// Provides: caml_fq_vector_create -var caml_fq_vector_create = function () { - return [0]; // OCaml tag for arrays, so that we can use the same utility fns on both -}; - -// Provides: caml_fq_vector_length -var caml_fq_vector_length = function (v) { - return v.length - 1; -}; - -// Provides: caml_fq_vector_emplace_back -var caml_fq_vector_emplace_back = function (v, x) { - v.push(x); -}; - -// Provides: caml_fq_vector_get -var caml_fq_vector_get = function (v, i) { - var value = v[i + 1]; - if (value === undefined) { - throw Error( - 'caml_fq_vector_get: Index out of bounds, got ' + i + '/' + (v.length - 1) - ); - } - return new joo_global_object.Uint8Array(value); -}; - -// Provides: caml_fq_vector_to_rust -// Requires: caml_u8array_vector_to_rust_flat_vector -var caml_fq_vector_to_rust = function (v) { - return caml_u8array_vector_to_rust_flat_vector(v); -}; - -// Provides: caml_fq_vector_of_rust -// Requires: caml_u8array_vector_of_rust_flat_vector -var caml_fq_vector_of_rust = function (v) { - // TODO: Hardcoding this is a little brittle - return caml_u8array_vector_of_rust_flat_vector(v, 32); -}; - // Provides: free_finalization_registry -var free_finalization_registry = new joo_global_object.FinalizationRegistry( - function (instance_representative) { - instance_representative.free(); - } -); +var free_finalization_registry = new globalThis.FinalizationRegistry(function ( + instance_representative +) { + instance_representative.free(); +}); // Provides: free_on_finalize // Requires: free_finalization_registry @@ -577,366 +84,16 @@ var free_on_finalize = function (x) { // directly, but unfortunately the destructor name is some mangled internal // string generated by wasm_bindgen. For now, this is the best, // least-brittle way to free once the original class instance gets collected. - var instance_representative = x.constructor.__wrap(x.ptr); + var instance_representative = x.constructor.__wrap(x.__wbg_ptr); free_finalization_registry.register(x, instance_representative, x); return x; }; -// Provides: rust_affine_to_caml_affine -var rust_affine_to_caml_affine = function (pt) { - var infinity = pt.infinity; - if (infinity) { - pt.free(); - return 0; - } else { - var x = pt.x; - var y = pt.y; - pt.free(); - return [0, [0, x, y]]; - } -}; - -// Provides: rust_affine_of_caml_affine -var rust_affine_of_caml_affine = function (pt, klass) { - var res = new klass(); - if (pt === 0) { - res.infinity = true; - } else { - // Layout is [0, [0, x, y]] - // First 0 is the tag (it's the 0th constructor that takes arguments) - // Second 0 is the block marker for the anonymous tuple arguments - res.x = pt[1][1]; - res.y = pt[1][2]; - } - return res; -}; - -// Provides: caml_pallas_one -// Requires: plonk_wasm, free_on_finalize -var caml_pallas_one = function () { - var res = plonk_wasm.caml_pallas_one(); - free_on_finalize(res); - return res; -}; - -// Provides: caml_pallas_add -// Requires: plonk_wasm, free_on_finalize -var caml_pallas_add = function (x, y) { - var res = plonk_wasm.caml_pallas_add(x, y); - free_on_finalize(res); - return res; -}; - -// Provides: caml_pallas_sub -// Requires: plonk_wasm, free_on_finalize -var caml_pallas_sub = function (x, y) { - var res = plonk_wasm.caml_pallas_sub(x, y); - free_on_finalize(res); - return res; -}; - -// Provides: caml_pallas_negate -// Requires: plonk_wasm, free_on_finalize -var caml_pallas_negate = function (x) { - var res = plonk_wasm.caml_pallas_negate(x); - free_on_finalize(res); - return res; -}; - -// Provides: caml_pallas_double -// Requires: plonk_wasm, free_on_finalize -var caml_pallas_double = function (x) { - var res = plonk_wasm.caml_pallas_double(x); - free_on_finalize(res); - return res; -}; - -// Provides: caml_pallas_scale -// Requires: plonk_wasm, free_on_finalize -var caml_pallas_scale = function (x, y) { - var res = plonk_wasm.caml_pallas_scale(x, y); - free_on_finalize(res); - return res; -}; - -// Provides: caml_pallas_random -// Requires: plonk_wasm, free_on_finalize -var caml_pallas_random = function () { - var res = plonk_wasm.caml_pallas_random(); - free_on_finalize(res); - return res; -}; - -// Provides: caml_pallas_rng -// Requires: plonk_wasm, free_on_finalize -var caml_pallas_rng = function (i) { - var res = plonk_wasm.caml_pallas_rng(i); - free_on_finalize(res); - return res; -}; - -// Provides: caml_pallas_to_affine -// Requires: plonk_wasm, rust_affine_to_caml_affine -var caml_pallas_to_affine = function (pt) { - var res = plonk_wasm.caml_pallas_to_affine(pt); - return rust_affine_to_caml_affine(res); -}; - -// Provides: caml_pallas_of_affine -// Requires: plonk_wasm, rust_affine_of_caml_affine, free_on_finalize -var caml_pallas_of_affine = function (pt) { - var res = plonk_wasm.caml_pallas_of_affine( - rust_affine_of_caml_affine(pt, plonk_wasm.caml_pallas_affine_one) - ); - free_on_finalize(res); - return res; -}; - -// Provides: caml_pallas_of_affine_coordinates -// Requires: plonk_wasm, free_on_finalize -var caml_pallas_of_affine_coordinates = function (x, y) { - var res = plonk_wasm.caml_pallas_of_affine_coordinates(x, y); - free_on_finalize(res); - return res; -}; - -// Provides: caml_pallas_endo_base -// Requires: plonk_wasm -var caml_pallas_endo_base = plonk_wasm.caml_pallas_endo_base; - -// Provides: caml_pallas_endo_scalar -// Requires: plonk_wasm -var caml_pallas_endo_scalar = plonk_wasm.caml_pallas_endo_scalar; - -// Provides: caml_pallas_affine_deep_copy -// Requires: plonk_wasm, rust_affine_of_caml_affine, rust_affine_to_caml_affine -var caml_pallas_affine_deep_copy = function (pt) { - return rust_affine_to_caml_affine( - plonk_wasm.caml_pallas_affine_deep_copy( - rust_affine_of_caml_affine(pt, plonk_wasm.caml_pallas_affine_one) - ) - ); -}; - -// Provides: caml_vesta_one -// Requires: plonk_wasm, free_on_finalize -var caml_vesta_one = function () { - var res = plonk_wasm.caml_vesta_one(); - free_on_finalize(res); - return res; -}; - -// Provides: caml_vesta_add -// Requires: plonk_wasm, free_on_finalize -var caml_vesta_add = function (x, y) { - var res = plonk_wasm.caml_vesta_add(x, y); - free_on_finalize(res); - return res; -}; - -// Provides: caml_vesta_sub -// Requires: plonk_wasm, free_on_finalize -var caml_vesta_sub = function (x, y) { - var res = plonk_wasm.caml_vesta_sub(x, y); - free_on_finalize(res); - return res; -}; - -// Provides: caml_vesta_negate -// Requires: plonk_wasm, free_on_finalize -var caml_vesta_negate = function (x) { - var res = plonk_wasm.caml_vesta_negate(x); - free_on_finalize(res); - return res; -}; - -// Provides: caml_vesta_double -// Requires: plonk_wasm, free_on_finalize -var caml_vesta_double = function (x) { - var res = plonk_wasm.caml_vesta_double(x); - free_on_finalize(res); - return res; -}; - -// Provides: caml_vesta_scale -// Requires: plonk_wasm, free_on_finalize -var caml_vesta_scale = function (x, y) { - var res = plonk_wasm.caml_vesta_scale(x, y); - free_on_finalize(res); - return res; -}; - -// Provides: caml_vesta_random -// Requires: plonk_wasm, free_on_finalize -var caml_vesta_random = function () { - var res = plonk_wasm.caml_vesta_random(); - free_on_finalize(res); - return res; -}; - -// Provides: caml_vesta_rng -// Requires: plonk_wasm, free_on_finalize -var caml_vesta_rng = function (i) { - var res = plonk_wasm.caml_vesta_rng(i); - free_on_finalize(res); - return res; -}; - -// Provides: caml_vesta_to_affine -// Requires: plonk_wasm, rust_affine_to_caml_affine -var caml_vesta_to_affine = function (pt) { - var res = plonk_wasm.caml_vesta_to_affine(pt); - return rust_affine_to_caml_affine(res); -}; - -// Provides: caml_vesta_of_affine -// Requires: plonk_wasm, rust_affine_of_caml_affine, free_on_finalize -var caml_vesta_of_affine = function (pt) { - var res = plonk_wasm.caml_vesta_of_affine( - rust_affine_of_caml_affine(pt, plonk_wasm.caml_vesta_affine_one) - ); - free_on_finalize(res); - return res; -}; - -// Provides: caml_vesta_of_affine_coordinates -// Requires: plonk_wasm, free_on_finalize -var caml_vesta_of_affine_coordinates = function (x, y) { - var res = plonk_wasm.caml_vesta_of_affine_coordinates(x, y); - free_on_finalize(res); - return res; -}; - -// Provides: caml_vesta_endo_base -// Requires: plonk_wasm -var caml_vesta_endo_base = plonk_wasm.caml_vesta_endo_base; - -// Provides: caml_vesta_endo_scalar -// Requires: plonk_wasm -var caml_vesta_endo_scalar = plonk_wasm.caml_vesta_endo_scalar; - -// Provides: caml_vesta_affine_deep_copy -// Requires: plonk_wasm, rust_affine_of_caml_affine, rust_affine_to_caml_affine -var caml_vesta_affine_deep_copy = function (pt) { - return rust_affine_to_caml_affine( - plonk_wasm.caml_vesta_affine_deep_copy( - rust_affine_of_caml_affine(pt, plonk_wasm.caml_vesta_affine_one) - ) - ); -}; - -// Provides: caml_array_of_rust_vector -// Requires: js_class_vector_of_rust_vector -var caml_array_of_rust_vector = function (v, klass, convert, should_free) { - v = js_class_vector_of_rust_vector(v, klass); - var len = v.length; - var res = new Array(len + 1); - res[0] = 0; // OCaml tag before array contents - for (var i = 0; i < len; i++) { - var rust_val = v[i]; - res[i + 1] = convert(rust_val); - if (should_free) { - rust_val.free(); - } - } - return res; -}; - -// Provides: caml_array_to_rust_vector -// Requires: js_class_vector_to_rust_vector, free_finalization_registry -var caml_array_to_rust_vector = function (v, convert, mk_new) { - v = v.slice(1); // Copy, dropping OCaml tag - for (var i = 0, l = v.length; i < l; i++) { - var class_val = convert(v[i], mk_new); - v[i] = class_val; - // Don't free when GC runs; rust will free on its end. - free_finalization_registry.unregister(class_val); - } - return js_class_vector_to_rust_vector(v); -}; - -// Provides: caml_poly_comm_of_rust_poly_comm -// Requires: rust_affine_to_caml_affine, caml_array_of_rust_vector -var caml_poly_comm_of_rust_poly_comm = function ( - poly_comm, - klass, - should_free -) { - var rust_shifted = poly_comm.shifted; - var rust_unshifted = poly_comm.unshifted; - var caml_shifted; - if (rust_shifted === undefined) { - caml_shifted = 0; - } else { - caml_shifted = [0, rust_affine_to_caml_affine(rust_shifted)]; - } - var caml_unshifted = caml_array_of_rust_vector( - rust_unshifted, - klass, - rust_affine_to_caml_affine, - should_free - ); - return [0, caml_unshifted, caml_shifted]; -}; - -// Provides: caml_poly_comm_to_rust_poly_comm -// Requires: rust_affine_of_caml_affine, caml_array_to_rust_vector -var caml_poly_comm_to_rust_poly_comm = function ( - poly_comm, - poly_comm_class, - mk_affine -) { - var caml_unshifted = poly_comm[1]; - var caml_shifted = poly_comm[2]; - var rust_shifted = undefined; - if (caml_shifted !== 0) { - rust_shifted = rust_affine_of_caml_affine(caml_shifted[1], mk_affine); - } - var rust_unshifted = caml_array_to_rust_vector( - caml_unshifted, - rust_affine_of_caml_affine, - mk_affine - ); - return new poly_comm_class(rust_unshifted, rust_shifted); -}; - -// Provides: caml_vesta_poly_comm_of_rust -// Requires: plonk_wasm, caml_poly_comm_of_rust_poly_comm -var caml_vesta_poly_comm_of_rust = function (x) { - return caml_poly_comm_of_rust_poly_comm(x, plonk_wasm.WasmGVesta, false); -}; - -// Provides: caml_vesta_poly_comm_to_rust -// Requires: plonk_wasm, caml_poly_comm_to_rust_poly_comm -var caml_vesta_poly_comm_to_rust = function (x) { - return caml_poly_comm_to_rust_poly_comm( - x, - plonk_wasm.WasmFpPolyComm, - plonk_wasm.caml_vesta_affine_one - ); -}; - -// Provides: caml_pallas_poly_comm_of_rust -// Requires: plonk_wasm, caml_poly_comm_of_rust_poly_comm -var caml_pallas_poly_comm_of_rust = function (x) { - return caml_poly_comm_of_rust_poly_comm(x, plonk_wasm.WasmGPallas, false); -}; - -// Provides: caml_pallas_poly_comm_to_rust -// Requires: plonk_wasm, caml_poly_comm_to_rust_poly_comm -var caml_pallas_poly_comm_to_rust = function (x) { - return caml_poly_comm_to_rust_poly_comm( - x, - plonk_wasm.WasmFqPolyComm, - plonk_wasm.caml_pallas_affine_one - ); -}; - +// srs + // Provides: caml_fp_srs_create -// Requires: plonk_wasm, free_on_finalize -var caml_fp_srs_create = function (i) { - return free_on_finalize(plonk_wasm.caml_fp_srs_create(i)); -}; +// Requires: tsSrs +var caml_fp_srs_create = tsSrs.fp.create; // Provides: caml_fp_srs_write // Requires: plonk_wasm, caml_jsstring_of_string @@ -966,42 +123,35 @@ var caml_fp_srs_read = function (offset, path) { }; // Provides: caml_fp_srs_lagrange_commitment -// Requires: plonk_wasm, caml_vesta_poly_comm_of_rust -var caml_fp_srs_lagrange_commitment = function (t, domain_size, i) { - var res = plonk_wasm.caml_fp_srs_lagrange_commitment(t, domain_size, i); - return caml_vesta_poly_comm_of_rust(res); -}; +// Requires: tsSrs +var caml_fp_srs_lagrange_commitment = tsSrs.fp.lagrangeCommitment; // Provides: caml_fp_srs_commit_evaluations -// Requires: plonk_wasm, caml_vesta_poly_comm_of_rust, caml_fp_vector_to_rust +// Requires: plonk_wasm, tsRustConversion var caml_fp_srs_commit_evaluations = function (t, domain_size, fps) { var res = plonk_wasm.caml_fp_srs_commit_evaluations( t, domain_size, - caml_fp_vector_to_rust(fps) + tsRustConversion.fp.vectorToRust(fps) ); - return caml_vesta_poly_comm_of_rust(res); + return tsRustConversion.fp.polyCommFromRust(res); }; // Provides: caml_fp_srs_b_poly_commitment -// Requires: plonk_wasm, caml_vesta_poly_comm_of_rust, caml_u8array_vector_to_rust_flat_vector +// Requires: plonk_wasm, tsRustConversion var caml_fp_srs_b_poly_commitment = function (srs, chals) { var res = plonk_wasm.caml_fp_srs_b_poly_commitment( srs, - caml_u8array_vector_to_rust_flat_vector(chals) + tsRustConversion.fieldsToRustFlat(chals) ); - return caml_vesta_poly_comm_of_rust(res); + return tsRustConversion.fp.polyCommFromRust(res); }; // Provides: caml_fp_srs_batch_accumulator_check -// Requires: plonk_wasm, rust_affine_of_caml_affine, caml_array_to_rust_vector, caml_fp_vector_to_rust +// Requires: plonk_wasm, tsRustConversion var caml_fp_srs_batch_accumulator_check = function (srs, comms, chals) { - var rust_comms = caml_array_to_rust_vector( - comms, - rust_affine_of_caml_affine, - plonk_wasm.caml_vesta_affine_one - ); - var rust_chals = caml_fp_vector_to_rust(chals); + var rust_comms = tsRustConversion.fp.pointsToRust(comms); + var rust_chals = tsRustConversion.fp.vectorToRust(chals); var ok = plonk_wasm.caml_fp_srs_batch_accumulator_check( srs, rust_comms, @@ -1011,33 +161,30 @@ var caml_fp_srs_batch_accumulator_check = function (srs, comms, chals) { }; // Provides: caml_fp_srs_batch_accumulator_generate -// Requires: plonk_wasm, rust_affine_to_caml_affine, caml_array_of_rust_vector, caml_fp_vector_to_rust -var caml_fp_srs_batch_accumulator_generate = function (srs, comms, chals) { - var rust_chals = caml_fp_vector_to_rust(chals); +// Requires: plonk_wasm, tsRustConversion +var caml_fp_srs_batch_accumulator_generate = function (srs, n_comms, chals) { + var rust_chals = tsRustConversion.fp.vectorToRust(chals); var rust_comms = plonk_wasm.caml_fp_srs_batch_accumulator_generate( srs, - comms, + n_comms, rust_chals ); - return caml_array_of_rust_vector( - rust_comms, - plonk_wasm.WasmGVesta, - rust_affine_to_caml_affine, - false - ); + return tsRustConversion.fp.pointsFromRust(rust_comms); }; // Provides: caml_fp_srs_h -// Requires: plonk_wasm, rust_affine_to_caml_affine +// Requires: plonk_wasm, tsRustConversion var caml_fp_srs_h = function (t) { - return rust_affine_to_caml_affine(plonk_wasm.caml_fp_srs_h(t)); + return tsRustConversion.fp.pointFromRust(plonk_wasm.caml_fp_srs_h(t)); }; +// Provides: caml_fp_srs_add_lagrange_basis +// Requires: tsSrs +var caml_fp_srs_add_lagrange_basis = tsSrs.fp.addLagrangeBasis; + // Provides: caml_fq_srs_create -// Requires: plonk_wasm, free_on_finalize -var caml_fq_srs_create = function (i) { - return free_on_finalize(plonk_wasm.caml_fq_srs_create(i)); -}; +// Requires: tsSrs +var caml_fq_srs_create = tsSrs.fq.create; // Provides: caml_fq_srs_write // Requires: plonk_wasm, caml_jsstring_of_string @@ -1067,42 +214,35 @@ var caml_fq_srs_read = function (offset, path) { }; // Provides: caml_fq_srs_lagrange_commitment -// Requires: plonk_wasm, caml_pallas_poly_comm_of_rust -var caml_fq_srs_lagrange_commitment = function (t, domain_size, i) { - var res = plonk_wasm.caml_fq_srs_lagrange_commitment(t, domain_size, i); - return caml_pallas_poly_comm_of_rust(res); -}; +// Requires: tsSrs +var caml_fq_srs_lagrange_commitment = tsSrs.fq.lagrangeCommitment; // Provides: caml_fq_srs_commit_evaluations -// Requires: plonk_wasm, caml_pallas_poly_comm_of_rust, caml_fq_vector_to_rust +// Requires: plonk_wasm, tsRustConversion var caml_fq_srs_commit_evaluations = function (t, domain_size, fqs) { var res = plonk_wasm.caml_fq_srs_commit_evaluations( t, domain_size, - caml_fq_vector_to_rust(fqs) + tsRustConversion.fq.vectorToRust(fqs) ); - return caml_pallas_poly_comm_of_rust(res); + return tsRustConversion.fq.polyCommFromRust(res); }; // Provides: caml_fq_srs_b_poly_commitment -// Requires: plonk_wasm, caml_pallas_poly_comm_of_rust, caml_u8array_vector_to_rust_flat_vector +// Requires: plonk_wasm, tsRustConversion var caml_fq_srs_b_poly_commitment = function (srs, chals) { var res = plonk_wasm.caml_fq_srs_b_poly_commitment( srs, - caml_u8array_vector_to_rust_flat_vector(chals) + tsRustConversion.fieldsToRustFlat(chals) ); - return caml_pallas_poly_comm_of_rust(res); + return tsRustConversion.fq.polyCommFromRust(res); }; // Provides: caml_fq_srs_batch_accumulator_check -// Requires: plonk_wasm, rust_affine_of_caml_affine, caml_array_to_rust_vector, caml_fq_vector_to_rust +// Requires: plonk_wasm, tsRustConversion var caml_fq_srs_batch_accumulator_check = function (srs, comms, chals) { - var rust_comms = caml_array_to_rust_vector( - comms, - rust_affine_of_caml_affine, - plonk_wasm.caml_pallas_affine_one - ); - var rust_chals = caml_fq_vector_to_rust(chals); + var rust_comms = tsRustConversion.fq.pointsToRust(comms); + var rust_chals = tsRustConversion.fq.vectorToRust(chals); var ok = plonk_wasm.caml_fq_srs_batch_accumulator_check( srs, rust_comms, @@ -1112,112 +252,28 @@ var caml_fq_srs_batch_accumulator_check = function (srs, comms, chals) { }; // Provides: caml_fq_srs_batch_accumulator_generate -// Requires: plonk_wasm, rust_affine_to_caml_affine, caml_array_of_rust_vector, caml_fq_vector_to_rust +// Requires: plonk_wasm, tsRustConversion var caml_fq_srs_batch_accumulator_generate = function (srs, comms, chals) { - var rust_chals = caml_fq_vector_to_rust(chals); + var rust_chals = tsRustConversion.fq.vectorToRust(chals); var rust_comms = plonk_wasm.caml_fq_srs_batch_accumulator_generate( srs, comms, rust_chals ); - return caml_array_of_rust_vector( - rust_comms, - plonk_wasm.WasmGPallas, - rust_affine_to_caml_affine, - false - ); + return tsRustConversion.fq.pointsFromRust(rust_comms); }; // Provides: caml_fq_srs_h -// Requires: plonk_wasm, rust_affine_to_caml_affine +// Requires: plonk_wasm, tsRustConversion var caml_fq_srs_h = function (t) { - return rust_affine_to_caml_affine(plonk_wasm.caml_fq_srs_h(t)); + return tsRustConversion.fq.pointFromRust(plonk_wasm.caml_fq_srs_h(t)); }; // Provides: caml_fq_srs_add_lagrange_basis -// Requires: plonk_wasm -function caml_fq_srs_add_lagrange_basis(srs, log2_size) { - return plonk_wasm.caml_fq_srs_add_lagrange_basis(srs, log2_size); -} - -// Provides: caml_plonk_wire_of_rust -var caml_plonk_wire_of_rust = function (wire) { - var res = [0, wire.row, wire.col]; - wire.free(); - return res; -}; - -// Provides: caml_plonk_wire_to_rust -// Requires: plonk_wasm -var caml_plonk_wire_to_rust = function (wire) { - return plonk_wasm.Wire.create(wire[1], wire[2]); -}; - -// Provides: caml_plonk_wires_of_rust -// Requires: caml_plonk_wire_of_rust -var caml_plonk_wires_of_rust = function (wires) { - var res = [ - 0, - caml_plonk_wire_of_rust(wires[0]), - caml_plonk_wire_of_rust(wires[1]), - caml_plonk_wire_of_rust(wires[2]), - caml_plonk_wire_of_rust(wires[3]), - caml_plonk_wire_of_rust(wires[4]), - caml_plonk_wire_of_rust(wires[5]), - caml_plonk_wire_of_rust(wires[6]), - ]; - wires.free(); - return res; -}; - -// Provides: caml_plonk_wires_to_rust -// Requires: plonk_wasm, caml_plonk_wire_to_rust -var caml_plonk_wires_to_rust = function (wires) { - return new plonk_wasm.WasmGateWires( - caml_plonk_wire_to_rust(wires[1]), - caml_plonk_wire_to_rust(wires[2]), - caml_plonk_wire_to_rust(wires[3]), - caml_plonk_wire_to_rust(wires[4]), - caml_plonk_wire_to_rust(wires[5]), - caml_plonk_wire_to_rust(wires[6]), - caml_plonk_wire_to_rust(wires[7]) - ); -}; - -// Provides: caml_plonk_gate_of_rust -// Requires: caml_plonk_wires_of_rust, caml_u8array_vector_of_rust_flat_vector -var caml_plonk_gate_of_rust = function (gate) { - // TODO: Hardcoding 32 here is a little brittle - var res = [ - 0, - gate.typ, - caml_plonk_wires_of_rust(gate.wires), - caml_u8array_vector_of_rust_flat_vector(gate.c, 32), - ]; - gate.free(); - return res; -}; - -// Provides: caml_fp_plonk_gate_to_rust -// Requires: plonk_wasm, caml_plonk_wires_to_rust, caml_u8array_vector_to_rust_flat_vector -var caml_fp_plonk_gate_to_rust = function (gate) { - return new plonk_wasm.WasmFpGate( - gate[1], - caml_plonk_wires_to_rust(gate[2]), - caml_u8array_vector_to_rust_flat_vector(gate[3]) - ); -}; +// Requires: tsSrs +var caml_fq_srs_add_lagrange_basis = tsSrs.fq.addLagrangeBasis; -// Provides: caml_fq_plonk_gate_to_rust -// Requires: plonk_wasm, caml_plonk_wires_to_rust, caml_u8array_vector_to_rust_flat_vector -var caml_fq_plonk_gate_to_rust = function (gate) { - // TODO: Hardcoding 32 here is a little brittle - return new plonk_wasm.WasmFqGate( - gate[1], - caml_plonk_wires_to_rust(gate[2]), - caml_u8array_vector_to_rust_flat_vector(gate[3]) - ); -}; +// gate vector // Provides: caml_pasta_fp_plonk_gate_vector_create // Requires: plonk_wasm, free_on_finalize @@ -1226,18 +282,18 @@ var caml_pasta_fp_plonk_gate_vector_create = function () { }; // Provides: caml_pasta_fp_plonk_gate_vector_add -// Requires: plonk_wasm, caml_fp_plonk_gate_to_rust +// Requires: plonk_wasm, tsRustConversion var caml_pasta_fp_plonk_gate_vector_add = function (v, x) { return plonk_wasm.caml_pasta_fp_plonk_gate_vector_add( v, - caml_fp_plonk_gate_to_rust(x) + tsRustConversion.fp.gateToRust(x) ); }; // Provides: caml_pasta_fp_plonk_gate_vector_get -// Requires: plonk_wasm, caml_plonk_gate_of_rust +// Requires: plonk_wasm, tsRustConversion var caml_pasta_fp_plonk_gate_vector_get = function (v, i) { - return caml_plonk_gate_of_rust( + return tsRustConversion.fp.gateFromRust( plonk_wasm.caml_pasta_fp_plonk_gate_vector_get(v, i) ); }; @@ -1249,12 +305,12 @@ var caml_pasta_fp_plonk_gate_vector_len = function (v) { }; // Provides: caml_pasta_fp_plonk_gate_vector_wrap -// Requires: plonk_wasm, caml_plonk_wire_to_rust +// Requires: plonk_wasm, tsRustConversion var caml_pasta_fp_plonk_gate_vector_wrap = function (v, x, y) { return plonk_wasm.caml_pasta_fp_plonk_gate_vector_wrap( v, - caml_plonk_wire_to_rust(x), - caml_plonk_wire_to_rust(y) + tsRustConversion.wireToRust(x), + tsRustConversion.wireToRust(y) ); }; @@ -1285,6 +341,8 @@ var caml_pasta_fp_plonk_circuit_serialize = function ( ); }; +// prover index + // Provides: caml_pasta_fq_plonk_gate_vector_create // Requires: plonk_wasm, free_on_finalize var caml_pasta_fq_plonk_gate_vector_create = function () { @@ -1292,18 +350,18 @@ var caml_pasta_fq_plonk_gate_vector_create = function () { }; // Provides: caml_pasta_fq_plonk_gate_vector_add -// Requires: plonk_wasm, caml_fq_plonk_gate_to_rust +// Requires: plonk_wasm, tsRustConversion var caml_pasta_fq_plonk_gate_vector_add = function (v, x) { return plonk_wasm.caml_pasta_fq_plonk_gate_vector_add( v, - caml_fq_plonk_gate_to_rust(x) + tsRustConversion.fq.gateToRust(x) ); }; // Provides: caml_pasta_fq_plonk_gate_vector_get -// Requires: plonk_wasm, caml_plonk_gate_of_rust +// Requires: plonk_wasm, tsRustConversion var caml_pasta_fq_plonk_gate_vector_get = function (v, i) { - return caml_plonk_gate_of_rust( + return tsRustConversion.fq.gateFromRust( plonk_wasm.caml_pasta_fq_plonk_gate_vector_get(v, i) ); }; @@ -1315,12 +373,12 @@ var caml_pasta_fq_plonk_gate_vector_len = function (v) { }; // Provides: caml_pasta_fq_plonk_gate_vector_wrap -// Requires: plonk_wasm, caml_plonk_wire_to_rust +// Requires: plonk_wasm, tsRustConversion var caml_pasta_fq_plonk_gate_vector_wrap = function (v, x, y) { return plonk_wasm.caml_pasta_fq_plonk_gate_vector_wrap( v, - caml_plonk_wire_to_rust(x), - caml_plonk_wire_to_rust(y) + tsRustConversion.wireToRust(x), + tsRustConversion.wireToRust(y) ); }; @@ -1352,22 +410,52 @@ var caml_pasta_fq_plonk_circuit_serialize = function ( }; // Provides: caml_pasta_fp_plonk_index_create -// Requires: plonk_wasm, free_on_finalize +// Requires: plonk_wasm, free_on_finalize, tsRustConversion var caml_pasta_fp_plonk_index_create = function ( gates, public_inputs, + caml_lookup_tables, + caml_runtime_table_cfgs, prev_challenges, urs ) { + var wasm_lookup_tables = + tsRustConversion.fp.lookupTablesToRust(caml_lookup_tables); + var wasm_runtime_table_cfgs = tsRustConversion.fp.runtimeTableCfgsToRust( + caml_runtime_table_cfgs + ); + var t = plonk_wasm.caml_pasta_fp_plonk_index_create( gates, public_inputs, + wasm_lookup_tables, + wasm_runtime_table_cfgs, prev_challenges, urs ); return free_on_finalize(t); }; +// Provides: caml_pasta_fp_plonk_index_create_bytecode +// Requires: caml_pasta_fp_plonk_index_create +var caml_pasta_fp_plonk_index_create_bytecode = function ( + gates, + public_inputs, + caml_lookup_tables, + caml_runtime_table_cfgs, + prev_challenges, + urs +) { + return caml_pasta_fp_plonk_index_create( + gates, + public_inputs, + caml_lookup_tables, + caml_runtime_table_cfgs, + prev_challenges, + urs + ); +}; + // Provides: caml_pasta_fp_plonk_index_max_degree // Requires: plonk_wasm var caml_pasta_fp_plonk_index_max_degree = @@ -1424,23 +512,53 @@ var caml_pasta_fp_plonk_index_write = function (append, t, path) { }; // Provides: caml_pasta_fq_plonk_index_create -// Requires: plonk_wasm, free_on_finalize +// Requires: plonk_wasm, free_on_finalize, tsRustConversion var caml_pasta_fq_plonk_index_create = function ( gates, public_inputs, + caml_lookup_tables, + caml_runtime_table_cfgs, prev_challenges, urs ) { + var wasm_lookup_tables = + tsRustConversion.fq.lookupTablesToRust(caml_lookup_tables); + var wasm_runtime_table_cfgs = tsRustConversion.fq.runtimeTableCfgsToRust( + caml_runtime_table_cfgs + ); + return free_on_finalize( plonk_wasm.caml_pasta_fq_plonk_index_create( gates, public_inputs, + wasm_lookup_tables, + wasm_runtime_table_cfgs, prev_challenges, urs ) ); }; +// Provides: caml_pasta_fq_plonk_index_create_bytecode +// Requires: caml_pasta_fq_plonk_index_create +var caml_pasta_fq_plonk_index_create_bytecode = function ( + gates, + public_inputs, + caml_lookup_tables, + caml_runtime_table_cfgs, + prev_challenges, + urs +) { + return caml_pasta_fq_plonk_index_create( + gates, + public_inputs, + caml_lookup_tables, + caml_runtime_table_cfgs, + prev_challenges, + urs + ); +}; + // Provides: caml_pasta_fq_plonk_index_max_degree // Requires: plonk_wasm var caml_pasta_fq_plonk_index_max_degree = @@ -1496,628 +614,44 @@ var caml_pasta_fq_plonk_index_write = function (append, t, path) { ); }; -// Provides: caml_plonk_domain_of_rust -var caml_plonk_domain_of_rust = function (x) { - var log_size_of_group = x.log_size_of_group; - var group_gen = x.group_gen; - x.free(); - return [0, log_size_of_group, group_gen]; -}; - -// Provides: caml_plonk_domain_to_rust -// Requires: free_on_finalize -var caml_plonk_domain_to_rust = function (x, klass) { - // TODO: Check if this gets finalized - return new klass(x[1], x[2]); -}; - -// Provides: caml_plonk_verification_evals_of_rust -// Requires: caml_poly_comm_of_rust_poly_comm, js_class_vector_of_rust_vector, plonk_wasm -var caml_plonk_verification_evals_of_rust = function (x, affine_klass) { - var convert = function (poly_comm) { - return caml_poly_comm_of_rust_poly_comm(poly_comm, affine_klass, false); - }; - - // var convertArray = function(comms) { - // var n = comms.length; - // var res = new Array(n-1); - // for (var i = 1; i < n; i++) { - // res[i-1] = convert(comms[i]); - // } - // return js_class_vector_to_rust_vector(res); - // }; - - // should be inverse of the above ^ - // TODO: make work for both Wasm..PolyComm types - var convertArray = function (comms) { - comms = js_class_vector_of_rust_vector(comms, plonk_wasm.WasmFqPolyComm); - // comms = js_class_vector_of_rust_vector(comms, plonk_wasm.WasmFpPolyComm); - return [0].concat(comms.map(convert)); - }; - - var sigma_comm = convertArray(x.sigma_comm); - var coefficients_comm = convertArray(x.coefficients_comm); - var generic_comm = convert(x.generic_comm); - var psm_comm = convert(x.psm_comm); - var complete_add_comm = convert(x.complete_add_comm); - var mul_comm = convert(x.mul_comm); - var emul_comm = convert(x.emul_comm); - var endomul_scalar_comm = convert(x.endomul_scalar_comm); - - x.free(); - return [ - 0, - sigma_comm, - coefficients_comm, - generic_comm, - psm_comm, - complete_add_comm, - mul_comm, - emul_comm, - endomul_scalar_comm, - 0, - ]; -}; - -// Provides: caml_plonk_verification_evals_to_rust -// Requires: caml_poly_comm_to_rust_poly_comm, js_class_vector_to_rust_vector -var caml_plonk_verification_evals_to_rust = function ( - x, - klass, - poly_comm_to_rust -) { - var convertArray = function (comms) { - var n = comms.length; - var res = new Array(n - 1); - for (var i = 1; i < n; i++) { - res[i - 1] = poly_comm_to_rust(comms[i]); - } - return js_class_vector_to_rust_vector(res); - }; - - var sigma_comm = convertArray(x[1]); - var coefficients_comm = convertArray(x[2]); - var generic_comm = poly_comm_to_rust(x[3]); - var psm_comm = poly_comm_to_rust(x[4]); - var complete_add_comm = poly_comm_to_rust(x[5]); - var mul_comm = poly_comm_to_rust(x[6]); - var emul_comm = poly_comm_to_rust(x[7]); - var endomul_scalar_comm = poly_comm_to_rust(x[8]); - - return new klass( - sigma_comm, - coefficients_comm, - generic_comm, - psm_comm, - complete_add_comm, - mul_comm, - emul_comm, - endomul_scalar_comm - ); -}; - -// Provides: caml_plonk_verification_shifts_of_rust -var caml_plonk_verification_shifts_of_rust = function (x) { - var res = [0, x.s0, x.s1, x.s2, x.s3, x.s4, x.s5, x.s6]; - x.free(); - return res; -}; - -// Provides: caml_plonk_verification_shifts_to_rust -var caml_plonk_verification_shifts_to_rust = function (x, klass) { - return new klass(x[1], x[2], x[3], x[4], x[5], x[6], x[7]); -}; - -// Provides: column_of_rust -function column_of_rust(col) { - // type nonrec column = Witness of int | Z | LookupSorted of int | LookupAggreg | LookupTable | LookupKindIndex of int | Index of gate_type | Coefficient of int - var tag = col.tag; - var gate_type = col.gate_type; - var i = col.i; - col.free(); - return ( - { - 0: [tag, i], - 2: [tag, i], - 5: [tag, i], - 6: [tag, gate_type], - 7: [tag, i], - }[tag] || tag - ); -} - -// Provides: variable_of_rust -// Requires: column_of_rust -function variable_of_rust(variable) { - // col * row - var col = variable.col; - var row = variable.row; // 0, 1 - variable.free(); - return [0, column_of_rust(col), row]; -} - -// Provides: polish_token_of_rust -// Requires: variable_of_rust -function polish_token_of_rust(token) { - var tag = token.tag; - var i0 = token.i0; - var i1 = token.i1; - var f = token.f; - var v = variable_of_rust(token.v); - token.free(); - return ( - { - 5: [5, i0, i1], - 6: [6, f], - 7: [7, v], - 9: [9, i0], - 14: [14, i0], - 16: [16, i0], - }[tag] || tag - ); -} - -// Provides: index_term_of_rust -// Requires: column_of_rust, js_class_vector_of_rust_vector, polish_token_of_rust -function index_term_of_rust(term, token_class) { - // pub column: WasmColumn, - // pub coefficient: WasmVector, - var column = column_of_rust(term.column); - var coefficient = js_class_vector_of_rust_vector( - term.coefficient, - token_class - ); - coefficient = coefficient.map(polish_token_of_rust); - coefficient = [0].concat(coefficient); - term.free(); - return [0, column, coefficient]; -} - -// Provides: wrap -function wrap(ptr, klass) { - var obj = Object.create(klass.prototype); - obj.ptr = ptr; - return obj; -} - -// Provides: linearization_of_rust -// Requires: plonk_wasm, js_class_vector_of_rust_vector, polish_token_of_rust, wrap, index_term_of_rust -function linearization_of_rust(linearization, affine_class) { - var F = affine_class === plonk_wasm.WasmGVesta ? 'Fq' : 'Fp'; - var WasmPolishToken = plonk_wasm['Wasm' + F + 'PolishToken']; - var WasmIndexTerm = plonk_wasm['Wasm' + F + 'IndexTerm']; - - var constant_term = js_class_vector_of_rust_vector( - linearization.constant_term, - WasmPolishToken - ); - constant_term = constant_term.map(polish_token_of_rust); - constant_term = [0].concat(constant_term); - - var index_terms = Array.from(linearization.index_terms).map(function (ptr) { - var wasmIndexTerm = wrap(ptr, WasmIndexTerm); - return index_term_of_rust(wasmIndexTerm, WasmPolishToken); - }); - index_terms = [0].concat(index_terms); - - linearization.free(); - return [0, constant_term, index_terms]; -} - -// Provides: None -var None = 0; - -// Provides: caml_is_none -// Requires: None -var caml_is_none = function (v) { - return v === None; -}; - -// Provides: caml_map_of_rust_vector -// Requires: js_class_vector_of_rust_vector -var caml_map_of_rust_vector = function (v, klass, converter_to_rust) { - var a = js_class_vector_of_rust_vector(v, klass); - var res = [0]; - for (var i = 0; i < a.length; ++i) { - res.push(converter_to_rust(a[i])); - } - return res; -}; +// verifier index // Provides: caml_opt_of_rust -// Requires: None var caml_opt_of_rust = function (value, value_of_rust) { if (value === undefined) { - return None; + return 0; } else { return [0, value_of_rust(value)]; } }; -// Provides: caml_bool_of_rust -var caml_bool_of_rust = function (bool) { - return bool; -}; - -// Provides: caml_lookup_patterns_of_rust -// Requires: plonk_wasm, caml_bool_of_rust -var caml_lookup_patterns_of_rust = function (wasm_lookup_patterns) { - return [ - 0, - caml_bool_of_rust(wasm_lookup_patterns.xor), - caml_bool_of_rust(wasm_lookup_patterns.lookup), - caml_bool_of_rust(wasm_lookup_patterns.range_check), - caml_bool_of_rust(wasm_lookup_patterns.foreign_field_mul), - ]; -}; - -// Provides: caml_lookup_features_of_rust -// Requires: plonk_wasm, caml_lookup_patterns_of_rust, caml_bool_of_rust -var caml_lookup_features_of_rust = function (wasm_lookup_features) { - var caml_lookup_patterns = caml_lookup_patterns_of_rust( - wasm_lookup_features.patterns - ); - var caml_joint_lookup_used = caml_bool_of_rust( - wasm_lookup_features.joint_lookup_used - ); - var caml_uses_runtime_tables = caml_bool_of_rust( - wasm_lookup_features.uses_runtime_tables - ); - - return [ - 0, - caml_lookup_patterns, - caml_joint_lookup_used, - caml_uses_runtime_tables, - ]; -}; - -// Provides: caml_lookup_info_of_rust -// Requires: plonk_wasm, caml_lookup_features_of_rust -var caml_lookup_info_of_rust = function (wasm_lookup_info) { - var caml_max_per_row = wasm_lookup_info.max_per_row; - var caml_max_joint_size = wasm_lookup_info.max_joint_size; - var caml_lookup_features = caml_lookup_features_of_rust( - wasm_lookup_info.features - ); - - return [0, caml_max_per_row, caml_max_joint_size, caml_lookup_features]; -}; - -// Provides: caml_lookup_selectors_of_rust -// Requires: plonk_wasm, caml_opt_of_rust -var caml_lookup_selectors_of_rust = function ( - wasm_lookup_selectors, - poly_comm_of_rust -) { - var xor = caml_opt_of_rust(wasm_lookup_selectors.xor, poly_comm_of_rust); - var lookup = caml_opt_of_rust( - wasm_lookup_selectors.lookup, - poly_comm_of_rust - ); - var range_check = caml_opt_of_rust( - wasm_lookup_selectors.range_check, - poly_comm_of_rust - ); - var ffmul = caml_opt_of_rust(wasm_lookup_selectors.ffmul, poly_comm_of_rust); - - return [0, xor, lookup, range_check, ffmul]; -}; - -// Provides: caml_lookup_verifier_index_of_rust -// Requires: plonk_wasm, caml_map_of_rust_vector, caml_bool_of_rust, caml_lookup_selectors_of_rust, caml_opt_of_rust, caml_lookup_info_of_rust -var caml_lookup_verifier_index_of_rust = function ( - wasm_lookup_index, - poly_comm_class, - poly_comm_of_rust -) { - var caml_joint_lookup_used = caml_bool_of_rust( - wasm_lookup_index.join_lookup_used - ); - - // lookup table - var caml_lookup_table = caml_map_of_rust_vector( - wasm_lookup_index.lookup_table, - poly_comm_class, - poly_comm_of_rust - ); - - var caml_lookup_selectors = caml_lookup_selectors_of_rust( - wasm_lookup_index.lookup_selectors, - poly_comm_of_rust - ); - - var caml_table_ids = caml_opt_of_rust( - wasm_lookup_index.table_ids, - poly_comm_of_rust - ); - - var caml_lookup_info = caml_lookup_info_of_rust( - wasm_lookup_index.lookup_info - ); - - var caml_runtime_tables_selector = caml_opt_of_rust( - wasm_lookup_index.runtime_tables_selector, - poly_comm_of_rust - ); - - return [ - 0, - caml_joint_lookup_used, - caml_lookup_table, - caml_lookup_selectors, - caml_table_ids, - caml_lookup_info, - caml_runtime_tables_selector, - ]; -}; - -// Provides: caml_plonk_verifier_index_of_rust -// Requires: linearization_of_rust, caml_plonk_domain_of_rust, caml_plonk_verification_evals_of_rust, caml_plonk_verification_shifts_of_rust, free_on_finalize, None, caml_opt_of_rust, caml_lookup_verifier_index_of_rust -var caml_plonk_verifier_index_of_rust = function (x, affine_class) { - var domain = caml_plonk_domain_of_rust(x.domain); - var max_poly_size = x.max_poly_size; - var public_ = x.public_; - var prev_challenges = x.prev_challenges; - var srs = free_on_finalize(x.srs); - var evals = caml_plonk_verification_evals_of_rust(x.evals, affine_class); - var shifts = caml_plonk_verification_shifts_of_rust(x.shifts); - // TODO: Handle linearization correctly! - // var linearization = linearization_of_rust(x.linearization, affine_class); - - var caml_lookup_index = caml_opt_of_rust( - x.lookup_index, - caml_lookup_verifier_index_of_rust - ); - x.free(); - return [ - 0, - domain, - max_poly_size, - public_, - prev_challenges, - srs, - evals, - shifts, - caml_lookup_index, - ]; -}; -// Provides: caml_plonk_verifier_index_to_rust -// Requires: caml_plonk_domain_to_rust, caml_plonk_verification_evals_to_rust, caml_plonk_verification_shifts_to_rust, free_finalization_registry, caml_plonk_lookup_verifier_index_to_rust, caml_opt_to_rust, caml_poly_comm_to_rust_poly_comm -var caml_plonk_verifier_index_to_rust = function ( - x, - klass, - domain_class, - verification_evals_class, - poly_comm_class, - mk_affine, - verification_shifts_class, - lookup_verifier_index_class, - lookup_selectors_class -) { - // Defining how to translate polynomial commitments from OCaml back to Rust - var poly_comm_to_rust = function (poly_comm) { - return caml_poly_comm_to_rust_poly_comm( - poly_comm, - poly_comm_class, - mk_affine - ); - }; - var domain = caml_plonk_domain_to_rust(x[1], domain_class); - var max_poly_size = x[2]; - var public_ = x[3]; - var prev_challenges = x[4]; - var srs = x[5]; - var evals = caml_plonk_verification_evals_to_rust( - x[6], - verification_evals_class, - poly_comm_to_rust - ); - var shifts = caml_plonk_verification_shifts_to_rust( - x[7], - verification_shifts_class - ); - - ////////////////////////////////////////////////////////////////////////////// - // The lookup_index_to_rust function is defined only in order to be able to // - // use the generic caml_opt_to_rust function. // - // // - // We could as well inline it but I preferred not having to think about // - // internal representation of values yet again. // - ////////////////////////////////////////////////////////////////////////////// - var lookup_index_to_rust = function (lidx) { - return caml_plonk_lookup_verifier_index_to_rust( - lidx, - lookup_verifier_index_class, - poly_comm_to_rust, - lookup_selectors_class, - mk_affine - ); - }; - - var wasm_lookup_index = caml_opt_to_rust(x[8], lookup_index_to_rust); - - return new klass( - domain, - max_poly_size, - public_, - prev_challenges, - srs, - evals, - shifts, - wasm_lookup_index - ); -}; - // Provides: caml_opt_to_rust -// Requires: caml_is_none, None var caml_opt_to_rust = function (caml_optional_value, to_rust) { // to_rust expects the parameters of the variant. A `Some vx` is represented // as [0, vx] - if (caml_is_none(caml_optional_value)) { + if (caml_optional_value === 0) { return undefined; } else { return to_rust(caml_optional_value[1]); } }; -// Provides: caml_lookup_selectors_to_rust -// Requires: plonk_wasm, caml_opt_to_rust -var caml_lookup_selectors_to_rust = function ( - wasm_lookup_selectors, - klass, - poly_comm_to_rust -) { - var xor = caml_opt_to_rust(wasm_lookup_selectors[1], poly_comm_to_rust); - var lookup = caml_opt_to_rust(wasm_lookup_selectors[2], poly_comm_to_rust); - var range_check = caml_opt_to_rust( - wasm_lookup_selectors[3], - poly_comm_to_rust - ); - var ffmul = caml_opt_to_rust(wasm_lookup_selectors[4], poly_comm_to_rust); - - return new klass(xor, lookup, range_check, ffmul); -}; - -// Provides: caml_lookup_table_to_rust -// Requires: plonk_wasm, caml_array_to_rust_vector, rust_affine_of_caml_affine -var caml_lookup_table_to_rust = function (wasm_lookup_table, mk_affine) { - return caml_array_to_rust_vector( - wasm_lookup_table, - rust_affine_of_caml_affine, - mk_affine - ); -}; - -// Provides: caml_lookup_patterns_to_rust -// Requires: plonk_wasm -var caml_lookup_patterns_to_rust = function (wasm_lookup_patterns, klass) { - var xor = wasm_lookup_patterns[1]; - var lookup = wasm_lookup_patterns[2]; - var range_check = wasm_lookup_patterns[3]; - var foreign_field_mul = wasm_lookup_patterns[4]; - - return new plonk_wasm.LookupPatterns( - xor, - lookup, - range_check, - foreign_field_mul - ); -}; - -// Provides: caml_lookup_features_to_rust -// Requires: plonk_wasm, caml_lookup_patterns_to_rust -var caml_lookup_features_to_rust = function (wasm_lookup_features) { - var patterns = caml_lookup_patterns_to_rust(wasm_lookup_features[1]); - var joint_lookup_used = wasm_lookup_features[2]; - var uses_runtime_tables = wasm_lookup_features[3]; - - return new plonk_wasm.LookupFeatures( - patterns, - joint_lookup_used, - uses_runtime_tables - ); -}; - -// Provides: caml_lookup_info_to_rust -// Requires: plonk_wasm, caml_lookup_features_to_rust -var caml_lookup_info_to_rust = function (wasm_lookup_info) { - var max_per_row = wasm_lookup_info[1]; - var max_poly_size = wasm_lookup_info[2]; - var features = caml_lookup_features_to_rust(wasm_lookup_info[3]); - - return new plonk_wasm.LookupInfo(max_per_row, max_poly_size, features); -}; - -// Provides: caml_plonk_lookup_verifier_index_to_rust -// Requires: plonk_wasm, caml_lookup_table_to_rust, caml_lookup_selectors_to_rust, caml_lookup_info_to_rust, caml_opt_to_rust -var caml_plonk_lookup_verifier_index_to_rust = function ( - caml_lookup_verifier_index, - klass, - poly_comm_to_rust, - lookup_selectors_class, - mk_affine -) { - // joint_lookup_used - var joint_lookup_used = caml_lookup_verifier_index[1]; - - // lookup_table - var lookup_table = caml_lookup_table_to_rust( - caml_lookup_verifier_index[2], - mk_affine - ); - - // lookup_selectors - var lookup_selectors = caml_lookup_selectors_to_rust( - caml_lookup_verifier_index[3], - lookup_selectors_class, - poly_comm_to_rust - ); - - // table_ids - var table_ids = caml_opt_to_rust( - caml_lookup_verifier_index[4], - poly_comm_to_rust - ); - - // lookup_info - var lookup_info = caml_lookup_info_to_rust(caml_lookup_verifier_index[5]); - - // runtime_tables_selector - var runtime_tables_selector = caml_opt_to_rust( - caml_lookup_verifier_index[6], - poly_comm_to_rust - ); - - return new klass( - joint_lookup_used, - lookup_table, - lookup_selectors, - table_ids, - lookup_info, - runtime_tables_selector - ); -}; - -// Provides: caml_pasta_fp_plonk_verifier_index_of_rust -// Requires: plonk_wasm, caml_plonk_verifier_index_of_rust -var caml_pasta_fp_plonk_verifier_index_of_rust = function (x) { - return caml_plonk_verifier_index_of_rust(x, plonk_wasm.WasmGVesta); -}; - -// Provides: caml_pasta_fp_plonk_verifier_index_to_rust -// Requires: plonk_wasm, caml_plonk_verifier_index_to_rust -var caml_pasta_fp_plonk_verifier_index_to_rust = function (x) { - return caml_plonk_verifier_index_to_rust( - x, - plonk_wasm.WasmFpPlonkVerifierIndex, - plonk_wasm.WasmFpDomain, - plonk_wasm.WasmFpPlonkVerificationEvals, - plonk_wasm.WasmFpPolyComm, - plonk_wasm.caml_vesta_affine_one, - plonk_wasm.WasmFpShifts, - plonk_wasm.WasmFpLookupVerifierIndex, - plonk_wasm.WasmFpLookupSelectors - ); -}; - // Provides: caml_pasta_fp_plonk_verifier_index_create -// Requires: plonk_wasm, caml_pasta_fp_plonk_verifier_index_of_rust +// Requires: plonk_wasm, tsRustConversion var caml_pasta_fp_plonk_verifier_index_create = function (x) { var vk = plonk_wasm.caml_pasta_fp_plonk_verifier_index_create(x); - var vk_caml = caml_pasta_fp_plonk_verifier_index_of_rust(vk); - return vk_caml; + return tsRustConversion.fp.verifierIndexFromRust(vk); }; // Provides: caml_pasta_fp_plonk_verifier_index_read -// Requires: plonk_wasm, caml_jsstring_of_string, caml_pasta_fp_plonk_verifier_index_of_rust +// Requires: plonk_wasm, caml_jsstring_of_string, tsRustConversion var caml_pasta_fp_plonk_verifier_index_read = function (offset, urs, path) { if (offset === 0) { offset = undefined; } else { offset = offset[1]; } - return caml_pasta_fp_plonk_verifier_index_of_rust( + return tsRustConversion.fp.verifierIndexFromRust( plonk_wasm.caml_pasta_fp_plonk_verifier_index_read( offset, urs, @@ -2127,7 +661,7 @@ var caml_pasta_fp_plonk_verifier_index_read = function (offset, urs, path) { }; // Provides: caml_pasta_fp_plonk_verifier_index_write -// Requires: plonk_wasm, caml_jsstring_of_string, caml_pasta_fp_plonk_verifier_index_to_rust +// Requires: plonk_wasm, caml_jsstring_of_string, tsRustConversion var caml_pasta_fp_plonk_verifier_index_write = function (append, t, path) { if (append === 0) { append = undefined; @@ -2136,76 +670,53 @@ var caml_pasta_fp_plonk_verifier_index_write = function (append, t, path) { } return plonk_wasm.caml_pasta_fp_plonk_verifier_index_write( append, - caml_pasta_fp_plonk_verifier_index_to_rust(t), + tsRustConversion.fp.verifierIndexToRust(t), caml_jsstring_of_string(path) ); }; // Provides: caml_pasta_fp_plonk_verifier_index_shifts -// Requires: plonk_wasm, caml_plonk_verification_shifts_of_rust +// Requires: plonk_wasm, tsRustConversion var caml_pasta_fp_plonk_verifier_index_shifts = function (log2_size) { - return caml_plonk_verification_shifts_of_rust( + return tsRustConversion.fp.shiftsFromRust( plonk_wasm.caml_pasta_fp_plonk_verifier_index_shifts(log2_size) ); }; // Provides: caml_pasta_fp_plonk_verifier_index_dummy -// Requires: plonk_wasm, caml_pasta_fp_plonk_verifier_index_of_rust +// Requires: plonk_wasm, tsRustConversion var caml_pasta_fp_plonk_verifier_index_dummy = function () { var res = plonk_wasm.caml_pasta_fp_plonk_verifier_index_dummy(); - return caml_pasta_fp_plonk_verifier_index_of_rust(res); - // return caml_pasta_fp_plonk_verifier_index_of_rust(plonk_wasm.caml_pasta_fp_plonk_verifier_index_dummy()); + return tsRustConversion.fp.verifierIndexFromRust(res); }; // Provides: caml_pasta_fp_plonk_verifier_index_deep_copy -// Requires: plonk_wasm, caml_pasta_fp_plonk_verifier_index_of_rust, caml_pasta_fp_plonk_verifier_index_to_rust +// Requires: plonk_wasm, tsRustConversion var caml_pasta_fp_plonk_verifier_index_deep_copy = function (x) { - return caml_pasta_fp_plonk_verifier_index_of_rust( + return tsRustConversion.fp.verifierIndexFromRust( plonk_wasm.caml_pasta_fp_plonk_verifier_index_deep_copy( - caml_pasta_fp_plonk_verifier_index_to_rust(x) + tsRustConversion.fp.verifierIndexToRust(x) ) ); }; -// Provides: caml_pasta_fq_plonk_verifier_index_of_rust -// Requires: plonk_wasm, caml_plonk_verifier_index_of_rust -var caml_pasta_fq_plonk_verifier_index_of_rust = function (x) { - return caml_plonk_verifier_index_of_rust(x, plonk_wasm.WasmGPallas); -}; - -// Provides: caml_pasta_fq_plonk_verifier_index_to_rust -// Requires: plonk_wasm, caml_plonk_verifier_index_to_rust -var caml_pasta_fq_plonk_verifier_index_to_rust = function (x) { - return caml_plonk_verifier_index_to_rust( - x, - plonk_wasm.WasmFqPlonkVerifierIndex, - plonk_wasm.WasmFqDomain, - plonk_wasm.WasmFqPlonkVerificationEvals, - plonk_wasm.WasmFqPolyComm, - plonk_wasm.caml_pallas_affine_one, - plonk_wasm.WasmFqShifts, - plonk_wasm.WasmFqLookupVerifierIndex, - plonk_wasm.WasmFqLookupSelectors - ); -}; - // Provides: caml_pasta_fq_plonk_verifier_index_create -// Requires: plonk_wasm, caml_pasta_fq_plonk_verifier_index_of_rust +// Requires: plonk_wasm, tsRustConversion var caml_pasta_fq_plonk_verifier_index_create = function (x) { - return caml_pasta_fq_plonk_verifier_index_of_rust( + return tsRustConversion.fq.verifierIndexFromRust( plonk_wasm.caml_pasta_fq_plonk_verifier_index_create(x) ); }; // Provides: caml_pasta_fq_plonk_verifier_index_read -// Requires: plonk_wasm, caml_jsstring_of_string, caml_pasta_fq_plonk_verifier_index_of_rust +// Requires: plonk_wasm, caml_jsstring_of_string, tsRustConversion var caml_pasta_fq_plonk_verifier_index_read = function (offset, urs, path) { if (offset === 0) { offset = undefined; } else { offset = offset[1]; } - return caml_pasta_fq_plonk_verifier_index_of_rust( + return tsRustConversion.fq.verifierIndexFromRust( plonk_wasm.caml_pasta_fq_plonk_verifier_index_read( offset, urs, @@ -2215,7 +726,7 @@ var caml_pasta_fq_plonk_verifier_index_read = function (offset, urs, path) { }; // Provides: caml_pasta_fq_plonk_verifier_index_write -// Requires: plonk_wasm, caml_jsstring_of_string, caml_pasta_fq_plonk_verifier_index_to_rust +// Requires: plonk_wasm, caml_jsstring_of_string, tsRustConversion var caml_pasta_fq_plonk_verifier_index_write = function (append, t, path) { if (append === 0) { append = undefined; @@ -2224,771 +735,238 @@ var caml_pasta_fq_plonk_verifier_index_write = function (append, t, path) { } return plonk_wasm.caml_pasta_fq_plonk_verifier_index_write( append, - caml_pasta_fq_plonk_verifier_index_to_rust(t), + tsRustConversion.fq.verifierIndexToRust(t), caml_jsstring_of_string(path) ); }; // Provides: caml_pasta_fq_plonk_verifier_index_shifts -// Requires: plonk_wasm, caml_plonk_verification_shifts_of_rust +// Requires: plonk_wasm, tsRustConversion var caml_pasta_fq_plonk_verifier_index_shifts = function (log2_size) { - return caml_plonk_verification_shifts_of_rust( + return tsRustConversion.fq.shiftsFromRust( plonk_wasm.caml_pasta_fq_plonk_verifier_index_shifts(log2_size) ); }; // Provides: caml_pasta_fq_plonk_verifier_index_dummy -// Requires: plonk_wasm, caml_pasta_fq_plonk_verifier_index_of_rust +// Requires: plonk_wasm, tsRustConversion var caml_pasta_fq_plonk_verifier_index_dummy = function () { - return caml_pasta_fq_plonk_verifier_index_of_rust( + return tsRustConversion.fq.verifierIndexFromRust( plonk_wasm.caml_pasta_fq_plonk_verifier_index_dummy() ); }; // Provides: caml_pasta_fq_plonk_verifier_index_deep_copy -// Requires: plonk_wasm, caml_pasta_fq_plonk_verifier_index_of_rust, caml_pasta_fq_plonk_verifier_index_to_rust +// Requires: plonk_wasm, tsRustConversion, tsRustConversion var caml_pasta_fq_plonk_verifier_index_deep_copy = function (x) { - return caml_pasta_fq_plonk_verifier_index_of_rust( + return tsRustConversion.fq.verifierIndexFromRust( plonk_wasm.caml_pasta_fq_plonk_verifier_index_deep_copy( - caml_pasta_fq_plonk_verifier_index_to_rust(x) + tsRustConversion.fq.verifierIndexToRust(x) ) ); }; -// Provides: COLUMNS -var COLUMNS = 15; -// Provides: PERMUTS_MINUS_1 -var PERMUTS_MINUS_1 = 6; - -// Provides: caml_pasta_fp_proof_evaluations_to_rust -// Requires: plonk_wasm, caml_fp_vector_to_rust, PERMUTS_MINUS_1, COLUMNS -var caml_pasta_fp_proof_evaluations_to_rust = function (x) { - return x; -}; - -// Provides: caml_pasta_fp_proof_evaluations_of_rust -// Requires: plonk_wasm, caml_fp_vector_of_rust, COLUMNS, PERMUTS_MINUS_1 -var caml_pasta_fp_proof_evaluations_of_rust = function (x) { - return x; -}; - -// Provides: caml_pasta_fp_opening_proof_to_rust -// Requires: plonk_wasm, caml_array_to_rust_vector, rust_affine_of_caml_affine -var caml_pasta_fp_opening_proof_to_rust = function (x) { - var convert_affines = function (affines) { - return caml_array_to_rust_vector( - affines, - rust_affine_of_caml_affine, - plonk_wasm.caml_vesta_affine_one - ); - }; - var lr = x[1]; - var delta = rust_affine_of_caml_affine( - x[2], - plonk_wasm.caml_vesta_affine_one - ); - var z1 = x[3]; - var z2 = x[4]; - var sg = rust_affine_of_caml_affine(x[5], plonk_wasm.caml_vesta_affine_one); - var len = lr.length; - // We pass l and r as separate vectors over the FFI - var l_ocaml = new Array(len); - var r_ocaml = new Array(len); - for (var i = 1; i < len; i++) { - l_ocaml[i] = lr[i][1]; - r_ocaml[i] = lr[i][2]; - } - var l = convert_affines(l_ocaml); - var r = convert_affines(r_ocaml); - return new plonk_wasm.WasmFpOpeningProof(l, r, delta, z1, z2, sg); -}; - -// Provides: caml_pasta_fp_opening_proof_of_rust -// Requires: plonk_wasm, caml_array_of_rust_vector, rust_affine_to_caml_affine -var caml_pasta_fp_opening_proof_of_rust = function (x) { - var convert_affines = function (affines) { - return caml_array_of_rust_vector( - affines, - plonk_wasm.WasmGVesta, - rust_affine_to_caml_affine, - false - ); - }; - var l = convert_affines(x.lr_0); - var r = convert_affines(x.lr_1); - var delta = rust_affine_to_caml_affine(x.delta); - var z1 = x.z1; - var z2 = x.z2; - var sg = rust_affine_to_caml_affine(x.sg); - x.free(); - var len = l.length; - if (len !== r.length) { - throw new Error("l and r lengths don't match"); - } - var lr = new Array(len); - lr[0] = 0; - for (var i = 1; i < len; i++) { - var tuple = new Array(3); - tuple[0] = 0; - tuple[1] = l[i]; - tuple[2] = r[i]; - lr[i] = tuple; - } - return [0, lr, delta, z1, z2, sg]; -}; - -// Provides: caml_fp_lookup_commitments_to_rust -// Requires: plonk_wasm, caml_vesta_poly_comm_to_rust, js_class_vector_to_rust_vector, caml_opt_to_rust -var caml_fp_lookup_commitments_to_rust = function (caml_lc) { - var convertArray = function (v) { - var n = v.length - 1; - var res = new Array(n); - for (var i = 0; i < n; ++i) { - res[i] = caml_vesta_poly_comm_to_rust(v[i + 1]); - } - return js_class_vector_to_rust_vector(res); - }; - - var wasm_sorted = convertArray(caml_lc[1]); - var wasm_aggreg = caml_vesta_poly_comm_to_rust(caml_lc[2]); - var wasm_runtime; - if (caml_lc[3] === 0) { - wasm_runtime = undefined; - } else { - wasm_runtime = caml_vesta_poly_comm_to_rust(caml_lc[3][1]); - } - return plonk_wasm.WasmFpLookupCommitments( - wasm_sorted, - wasm_aggreg, - wasm_runtime - ); -}; - -// Provides: caml_pasta_fp_commitments_to_rust -// Requires: plonk_wasm, caml_vesta_poly_comm_to_rust, js_class_vector_to_rust_vector, caml_fp_lookup_commitments_to_rust, caml_opt_to_rust -var caml_pasta_fp_commitments_to_rust = function (x) { - var convertArray = function (v) { - var n = v.length - 1; - var res = new Array(n); - for (var i = 0; i < n; ++i) { - res[i] = caml_vesta_poly_comm_to_rust(v[i + 1]); - } - // TODO need to do finalizer things? - return js_class_vector_to_rust_vector(res); - }; - - var w_comm = convertArray(x[1]); - var z_comm = caml_vesta_poly_comm_to_rust(x[2]); - var t_comm = caml_vesta_poly_comm_to_rust(x[3]); - var lookup = caml_opt_to_rust(x[4], caml_fp_lookup_commitments_to_rust); - return new plonk_wasm.WasmFpProverCommitments(w_comm, z_comm, t_comm, lookup); -}; - -// Provides: caml_fp_lookup_commitments_of_rust -// Requires: caml_vesta_poly_comm_of_rust, js_class_vector_of_rust_vector, plonk_wasm -var caml_fp_lookup_commitments_of_rust = function (wasm_lc) { - var convertArray = function (v) { - var a = js_class_vector_of_rust_vector(v, plonk_wasm.WasmFpPolyComm); - var res = [0]; - for (var i = 0; i < a.length; ++i) { - res.push(caml_vesta_poly_comm_of_rust(a[i])); - } - return res; - }; - - var sorted = convertArray(wasm_lc.sorted); - var aggreg = caml_vesta_poly_comm_of_rust(wasm_lc.aggreg); - var wasm_lc_runtime = wasm_lc.runtime; - var caml_runtime; - if (wasm_lc_runtime === undefined) { - caml_runtime = 0; - } else { - caml_runtime = [0, caml_vesta_poly_comm_of_rust(wasm_lc_runtime)]; - } - wasm_lc.free(); - return [0, sorted, aggreg, caml_runtime]; -}; - -// Provides: caml_pasta_fp_commitments_of_rust -// Requires: caml_vesta_poly_comm_of_rust, js_class_vector_of_rust_vector, plonk_wasm, caml_fp_lookup_commitments_of_rust, caml_opt_of_rust -var caml_pasta_fp_commitments_of_rust = function (x) { - var convertArray = function (v) { - var a = js_class_vector_of_rust_vector(v, plonk_wasm.WasmFpPolyComm); - var res = [0]; - for (var i = 0; i < a.length; ++i) { - // TODO Check this. Could be off by 1 - res.push(caml_vesta_poly_comm_of_rust(a[i])); - } - return res; - }; - - var w_comm = convertArray(x.w_comm); - var z_comm = caml_vesta_poly_comm_of_rust(x.z_comm); - var t_comm = caml_vesta_poly_comm_of_rust(x.t_comm); - var caml_lookup = caml_opt_of_rust( - x.lookup, - caml_fp_lookup_commitments_of_rust - ); - x.free(); - return [0, w_comm, z_comm, t_comm, caml_lookup]; -}; - -// Provides: caml_pasta_fp_proof_to_rust -// Requires: plonk_wasm, caml_pasta_fp_commitments_to_rust, caml_pasta_fp_opening_proof_to_rust, caml_pasta_fp_proof_evaluations_to_rust, caml_fp_vector_to_rust, caml_vesta_poly_comm_to_rust, js_class_vector_to_rust_vector -var caml_pasta_fp_proof_to_rust = function (x) { - var commitments = caml_pasta_fp_commitments_to_rust(x[1]); - var proof = caml_pasta_fp_opening_proof_to_rust(x[2]); - var evals = caml_pasta_fp_proof_evaluations_to_rust(x[3]); - var ft_eval1 = x[4]; - var public_ = caml_fp_vector_to_rust(x[5]); - var prev_challenges = x[6]; - var chals_len = prev_challenges.length; - var prev_challenges_scalars = new plonk_wasm.WasmVecVecFp(chals_len - 1); - var prev_challenges_comms = new Array(chals_len - 1); - for (var i = 1; i < chals_len; i++) { - prev_challenges_scalars.push(caml_fp_vector_to_rust(prev_challenges[i][1])); - prev_challenges_comms[i - 1] = caml_vesta_poly_comm_to_rust( - prev_challenges[i][2] - ); - } - prev_challenges_comms = js_class_vector_to_rust_vector(prev_challenges_comms); - return new plonk_wasm.WasmFpProverProof( - commitments, - proof, - evals, - ft_eval1, - public_, - prev_challenges_scalars, - prev_challenges_comms - ); -}; - -// Provides: caml_pasta_fp_proof_of_rust -// Requires: plonk_wasm, caml_pasta_fp_commitments_of_rust, caml_pasta_fp_opening_proof_of_rust, caml_pasta_fp_proof_evaluations_of_rust, caml_fp_vector_of_rust, js_class_vector_of_rust_vector, caml_vesta_poly_comm_of_rust -var caml_pasta_fp_proof_of_rust = function (x) { - var messages = caml_pasta_fp_commitments_of_rust(x.commitments); - var proof = caml_pasta_fp_opening_proof_of_rust(x.proof); - var evals = caml_pasta_fp_proof_evaluations_of_rust(x.evals); - var ft_eval1 = x.ft_eval1; - var public_ = caml_fp_vector_of_rust(x.public_); - var prev_challenges_scalars = x.prev_challenges_scalars; - var prev_challenges_comms = js_class_vector_of_rust_vector( - x.prev_challenges_comms, - plonk_wasm.WasmFpPolyComm - ); - var chals_len = prev_challenges_comms.length; - var prev_challenges = new Array(chals_len); - prev_challenges[0] = 0; - for (var i = 1; i < chals_len; i++) { - var res = new Array(3); - res[0] = 0; - res[1] = caml_fp_vector_of_rust(prev_challenges_scalars.get(i - 1)); - // TODO Check this. Could be off by 1 - res[2] = caml_vesta_poly_comm_of_rust(prev_challenges_comms[i]); - prev_challenges[i] = res; - } - return [0, messages, proof, evals, ft_eval1, public_, prev_challenges]; -}; +// proof // Provides: caml_pasta_fp_plonk_proof_create -// Requires: plonk_wasm, caml_fp_vector_to_rust, caml_array_to_rust_vector, rust_affine_of_caml_affine, caml_pasta_fp_proof_of_rust +// Requires: plonk_wasm, tsRustConversion var caml_pasta_fp_plonk_proof_create = function ( index, witness_cols, + caml_runtime_tables, prev_challenges, prev_sgs ) { var w = new plonk_wasm.WasmVecVecFp(witness_cols.length - 1); for (var i = 1; i < witness_cols.length; i++) { - w.push(caml_fp_vector_to_rust(witness_cols[i])); + w.push(tsRustConversion.fp.vectorToRust(witness_cols[i])); } witness_cols = w; - prev_challenges = caml_fp_vector_to_rust(prev_challenges); - prev_sgs = caml_array_to_rust_vector( - prev_sgs, - rust_affine_of_caml_affine, - plonk_wasm.caml_vesta_affine_one - ); - var res = plonk_wasm.caml_pasta_fp_plonk_proof_create( + prev_challenges = tsRustConversion.fp.vectorToRust(prev_challenges); + var wasm_runtime_tables = + tsRustConversion.fp.runtimeTablesToRust(caml_runtime_tables); + prev_sgs = tsRustConversion.fp.pointsToRust(prev_sgs); + var proof = plonk_wasm.caml_pasta_fp_plonk_proof_create( index, witness_cols, + wasm_runtime_tables, prev_challenges, prev_sgs ); - var proof = caml_pasta_fp_proof_of_rust(res); - return proof; + return tsRustConversion.fp.proofFromRust(proof); }; // Provides: caml_pasta_fp_plonk_proof_verify -// Requires: plonk_wasm, caml_array_to_rust_vector, caml_vesta_poly_comm_to_rust, caml_pasta_fp_plonk_verifier_index_to_rust, caml_pasta_fp_proof_to_rust +// Requires: plonk_wasm, tsRustConversion var caml_pasta_fp_plonk_proof_verify = function (index, proof) { - index = caml_pasta_fp_plonk_verifier_index_to_rust(index); - proof = caml_pasta_fp_proof_to_rust(proof); + index = tsRustConversion.fp.verifierIndexToRust(index); + proof = tsRustConversion.fp.proofToRust(proof); return plonk_wasm.caml_pasta_fp_plonk_proof_verify(index, proof); }; // Provides: caml_pasta_fp_plonk_proof_batch_verify -// Requires: plonk_wasm, caml_array_to_rust_vector, caml_vesta_poly_comm_to_rust, caml_pasta_fp_plonk_verifier_index_to_rust, caml_pasta_fp_proof_to_rust +// Requires: plonk_wasm, tsRustConversion var caml_pasta_fp_plonk_proof_batch_verify = function (indexes, proofs) { - indexes = caml_array_to_rust_vector( + indexes = tsRustConversion.mapMlArrayToRustVector( indexes, - caml_pasta_fp_plonk_verifier_index_to_rust + tsRustConversion.fp.verifierIndexToRust + ); + proofs = tsRustConversion.mapMlArrayToRustVector( + proofs, + tsRustConversion.fp.proofToRust ); - proofs = caml_array_to_rust_vector(proofs, caml_pasta_fp_proof_to_rust); return plonk_wasm.caml_pasta_fp_plonk_proof_batch_verify(indexes, proofs); }; // Provides: caml_pasta_fp_plonk_proof_dummy -// Requires: plonk_wasm, caml_pasta_fp_proof_of_rust +// Requires: plonk_wasm, tsRustConversion var caml_pasta_fp_plonk_proof_dummy = function () { - return caml_pasta_fp_proof_of_rust( + return tsRustConversion.fp.proofFromRust( plonk_wasm.caml_pasta_fp_plonk_proof_dummy() ); }; // Provides: caml_pasta_fp_plonk_proof_deep_copy -// Requires: plonk_wasm, caml_pasta_fp_proof_to_rust, caml_pasta_fp_proof_of_rust +// Requires: plonk_wasm, tsRustConversion var caml_pasta_fp_plonk_proof_deep_copy = function (proof) { - return caml_pasta_fp_proof_of_rust( + return tsRustConversion.fp.proofFromRust( plonk_wasm.caml_pasta_fp_plonk_proof_deep_copy( - caml_pasta_fp_proof_to_rust(proof) + tsRustConversion.fp.proofToRust(proof) ) ); }; -// Provides: caml_pasta_fq_proof_evaluations_to_rust -// Requires: plonk_wasm, caml_fq_vector_to_rust, PERMUTS_MINUS_1, COLUMNS -var caml_pasta_fq_proof_evaluations_to_rust = function (x) { - return x; -}; - -// Provides: caml_pasta_fq_proof_evaluations_of_rust -// Requires: plonk_wasm, caml_fq_vector_of_rust, COLUMNS, PERMUTS_MINUS_1 -var caml_pasta_fq_proof_evaluations_of_rust = function (x) { - return x; -}; - -// Provides: caml_pasta_fq_opening_proof_to_rust -// Requires: plonk_wasm, caml_array_to_rust_vector, rust_affine_of_caml_affine -var caml_pasta_fq_opening_proof_to_rust = function (x) { - var convert_affines = function (affines) { - return caml_array_to_rust_vector( - affines, - rust_affine_of_caml_affine, - plonk_wasm.caml_pallas_affine_one - ); - }; - var lr = x[1]; - var delta = rust_affine_of_caml_affine( - x[2], - plonk_wasm.caml_pallas_affine_one - ); - var z1 = x[3]; - var z2 = x[4]; - var sg = rust_affine_of_caml_affine(x[5], plonk_wasm.caml_pallas_affine_one); - var len = lr.length; - // We pass l and r as separate vectors over the FFI - var l_ocaml = new Array(len); - var r_ocaml = new Array(len); - for (var i = 1; i < len; i++) { - l_ocaml[i] = lr[i][1]; - r_ocaml[i] = lr[i][2]; - } - var l = convert_affines(l_ocaml); - var r = convert_affines(r_ocaml); - return new plonk_wasm.WasmFqOpeningProof(l, r, delta, z1, z2, sg); -}; - -// Provides: caml_pasta_fq_opening_proof_of_rust -// Requires: plonk_wasm, caml_array_of_rust_vector, rust_affine_to_caml_affine -var caml_pasta_fq_opening_proof_of_rust = function (x) { - var convert_affines = function (affines) { - return caml_array_of_rust_vector( - affines, - plonk_wasm.WasmGPallas, - rust_affine_to_caml_affine, - false - ); - }; - var l = convert_affines(x.lr_0); - var r = convert_affines(x.lr_1); - var delta = rust_affine_to_caml_affine(x.delta); - var z1 = x.z1; - var z2 = x.z2; - var sg = rust_affine_to_caml_affine(x.sg); - x.free(); - var len = l.length; - if (len !== r.length) { - throw new Error("l and r lengths don't match"); - } - var lr = new Array(len); - lr[0] = 0; - for (var i = 1; i < len; i++) { - var tuple = new Array(3); - tuple[0] = 0; - tuple[1] = l[i]; - tuple[2] = r[i]; - lr[i] = tuple; - } - return [0, lr, delta, z1, z2, sg]; -}; - -// Provides: caml_fq_lookup_commitments_to_rust -// Requires: plonk_wasm, caml_pallas_poly_comm_to_rust, js_class_vector_to_rust_vector, caml_opt_to_rust -var caml_fq_lookup_commitments_to_rust = function (caml_lc) { - var convertArray = function (v) { - var n = v.length - 1; - var res = new Array(n); - for (var i = 0; i < n; ++i) { - res[i] = caml_pallas_poly_comm_to_rust(v[i + 1]); - } - return js_class_vector_to_rust_vector(res); - }; - - var wasm_sorted = convertArray(caml_lc[1]); - var wasm_aggreg = caml_pallas_poly_comm_to_rust(caml_lc[2]); - var wasm_runtime = caml_opt_to_rust( - caml_lc[3], - caml_pallas_poly_comm_to_rust - ); - return plonk_wasm.WasmFqLookupCommitments( - wasm_sorted, - wasm_aggreg, - wasm_runtime - ); -}; - -// Provides: caml_pasta_fq_commitments_to_rust -// Requires: plonk_wasm, caml_pallas_poly_comm_to_rust, js_class_vector_to_rust_vector, caml_fq_lookup_commitments_to_rust, caml_opt_to_rust -var caml_pasta_fq_commitments_to_rust = function (x) { - var convertArray = function (v) { - var n = v.length - 1; - var res = new Array(n); - for (var i = 0; i < n; ++i) { - res[i] = caml_pallas_poly_comm_to_rust(v[i + 1]); - } - return js_class_vector_to_rust_vector(res); - }; - - var w_comm = convertArray(x[1]); - var z_comm = caml_pallas_poly_comm_to_rust(x[2]); - var t_comm = caml_pallas_poly_comm_to_rust(x[3]); - var lookup = caml_opt_to_rust(x[4], caml_fq_lookup_commitments_to_rust); - return new plonk_wasm.WasmFqProverCommitments(w_comm, z_comm, t_comm, lookup); -}; - -// Provides: caml_fq_lookup_commitments_of_rust -// Requires: caml_pallas_poly_comm_of_rust, js_class_vector_of_rust_vector, plonk_wasm -var caml_fq_lookup_commitments_of_rust = function (wasm_lc) { - var convertArray = function (v) { - var a = js_class_vector_of_rust_vector(v, plonk_wasm.WasmFqPolyComm); - var res = [0]; - for (var i = 0; i < a.length; ++i) { - res.push(caml_pallas_poly_comm_of_rust(a[i])); - } - return res; - }; - - var sorted = convertArray(wasm_lc.sorted); - var aggreg = caml_pallas_poly_comm_of_rust(wasm_lc.aggreg); - var wasm_lc_runtime = wasm_lc.runtime; - var caml_runtime; - if (wasm_lc_runtime === undefined) { - caml_runtime = 0; - } else { - caml_runtime = [0, caml_pallas_poly_comm_of_rust(wasm_lc_runtime)]; - } - wasm_lc.free(); - return [0, sorted, aggreg, caml_runtime]; -}; - -// Provides: caml_pasta_fq_commitments_of_rust -// Requires: caml_pallas_poly_comm_of_rust, js_class_vector_of_rust_vector, plonk_wasm, caml_fq_lookup_commitments_of_rust, caml_opt_of_rust -var caml_pasta_fq_commitments_of_rust = function (x) { - var convertArray = function (v) { - var a = js_class_vector_of_rust_vector(v, plonk_wasm.WasmFqPolyComm); - var res = [0]; - for (var i = 0; i < a.length; ++i) { - // TODO Check this. Could be off by 1 - res.push(caml_pallas_poly_comm_of_rust(a[i])); - } - return res; - }; - - var w_comm = convertArray(x.w_comm); - var z_comm = caml_pallas_poly_comm_of_rust(x.z_comm); - var t_comm = caml_pallas_poly_comm_of_rust(x.t_comm); - var caml_lookup = caml_opt_of_rust( - x.lookup, - caml_fq_lookup_commitments_of_rust - ); - x.free(); - return [0, w_comm, z_comm, t_comm, caml_lookup]; -}; - -// Provides: caml_pasta_fq_proof_to_rust -// Requires: plonk_wasm, caml_pasta_fq_commitments_to_rust, caml_pasta_fq_opening_proof_to_rust, caml_pasta_fq_proof_evaluations_to_rust, caml_fq_vector_to_rust, caml_pallas_poly_comm_to_rust, js_class_vector_to_rust_vector -var caml_pasta_fq_proof_to_rust = function (x) { - var messages = caml_pasta_fq_commitments_to_rust(x[1]); - var proof = caml_pasta_fq_opening_proof_to_rust(x[2]); - var evals = caml_pasta_fq_proof_evaluations_to_rust(x[3]); - var ft_eval1 = x[4]; - var public_ = caml_fq_vector_to_rust(x[5]); - var prev_challenges = x[6]; - var chals_len = prev_challenges.length; - var prev_challenges_scalars = new plonk_wasm.WasmVecVecFq(chals_len - 1); - var prev_challenges_comms = new Array(chals_len - 1); - for (var i = 1; i < chals_len; i++) { - prev_challenges_scalars.push(caml_fq_vector_to_rust(prev_challenges[i][1])); - prev_challenges_comms[i - 1] = caml_pallas_poly_comm_to_rust( - prev_challenges[i][2] - ); - } - prev_challenges_comms = js_class_vector_to_rust_vector(prev_challenges_comms); - return new plonk_wasm.WasmFqProverProof( - messages, - proof, - evals, - ft_eval1, - public_, - prev_challenges_scalars, - prev_challenges_comms - ); -}; - -// Provides: caml_pasta_fq_proof_of_rust -// Requires: plonk_wasm, caml_pasta_fq_commitments_of_rust, caml_pasta_fq_opening_proof_of_rust, caml_pasta_fq_proof_evaluations_of_rust, caml_fq_vector_of_rust, js_class_vector_of_rust_vector, caml_pallas_poly_comm_of_rust -var caml_pasta_fq_proof_of_rust = function (x) { - var messages = caml_pasta_fq_commitments_of_rust(x.commitments); - var proof = caml_pasta_fq_opening_proof_of_rust(x.proof); - var evals = caml_pasta_fq_proof_evaluations_of_rust(x.evals); - var evals1 = caml_pasta_fq_proof_evaluations_of_rust(x.evals1); - var ft_eval1 = x.ft_eval1; - var public_ = caml_fq_vector_of_rust(x.public_); - var prev_challenges_scalars = x.prev_challenges_scalars; - var prev_challenges_comms = js_class_vector_of_rust_vector( - x.prev_challenges_comms, - plonk_wasm.WasmFqPolyComm - ); - var chals_len = prev_challenges_comms.length; - var prev_challenges = new Array(chals_len); - prev_challenges[0] = 0; - for (var i = 1; i < chals_len; i++) { - var res = new Array(3); - res[0] = 0; - res[1] = caml_fq_vector_of_rust(prev_challenges_scalars.get(i - 1)); - res[2] = caml_pallas_poly_comm_of_rust(prev_challenges_comms[i]); - prev_challenges[i] = res; - } - return [0, messages, proof, evals, ft_eval1, public_, prev_challenges]; -}; - // Provides: caml_pasta_fq_plonk_proof_create -// Requires: plonk_wasm, caml_fq_vector_to_rust, caml_array_to_rust_vector, rust_affine_of_caml_affine, caml_pasta_fq_proof_of_rust +// Requires: plonk_wasm, tsRustConversion var caml_pasta_fq_plonk_proof_create = function ( index, witness_cols, + caml_runtime_tables, prev_challenges, prev_sgs ) { var w = new plonk_wasm.WasmVecVecFq(witness_cols.length - 1); for (var i = 1; i < witness_cols.length; i++) { - w.push(caml_fq_vector_to_rust(witness_cols[i])); + w.push(tsRustConversion.fq.vectorToRust(witness_cols[i])); } witness_cols = w; - prev_challenges = caml_fq_vector_to_rust(prev_challenges); - prev_sgs = caml_array_to_rust_vector( - prev_sgs, - rust_affine_of_caml_affine, - plonk_wasm.caml_pallas_affine_one - ); - var res = plonk_wasm.caml_pasta_fq_plonk_proof_create( + prev_challenges = tsRustConversion.fq.vectorToRust(prev_challenges); + var wasm_runtime_tables = + tsRustConversion.fq.runtimeTablesToRust(caml_runtime_tables); + prev_sgs = tsRustConversion.fq.pointsToRust(prev_sgs); + var proof = plonk_wasm.caml_pasta_fq_plonk_proof_create( index, witness_cols, + wasm_runtime_tables, prev_challenges, prev_sgs ); - var proof = caml_pasta_fq_proof_of_rust(res); - return proof; + return tsRustConversion.fq.proofFromRust(proof); }; // Provides: caml_pasta_fq_plonk_proof_verify -// Requires: plonk_wasm, caml_array_to_rust_vector, caml_pallas_poly_comm_to_rust, caml_pasta_fq_plonk_verifier_index_to_rust, caml_pasta_fq_proof_to_rust +// Requires: plonk_wasm, tsRustConversion var caml_pasta_fq_plonk_proof_verify = function (index, proof) { - index = caml_pasta_fq_plonk_verifier_index_to_rust(index); - proof = caml_pasta_fq_proof_to_rust(proof); + index = tsRustConversion.fq.verifierIndexToRust(index); + proof = tsRustConversion.fq.proofToRust(proof); return plonk_wasm.caml_pasta_fq_plonk_proof_verify(index, proof); }; // Provides: caml_pasta_fq_plonk_proof_batch_verify -// Requires: plonk_wasm, caml_array_to_rust_vector, caml_pallas_poly_comm_to_rust, caml_pasta_fq_plonk_verifier_index_to_rust, caml_pasta_fq_proof_to_rust +// Requires: plonk_wasm, tsRustConversion var caml_pasta_fq_plonk_proof_batch_verify = function (indexes, proofs) { - indexes = caml_array_to_rust_vector( + indexes = tsRustConversion.mapMlArrayToRustVector( indexes, - caml_pasta_fq_plonk_verifier_index_to_rust + tsRustConversion.fq.verifierIndexToRust + ); + proofs = tsRustConversion.mapMlArrayToRustVector( + proofs, + tsRustConversion.fq.proofToRust ); - proofs = caml_array_to_rust_vector(proofs, caml_pasta_fq_proof_to_rust); return plonk_wasm.caml_pasta_fq_plonk_proof_batch_verify(indexes, proofs); }; // Provides: caml_pasta_fq_plonk_proof_dummy -// Requires: plonk_wasm, caml_pasta_fq_proof_of_rust +// Requires: plonk_wasm, tsRustConversion var caml_pasta_fq_plonk_proof_dummy = function () { - return caml_pasta_fq_proof_of_rust( + return tsRustConversion.fq.proofFromRust( plonk_wasm.caml_pasta_fq_plonk_proof_dummy() ); }; // Provides: caml_pasta_fq_plonk_proof_deep_copy -// Requires: plonk_wasm, caml_pasta_fq_proof_to_rust, caml_pasta_fq_proof_of_rust +// Requires: plonk_wasm, tsRustConversion var caml_pasta_fq_plonk_proof_deep_copy = function (proof) { - return caml_pasta_fq_proof_of_rust( + return tsRustConversion.fq.proofFromRust( plonk_wasm.caml_pasta_fq_plonk_proof_deep_copy( - caml_pasta_fq_proof_to_rust(proof) + tsRustConversion.fq.proofToRust(proof) ) ); }; -// Provides: caml_random_oracles_of_rust -// Requires: caml_u8array_vector_of_rust_flat_vector, caml_option_of_maybe_undefined -var caml_random_oracles_of_rust = function (x) { - var joint_combiner_chal = x.joint_combiner_chal; - var joint_combiner = x.joint_combiner; - var joint_combiner_ocaml = undefined; - if (joint_combiner_chal !== undefined && joint_combiner !== undefined) { - joint_combiner_ocaml = [0, [0, joint_combiner_chal], joint_combiner]; - } - return [ - 0, - caml_option_of_maybe_undefined(joint_combiner_ocaml), - x.beta, - x.gamma, - [0, x.alpha_chal], - x.alpha, - x.zeta, - x.v, - x.u, - [0, x.zeta_chal], - [0, x.v_chal], - [0, x.u_chal], - ]; -}; - -// Provides: caml_random_oracles_to_rust -// Requires: caml_u8array_vector_to_rust_flat_vector, caml_option_to_maybe_undefined -var caml_random_oracles_to_rust = function (x, roKlass) { - // var caml_vector = [0, x[1], x[2], x[3][1], x[4], x[5], x[6], x[7], x[8][1], x[9][1], x[10][1]]; - var joint_combiner_ocaml = caml_option_to_maybe_undefined(x[1]); - var joint_combiner_chal = undefined; - var joint_combiner = undefined; - if (joint_combiner_ocaml !== undefined) { - joint_combiner_chal = joint_combiner_ocaml[1][1]; - joint_combiner = joint_combiner_ocaml[2]; - } - return new roKlass( - joint_combiner_chal, - joint_combiner, - x[2], - x[3], - x[4][1], - x[5], - x[6], - x[7], - x[8], - x[9][1], - x[10][1], - x[11][1] - ); -}; - -// Provides: caml_oracles_of_rust -// Requires: caml_u8array_vector_of_rust_flat_vector, caml_random_oracles_of_rust -var caml_oracles_of_rust = function (x) { - return [ - 0, - caml_random_oracles_of_rust(x.o), - [0, x.p_eval0, x.p_eval1], - caml_u8array_vector_of_rust_flat_vector( - x.opening_prechallenges, - 32 /* TODO: Don't hardcode */ - ), - x.digest_before_evaluations, - ]; -}; - -// Provides: caml_oracles_to_rust -// Requires: caml_u8array_vector_to_rust_flat_vector, caml_random_oracles_to_rust -var caml_oracles_to_rust = function (x, klass, roKlass) { - return new klass( - caml_random_oracles_to_rust(x[1], roKlass), - x[2][1], - x[2][2], - caml_u8array_vector_to_rust_flat_vector(x[3]), - x[4] - ); -}; +// oracles // Provides: fp_oracles_create -// Requires: plonk_wasm, caml_oracles_of_rust, caml_array_to_rust_vector, caml_vesta_poly_comm_to_rust, caml_pasta_fp_plonk_verifier_index_to_rust, caml_pasta_fp_proof_to_rust +// Requires: plonk_wasm, tsRustConversion var fp_oracles_create = function (lgr_comm, verifier_index, proof) { - return caml_oracles_of_rust( + return tsRustConversion.fp.oraclesFromRust( plonk_wasm.fp_oracles_create( - caml_array_to_rust_vector(lgr_comm, caml_vesta_poly_comm_to_rust), - caml_pasta_fp_plonk_verifier_index_to_rust(verifier_index), - caml_pasta_fp_proof_to_rust(proof) + tsRustConversion.fp.polyCommsToRust(lgr_comm), + tsRustConversion.fp.verifierIndexToRust(verifier_index), + tsRustConversion.fp.proofToRust(proof) ) ); }; +// Provides: fp_oracles_create_no_public +// Requires: fp_oracles_create +var fp_oracles_create_no_public = function (lgr_comm, verifier_index, proof) { + return fp_oracles_create(lgr_comm, verifier_index, [0, 0, proof]); +}; + // Provides: fp_oracles_dummy -// Requires: plonk_wasm, caml_oracles_of_rust +// Requires: plonk_wasm, tsRustConversion var fp_oracles_dummy = function () { - return caml_oracles_of_rust(plonk_wasm.fp_oracles_dummy()); + return tsRustConversion.fp.oraclesFromRust(plonk_wasm.fp_oracles_dummy()); }; // Provides: fp_oracles_deep_copy -// Requires: plonk_wasm, caml_oracles_of_rust, caml_oracles_to_rust +// Requires: plonk_wasm, tsRustConversion var fp_oracles_deep_copy = function (x) { - return caml_oracles_of_rust( - plonk_wasm.fp_oracles_deep_copy( - caml_oracles_to_rust( - x, - plonk_wasm.WasmFpOracles, - plonk_wasm.WasmFpRandomOracles - ) - ) + return tsRustConversion.fp.oraclesFromRust( + plonk_wasm.fp_oracles_deep_copy(tsRustConversion.fp.oraclesToRust(x)) ); }; // Provides: fq_oracles_create -// Requires: plonk_wasm, caml_oracles_of_rust, caml_array_to_rust_vector, caml_pallas_poly_comm_to_rust, caml_pasta_fq_plonk_verifier_index_to_rust, caml_pasta_fq_proof_to_rust +// Requires: plonk_wasm, tsRustConversion var fq_oracles_create = function (lgr_comm, verifier_index, proof) { - return caml_oracles_of_rust( + return tsRustConversion.fq.oraclesFromRust( plonk_wasm.fq_oracles_create( - caml_array_to_rust_vector(lgr_comm, caml_pallas_poly_comm_to_rust), - caml_pasta_fq_plonk_verifier_index_to_rust(verifier_index), - caml_pasta_fq_proof_to_rust(proof) + tsRustConversion.fq.polyCommsToRust(lgr_comm), + tsRustConversion.fq.verifierIndexToRust(verifier_index), + tsRustConversion.fq.proofToRust(proof) ) ); }; +// Provides: fq_oracles_create_no_public +// Requires: fq_oracles_create +var fq_oracles_create_no_public = function (lgr_comm, verifier_index, proof) { + return fq_oracles_create(lgr_comm, verifier_index, [0, 0, proof]); +}; + // Provides: fq_oracles_dummy -// Requires: plonk_wasm, caml_oracles_of_rust +// Requires: plonk_wasm, tsRustConversion var fq_oracles_dummy = function () { - return caml_oracles_of_rust(plonk_wasm.fq_oracles_dummy()); + return tsRustConversion.fq.oraclesFromRust(plonk_wasm.fq_oracles_dummy()); }; // Provides: fq_oracles_deep_copy -// Requires: plonk_wasm, caml_oracles_of_rust, caml_oracles_to_rust +// Requires: plonk_wasm, tsRustConversion var fq_oracles_deep_copy = function (x) { - return caml_oracles_of_rust( - plonk_wasm.fq_oracles_deep_copy( - caml_oracles_to_rust( - x, - plonk_wasm.WasmFqOracles, - plonk_wasm.WasmFqRandomOracles - ) - ) + return tsRustConversion.fq.oraclesFromRust( + plonk_wasm.fq_oracles_deep_copy(tsRustConversion.fq.oraclesToRust(x)) ); }; @@ -3003,32 +981,41 @@ function caml_pasta_fq_poseidon_params_create() { } // Provides: caml_pasta_fp_poseidon_block_cipher -// Requires: plonk_wasm, caml_fp_vector_to_rust, caml_fp_vector_of_rust +// Requires: plonk_wasm, tsRustConversion, tsRustConversion function caml_pasta_fp_poseidon_block_cipher(_fake_params, fp_vector) { // 1. get permuted field vector from rust var wasm_flat_vector = plonk_wasm.caml_pasta_fp_poseidon_block_cipher( - caml_fp_vector_to_rust(fp_vector) + tsRustConversion.fp.vectorToRust(fp_vector) ); - var new_fp_vector = caml_fp_vector_of_rust(wasm_flat_vector); + var new_fp_vector = tsRustConversion.fp.vectorFromRust(wasm_flat_vector); // 2. write back modified field vector to original one new_fp_vector.forEach(function (a, i) { fp_vector[i] = a; }); } + // Provides: caml_pasta_fq_poseidon_block_cipher -// Requires: plonk_wasm, caml_fq_vector_to_rust, caml_fq_vector_of_rust +// Requires: plonk_wasm, tsRustConversion, tsRustConversion function caml_pasta_fq_poseidon_block_cipher(_fake_params, fq_vector) { // 1. get permuted field vector from rust var wasm_flat_vector = plonk_wasm.caml_pasta_fq_poseidon_block_cipher( - caml_fq_vector_to_rust(fq_vector) + tsRustConversion.fq.vectorToRust(fq_vector) ); - var new_fq_vector = caml_fq_vector_of_rust(wasm_flat_vector); + var new_fq_vector = tsRustConversion.fq.vectorFromRust(wasm_flat_vector); // 2. write back modified field vector to original one new_fq_vector.forEach(function (a, i) { fq_vector[i] = a; }); } +// Provides: caml_pasta_fp_plonk_proof_example_with_lookup +function caml_pasta_fp_plonk_proof_example_with_lookup() { + // This is only used in the pickles unit tests + throw new Error( + 'Unimplemented caml_pasta_fp_plonk_proof_example_with_lookup' + ); +} + // Provides: prover_to_json // Requires: plonk_wasm var prover_to_json = plonk_wasm.prover_to_json; @@ -3079,10 +1066,3 @@ function caml_pasta_fp_plonk_proof_example_with_rot() { function caml_pasta_fp_plonk_proof_example_with_xor() { throw new Error('Unimplemented caml_pasta_fp_plonk_proof_example_with_xor'); } - -// Provides: caml_pasta_fp_plonk_proof_example_with_lookup -function caml_pasta_fp_plonk_proof_example_with_lookup() { - throw new Error( - 'Unimplemented caml_pasta_fp_plonk_proof_example_with_lookup' - ); -} diff --git a/src/lib/crypto/kimchi_bindings/js/dune b/src/lib/crypto/kimchi_bindings/js/dune index 68b86f3fa2f..f8746ce705d 100644 --- a/src/lib/crypto/kimchi_bindings/js/dune +++ b/src/lib/crypto/kimchi_bindings/js/dune @@ -1,6 +1,14 @@ (library (name bindings_js) (public_name bindings_js) - (js_of_ocaml (javascript_files bindings.js)) - (instrumentation (backend bisect_ppx)) - (preprocess (pps ppx_version))) + (js_of_ocaml + (javascript_files + bindings.js + bindings-bigint256.js + bindings-field.js + bindings-curve.js + bindings-vector.js)) + (instrumentation + (backend bisect_ppx)) + (preprocess + (pps ppx_version))) diff --git a/src/lib/crypto/kimchi_bindings/js/node_js/build.sh b/src/lib/crypto/kimchi_bindings/js/node_js/build.sh index 5e230e41020..41a7c57bc5c 100755 --- a/src/lib/crypto/kimchi_bindings/js/node_js/build.sh +++ b/src/lib/crypto/kimchi_bindings/js/node_js/build.sh @@ -2,9 +2,9 @@ set -euo pipefail if [[ -z "${PLONK_WASM_NODEJS-}" ]]; then - # The version should stay in line with the one in kimchi_bindings/wasm/rust-toolchain.toml export RUSTFLAGS="-C target-feature=+atomics,+bulk-memory,+mutable-globals -C link-arg=--no-check-features -C link-arg=--max-memory=4294967296" - rustup run nightly-2022-09-12 wasm-pack build --target nodejs --out-dir ../js/node_js ../../wasm -- -Z build-std=panic_abort,std --features nodejs + # The version should stay in line with the one in kimchi_bindings/wasm/rust-toolchain.toml + rustup run nightly-2023-09-01 wasm-pack build --target nodejs --out-dir ../js/node_js ../../wasm -- -Z build-std=panic_abort,std --features nodejs else cp "$PLONK_WASM_NODEJS"/* -R . fi diff --git a/src/lib/crypto/kimchi_bindings/js/node_js/node_backend.js b/src/lib/crypto/kimchi_bindings/js/node_js/node_backend.js index 39f363ed2e5..76e22cc82b2 100644 --- a/src/lib/crypto/kimchi_bindings/js/node_js/node_backend.js +++ b/src/lib/crypto/kimchi_bindings/js/node_js/node_backend.js @@ -1,2 +1,2 @@ // Provides: plonk_wasm -var plonk_wasm = require('./plonk_wasm.js') \ No newline at end of file +var plonk_wasm = require('./plonk_wasm.js'); diff --git a/src/lib/crypto/kimchi_bindings/js/test/bindings_js_test.ml b/src/lib/crypto/kimchi_bindings/js/test/bindings_js_test.ml index 2296d95f0fd..5a7148337a1 100644 --- a/src/lib/crypto/kimchi_bindings/js/test/bindings_js_test.ml +++ b/src/lib/crypto/kimchi_bindings/js/test/bindings_js_test.ml @@ -643,7 +643,7 @@ let _ = |> Backend.Keypair.create ~prev_challenges:0 ) in let x = Backend.Field.of_int 2 in - let (pi : Backend.Proof.t) = + let (pi : Backend.Proof.with_public_evals) = time "generate witness conv" (fun () -> Impl.generate_witness_conv ~input_typ:Typ.field ~return_typ:Typ.unit main @@ -920,8 +920,16 @@ let _ = vec in let urs = Pasta_fp_urs.create 16 in - let index0 = create gate_vector 0 0 urs in - let index2 = create gate_vector 2 0 urs in + (* TODO(dw) write tests with lookup tables *) + let lookup_tables = [||] in + (* TODO(dw) write tests with runtime tables *) + let runtime_table_cfg = [||] in + let index0 = + create gate_vector 0 lookup_tables runtime_table_cfg 0 urs + in + let index2 = + create gate_vector 2 lookup_tables runtime_table_cfg 0 urs + in assert (max_degree index0 = 16) ; assert (max_degree index2 = 16) ; assert (public_inputs index0 = 0) ; @@ -967,8 +975,16 @@ let _ = vec in let urs = Pasta_fq_urs.create 16 in - let index0 = create gate_vector 0 0 urs in - let index2 = create gate_vector 2 0 urs in + (* TODO(dw) write tests with lookup tables *) + let lookup_tables = [||] in + (* TODO(dw) write tests with runtime tables *) + let runtime_table_cfg = [||] in + let index0 = + create gate_vector 0 lookup_tables runtime_table_cfg 0 urs + in + let index2 = + create gate_vector 2 lookup_tables runtime_table_cfg 0 urs + in assert (max_degree index0 = 16) ; assert (max_degree index2 = 16) ; assert (public_inputs index0 = 0) ; @@ -992,10 +1008,28 @@ let verification_evals_to_list ; mul_comm : 'PolyComm ; emul_comm : 'PolyComm ; endomul_scalar_comm : 'PolyComm + ; xor_comm : 'PolyComm option + ; range_check0_comm : 'PolyComm option + ; range_check1_comm : 'PolyComm option + ; foreign_field_add_comm : 'PolyComm option + ; foreign_field_mul_comm : 'PolyComm option + ; rot_comm : 'PolyComm option } = - generic_comm :: psm_comm :: complete_add_comm :: mul_comm :: emul_comm - :: endomul_scalar_comm - :: (Array.append sigma_comm coefficients_comm |> Array.to_list) + let non_opt_comms = + generic_comm :: psm_comm :: complete_add_comm :: mul_comm :: emul_comm + :: endomul_scalar_comm + :: (Array.append sigma_comm coefficients_comm |> Array.to_list) + in + let opt_comms = + [ xor_comm + ; range_check0_comm + ; range_check1_comm + ; foreign_field_add_comm + ; foreign_field_mul_comm + ; rot_comm + ] + in + List.map Option.some non_opt_comms @ opt_comms let eq_verifier_index ~field_equal ~other_field_equal { VerifierIndex.domain = { log_size_of_group = i1_1; group_gen = f1 } @@ -1006,6 +1040,7 @@ let eq_verifier_index ~field_equal ~other_field_equal ; lookup_index = _ ; public = public1 ; prev_challenges = prev_challenges1 + ; zk_rows = zk_rows1 } { VerifierIndex.domain = { log_size_of_group = i2_1; group_gen = f2 } ; max_poly_size = i2_2 @@ -1015,15 +1050,24 @@ let eq_verifier_index ~field_equal ~other_field_equal ; lookup_index = _ ; public = public2 ; prev_challenges = prev_challenges2 + ; zk_rows = zk_rows2 } = i1_1 = i2_1 && field_equal f1 f2 && i1_2 = i2_2 && List.for_all2 - (eq_poly_comm ~field_equal:other_field_equal) + (fun x y -> + match (x, y) with + | Some x, Some y -> + eq_poly_comm ~field_equal:other_field_equal x y + | None, None -> + true + | _, _ -> + false ) (verification_evals_to_list evals1) (verification_evals_to_list evals2) && eq_verification_shifts ~field_equal shifts1 shifts2 && public1 = public2 && prev_challenges1 = prev_challenges2 + && zk_rows1 = zk_rows2 let _ = let open Pasta_fp_verifier_index in @@ -1059,8 +1103,18 @@ let _ = ~other_field_equal:Pasta_fq.equal in let urs = Pasta_fp_urs.create 16 in - let index0 = Pasta_fp_index.create gate_vector 0 0 urs in - let index2 = Pasta_fp_index.create gate_vector 2 0 urs in + (* TODO(dw) write tests with lookup tables *) + let lookup_tables = [||] in + (* TODO(dw) write tests with runtime tables *) + let runtime_table_cfg = [||] in + let index0 = + Pasta_fp_index.create gate_vector 0 lookup_tables runtime_table_cfg 0 + urs + in + let index2 = + Pasta_fp_index.create gate_vector 2 lookup_tables runtime_table_cfg 0 + urs + in let vindex0_0 = create index0 in let vindex0_1 = create index0 in assert (eq vindex0_0 vindex0_1) ; @@ -1109,8 +1163,18 @@ let _ = ~other_field_equal:Pasta_fp.equal in let urs = Pasta_fq_urs.create 16 in - let index0 = Pasta_fq_index.create gate_vector 0 0 urs in - let index2 = Pasta_fq_index.create gate_vector 2 0 urs in + (* TODO(dw) write tests with lookup tables *) + let lookup_tables = [||] in + (* TODO(dw) write tests with runtime tables *) + let runtime_table_cfg = [||] in + let index0 = + Pasta_fq_index.create gate_vector 0 lookup_tables runtime_table_cfg 0 + urs + in + let index2 = + Pasta_fq_index.create gate_vector 2 lookup_tables runtime_table_cfg 0 + urs + in let vindex0_0 = create index0 in let vindex0_1 = create index0 in assert (eq vindex0_0 vindex0_1) ; diff --git a/src/lib/crypto/kimchi_bindings/js/web/build.sh b/src/lib/crypto/kimchi_bindings/js/web/build.sh index c63fadb4ccb..f99564ef81d 100755 --- a/src/lib/crypto/kimchi_bindings/js/web/build.sh +++ b/src/lib/crypto/kimchi_bindings/js/web/build.sh @@ -2,9 +2,9 @@ set -euo pipefail if [[ -z "${PLONK_WASM_WEB-}" ]]; then - # The version should stay in line with the one in kimchi_bindings/wasm/rust-toolchain.toml export RUSTFLAGS="-C target-feature=+atomics,+bulk-memory,+mutable-globals -C link-arg=--no-check-features -C link-arg=--max-memory=4294967296" - rustup run nightly-2022-09-12 wasm-pack build --target web --out-dir ../js/web ../../wasm -- -Z build-std=panic_abort,std + # The version should stay in line with the one in kimchi_bindings/wasm/rust-toolchain.toml + rustup run nightly-2023-09-01 wasm-pack build --target web --out-dir ../js/web ../../wasm -- -Z build-std=panic_abort,std else cp "$PLONK_WASM_WEB"/* -R . fi diff --git a/src/lib/crypto/kimchi_bindings/js/web/web_backend.js b/src/lib/crypto/kimchi_bindings/js/web/web_backend.js index 57c4928ec20..53db813af7f 100644 --- a/src/lib/crypto/kimchi_bindings/js/web/web_backend.js +++ b/src/lib/crypto/kimchi_bindings/js/web/web_backend.js @@ -1,2 +1,2 @@ // Provides: plonk_wasm -var plonk_wasm = joo_global_object.plonk_wasm; +var plonk_wasm = globalThis.plonk_wasm; diff --git a/src/lib/crypto/kimchi_bindings/stubs/Cargo.lock b/src/lib/crypto/kimchi_bindings/stubs/Cargo.lock index daafb5ddf20..effa7be4fff 100644 --- a/src/lib/crypto/kimchi_bindings/stubs/Cargo.lock +++ b/src/lib/crypto/kimchi_bindings/stubs/Cargo.lock @@ -373,9 +373,9 @@ checksum = "d102f1a462fdcdddce88d6d46c06c074a2d2749b262230333726b06c52bb7585" [[package]] name = "either" -version = "1.8.1" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" +checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" [[package]] name = "fnv" @@ -439,6 +439,9 @@ name = "hex" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" +dependencies = [ + "serde", +] [[package]] name = "ident_case" @@ -446,6 +449,14 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" +[[package]] +name = "internal-tracing" +version = "0.1.0" +dependencies = [ + "ocaml", + "ocaml-gen", +] + [[package]] name = "itertools" version = "0.10.5" @@ -473,6 +484,7 @@ dependencies = [ "disjoint-set", "groupmap", "hex", + "internal-tracing", "itertools", "mina-curves", "mina-poseidon", @@ -488,7 +500,7 @@ dependencies = [ "rand", "rand_core", "rayon", - "rmp-serde 1.1.1", + "rmp-serde 1.1.2", "serde", "serde_with", "strum", @@ -622,9 +634,9 @@ dependencies = [ [[package]] name = "num-traits" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" +checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2" dependencies = [ "autocfg", "libm", @@ -781,6 +793,7 @@ dependencies = [ "rand", "rand_core", "rayon", + "rmp-serde 1.1.2", "serde", "serde_with", "thiserror", @@ -809,9 +822,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.31" +version = "1.0.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fe8a65d69dd0808184ebb5f836ab526bb259db23c657efa38711b1072ee47f0" +checksum = "50f3b39ccfb720540debaa0164757101c08ecb8d326b15358ce76a62c7e85965" dependencies = [ "proc-macro2", ] @@ -876,9 +889,9 @@ dependencies = [ [[package]] name = "rmp" -version = "0.8.11" +version = "0.8.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44519172358fd6d58656c86ab8e7fbc9e1490c3e8f14d35ed78ca0dd07403c9f" +checksum = "7f9860a6cc38ed1da53456442089b4dfa35e7cedaa326df63017af88385e6b20" dependencies = [ "byteorder", "num-traits", @@ -898,9 +911,9 @@ dependencies = [ [[package]] name = "rmp-serde" -version = "1.1.1" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5b13be192e0220b8afb7222aa5813cb62cc269ebb5cac346ca6487681d2913e" +checksum = "bffea85eea980d8a74453e5d02a8d93028f3c34725de143085a844ebe953258a" dependencies = [ "byteorder", "rmp", @@ -954,29 +967,29 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.171" +version = "1.0.176" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30e27d1e4fd7659406c492fd6cfaf2066ba8773de45ca75e855590f856dc34a9" +checksum = "76dc28c9523c5d70816e393136b86d48909cfb27cecaa902d338c19ed47164dc" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.171" +version = "1.0.176" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "389894603bd18c46fa56231694f8d827779c0951a667087194cf9de94ed24682" +checksum = "a4e7b8c5dc823e3b90651ff1d3808419cd14e5ad76de04feaf37da114e7a306f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.27", ] [[package]] name = "serde_json" -version = "1.0.103" +version = "1.0.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d03b412469450d4404fe8499a268edd7f8b79fecb074b0d812ad64ca21f4031b" +checksum = "076066c5f1078eac5b722a31827a8832fe108bed65dfa75e233c89f8206e976c" dependencies = [ "itoa", "ryu", @@ -1087,9 +1100,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.26" +version = "2.0.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45c3457aacde3c65315de5031ec191ce46604304d2446e803d71ade03308d970" +checksum = "b60f673f44a8255b9c8c657daf66a596d435f2da81a555b06dc644d080ba45e0" dependencies = [ "proc-macro2", "quote", @@ -1110,22 +1123,22 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.43" +version = "1.0.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a35fc5b8971143ca348fa6df4f024d4d55264f3468c71ad1c2f365b0a4d58c42" +checksum = "611040a08a0439f8248d1990b111c95baa9c704c805fa1f62104b39655fd7f90" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.43" +version = "1.0.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "463fe12d7993d3b327787537ce8dd4dfa058de32fc2b195ef3cde03dc4771e8f" +checksum = "090198534930841fab3a5d1bb637cde49e339654e606195f8d9c76eeb081dc96" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.27", ] [[package]] @@ -1234,5 +1247,5 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.27", ] diff --git a/src/lib/crypto/kimchi_bindings/stubs/Cargo.toml b/src/lib/crypto/kimchi_bindings/stubs/Cargo.toml index 855f46310cd..39354e4c3a1 100644 --- a/src/lib/crypto/kimchi_bindings/stubs/Cargo.toml +++ b/src/lib/crypto/kimchi_bindings/stubs/Cargo.toml @@ -40,4 +40,4 @@ kimchi = { path = "../../proof-systems/kimchi", features = ["ocaml_types"] } # ocaml-specific ocaml = { version = "0.22.2", features = ["no-caml-startup"] } -ocaml-gen = "0.1.0" +ocaml-gen = "0.1.5" diff --git a/src/lib/crypto/kimchi_bindings/stubs/kimchi_bindings.ml b/src/lib/crypto/kimchi_bindings/stubs/kimchi_bindings.ml index 257e4ba84f9..07a4d89b681 100644 --- a/src/lib/crypto/kimchi_bindings/stubs/kimchi_bindings.ml +++ b/src/lib/crypto/kimchi_bindings/stubs/kimchi_bindings.ml @@ -194,8 +194,15 @@ module Protocol = struct module Fp = struct type nonrec t - external create : Gates.Vector.Fp.t -> int -> int -> SRS.Fp.t -> t - = "caml_pasta_fp_plonk_index_create" + external create : + Gates.Vector.Fp.t + -> int + -> Pasta_bindings.Fp.t Kimchi_types.lookup_table array + -> Pasta_bindings.Fp.t Kimchi_types.runtime_table_cfg array + -> int + -> SRS.Fp.t + -> t + = "caml_pasta_fp_plonk_index_create_bytecode" "caml_pasta_fp_plonk_index_create" external max_degree : t -> int = "caml_pasta_fp_plonk_index_max_degree" @@ -221,8 +228,15 @@ module Protocol = struct module Fq = struct type nonrec t - external create : Gates.Vector.Fq.t -> int -> int -> SRS.Fq.t -> t - = "caml_pasta_fq_plonk_index_create" + external create : + Gates.Vector.Fq.t + -> int + -> Pasta_bindings.Fq.t Kimchi_types.lookup_table array + -> Pasta_bindings.Fq.t Kimchi_types.runtime_table_cfg array + -> int + -> SRS.Fq.t + -> t + = "caml_pasta_fq_plonk_index_create_bytecode" "caml_pasta_fq_plonk_index_create" external max_degree : t -> int = "caml_pasta_fq_plonk_index_max_degree" @@ -315,6 +329,19 @@ module Protocol = struct -> ( Pasta_bindings.Fq.t Kimchi_types.or_infinity , Pasta_bindings.Fp.t ) Kimchi_types.prover_proof + -> t = "fp_oracles_create_no_public" + + external create_with_public_evals : + Pasta_bindings.Fq.t Kimchi_types.or_infinity Kimchi_types.poly_comm + array + -> ( Pasta_bindings.Fp.t + , SRS.Fp.t + , Pasta_bindings.Fq.t Kimchi_types.or_infinity Kimchi_types.poly_comm + ) + Kimchi_types.VerifierIndex.verifier_index + -> ( Pasta_bindings.Fq.t Kimchi_types.or_infinity + , Pasta_bindings.Fp.t ) + Kimchi_types.proof_with_public -> t = "fp_oracles_create" external dummy : unit -> Pasta_bindings.Fp.t Kimchi_types.random_oracles @@ -340,6 +367,19 @@ module Protocol = struct -> ( Pasta_bindings.Fp.t Kimchi_types.or_infinity , Pasta_bindings.Fq.t ) Kimchi_types.prover_proof + -> t = "fq_oracles_create_no_public" + + external create_with_public_evals : + Pasta_bindings.Fp.t Kimchi_types.or_infinity Kimchi_types.poly_comm + array + -> ( Pasta_bindings.Fq.t + , SRS.Fq.t + , Pasta_bindings.Fp.t Kimchi_types.or_infinity Kimchi_types.poly_comm + ) + Kimchi_types.VerifierIndex.verifier_index + -> ( Pasta_bindings.Fp.t Kimchi_types.or_infinity + , Pasta_bindings.Fq.t ) + Kimchi_types.proof_with_public -> t = "fq_oracles_create" external dummy : unit -> Pasta_bindings.Fq.t Kimchi_types.random_oracles @@ -357,20 +397,31 @@ module Protocol = struct external create : Index.Fp.t -> FieldVectors.Fp.t array + -> Pasta_bindings.Fp.t Kimchi_types.runtime_table array -> Pasta_bindings.Fp.t array -> Pasta_bindings.Fq.t Kimchi_types.or_infinity array -> ( Pasta_bindings.Fq.t Kimchi_types.or_infinity , Pasta_bindings.Fp.t ) - Kimchi_types.prover_proof = "caml_pasta_fp_plonk_proof_create" + Kimchi_types.proof_with_public = "caml_pasta_fp_plonk_proof_create" + + external create_and_verify : + Index.Fp.t + -> FieldVectors.Fp.t array + -> Pasta_bindings.Fp.t Kimchi_types.runtime_table array + -> Pasta_bindings.Fp.t array + -> Pasta_bindings.Fq.t Kimchi_types.or_infinity array + -> ( Pasta_bindings.Fq.t Kimchi_types.or_infinity + , Pasta_bindings.Fp.t ) + Kimchi_types.proof_with_public + = "caml_pasta_fp_plonk_proof_create_and_verify" external example_with_lookup : SRS.Fp.t - -> bool -> Index.Fp.t * Pasta_bindings.Fp.t * ( Pasta_bindings.Fq.t Kimchi_types.or_infinity , Pasta_bindings.Fp.t ) - Kimchi_types.prover_proof + Kimchi_types.proof_with_public = "caml_pasta_fp_plonk_proof_example_with_lookup" external example_with_ffadd : @@ -379,7 +430,7 @@ module Protocol = struct * Pasta_bindings.Fp.t * ( Pasta_bindings.Fq.t Kimchi_types.or_infinity , Pasta_bindings.Fp.t ) - Kimchi_types.prover_proof + Kimchi_types.proof_with_public = "caml_pasta_fp_plonk_proof_example_with_ffadd" external example_with_xor : @@ -388,7 +439,7 @@ module Protocol = struct * (Pasta_bindings.Fp.t * Pasta_bindings.Fp.t) * ( Pasta_bindings.Fq.t Kimchi_types.or_infinity , Pasta_bindings.Fp.t ) - Kimchi_types.prover_proof + Kimchi_types.proof_with_public = "caml_pasta_fp_plonk_proof_example_with_xor" external example_with_rot : @@ -397,7 +448,7 @@ module Protocol = struct * (Pasta_bindings.Fp.t * Pasta_bindings.Fp.t) * ( Pasta_bindings.Fq.t Kimchi_types.or_infinity , Pasta_bindings.Fp.t ) - Kimchi_types.prover_proof + Kimchi_types.proof_with_public = "caml_pasta_fp_plonk_proof_example_with_rot" external example_with_foreign_field_mul : @@ -405,7 +456,7 @@ module Protocol = struct -> Index.Fp.t * ( Pasta_bindings.Fq.t Kimchi_types.or_infinity , Pasta_bindings.Fp.t ) - Kimchi_types.prover_proof + Kimchi_types.proof_with_public = "caml_pasta_fp_plonk_proof_example_with_foreign_field_mul" external example_with_range_check : @@ -413,7 +464,7 @@ module Protocol = struct -> Index.Fp.t * ( Pasta_bindings.Fq.t Kimchi_types.or_infinity , Pasta_bindings.Fp.t ) - Kimchi_types.prover_proof + Kimchi_types.proof_with_public = "caml_pasta_fp_plonk_proof_example_with_range_check" external example_with_range_check0 : @@ -421,7 +472,7 @@ module Protocol = struct -> Index.Fp.t * ( Pasta_bindings.Fq.t Kimchi_types.or_infinity , Pasta_bindings.Fp.t ) - Kimchi_types.prover_proof + Kimchi_types.proof_with_public = "caml_pasta_fp_plonk_proof_example_with_range_check0" external verify : @@ -432,7 +483,7 @@ module Protocol = struct Kimchi_types.VerifierIndex.verifier_index -> ( Pasta_bindings.Fq.t Kimchi_types.or_infinity , Pasta_bindings.Fp.t ) - Kimchi_types.prover_proof + Kimchi_types.proof_with_public -> bool = "caml_pasta_fp_plonk_proof_verify" external batch_verify : @@ -444,7 +495,7 @@ module Protocol = struct array -> ( Pasta_bindings.Fq.t Kimchi_types.or_infinity , Pasta_bindings.Fp.t ) - Kimchi_types.prover_proof + Kimchi_types.proof_with_public array -> bool = "caml_pasta_fp_plonk_proof_batch_verify" @@ -452,26 +503,28 @@ module Protocol = struct unit -> ( Pasta_bindings.Fq.t Kimchi_types.or_infinity , Pasta_bindings.Fp.t ) - Kimchi_types.prover_proof = "caml_pasta_fp_plonk_proof_dummy" + Kimchi_types.proof_with_public = "caml_pasta_fp_plonk_proof_dummy" external deep_copy : ( Pasta_bindings.Fq.t Kimchi_types.or_infinity , Pasta_bindings.Fp.t ) - Kimchi_types.prover_proof + Kimchi_types.proof_with_public -> ( Pasta_bindings.Fq.t Kimchi_types.or_infinity , Pasta_bindings.Fp.t ) - Kimchi_types.prover_proof = "caml_pasta_fp_plonk_proof_deep_copy" + Kimchi_types.proof_with_public + = "caml_pasta_fp_plonk_proof_deep_copy" end module Fq = struct external create : Index.Fq.t -> FieldVectors.Fq.t array + -> Pasta_bindings.Fq.t Kimchi_types.runtime_table array -> Pasta_bindings.Fq.t array -> Pasta_bindings.Fp.t Kimchi_types.or_infinity array -> ( Pasta_bindings.Fp.t Kimchi_types.or_infinity , Pasta_bindings.Fq.t ) - Kimchi_types.prover_proof = "caml_pasta_fq_plonk_proof_create" + Kimchi_types.proof_with_public = "caml_pasta_fq_plonk_proof_create" external verify : ( Pasta_bindings.Fq.t @@ -481,7 +534,7 @@ module Protocol = struct Kimchi_types.VerifierIndex.verifier_index -> ( Pasta_bindings.Fp.t Kimchi_types.or_infinity , Pasta_bindings.Fq.t ) - Kimchi_types.prover_proof + Kimchi_types.proof_with_public -> bool = "caml_pasta_fq_plonk_proof_verify" external batch_verify : @@ -493,7 +546,7 @@ module Protocol = struct array -> ( Pasta_bindings.Fp.t Kimchi_types.or_infinity , Pasta_bindings.Fq.t ) - Kimchi_types.prover_proof + Kimchi_types.proof_with_public array -> bool = "caml_pasta_fq_plonk_proof_batch_verify" @@ -501,15 +554,16 @@ module Protocol = struct unit -> ( Pasta_bindings.Fp.t Kimchi_types.or_infinity , Pasta_bindings.Fq.t ) - Kimchi_types.prover_proof = "caml_pasta_fq_plonk_proof_dummy" + Kimchi_types.proof_with_public = "caml_pasta_fq_plonk_proof_dummy" external deep_copy : ( Pasta_bindings.Fp.t Kimchi_types.or_infinity , Pasta_bindings.Fq.t ) - Kimchi_types.prover_proof + Kimchi_types.proof_with_public -> ( Pasta_bindings.Fp.t Kimchi_types.or_infinity , Pasta_bindings.Fq.t ) - Kimchi_types.prover_proof = "caml_pasta_fq_plonk_proof_deep_copy" + Kimchi_types.proof_with_public + = "caml_pasta_fq_plonk_proof_deep_copy" end end end diff --git a/src/lib/crypto/kimchi_bindings/stubs/kimchi_types.ml b/src/lib/crypto/kimchi_bindings/stubs/kimchi_types.ml index a9e97d76e89..6bec0f01f84 100644 --- a/src/lib/crypto/kimchi_bindings/stubs/kimchi_types.ml +++ b/src/lib/crypto/kimchi_bindings/stubs/kimchi_types.ml @@ -98,6 +98,13 @@ type nonrec 'caml_g lookup_commitments = ; runtime : 'caml_g poly_comm option } +type nonrec 'caml_f runtime_table_cfg = + { id : int32; first_column : 'caml_f array } + +type nonrec 'caml_f lookup_table = { id : int32; data : 'caml_f array array } + +type nonrec 'caml_f runtime_table = { id : int32; data : 'caml_f array } + type nonrec 'caml_g prover_commitments = { w_comm : 'caml_g poly_comm @@ -129,6 +136,11 @@ type nonrec ('caml_g, 'caml_f) prover_proof = ; prev_challenges : ('caml_g, 'caml_f) recursion_challenge array } +type nonrec ('caml_g, 'caml_f) proof_with_public = + { public_evals : 'caml_f array point_evaluations option + ; proof : ('caml_g, 'caml_f) prover_proof + } + type nonrec wire = { row : int; col : int } type nonrec gate_type = @@ -225,6 +237,12 @@ module VerifierIndex = struct ; mul_comm : 'poly_comm ; emul_comm : 'poly_comm ; endomul_scalar_comm : 'poly_comm + ; xor_comm : 'poly_comm option + ; range_check0_comm : 'poly_comm option + ; range_check1_comm : 'poly_comm option + ; foreign_field_add_comm : 'poly_comm option + ; foreign_field_mul_comm : 'poly_comm option + ; rot_comm : 'poly_comm option } type nonrec ('fr, 'srs, 'poly_comm) verifier_index = @@ -236,5 +254,6 @@ module VerifierIndex = struct ; evals : 'poly_comm verification_evals ; shifts : 'fr array ; lookup_index : 'poly_comm Lookup.t option + ; zk_rows : int } end diff --git a/src/lib/crypto/kimchi_bindings/stubs/rust-toolchain.toml b/src/lib/crypto/kimchi_bindings/stubs/rust-toolchain.toml index 8ecda82d680..5a82cfe5ab4 100644 --- a/src/lib/crypto/kimchi_bindings/stubs/rust-toolchain.toml +++ b/src/lib/crypto/kimchi_bindings/stubs/rust-toolchain.toml @@ -11,4 +11,4 @@ # 4. figure out the hashes of the (now obsolete) docker images used in CI rules that are failing, grep for these hashes and replace them with the new hashes [toolchain] -channel = "1.67.0" +channel = "1.72" diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/arkworks/bigint_256.rs b/src/lib/crypto/kimchi_bindings/stubs/src/arkworks/bigint_256.rs index 90570304d3c..e9f54f65219 100644 --- a/src/lib/crypto/kimchi_bindings/stubs/src/arkworks/bigint_256.rs +++ b/src/lib/crypto/kimchi_bindings/stubs/src/arkworks/bigint_256.rs @@ -239,12 +239,12 @@ mod tests { #[test] fn biguint() { let x = 10000.to_biguint().unwrap(); - println!("biguint.to_string: {}", x.to_string()); + println!("biguint.to_string: {}", x); let y = CamlBigInteger256::try_from(x.clone()).unwrap(); println!("camlbigint.to_string: {}", y.to_string()); //assert!(&y.to_string() == "10000"); let x2: BigUint = y.into(); assert!(x2 == x); - println!("biguint.to_string: {}", x2.to_string()); + println!("biguint.to_string: {}", x2); } } diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/lib.rs b/src/lib/crypto/kimchi_bindings/stubs/src/lib.rs index 11cb493b9c4..a3b3463b0e3 100644 --- a/src/lib/crypto/kimchi_bindings/stubs/src/lib.rs +++ b/src/lib/crypto/kimchi_bindings/stubs/src/lib.rs @@ -61,7 +61,9 @@ pub use { wires::caml::CamlWire, }, kimchi::proof::caml::CamlProofEvaluations, - kimchi::prover::caml::{CamlLookupCommitments, CamlProverCommitments, CamlProverProof}, + kimchi::prover::caml::{ + CamlLookupCommitments, CamlProofWithPublic, CamlProverCommitments, CamlProverProof, + }, mina_poseidon::sponge::caml::CamlScalarChallenge, poly_commitment::commitment::caml::{CamlOpeningProof, CamlPolyComm}, }; diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/main.rs b/src/lib/crypto/kimchi_bindings/stubs/src/main.rs index e9d8d785f9b..5b74b542a3f 100644 --- a/src/lib/crypto/kimchi_bindings/stubs/src/main.rs +++ b/src/lib/crypto/kimchi_bindings/stubs/src/main.rs @@ -1,6 +1,10 @@ use kimchi::circuits::{ expr::FeatureFlag, - lookup::lookups::{LookupFeatures, LookupPattern, LookupPatterns}, + lookup::{ + lookups::{LookupFeatures, LookupPattern, LookupPatterns}, + runtime_tables::caml::{CamlRuntimeTable, CamlRuntimeTableCfg}, + tables::caml::CamlLookupTable, + }, }; use kimchi::proof::{caml::CamlRecursionChallenge, PointEvaluations}; use ocaml_gen::{decl_fake_generic, decl_func, decl_module, decl_type, decl_type_alias, Env}; @@ -29,6 +33,7 @@ use wires_15_stubs::{ CamlOpeningProof, CamlPolyComm, CamlProofEvaluations, + CamlProofWithPublic, CamlProverCommitments, CamlProverProof, CamlRandomOracles, @@ -100,8 +105,13 @@ fn generate_types_bindings(mut w: impl std::io::Write, env: &mut Env) { decl_type!(w, env, CamlRecursionChallenge:: => "recursion_challenge"); decl_type!(w, env, CamlOpeningProof:: => "opening_proof"); decl_type!(w, env, CamlLookupCommitments:: => "lookup_commitments"); + + decl_type!(w, env, CamlRuntimeTableCfg:: => "runtime_table_cfg"); + decl_type!(w, env, CamlLookupTable:: => "lookup_table"); + decl_type!(w, env, CamlRuntimeTable:: => "runtime_table"); decl_type!(w, env, CamlProverCommitments:: => "prover_commitments"); decl_type!(w, env, CamlProverProof => "prover_proof"); + decl_type!(w, env, CamlProofWithPublic => "proof_with_public"); decl_type!(w, env, CamlWire => "wire"); decl_type!(w, env, GateType => "gate_type"); @@ -433,7 +443,8 @@ fn generate_kimchi_bindings(mut w: impl std::io::Write, env: &mut Env) { decl_module!(w, env, "Fp", { decl_type_alias!(w, env, "t" => CamlOracles); - decl_func!(w, env, fp_oracles_create => "create"); + decl_func!(w, env, fp_oracles_create_no_public => "create"); + decl_func!(w, env, fp_oracles_create => "create_with_public_evals"); decl_func!(w, env, fp_oracles_dummy => "dummy"); decl_func!(w, env, fp_oracles_deep_copy => "deep_copy"); }); @@ -441,7 +452,8 @@ fn generate_kimchi_bindings(mut w: impl std::io::Write, env: &mut Env) { decl_module!(w, env, "Fq", { decl_type_alias!(w, env, "t" => CamlOracles); - decl_func!(w, env, fq_oracles_create => "create"); + decl_func!(w, env, fq_oracles_create_no_public => "create"); + decl_func!(w, env, fq_oracles_create => "create_with_public_evals"); decl_func!(w, env, fq_oracles_dummy => "dummy"); decl_func!(w, env, fq_oracles_deep_copy => "deep_copy"); }); @@ -450,6 +462,7 @@ fn generate_kimchi_bindings(mut w: impl std::io::Write, env: &mut Env) { decl_module!(w, env, "Proof", { decl_module!(w, env, "Fp", { decl_func!(w, env, caml_pasta_fp_plonk_proof_create => "create"); + decl_func!(w, env, caml_pasta_fp_plonk_proof_create_and_verify => "create_and_verify"); decl_func!(w, env, caml_pasta_fp_plonk_proof_example_with_lookup => "example_with_lookup"); decl_func!(w, env, caml_pasta_fp_plonk_proof_example_with_ffadd => "example_with_ffadd"); decl_func!(w, env, caml_pasta_fp_plonk_proof_example_with_xor => "example_with_xor"); diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/oracles.rs b/src/lib/crypto/kimchi_bindings/stubs/src/oracles.rs index c6a714f23af..db3acc979a6 100644 --- a/src/lib/crypto/kimchi_bindings/stubs/src/oracles.rs +++ b/src/lib/crypto/kimchi_bindings/stubs/src/oracles.rs @@ -2,7 +2,10 @@ use crate::pasta_fp_plonk_verifier_index::CamlPastaFpPlonkVerifierIndex; use ark_ff::One; use kimchi::circuits::scalars::{caml::CamlRandomOracles, RandomOracles}; use kimchi::proof::ProverProof; -use kimchi::{prover::caml::CamlProverProof, verifier_index::VerifierIndex}; +use kimchi::{ + prover::caml::{CamlProofWithPublic, CamlProverProof}, + verifier_index::VerifierIndex, +}; use mina_poseidon::{ self, constants::PlonkSpongeConstantsKimchi, @@ -11,6 +14,8 @@ use mina_poseidon::{ }; use paste::paste; use poly_commitment::commitment::{caml::CamlPolyComm, shift_scalar, PolyComm}; +use poly_commitment::evaluation_proof::OpeningProof; +use poly_commitment::SRS; #[derive(ocaml::IntoValue, ocaml::FromValue, ocaml_gen::Struct)] pub struct CamlOracles { @@ -22,20 +27,95 @@ pub struct CamlOracles { macro_rules! impl_oracles { ($CamlF: ty, $F: ty, $CamlG: ty, $G: ty, $index: ty, $curve_params: ty) => { - paste! { #[ocaml_gen::func] #[ocaml::func] pub fn [<$F:snake _oracles_create>]( + lgr_comm: Vec>, + index: $index, + proof: CamlProofWithPublic<$CamlG, $CamlF>, + ) -> Result, ocaml::Error> { + let index: VerifierIndex<$G, OpeningProof<$G>> = index.into(); + + let lgr_comm: Vec> = lgr_comm + .into_iter() + .take(proof.proof.public.len()) + .map(Into::into) + .collect(); + let lgr_comm_refs: Vec<_> = lgr_comm.iter().collect(); + + let p_comm = PolyComm::<$G>::multi_scalar_mul( + &lgr_comm_refs, + &proof + .proof + .public + .iter() + .map(Into::<$F>::into) + .map(|s| -s) + .collect::>(), + ); + + let p_comm = { + index + .srs() + .mask_custom( + p_comm.clone(), + &p_comm.map(|_| $F::one()), + ) + .unwrap() + .commitment + }; + + let (proof, public_input): (ProverProof<$G, OpeningProof<$G>>, Vec<$F>) = proof.into(); + + let oracles_result = + proof.oracles::< + DefaultFqSponge<$curve_params, PlonkSpongeConstantsKimchi>, + DefaultFrSponge<$F, PlonkSpongeConstantsKimchi>, + >(&index, &p_comm, Some(&public_input))?; + + let (mut sponge, combined_inner_product, p_eval, digest, oracles) = ( + oracles_result.fq_sponge, + oracles_result.combined_inner_product, + oracles_result.public_evals, + oracles_result.digest, + oracles_result.oracles, + ); + + sponge.absorb_fr(&[shift_scalar::<$G>(combined_inner_product)]); + + let opening_prechallenges = proof + .proof + .prechallenges(&mut sponge) + .into_iter() + .map(|x| x.0.into()) + .collect(); + + Ok(CamlOracles { + o: oracles.into(), + p_eval: (p_eval[0][0].into(), p_eval[1][0].into()), + opening_prechallenges, + digest_before_evaluations: digest.into(), + }) + } + + #[ocaml_gen::func] + #[ocaml::func] + pub fn [<$F:snake _oracles_create_no_public>]( lgr_comm: Vec>, index: $index, proof: CamlProverProof<$CamlG, $CamlF>, ) -> Result, ocaml::Error> { - let index: VerifierIndex<$G> = index.into(); + let proof = CamlProofWithPublic { + proof, + public_evals: None, + }; + + let index: VerifierIndex<$G, OpeningProof<$G>> = index.into(); let lgr_comm: Vec> = lgr_comm .into_iter() - .take(proof.public.len()) + .take(proof.proof.public.len()) .map(Into::into) .collect(); let lgr_comm_refs: Vec<_> = lgr_comm.iter().collect(); @@ -43,6 +123,7 @@ macro_rules! impl_oracles { let p_comm = PolyComm::<$G>::multi_scalar_mul( &lgr_comm_refs, &proof + .proof .public .iter() .map(Into::<$F>::into) @@ -61,10 +142,13 @@ macro_rules! impl_oracles { .commitment }; - let (proof, public_input): (ProverProof<$G>, Vec<$F>) = proof.into(); + let (proof, public_input): (ProverProof<$G, OpeningProof<$G>>, Vec<$F>) = proof.into(); let oracles_result = - proof.oracles::, DefaultFrSponge<$F, PlonkSpongeConstantsKimchi>>(&index, &p_comm, &public_input)?; + proof.oracles::< + DefaultFqSponge<$curve_params, PlonkSpongeConstantsKimchi>, + DefaultFrSponge<$F, PlonkSpongeConstantsKimchi>, + >(&index, &p_comm, Some(&public_input))?; let (mut sponge, combined_inner_product, p_eval, digest, oracles) = ( oracles_result.fq_sponge, diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_index.rs b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_index.rs index 494d262a243..4f1a3cba8a1 100644 --- a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_index.rs +++ b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_index.rs @@ -1,9 +1,15 @@ +use crate::arkworks::CamlFp; use crate::{gate_vector::fp::CamlPastaFpPlonkGateVectorPtr, srs::fp::CamlFpSrs}; use ark_poly::EvaluationDomain; +use kimchi::circuits::lookup::runtime_tables::caml::CamlRuntimeTableCfg; +use kimchi::circuits::lookup::runtime_tables::RuntimeTableCfg; +use kimchi::circuits::lookup::tables::caml::CamlLookupTable; +use kimchi::circuits::lookup::tables::LookupTable; use kimchi::circuits::{constraints::ConstraintSystem, gate::CircuitGate}; use kimchi::{linearization::expr_linearization, prover_index::ProverIndex}; use mina_curves::pasta::{Fp, Pallas, Vesta, VestaParameters}; use mina_poseidon::{constants::PlonkSpongeConstantsKimchi, sponge::DefaultFqSponge}; +use poly_commitment::{evaluation_proof::OpeningProof, SRS as _}; use serde::{Deserialize, Serialize}; use std::{ fs::{File, OpenOptions}, @@ -12,7 +18,7 @@ use std::{ /// Boxed so that we don't store large proving indexes in the OCaml heap. #[derive(ocaml_gen::CustomType)] -pub struct CamlPastaFpPlonkIndex(pub Box>); +pub struct CamlPastaFpPlonkIndex(pub Box>>); pub type CamlPastaFpPlonkIndexPtr<'a> = ocaml::Pointer<'a, CamlPastaFpPlonkIndex>; extern "C" fn caml_pasta_fp_plonk_index_finalize(v: ocaml::Raw) { @@ -39,6 +45,8 @@ impl ocaml::custom::Custom for CamlPastaFpPlonkIndex { pub fn caml_pasta_fp_plonk_index_create( gates: CamlPastaFpPlonkGateVectorPtr, public: ocaml::Int, + lookup_tables: Vec>, + runtime_tables: Vec>, prev_challenges: ocaml::Int, srs: CamlFpSrs, ) -> Result { @@ -53,18 +61,26 @@ pub fn caml_pasta_fp_plonk_index_create( }) .collect(); + let runtime_tables: Vec> = + runtime_tables.into_iter().map(Into::into).collect(); + + let lookup_tables: Vec> = lookup_tables.into_iter().map(Into::into).collect(); + // create constraint system let cs = match ConstraintSystem::::create(gates) .public(public as usize) .prev_challenges(prev_challenges as usize) + .max_poly_size(Some(srs.0.max_poly_size())) + .lookup(lookup_tables) + .runtime(if runtime_tables.is_empty() { + None + } else { + Some(runtime_tables) + }) .build() { - Err(_) => { - return Err(ocaml::Error::failwith( - "caml_pasta_fp_plonk_index_create: could not create constraint system", - ) - .err() - .unwrap()) + Err(e) => { + return Err(e.into()) } Ok(cs) => cs, }; @@ -80,7 +96,7 @@ pub fn caml_pasta_fp_plonk_index_create( } // create index - let mut index = ProverIndex::::create(cs, endo_q, srs.clone()); + let mut index = ProverIndex::>::create(cs, endo_q, srs.clone()); // Compute and cache the verifier index digest index.compute_verifier_index_digest::>(); @@ -143,7 +159,9 @@ pub fn caml_pasta_fp_plonk_index_read( } // deserialize the index - let mut t = ProverIndex::::deserialize(&mut rmp_serde::Deserializer::new(r))?; + let mut t = ProverIndex::>::deserialize( + &mut rmp_serde::Deserializer::new(r), + )?; t.srs = srs.clone(); let (linearization, powers_of_alpha) = expr_linearization(Some(&t.cs.feature_flags), true); diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_proof.rs b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_proof.rs index 75e36747a7e..1c6f00408b0 100644 --- a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_proof.rs +++ b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_proof.rs @@ -9,7 +9,11 @@ use ark_ec::AffineCurve; use ark_ff::One; use array_init::array_init; use groupmap::GroupMap; -use kimchi::prover_index::ProverIndex; +use kimchi::verifier::verify; +use kimchi::{ + circuits::lookup::runtime_tables::{caml::CamlRuntimeTable, RuntimeTable}, + prover_index::ProverIndex, +}; use kimchi::{circuits::polynomial::COLUMNS, verifier::batch_verify}; use kimchi::{ proof::{ @@ -17,7 +21,7 @@ use kimchi::{ }, verifier::Context, }; -use kimchi::{prover::caml::CamlProverProof, verifier_index::VerifierIndex}; +use kimchi::{prover::caml::CamlProofWithPublic, verifier_index::VerifierIndex}; use mina_curves::pasta::{Fp, Fq, Pallas, Vesta, VestaParameters}; use mina_poseidon::{ constants::PlonkSpongeConstantsKimchi, @@ -36,9 +40,10 @@ type EFrSponge = DefaultFrSponge; pub fn caml_pasta_fp_plonk_proof_create( index: CamlPastaFpPlonkIndexPtr<'static>, witness: Vec, + runtime_tables: Vec>, prev_challenges: Vec, prev_sgs: Vec, -) -> Result, ocaml::Error> { +) -> Result, ocaml::Error> { { let ptr: &mut poly_commitment::srs::SRS = unsafe { &mut *(std::sync::Arc::as_ptr(&index.as_ref().0.srs) as *mut _) }; @@ -57,10 +62,7 @@ pub fn caml_pasta_fp_plonk_proof_create( .iter() .map(Into::::into) .collect(); - let comm = PolyComm:: { - unshifted: vec![sg], - shifted: None, - }; + let comm = PolyComm:: { elems: vec![sg] }; RecursionChallenge { chals, comm } }) .collect() @@ -70,7 +72,9 @@ pub fn caml_pasta_fp_plonk_proof_create( let witness: [Vec<_>; COLUMNS] = witness .try_into() .map_err(|_| ocaml::Error::Message("the witness should be a column of 15 vectors"))?; - let index: &ProverIndex = &index.as_ref().0; + let index: &ProverIndex> = &index.as_ref().0; + let runtime_tables: Vec> = + runtime_tables.into_iter().map(Into::into).collect(); // public input let public_input = witness[0][0..index.cs.public].to_vec(); @@ -86,7 +90,7 @@ pub fn caml_pasta_fp_plonk_proof_create( let proof = ProverProof::create_recursive::( &group_map, witness, - &[], + &runtime_tables, index, prev, None, @@ -96,41 +100,119 @@ pub fn caml_pasta_fp_plonk_proof_create( }) } +#[ocaml_gen::func] +#[ocaml::func] +pub fn caml_pasta_fp_plonk_proof_create_and_verify( + index: CamlPastaFpPlonkIndexPtr<'static>, + witness: Vec, + runtime_tables: Vec>, + prev_challenges: Vec, + prev_sgs: Vec, +) -> Result, ocaml::Error> { + { + let ptr: &mut poly_commitment::srs::SRS = + unsafe { &mut *(std::sync::Arc::as_ptr(&index.as_ref().0.srs) as *mut _) }; + ptr.add_lagrange_basis(index.as_ref().0.cs.domain.d1); + } + let prev = if prev_challenges.is_empty() { + Vec::new() + } else { + let challenges_per_sg = prev_challenges.len() / prev_sgs.len(); + prev_sgs + .into_iter() + .map(Into::::into) + .enumerate() + .map(|(i, sg)| { + let chals = prev_challenges[(i * challenges_per_sg)..(i + 1) * challenges_per_sg] + .iter() + .map(Into::::into) + .collect(); + let comm = PolyComm:: { elems: vec![sg] }; + RecursionChallenge { chals, comm } + }) + .collect() + }; + + let witness: Vec> = witness.iter().map(|x| (*x.0).clone()).collect(); + let witness: [Vec<_>; COLUMNS] = witness + .try_into() + .map_err(|_| ocaml::Error::Message("the witness should be a column of 15 vectors"))?; + let index: &ProverIndex> = &index.as_ref().0; + let runtime_tables: Vec> = + runtime_tables.into_iter().map(Into::into).collect(); + + // public input + let public_input = witness[0][0..index.cs.public].to_vec(); + + // NB: This method is designed only to be used by tests. However, since creating a new reference will cause `drop` to be called on it once we are done with it. Since `drop` calls `caml_shutdown` internally, we *really, really* do not want to do this, but we have no other way to get at the active runtime. + // TODO: There's actually a way to get a handle to the runtime as a function argument. Switch + // to doing this instead. + let runtime = unsafe { ocaml::Runtime::recover_handle() }; + + // Release the runtime lock so that other threads can run using it while we generate the proof. + runtime.releasing_runtime(|| { + let group_map = GroupMap::::setup(); + let proof = ProverProof::create_recursive::( + &group_map, + witness, + &runtime_tables, + index, + prev, + None, + ) + .map_err(|e| ocaml::Error::Error(e.into()))?; + + let verifier_index = index.verifier_index(); + + // Verify proof + verify::>( + &group_map, + &verifier_index, + &proof, + &public_input, + )?; + + Ok((proof, public_input).into()) + }) +} + #[ocaml_gen::func] #[ocaml::func] pub fn caml_pasta_fp_plonk_proof_example_with_lookup( srs: CamlFpSrs, - indexed: bool, ) -> ( CamlPastaFpPlonkIndex, CamlFp, - CamlProverProof, + CamlProofWithPublic, ) { use ark_ff::Zero; use kimchi::circuits::{ constraints::ConstraintSystem, gate::{CircuitGate, GateType}, - lookup::runtime_tables::{RuntimeTable, RuntimeTableCfg, RuntimeTableSpec}, + lookup::{ + runtime_tables::{RuntimeTable, RuntimeTableCfg}, + tables::LookupTable, + }, polynomial::COLUMNS, wires::Wire, }; use poly_commitment::srs::{endos, SRS}; let num_gates = 1000; - let num_tables = 5; + let num_tables: usize = 5; + + // Even if using runtime tables, we need a fixed table with a zero row. + let fixed_tables = vec![LookupTable { + id: 0, + data: vec![vec![0, 0, 0, 0, 0].into_iter().map(Into::into).collect()], + }]; let mut runtime_tables_setup = vec![]; + let first_column: Vec<_> = [8u32, 9, 8, 7, 1].into_iter().map(Into::into).collect(); for table_id in 0..num_tables { - let cfg = if indexed { - RuntimeTableCfg::Indexed(RuntimeTableSpec { - id: table_id as i32, - len: 5, - }) - } else { - RuntimeTableCfg::Custom { - id: table_id as i32, - first_column: [8u32, 9, 8, 7, 1].into_iter().map(Into::into).collect(), - } + let cfg = RuntimeTableCfg { + id: table_id as i32, + first_column: first_column.clone(), }; runtime_tables_setup.push(cfg); } @@ -163,13 +245,19 @@ pub fn caml_pasta_fp_plonk_proof_example_with_lookup( for row in 0..num_gates { // the first register is the table id - lookup_cols[0][row] = 0u32.into(); + lookup_cols[0][row] = ((row % num_tables) as u64).into(); // create queries into our runtime lookup table let lookup_cols = &mut lookup_cols[1..]; - for chunk in lookup_cols.chunks_mut(2) { - chunk[0][row] = if indexed { 1u32.into() } else { 9u32.into() }; // index - chunk[1][row] = 2u32.into(); // value + for (chunk_id, chunk) in lookup_cols.chunks_mut(2).enumerate() { + // this could be properly fully random + if (row + chunk_id) % 2 == 0 { + chunk[0][row] = 9u32.into(); // index + chunk[1][row] = 2u32.into(); // value + } else { + chunk[0][row] = 8u32.into(); // index + chunk[1][row] = 3u32.into(); // value + } } } cols @@ -180,6 +268,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_lookup( // not sure if theres a smarter way instead of the double unwrap, but should be fine in the test let cs = ConstraintSystem::::create(gates) .runtime(Some(runtime_tables_setup)) + .lookup(fixed_tables) .public(num_public_inputs) .build() .unwrap(); @@ -188,7 +277,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_lookup( ptr.add_lagrange_basis(cs.domain.d1); let (endo_q, _endo_r) = endos::(); - let index = ProverIndex::::create(cs, endo_q, srs.0); + let index = ProverIndex::>::create(cs, endo_q, srs.0); let group_map = ::Map::setup(); let public_input = witness[0][0]; let proof = ProverProof::create_recursive::( @@ -214,7 +303,10 @@ pub fn caml_pasta_fp_plonk_proof_example_with_lookup( #[ocaml::func] pub fn caml_pasta_fp_plonk_proof_example_with_foreign_field_mul( srs: CamlFpSrs, -) -> (CamlPastaFpPlonkIndex, CamlProverProof) { +) -> ( + CamlPastaFpPlonkIndex, + CamlProofWithPublic, +) { use ark_ff::Zero; use kimchi::circuits::{ constraints::ConstraintSystem, @@ -343,16 +435,13 @@ pub fn caml_pasta_fp_plonk_proof_example_with_foreign_field_mul( } // Create constraint system - let cs = ConstraintSystem::::create(gates) - .lookup(vec![foreign_field_mul::gadget::lookup_table()]) - .build() - .unwrap(); + let cs = ConstraintSystem::::create(gates).build().unwrap(); let ptr: &mut SRS = unsafe { &mut *(std::sync::Arc::as_ptr(&srs.0) as *mut _) }; ptr.add_lagrange_basis(cs.domain.d1); let (endo_q, _endo_r) = endos::(); - let index = ProverIndex::::create(cs, endo_q, srs.0); + let index = ProverIndex::>::create(cs, endo_q, srs.0); let group_map = ::Map::setup(); let proof = ProverProof::create_recursive::( &group_map, @@ -373,7 +462,10 @@ pub fn caml_pasta_fp_plonk_proof_example_with_foreign_field_mul( #[ocaml::func] pub fn caml_pasta_fp_plonk_proof_example_with_range_check( srs: CamlFpSrs, -) -> (CamlPastaFpPlonkIndex, CamlProverProof) { +) -> ( + CamlPastaFpPlonkIndex, + CamlProofWithPublic, +) { use ark_ff::Zero; use kimchi::circuits::{ constraints::ConstraintSystem, gate::CircuitGate, polynomials::range_check, wires::Wire, @@ -409,16 +501,13 @@ pub fn caml_pasta_fp_plonk_proof_example_with_range_check( } // Create constraint system - let cs = ConstraintSystem::::create(gates) - .lookup(vec![range_check::gadget::lookup_table()]) - .build() - .unwrap(); + let cs = ConstraintSystem::::create(gates).build().unwrap(); let ptr: &mut SRS = unsafe { &mut *(std::sync::Arc::as_ptr(&srs.0) as *mut _) }; ptr.add_lagrange_basis(cs.domain.d1); let (endo_q, _endo_r) = endos::(); - let index = ProverIndex::::create(cs, endo_q, srs.0); + let index = ProverIndex::>::create(cs, endo_q, srs.0); let group_map = ::Map::setup(); let proof = ProverProof::create_recursive::( &group_map, @@ -439,7 +528,10 @@ pub fn caml_pasta_fp_plonk_proof_example_with_range_check( #[ocaml::func] pub fn caml_pasta_fp_plonk_proof_example_with_range_check0( srs: CamlFpSrs, -) -> (CamlPastaFpPlonkIndex, CamlProverProof) { +) -> ( + CamlPastaFpPlonkIndex, + CamlProofWithPublic, +) { use ark_ff::Zero; use kimchi::circuits::{ constraints::ConstraintSystem, @@ -481,16 +573,13 @@ pub fn caml_pasta_fp_plonk_proof_example_with_range_check0( }; // not sure if theres a smarter way instead of the double unwrap, but should be fine in the test - let cs = ConstraintSystem::::create(gates) - .lookup(vec![range_check::gadget::lookup_table()]) - .build() - .unwrap(); + let cs = ConstraintSystem::::create(gates).build().unwrap(); let ptr: &mut SRS = unsafe { &mut *(std::sync::Arc::as_ptr(&srs.0) as *mut _) }; ptr.add_lagrange_basis(cs.domain.d1); let (endo_q, _endo_r) = endos::(); - let index = ProverIndex::::create(cs, endo_q, srs.0); + let index = ProverIndex::>::create(cs, endo_q, srs.0); let group_map = ::Map::setup(); let proof = ProverProof::create_recursive::( &group_map, @@ -514,7 +603,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_ffadd( ) -> ( CamlPastaFpPlonkIndex, CamlFp, - CamlProverProof, + CamlProofWithPublic, ) { use ark_ff::Zero; use kimchi::circuits::{ @@ -607,7 +696,6 @@ pub fn caml_pasta_fp_plonk_proof_example_with_ffadd( // not sure if theres a smarter way instead of the double unwrap, but should be fine in the test let cs = ConstraintSystem::::create(gates) .public(num_public_inputs) - .lookup(vec![range_check::gadget::lookup_table()]) .build() .unwrap(); @@ -615,7 +703,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_ffadd( ptr.add_lagrange_basis(cs.domain.d1); let (endo_q, _endo_r) = endos::(); - let index = ProverIndex::::create(cs, endo_q, srs.0); + let index = ProverIndex::>::create(cs, endo_q, srs.0); let group_map = ::Map::setup(); let public_input = witness[0][0]; let proof = ProverProof::create_recursive::( @@ -641,7 +729,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_xor( ) -> ( CamlPastaFpPlonkIndex, (CamlFp, CamlFp), - CamlProverProof, + CamlProofWithPublic, ) { use ark_ff::Zero; use kimchi::circuits::{ @@ -696,7 +784,6 @@ pub fn caml_pasta_fp_plonk_proof_example_with_xor( // not sure if theres a smarter way instead of the double unwrap, but should be fine in the test let cs = ConstraintSystem::::create(gates) .public(num_public_inputs) - .lookup(vec![xor::lookup_table()]) .build() .unwrap(); @@ -704,7 +791,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_xor( ptr.add_lagrange_basis(cs.domain.d1); let (endo_q, _endo_r) = endos::(); - let index = ProverIndex::::create(cs, endo_q, srs.0); + let index = ProverIndex::>::create(cs, endo_q, srs.0); let group_map = ::Map::setup(); let public_input = (witness[0][0], witness[0][1]); let proof = ProverProof::create_recursive::( @@ -730,7 +817,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_rot( ) -> ( CamlPastaFpPlonkIndex, (CamlFp, CamlFp), - CamlProverProof, + CamlProofWithPublic, ) { use ark_ff::Zero; use kimchi::circuits::{ @@ -790,7 +877,6 @@ pub fn caml_pasta_fp_plonk_proof_example_with_rot( // not sure if theres a smarter way instead of the double unwrap, but should be fine in the test let cs = ConstraintSystem::::create(gates) .public(num_public_inputs) - .lookup(vec![rot::lookup_table()]) .build() .unwrap(); @@ -798,7 +884,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_rot( ptr.add_lagrange_basis(cs.domain.d1); let (endo_q, _endo_r) = endos::(); - let index = ProverIndex::::create(cs, endo_q, srs.0); + let index = ProverIndex::>::create(cs, endo_q, srs.0); let group_map = ::Map::setup(); let public_input = (witness[0][0], witness[0][1]); let proof = ProverProof::create_recursive::( @@ -821,7 +907,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_rot( #[ocaml::func] pub fn caml_pasta_fp_plonk_proof_verify( index: CamlPastaFpPlonkVerifierIndex, - proof: CamlProverProof, + proof: CamlProofWithPublic, ) -> bool { let group_map = ::Map::setup(); @@ -837,6 +923,7 @@ pub fn caml_pasta_fp_plonk_proof_verify( Vesta, DefaultFqSponge, DefaultFrSponge, + OpeningProof, >(&group_map, &[context]) .is_ok() } @@ -845,18 +932,19 @@ pub fn caml_pasta_fp_plonk_proof_verify( #[ocaml::func] pub fn caml_pasta_fp_plonk_proof_batch_verify( indexes: Vec, - proofs: Vec>, + proofs: Vec>, ) -> bool { let ts: Vec<_> = indexes .into_iter() .zip(proofs.into_iter()) .map(|(caml_index, caml_proof)| { - let verifier_index: VerifierIndex = caml_index.into(); - let (proof, public_input): (ProverProof<_>, Vec<_>) = caml_proof.into(); + let verifier_index: VerifierIndex> = caml_index.into(); + let (proof, public_input): (ProverProof>, Vec<_>) = + caml_proof.into(); (verifier_index, proof, public_input) }) .collect(); - let ts_ref: Vec<_> = ts + let ts_ref: Vec>> = ts .iter() .map(|(verifier_index, proof, public_input)| Context { verifier_index, @@ -870,18 +958,18 @@ pub fn caml_pasta_fp_plonk_proof_batch_verify( Vesta, DefaultFqSponge, DefaultFrSponge, + OpeningProof, >(&group_map, &ts_ref) .is_ok() } #[ocaml_gen::func] #[ocaml::func] -pub fn caml_pasta_fp_plonk_proof_dummy() -> CamlProverProof { +pub fn caml_pasta_fp_plonk_proof_dummy() -> CamlProofWithPublic { fn comm() -> PolyComm { let g = Vesta::prime_subgroup_generator(); PolyComm { - shifted: Some(g), - unshifted: vec![g, g, g], + elems: vec![g, g, g], } } @@ -904,6 +992,7 @@ pub fn caml_pasta_fp_plonk_proof_dummy() -> CamlProverProof zeta_omega: vec![Fp::one()], }; let evals = ProofEvaluations { + public: Some(eval()), w: array_init(|_| eval()), coefficients: array_init(|_| eval()), z: eval(), @@ -951,7 +1040,7 @@ pub fn caml_pasta_fp_plonk_proof_dummy() -> CamlProverProof #[ocaml_gen::func] #[ocaml::func] pub fn caml_pasta_fp_plonk_proof_deep_copy( - x: CamlProverProof, -) -> CamlProverProof { + x: CamlProofWithPublic, +) -> CamlProofWithPublic { x } diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_verifier_index.rs b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_verifier_index.rs index 8ac7e0b2ff7..115fa6a2771 100644 --- a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_verifier_index.rs +++ b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_verifier_index.rs @@ -10,20 +10,22 @@ use ark_poly::{EvaluationDomain, Radix2EvaluationDomain as Domain}; use kimchi::circuits::constraints::FeatureFlags; use kimchi::circuits::lookup::lookups::{LookupFeatures, LookupPatterns}; use kimchi::circuits::polynomials::permutation::Shifts; -use kimchi::circuits::polynomials::permutation::{zk_polynomial, zk_w3}; +use kimchi::circuits::polynomials::permutation::{permutation_vanishing_polynomial, zk_w}; use kimchi::circuits::wires::{COLUMNS, PERMUTS}; use kimchi::{linearization::expr_linearization, verifier_index::VerifierIndex}; use mina_curves::pasta::{Fp, Pallas, Vesta}; use poly_commitment::commitment::caml::CamlPolyComm; +use poly_commitment::evaluation_proof::OpeningProof; use poly_commitment::{commitment::PolyComm, srs::SRS}; use std::convert::TryInto; use std::path::Path; +use std::sync::Arc; pub type CamlPastaFpPlonkVerifierIndex = CamlPlonkVerifierIndex>; -impl From> for CamlPastaFpPlonkVerifierIndex { - fn from(vi: VerifierIndex) -> Self { +impl From>> for CamlPastaFpPlonkVerifierIndex { + fn from(vi: VerifierIndex>) -> Self { Self { domain: CamlPlonkDomain { log_size_of_group: vi.domain.log_size_of_group as isize, @@ -32,7 +34,7 @@ impl From> for CamlPastaFpPlonkVerifierIndex { max_poly_size: vi.max_poly_size as isize, public: vi.public as isize, prev_challenges: vi.prev_challenges as isize, - srs: CamlFpSrs(vi.srs.get().expect("have an srs").clone()), + srs: CamlFpSrs(vi.srs.clone()), evals: CamlPlonkVerificationEvals { sigma_comm: vi.sigma_comm.to_vec().iter().map(Into::into).collect(), coefficients_comm: vi @@ -47,15 +49,23 @@ impl From> for CamlPastaFpPlonkVerifierIndex { mul_comm: vi.mul_comm.into(), emul_comm: vi.emul_comm.into(), endomul_scalar_comm: vi.endomul_scalar_comm.into(), + + xor_comm: vi.xor_comm.map(Into::into), + range_check0_comm: vi.range_check0_comm.map(Into::into), + range_check1_comm: vi.range_check1_comm.map(Into::into), + foreign_field_add_comm: vi.foreign_field_add_comm.map(Into::into), + foreign_field_mul_comm: vi.foreign_field_mul_comm.map(Into::into), + rot_comm: vi.rot_comm.map(Into::into), }, shifts: vi.shift.to_vec().iter().map(Into::into).collect(), lookup_index: vi.lookup_index.map(Into::into), + zk_rows: vi.zk_rows as isize, } } } // TODO: This should really be a TryFrom or TryInto -impl From for VerifierIndex { +impl From for VerifierIndex> { fn from(index: CamlPastaFpPlonkVerifierIndex) -> Self { let evals = index.evals; let shifts = index.shifts; @@ -76,38 +86,42 @@ impl From for VerifierIndex { let shift: [Fp; PERMUTS] = shifts.try_into().expect("wrong size"); let feature_flags = FeatureFlags { - range_check0: false, - range_check1: false, - foreign_field_add: false, - foreign_field_mul: false, - rot: false, - xor: false, - lookup_features: LookupFeatures { - patterns: LookupPatterns { - xor: false, - lookup: false, - range_check: false, - foreign_field_mul: false, - }, - joint_lookup_used: false, - uses_runtime_tables: false, + range_check0: evals.range_check0_comm.is_some(), + range_check1: evals.range_check1_comm.is_some(), + foreign_field_add: evals.foreign_field_add_comm.is_some(), + foreign_field_mul: evals.foreign_field_mul_comm.is_some(), + rot: evals.rot_comm.is_some(), + xor: evals.xor_comm.is_some(), + lookup_features: { + if let Some(li) = index.lookup_index.as_ref() { + li.lookup_info.features + } else { + LookupFeatures { + patterns: LookupPatterns { + xor: false, + lookup: false, + range_check: false, + foreign_field_mul: false, + }, + joint_lookup_used: false, + uses_runtime_tables: false, + } + } }, }; // TODO dummy_lookup_value ? let (linearization, powers_of_alpha) = expr_linearization(Some(&feature_flags), true); - VerifierIndex:: { + VerifierIndex::> { domain, max_poly_size: index.max_poly_size as usize, public: index.public as usize, prev_challenges: index.prev_challenges as usize, powers_of_alpha, - srs: { - let res = once_cell::sync::OnceCell::new(); - res.set(index.srs.0).unwrap(); - res - }, + srs: { Arc::clone(&index.srs.0) }, + + zk_rows: index.zk_rows as u64, sigma_comm, coefficients_comm, @@ -120,23 +134,26 @@ impl From for VerifierIndex { emul_comm: evals.emul_comm.into(), endomul_scalar_comm: evals.endomul_scalar_comm.into(), - xor_comm: None, - - range_check0_comm: None, - range_check1_comm: None, - foreign_field_add_comm: None, - foreign_field_mul_comm: None, - rot_comm: None, + xor_comm: evals.xor_comm.map(Into::into), + range_check0_comm: evals.range_check0_comm.map(Into::into), + range_check1_comm: evals.range_check1_comm.map(Into::into), + foreign_field_add_comm: evals.foreign_field_add_comm.map(Into::into), + foreign_field_mul_comm: evals.foreign_field_mul_comm.map(Into::into), + rot_comm: evals.rot_comm.map(Into::into), shift, - zkpm: { + permutation_vanishing_polynomial_m: { let res = once_cell::sync::OnceCell::new(); - res.set(zk_polynomial(domain)).unwrap(); + res.set(permutation_vanishing_polynomial( + domain, + index.zk_rows as u64, + )) + .unwrap(); res }, w: { let res = once_cell::sync::OnceCell::new(); - res.set(zk_w3(domain)).unwrap(); + res.set(zk_w(domain, index.zk_rows as u64)).unwrap(); res }, endo: endo_q, @@ -151,16 +168,20 @@ pub fn read_raw( offset: Option, srs: CamlFpSrs, path: String, -) -> Result, ocaml::Error> { +) -> Result>, ocaml::Error> { let path = Path::new(&path); let (endo_q, _endo_r) = poly_commitment::srs::endos::(); - VerifierIndex::::from_file(Some(srs.0), path, offset.map(|x| x as u64), endo_q).map_err( - |_e| { - ocaml::Error::invalid_argument("caml_pasta_fp_plonk_verifier_index_raw_read") - .err() - .unwrap() - }, + VerifierIndex::>::from_file( + srs.0, + path, + offset.map(|x| x as u64), + endo_q, ) + .map_err(|_e| { + ocaml::Error::invalid_argument("caml_pasta_fp_plonk_verifier_index_raw_read") + .err() + .unwrap() + }) } // @@ -185,7 +206,7 @@ pub fn caml_pasta_fp_plonk_verifier_index_write( index: CamlPastaFpPlonkVerifierIndex, path: String, ) -> Result<(), ocaml::Error> { - let index: VerifierIndex = index.into(); + let index: VerifierIndex> = index.into(); let path = Path::new(&path); index.to_file(path, append).map_err(|_e| { ocaml::Error::invalid_argument("caml_pasta_fp_plonk_verifier_index_raw_read") @@ -222,7 +243,7 @@ pub fn caml_pasta_fp_plonk_verifier_index_dummy() -> CamlPastaFpPlonkVerifierInd fn comm() -> CamlPolyComm { let g: CamlGVesta = Vesta::prime_subgroup_generator().into(); CamlPolyComm { - shifted: Some(g), + shifted: None, unshifted: vec![g, g, g], } } @@ -248,9 +269,16 @@ pub fn caml_pasta_fp_plonk_verifier_index_dummy() -> CamlPastaFpPlonkVerifierInd mul_comm: comm(), emul_comm: comm(), endomul_scalar_comm: comm(), + xor_comm: None, + range_check0_comm: None, + range_check1_comm: None, + foreign_field_add_comm: None, + foreign_field_mul_comm: None, + rot_comm: None, }, shifts: (0..PERMUTS - 1).map(|_| Fp::one().into()).collect(), lookup_index: None, + zk_rows: 3, } } diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_index.rs b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_index.rs index 0203b98bf18..0229fb9c469 100644 --- a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_index.rs +++ b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_index.rs @@ -1,9 +1,15 @@ +use crate::arkworks::CamlFq; use crate::{gate_vector::fq::CamlPastaFqPlonkGateVectorPtr, srs::fq::CamlFqSrs}; use ark_poly::EvaluationDomain; +use kimchi::circuits::lookup::runtime_tables::caml::CamlRuntimeTableCfg; +use kimchi::circuits::lookup::runtime_tables::RuntimeTableCfg; +use kimchi::circuits::lookup::tables::caml::CamlLookupTable; +use kimchi::circuits::lookup::tables::LookupTable; use kimchi::circuits::{constraints::ConstraintSystem, gate::CircuitGate}; use kimchi::{linearization::expr_linearization, prover_index::ProverIndex}; use mina_curves::pasta::{Fq, Pallas, PallasParameters, Vesta}; use mina_poseidon::{constants::PlonkSpongeConstantsKimchi, sponge::DefaultFqSponge}; +use poly_commitment::{evaluation_proof::OpeningProof}; use serde::{Deserialize, Serialize}; use std::{ fs::{File, OpenOptions}, @@ -12,7 +18,7 @@ use std::{ /// Boxed so that we don't store large proving indexes in the OCaml heap. #[derive(ocaml_gen::CustomType)] -pub struct CamlPastaFqPlonkIndex(pub Box>); +pub struct CamlPastaFqPlonkIndex(pub Box>>); pub type CamlPastaFqPlonkIndexPtr<'a> = ocaml::Pointer<'a, CamlPastaFqPlonkIndex>; extern "C" fn caml_pasta_fq_plonk_index_finalize(v: ocaml::Raw) { @@ -39,6 +45,8 @@ impl ocaml::custom::Custom for CamlPastaFqPlonkIndex { pub fn caml_pasta_fq_plonk_index_create( gates: CamlPastaFqPlonkGateVectorPtr, public: ocaml::Int, + lookup_tables: Vec>, + runtime_tables: Vec>, prev_challenges: ocaml::Int, srs: CamlFqSrs, ) -> Result { @@ -53,18 +61,25 @@ pub fn caml_pasta_fq_plonk_index_create( }) .collect(); + let runtime_tables: Vec> = + runtime_tables.into_iter().map(Into::into).collect(); + + let lookup_tables: Vec> = lookup_tables.into_iter().map(Into::into).collect(); + // create constraint system let cs = match ConstraintSystem::::create(gates) .public(public as usize) .prev_challenges(prev_challenges as usize) + .lookup(lookup_tables) + .runtime(if runtime_tables.is_empty() { + None + } else { + Some(runtime_tables) + }) .build() { - Err(_) => { - return Err(ocaml::Error::failwith( - "caml_pasta_fq_plonk_index_create: could not create constraint system", - ) - .err() - .unwrap()) + Err(e) => { + return Err(e.into()) } Ok(cs) => cs, }; @@ -80,7 +95,7 @@ pub fn caml_pasta_fq_plonk_index_create( } // create index - let mut index = ProverIndex::::create(cs, endo_q, srs.clone()); + let mut index = ProverIndex::>::create(cs, endo_q, srs.clone()); // Compute and cache the verifier index digest index.compute_verifier_index_digest::>(); @@ -143,7 +158,9 @@ pub fn caml_pasta_fq_plonk_index_read( } // deserialize the index - let mut t = ProverIndex::::deserialize(&mut rmp_serde::Deserializer::new(r))?; + let mut t = ProverIndex::>::deserialize( + &mut rmp_serde::Deserializer::new(r), + )?; t.srs = srs.clone(); let (linearization, powers_of_alpha) = expr_linearization(Some(&t.cs.feature_flags), true); diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_proof.rs b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_proof.rs index 3bea626a23b..a82b6677c07 100644 --- a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_proof.rs +++ b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_proof.rs @@ -8,7 +8,10 @@ use ark_ec::AffineCurve; use ark_ff::One; use array_init::array_init; use groupmap::GroupMap; -use kimchi::prover_index::ProverIndex; +use kimchi::{ + circuits::lookup::runtime_tables::{caml::CamlRuntimeTable, RuntimeTable}, + prover_index::ProverIndex, +}; use kimchi::{circuits::polynomial::COLUMNS, verifier::batch_verify}; use kimchi::{ proof::{ @@ -16,7 +19,7 @@ use kimchi::{ }, verifier::Context, }; -use kimchi::{prover::caml::CamlProverProof, verifier_index::VerifierIndex}; +use kimchi::{prover::caml::CamlProofWithPublic, verifier_index::VerifierIndex}; use mina_curves::pasta::{Fp, Fq, Pallas, PallasParameters}; use mina_poseidon::{ constants::PlonkSpongeConstantsKimchi, @@ -32,9 +35,10 @@ use std::convert::TryInto; pub fn caml_pasta_fq_plonk_proof_create( index: CamlPastaFqPlonkIndexPtr<'static>, witness: Vec, + runtime_tables: Vec>, prev_challenges: Vec, prev_sgs: Vec, -) -> Result, ocaml::Error> { +) -> Result, ocaml::Error> { { let ptr: &mut poly_commitment::srs::SRS = unsafe { &mut *(std::sync::Arc::as_ptr(&index.as_ref().0.srs) as *mut _) }; @@ -53,10 +57,7 @@ pub fn caml_pasta_fq_plonk_proof_create( .iter() .map(Into::::into) .collect(); - let comm = PolyComm:: { - unshifted: vec![sg], - shifted: None, - }; + let comm = PolyComm:: { elems: vec![sg] }; RecursionChallenge { chals, comm } }) .collect() @@ -66,7 +67,10 @@ pub fn caml_pasta_fq_plonk_proof_create( let witness: [Vec<_>; COLUMNS] = witness .try_into() .expect("the witness should be a column of 15 vectors"); - let index: &ProverIndex = &index.as_ref().0; + let index: &ProverIndex> = &index.as_ref().0; + + let runtime_tables: Vec> = + runtime_tables.into_iter().map(Into::into).collect(); // public input let public_input = witness[0][0..index.cs.public].to_vec(); @@ -82,7 +86,7 @@ pub fn caml_pasta_fq_plonk_proof_create( let proof = ProverProof::create_recursive::< DefaultFqSponge, DefaultFrSponge, - >(&group_map, witness, &[], index, prev, None) + >(&group_map, witness, &runtime_tables, index, prev, None) .map_err(|e| ocaml::Error::Error(e.into()))?; Ok((proof, public_input).into()) }) @@ -92,7 +96,7 @@ pub fn caml_pasta_fq_plonk_proof_create( #[ocaml::func] pub fn caml_pasta_fq_plonk_proof_verify( index: CamlPastaFqPlonkVerifierIndex, - proof: CamlProverProof, + proof: CamlProofWithPublic, ) -> bool { let group_map = ::Map::setup(); @@ -108,6 +112,7 @@ pub fn caml_pasta_fq_plonk_proof_verify( Pallas, DefaultFqSponge, DefaultFrSponge, + OpeningProof, >(&group_map, &[context]) .is_ok() } @@ -116,18 +121,19 @@ pub fn caml_pasta_fq_plonk_proof_verify( #[ocaml::func] pub fn caml_pasta_fq_plonk_proof_batch_verify( indexes: Vec, - proofs: Vec>, + proofs: Vec>, ) -> bool { let ts: Vec<_> = indexes .into_iter() .zip(proofs.into_iter()) .map(|(caml_index, caml_proof)| { - let verifier_index: VerifierIndex = caml_index.into(); - let (proof, public_input): (ProverProof<_>, Vec<_>) = caml_proof.into(); + let verifier_index: VerifierIndex> = caml_index.into(); + let (proof, public_input): (ProverProof>, Vec<_>) = + caml_proof.into(); (verifier_index, proof, public_input) }) .collect(); - let ts_ref: Vec<_> = ts + let ts_ref: Vec>> = ts .iter() .map(|(verifier_index, proof, public_input)| Context { verifier_index, @@ -141,18 +147,18 @@ pub fn caml_pasta_fq_plonk_proof_batch_verify( Pallas, DefaultFqSponge, DefaultFrSponge, + OpeningProof, >(&group_map, &ts_ref) .is_ok() } #[ocaml_gen::func] #[ocaml::func] -pub fn caml_pasta_fq_plonk_proof_dummy() -> CamlProverProof { +pub fn caml_pasta_fq_plonk_proof_dummy() -> CamlProofWithPublic { fn comm() -> PolyComm { let g = Pallas::prime_subgroup_generator(); PolyComm { - shifted: Some(g), - unshifted: vec![g, g, g], + elems: vec![g, g, g], } } @@ -175,6 +181,7 @@ pub fn caml_pasta_fq_plonk_proof_dummy() -> CamlProverProof zeta_omega: vec![Fq::one()], }; let evals = ProofEvaluations { + public: Some(eval()), w: array_init(|_| eval()), coefficients: array_init(|_| eval()), z: eval(), @@ -222,7 +229,7 @@ pub fn caml_pasta_fq_plonk_proof_dummy() -> CamlProverProof #[ocaml_gen::func] #[ocaml::func] pub fn caml_pasta_fq_plonk_proof_deep_copy( - x: CamlProverProof, -) -> CamlProverProof { + x: CamlProofWithPublic, +) -> CamlProofWithPublic { x } diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_verifier_index.rs b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_verifier_index.rs index d470c8d1de3..678448755c0 100644 --- a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_verifier_index.rs +++ b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_verifier_index.rs @@ -10,20 +10,21 @@ use ark_poly::{EvaluationDomain, Radix2EvaluationDomain as Domain}; use kimchi::circuits::constraints::FeatureFlags; use kimchi::circuits::lookup::lookups::{LookupFeatures, LookupPatterns}; use kimchi::circuits::polynomials::permutation::Shifts; -use kimchi::circuits::polynomials::permutation::{zk_polynomial, zk_w3}; +use kimchi::circuits::polynomials::permutation::{permutation_vanishing_polynomial, zk_w}; use kimchi::circuits::wires::{COLUMNS, PERMUTS}; use kimchi::{linearization::expr_linearization, verifier_index::VerifierIndex}; use mina_curves::pasta::{Fq, Pallas, Vesta}; -use poly_commitment::commitment::caml::CamlPolyComm; +use poly_commitment::{commitment::caml::CamlPolyComm, evaluation_proof::OpeningProof}; use poly_commitment::{commitment::PolyComm, srs::SRS}; use std::convert::TryInto; use std::path::Path; +use std::sync::Arc; pub type CamlPastaFqPlonkVerifierIndex = CamlPlonkVerifierIndex>; -impl From> for CamlPastaFqPlonkVerifierIndex { - fn from(vi: VerifierIndex) -> Self { +impl From>> for CamlPastaFqPlonkVerifierIndex { + fn from(vi: VerifierIndex>) -> Self { Self { domain: CamlPlonkDomain { log_size_of_group: vi.domain.log_size_of_group as isize, @@ -32,7 +33,7 @@ impl From> for CamlPastaFqPlonkVerifierIndex { max_poly_size: vi.max_poly_size as isize, public: vi.public as isize, prev_challenges: vi.prev_challenges as isize, - srs: CamlFqSrs(vi.srs.get().expect("have an srs").clone()), + srs: CamlFqSrs(vi.srs.clone()), evals: CamlPlonkVerificationEvals { sigma_comm: vi.sigma_comm.to_vec().iter().map(Into::into).collect(), coefficients_comm: vi @@ -47,15 +48,23 @@ impl From> for CamlPastaFqPlonkVerifierIndex { mul_comm: vi.mul_comm.into(), emul_comm: vi.emul_comm.into(), endomul_scalar_comm: vi.endomul_scalar_comm.into(), + + xor_comm: vi.xor_comm.map(Into::into), + range_check0_comm: vi.range_check0_comm.map(Into::into), + range_check1_comm: vi.range_check1_comm.map(Into::into), + foreign_field_add_comm: vi.foreign_field_add_comm.map(Into::into), + foreign_field_mul_comm: vi.foreign_field_mul_comm.map(Into::into), + rot_comm: vi.rot_comm.map(Into::into), }, shifts: vi.shift.to_vec().iter().map(Into::into).collect(), lookup_index: vi.lookup_index.map(Into::into), + zk_rows: vi.zk_rows as isize, } } } // TODO: This should really be a TryFrom or TryInto -impl From for VerifierIndex { +impl From for VerifierIndex> { fn from(index: CamlPastaFqPlonkVerifierIndex) -> Self { let evals = index.evals; let shifts = index.shifts; @@ -76,38 +85,42 @@ impl From for VerifierIndex { let shift: [Fq; PERMUTS] = shifts.try_into().expect("wrong size"); let feature_flags = FeatureFlags { - range_check0: false, - range_check1: false, - foreign_field_add: false, - foreign_field_mul: false, - rot: false, - xor: false, - lookup_features: LookupFeatures { - patterns: LookupPatterns { - xor: false, - lookup: false, - range_check: false, - foreign_field_mul: false, - }, - joint_lookup_used: false, - uses_runtime_tables: false, + range_check0: evals.range_check0_comm.is_some(), + range_check1: evals.range_check1_comm.is_some(), + foreign_field_add: evals.foreign_field_add_comm.is_some(), + foreign_field_mul: evals.foreign_field_mul_comm.is_some(), + rot: evals.rot_comm.is_some(), + xor: evals.xor_comm.is_some(), + lookup_features: { + if let Some(li) = index.lookup_index.as_ref() { + li.lookup_info.features + } else { + LookupFeatures { + patterns: LookupPatterns { + xor: false, + lookup: false, + range_check: false, + foreign_field_mul: false, + }, + joint_lookup_used: false, + uses_runtime_tables: false, + } + } }, }; // TODO dummy_lookup_value ? let (linearization, powers_of_alpha) = expr_linearization(Some(&feature_flags), true); - VerifierIndex:: { + VerifierIndex::> { domain, max_poly_size: index.max_poly_size as usize, public: index.public as usize, prev_challenges: index.prev_challenges as usize, powers_of_alpha, - srs: { - let res = once_cell::sync::OnceCell::new(); - res.set(index.srs.0).unwrap(); - res - }, + srs: { Arc::clone(&index.srs.0) }, + + zk_rows: index.zk_rows as u64, sigma_comm, coefficients_comm, @@ -120,23 +133,26 @@ impl From for VerifierIndex { emul_comm: evals.emul_comm.into(), endomul_scalar_comm: evals.endomul_scalar_comm.into(), - xor_comm: None, - - range_check0_comm: None, - range_check1_comm: None, - foreign_field_add_comm: None, - foreign_field_mul_comm: None, - rot_comm: None, + xor_comm: evals.xor_comm.map(Into::into), + range_check0_comm: evals.range_check0_comm.map(Into::into), + range_check1_comm: evals.range_check1_comm.map(Into::into), + foreign_field_add_comm: evals.foreign_field_add_comm.map(Into::into), + foreign_field_mul_comm: evals.foreign_field_mul_comm.map(Into::into), + rot_comm: evals.rot_comm.map(Into::into), shift, - zkpm: { + permutation_vanishing_polynomial_m: { let res = once_cell::sync::OnceCell::new(); - res.set(zk_polynomial(domain)).unwrap(); + res.set(permutation_vanishing_polynomial( + domain, + index.zk_rows as u64, + )) + .unwrap(); res }, w: { let res = once_cell::sync::OnceCell::new(); - res.set(zk_w3(domain)).unwrap(); + res.set(zk_w(domain, index.zk_rows as u64)).unwrap(); res }, endo: endo_q, @@ -151,16 +167,20 @@ pub fn read_raw( offset: Option, srs: CamlFqSrs, path: String, -) -> Result, ocaml::Error> { +) -> Result>, ocaml::Error> { let path = Path::new(&path); let (endo_q, _endo_r) = poly_commitment::srs::endos::(); - VerifierIndex::::from_file(Some(srs.0), path, offset.map(|x| x as u64), endo_q).map_err( - |_e| { - ocaml::Error::invalid_argument("caml_pasta_fq_plonk_verifier_index_raw_read") - .err() - .unwrap() - }, + VerifierIndex::>::from_file( + srs.0, + path, + offset.map(|x| x as u64), + endo_q, ) + .map_err(|_e| { + ocaml::Error::invalid_argument("caml_pasta_fq_plonk_verifier_index_raw_read") + .err() + .unwrap() + }) } // @@ -185,7 +205,7 @@ pub fn caml_pasta_fq_plonk_verifier_index_write( index: CamlPastaFqPlonkVerifierIndex, path: String, ) -> Result<(), ocaml::Error> { - let index: VerifierIndex = index.into(); + let index: VerifierIndex> = index.into(); let path = Path::new(&path); index.to_file(path, append).map_err(|_e| { ocaml::Error::invalid_argument("caml_pasta_fq_plonk_verifier_index_raw_read") @@ -222,7 +242,7 @@ pub fn caml_pasta_fq_plonk_verifier_index_dummy() -> CamlPastaFqPlonkVerifierInd fn comm() -> CamlPolyComm { let g: CamlGPallas = Pallas::prime_subgroup_generator().into(); CamlPolyComm { - shifted: Some(g), + shifted: None, unshifted: vec![g, g, g], } } @@ -248,9 +268,16 @@ pub fn caml_pasta_fq_plonk_verifier_index_dummy() -> CamlPastaFqPlonkVerifierInd mul_comm: comm(), endomul_scalar_comm: comm(), emul_comm: comm(), + xor_comm: None, + range_check0_comm: None, + range_check1_comm: None, + foreign_field_add_comm: None, + foreign_field_mul_comm: None, + rot_comm: None, }, shifts: (0..PERMUTS - 1).map(|_| Fq::one().into()).collect(), lookup_index: None, + zk_rows: 3, } } diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/plonk_verifier_index.rs b/src/lib/crypto/kimchi_bindings/stubs/src/plonk_verifier_index.rs index 33b3278c60b..52d45fa3527 100644 --- a/src/lib/crypto/kimchi_bindings/stubs/src/plonk_verifier_index.rs +++ b/src/lib/crypto/kimchi_bindings/stubs/src/plonk_verifier_index.rs @@ -20,6 +20,12 @@ pub struct CamlPlonkVerificationEvals { pub mul_comm: PolyComm, pub emul_comm: PolyComm, pub endomul_scalar_comm: PolyComm, + pub xor_comm: Option, + pub range_check0_comm: Option, + pub range_check1_comm: Option, + pub foreign_field_add_comm: Option, + pub foreign_field_mul_comm: Option, + pub rot_comm: Option, } #[derive(ocaml::IntoValue, ocaml::FromValue, ocaml_gen::Enum)] @@ -188,4 +194,5 @@ pub struct CamlPlonkVerifierIndex { pub evals: CamlPlonkVerificationEvals, pub shifts: Vec, pub lookup_index: Option>, + pub zk_rows: ocaml::Int, } diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/srs.rs b/src/lib/crypto/kimchi_bindings/stubs/src/srs.rs index 0b306adb83b..32bb14faadc 100644 --- a/src/lib/crypto/kimchi_bindings/stubs/src/srs.rs +++ b/src/lib/crypto/kimchi_bindings/stubs/src/srs.rs @@ -1,6 +1,7 @@ use ark_poly::UVPolynomial; use ark_poly::{univariate::DensePolynomial, EvaluationDomain, Evaluations}; use paste::paste; +use poly_commitment::SRS as _; use poly_commitment::{ commitment::{b_poly_coefficients, caml::CamlPolyComm}, srs::SRS, @@ -121,7 +122,7 @@ macro_rules! impl_srs { let evals = evals.into_iter().map(Into::into).collect(); let p = Evaluations::<$F>::from_vec_and_domain(evals, x_domain).interpolate(); - Ok(srs.commit_non_hiding(&p, None).into()) + Ok(srs.commit_non_hiding(&p, 1).into()) } #[ocaml_gen::func] @@ -134,7 +135,7 @@ macro_rules! impl_srs { let coeffs = b_poly_coefficients(&chals); let p = DensePolynomial::<$F>::from_coefficients_vec(coeffs); - Ok(srs.commit_non_hiding(&p, None).into()) + Ok(srs.commit_non_hiding(&p, 1).into()) } #[ocaml_gen::func] diff --git a/src/lib/crypto/kimchi_bindings/wasm/Cargo.lock b/src/lib/crypto/kimchi_bindings/wasm/Cargo.lock index 204b924271a..bfafd70a5a5 100644 --- a/src/lib/crypto/kimchi_bindings/wasm/Cargo.lock +++ b/src/lib/crypto/kimchi_bindings/wasm/Cargo.lock @@ -4,9 +4,9 @@ version = 3 [[package]] name = "ahash" -version = "0.7.6" +version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" +checksum = "5a824f2aa7e75a0c98c5a504fceb80649e9c35265d44525b5f94de4771a395cd" dependencies = [ "getrandom", "once_cell", @@ -24,6 +24,12 @@ dependencies = [ "num-traits", ] +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + [[package]] name = "android_system_properties" version = "0.1.5" @@ -83,7 +89,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "db02d390bf6643fb404d3d22d31aee1c4bc4459600aef9113833d17e786c6e44" dependencies = [ "quote", - "syn", + "syn 1.0.109", ] [[package]] @@ -95,7 +101,7 @@ dependencies = [ "num-bigint", "num-traits", "quote", - "syn", + "syn 1.0.109", ] [[package]] @@ -131,7 +137,7 @@ checksum = "8dd4e5f0bf8285d5ed538d27fab7411f3e297908fd93c62195de8bee3f199e82" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.109", ] [[package]] @@ -165,9 +171,9 @@ checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" [[package]] name = "bcs" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bd3ffe8b19a604421a5d461d4a70346223e535903fbc3067138bddbebddcf77" +checksum = "85b6598a2f5d564fb7855dc6b06fd1c38cff5a72bd8b863a4d021938497b440a" dependencies = [ "serde", "thiserror", @@ -179,35 +185,38 @@ version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" dependencies = [ - "digest 0.10.6", + "digest 0.10.7", ] [[package]] name = "block-buffer" -version = "0.10.3" +version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" dependencies = [ "generic-array", ] [[package]] name = "bumpalo" -version = "3.12.0" +version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535" +checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec" [[package]] name = "byteorder" -version = "1.4.3" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "cc" -version = "1.0.79" +version = "1.0.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" +checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0" +dependencies = [ + "libc", +] [[package]] name = "cfg-if" @@ -217,25 +226,15 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" -version = "0.4.23" +version = "0.4.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16b0a3d9ed01224b22057780a37bb8c5dbfe1be8ba48678e7bf57ec4b385411f" +checksum = "7f2c685bad3eb3d45a01354cedb7d5faa66194d1d58ba6e267a8de788f79db38" dependencies = [ + "android-tzdata", "iana-time-zone", - "num-integer", "num-traits", "serde", - "winapi", -] - -[[package]] -name = "codespan-reporting" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e" -dependencies = [ - "termcolor", - "unicode-width", + "windows-targets", ] [[package]] @@ -250,34 +249,24 @@ dependencies = [ [[package]] name = "core-foundation-sys" -version = "0.8.3" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" +checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" [[package]] name = "cpufeatures" -version = "0.2.5" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320" +checksum = "3fbc60abd742b35f2492f808e1abbb83d45f72db402e14c55057edc9c7b1e9e4" dependencies = [ "libc", ] -[[package]] -name = "crossbeam-channel" -version = "0.5.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521" -dependencies = [ - "cfg-if", - "crossbeam-utils", -] - [[package]] name = "crossbeam-deque" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc" +checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" dependencies = [ "cfg-if", "crossbeam-epoch", @@ -286,9 +275,9 @@ dependencies = [ [[package]] name = "crossbeam-epoch" -version = "0.9.13" +version = "0.9.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01a9af1f4c2ef74bb8aa1f7e19706bc72d03598c8a570bb5de72243c7a9d9d5a" +checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" dependencies = [ "autocfg", "cfg-if", @@ -299,9 +288,9 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.14" +version = "0.8.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fb766fa798726286dbbb842f174001dab8abc7b627a1dd86e0b7222a95d929f" +checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" dependencies = [ "cfg-if", ] @@ -316,50 +305,6 @@ dependencies = [ "typenum", ] -[[package]] -name = "cxx" -version = "1.0.91" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86d3488e7665a7a483b57e25bdd90d0aeb2bc7608c8d0346acf2ad3f1caf1d62" -dependencies = [ - "cc", - "cxxbridge-flags", - "cxxbridge-macro", - "link-cplusplus", -] - -[[package]] -name = "cxx-build" -version = "1.0.91" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48fcaf066a053a41a81dfb14d57d99738b767febb8b735c3016e469fac5da690" -dependencies = [ - "cc", - "codespan-reporting", - "once_cell", - "proc-macro2", - "quote", - "scratch", - "syn", -] - -[[package]] -name = "cxxbridge-flags" -version = "1.0.91" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2ef98b8b717a829ca5603af80e1f9e2e48013ab227b68ef37872ef84ee479bf" - -[[package]] -name = "cxxbridge-macro" -version = "1.0.91" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "086c685979a698443656e5cf7856c95c642295a38599f12fb1ff76fb28d19892" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "darling" version = "0.13.4" @@ -372,12 +317,12 @@ dependencies = [ [[package]] name = "darling" -version = "0.14.3" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0808e1bd8671fb44a113a14e13497557533369847788fa2ae912b6ebfce9fa8" +checksum = "0209d94da627ab5605dcccf08bb18afa5009cfbef48d8a8b7d7bdbc79be25c5e" dependencies = [ - "darling_core 0.14.3", - "darling_macro 0.14.3", + "darling_core 0.20.3", + "darling_macro 0.20.3", ] [[package]] @@ -391,21 +336,21 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn", + "syn 1.0.109", ] [[package]] name = "darling_core" -version = "0.14.3" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "001d80444f28e193f30c2f293455da62dcf9a6b29918a4253152ae2b1de592cb" +checksum = "177e3443818124b357d8e76f53be906d60937f0d3a90773a664fa63fa253e621" dependencies = [ "fnv", "ident_case", "proc-macro2", "quote", "strsim", - "syn", + "syn 2.0.20", ] [[package]] @@ -416,18 +361,27 @@ checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" dependencies = [ "darling_core 0.13.4", "quote", - "syn", + "syn 1.0.109", ] [[package]] name = "darling_macro" -version = "0.14.3" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b36230598a2d5de7ec1c6f51f72d8a99a9208daff41de2084d06e3fd3ea56685" +checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" dependencies = [ - "darling_core 0.14.3", + "darling_core 0.20.3", "quote", - "syn", + "syn 2.0.20", +] + +[[package]] +name = "deranged" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f32d04922c60427da6f9fef14d042d9edddef64cb9d4ce0d64d0685fbeb1fd3" +dependencies = [ + "serde", ] [[package]] @@ -438,7 +392,7 @@ checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.109", ] [[package]] @@ -452,9 +406,9 @@ dependencies = [ [[package]] name = "digest" -version = "0.10.6" +version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8168378f4e5023e7218c89c891c0fd8ecdb5e5e4f18cb78f38cf245dd021e76f" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", "crypto-common", @@ -469,9 +423,9 @@ checksum = "d102f1a462fdcdddce88d6d46c06c074a2d2749b262230333726b06c52bb7585" [[package]] name = "either" -version = "1.8.1" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" +checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" [[package]] name = "fnv" @@ -481,9 +435,9 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "generic-array" -version = "0.14.6" +version = "0.14.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ "typenum", "version_check", @@ -491,9 +445,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.8" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31" +checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" dependencies = [ "cfg-if", "js-sys", @@ -534,41 +488,40 @@ checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" [[package]] name = "hermit-abi" -version = "0.2.6" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7" -dependencies = [ - "libc", -] +checksum = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7" [[package]] name = "hex" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" +dependencies = [ + "serde", +] [[package]] name = "iana-time-zone" -version = "0.1.53" +version = "0.1.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64c122667b287044802d6ce17ee2ddf13207ed924c712de9a66a5814d5b64765" +checksum = "8326b86b6cff230b97d0d312a6c40a60726df3332e721f72a1b035f451663b20" dependencies = [ "android_system_properties", "core-foundation-sys", "iana-time-zone-haiku", "js-sys", "wasm-bindgen", - "winapi", + "windows-core", ] [[package]] name = "iana-time-zone-haiku" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0703ae284fc167426161c2e3f1da3ea71d94b21bedbcc9494e92b28e334e3dca" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" dependencies = [ - "cxx", - "cxx-build", + "cc", ] [[package]] @@ -579,15 +532,19 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "indexmap" -version = "1.9.2" +version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" dependencies = [ "autocfg", "hashbrown 0.12.3", "serde", ] +[[package]] +name = "internal-tracing" +version = "0.1.0" + [[package]] name = "itertools" version = "0.10.5" @@ -599,15 +556,15 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.5" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440" +checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" [[package]] name = "js-sys" -version = "0.3.61" +version = "0.3.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "445dde2150c55e483f3d8416706b97ec8e8237c307e5b7b4b8dd15e6af2a0730" +checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a" dependencies = [ "wasm-bindgen", ] @@ -624,6 +581,7 @@ dependencies = [ "disjoint-set", "groupmap", "hex", + "internal-tracing", "itertools", "mina-curves", "mina-poseidon", @@ -649,48 +607,43 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.139" +version = "0.2.149" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79" +checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b" [[package]] name = "libm" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "348108ab3fba42ec82ff6e9564fc4ca0247bdccdc68dd8af9764bbc79c3c8ffb" - -[[package]] -name = "link-cplusplus" -version = "1.0.8" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecd207c9c713c34f95a097a5b029ac2ce6010530c7b49d7fea24d977dede04f5" -dependencies = [ - "cc", -] +checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" [[package]] name = "log" -version = "0.4.17" +version = "0.4.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" -dependencies = [ - "cfg-if", -] +checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" [[package]] name = "matrixmultiply" -version = "0.3.2" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "add85d4dd35074e6fedc608f8c8f513a3548619a9024b751949ef0e8e45a4d84" +checksum = "7574c1cf36da4798ab73da5b215bbf444f50718207754cb522201d78d1cd0ff2" dependencies = [ + "autocfg", "rawpointer", ] +[[package]] +name = "memchr" +version = "2.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167" + [[package]] name = "memoffset" -version = "0.7.1" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4" +checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" dependencies = [ "autocfg", ] @@ -726,7 +679,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "adb12d4e967ec485a5f71c6311fe28158e9d6f4bc4a447b474184d0f91a8fa32" dependencies = [ "matrixmultiply", - "num-complex 0.4.3", + "num-complex 0.4.4", "num-integer", "num-traits", "rawpointer", @@ -734,9 +687,9 @@ dependencies = [ [[package]] name = "num-bigint" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f" +checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0" dependencies = [ "autocfg", "num-integer", @@ -757,9 +710,9 @@ dependencies = [ [[package]] name = "num-complex" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02e0d21255c828d6f128a1e41534206671e8c3ea0c62f32291e808dc82cff17d" +checksum = "1ba157ca0885411de85d6ca030ba7e2a83a28636056c7c699b07c8b6f7383214" dependencies = [ "num-traits", ] @@ -772,7 +725,7 @@ checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.109", ] [[package]] @@ -787,9 +740,9 @@ dependencies = [ [[package]] name = "num-traits" -version = "0.2.15" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" +checksum = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c" dependencies = [ "autocfg", "libm", @@ -797,9 +750,9 @@ dependencies = [ [[package]] name = "num_cpus" -version = "1.15.0" +version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ "hermit-abi", "libc", @@ -829,22 +782,23 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.17.1" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3" +checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" [[package]] name = "paste" -version = "1.0.11" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d01a5bd0424d00070b0098dd17ebca6f961a959dead1dbcbbbc1d1cd8d3deeba" +checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" [[package]] name = "pest" -version = "2.5.5" +version = "2.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "028accff104c4e513bad663bbcd2ad7cfd5304144404c31ed0a77ac103d00660" +checksum = "ae9cee2a55a544be8b89dc6848072af97a20f2422603c10865be2a42b580fff5" dependencies = [ + "memchr", "thiserror", "ucd-trie", ] @@ -872,13 +826,14 @@ dependencies = [ "once_cell", "paste", "poly-commitment", + "proc-macro2", "rand", "rayon", "rmp-serde", "serde", "serde-wasm-bindgen", "serde_json", - "serde_with 2.2.0", + "serde_with 2.3.3", "spmc", "sprs", "wasm-bindgen", @@ -904,6 +859,7 @@ dependencies = [ "rand", "rand_core", "rayon", + "rmp-serde", "serde", "serde_with 1.14.0", "thiserror", @@ -917,18 +873,18 @@ checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" [[package]] name = "proc-macro2" -version = "1.0.51" +version = "1.0.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d727cae5b39d21da60fa540906919ad737832fe0b1c165da3a34d6548c849d6" +checksum = "dec2b086b7a862cf4de201096214fa870344cf922b2b30c167badb3af3195406" dependencies = [ "unicode-ident", ] [[package]] name = "quote" -version = "1.0.23" +version = "1.0.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b" +checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488" dependencies = [ "proc-macro2", ] @@ -971,9 +927,9 @@ checksum = "60a357793950651c4ed0f3f52338f53b2f809f32d83a07f72909fa13e4c6c1e3" [[package]] name = "rayon" -version = "1.6.1" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6db3a213adf02b3bcfd2d3846bb41cb22857d131789e01df434fb7e7bc0759b7" +checksum = "9c27db03db7734835b3f53954b534c91069375ce6ccaa2e065441e07d9b6cdb1" dependencies = [ "either", "rayon-core", @@ -981,21 +937,19 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.10.2" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "356a0625f1954f730c0201cdab48611198dc6ce21f4acff55089b5a78e6e835b" +checksum = "5ce3fb6ad83f861aac485e76e1985cd109d9a3713802152be56c3b1f0e0658ed" dependencies = [ - "crossbeam-channel", "crossbeam-deque", "crossbeam-utils", - "num_cpus", ] [[package]] name = "rmp" -version = "0.8.11" +version = "0.8.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44519172358fd6d58656c86ab8e7fbc9e1490c3e8f14d35ed78ca0dd07403c9f" +checksum = "7f9860a6cc38ed1da53456442089b4dfa35e7cedaa326df63017af88385e6b20" dependencies = [ "byteorder", "num-traits", @@ -1004,9 +958,9 @@ dependencies = [ [[package]] name = "rmp-serde" -version = "1.1.1" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5b13be192e0220b8afb7222aa5813cb62cc269ebb5cac346ca6487681d2913e" +checksum = "bffea85eea980d8a74453e5d02a8d93028f3c34725de143085a844ebe953258a" dependencies = [ "byteorder", "rmp", @@ -1024,15 +978,15 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.11" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5583e89e108996506031660fe09baa5011b9dd0341b89029313006d1fb508d70" +checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" [[package]] name = "ryu" -version = "1.0.12" +version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde" +checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" [[package]] name = "scoped-tls" @@ -1042,15 +996,9 @@ checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294" [[package]] name = "scopeguard" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" - -[[package]] -name = "scratch" -version = "1.0.3" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddccb15bcce173023b3fedd9436f882a0739b8dfb45e4f6b6002bee5929f61b2" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "semver" @@ -1072,9 +1020,9 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.152" +version = "1.0.164" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb" +checksum = "9e8c8cf938e98f769bc164923b06dce91cea1751522f46f8466461af04c9027d" dependencies = [ "serde_derive", ] @@ -1092,20 +1040,20 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.152" +version = "1.0.164" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e" +checksum = "d9735b638ccc51c28bf6914d90a2e9725b377144fc612c49a611fddd1b631d68" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.20", ] [[package]] name = "serde_json" -version = "1.0.93" +version = "1.0.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cad406b69c91885b5107daf2c29572f6c8cdb3c66826821e286c533490c0bc76" +checksum = "46266871c240a00b8f503b877622fe33430b3c7d963bdc0f2adc511e54a1eae3" dependencies = [ "itoa", "ryu", @@ -1124,9 +1072,9 @@ dependencies = [ [[package]] name = "serde_with" -version = "2.2.0" +version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30d904179146de381af4c93d3af6ca4984b3152db687dacb9c3c35e86f39809c" +checksum = "07ff71d2c147a7b57362cead5e22f772cd52f6ab31cfcd9edcd7f6aeb2a0afbe" dependencies = [ "base64", "chrono", @@ -1134,7 +1082,7 @@ dependencies = [ "indexmap", "serde", "serde_json", - "serde_with_macros 2.2.0", + "serde_with_macros 2.3.3", "time", ] @@ -1147,37 +1095,37 @@ dependencies = [ "darling 0.13.4", "proc-macro2", "quote", - "syn", + "syn 1.0.109", ] [[package]] name = "serde_with_macros" -version = "2.2.0" +version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1966009f3c05f095697c537312f5415d1e3ed31ce0a56942bac4c771c5c335e" +checksum = "881b6f881b17d13214e5d494c939ebab463d01264ce1811e9d4ac3a882e7695f" dependencies = [ - "darling 0.14.3", + "darling 0.20.3", "proc-macro2", "quote", - "syn", + "syn 2.0.20", ] [[package]] name = "sha2" -version = "0.10.6" +version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" dependencies = [ "cfg-if", "cpufeatures", - "digest 0.10.6", + "digest 0.10.7", ] [[package]] name = "smallvec" -version = "1.10.0" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" +checksum = "942b4a808e05215192e39f4ab80813e599068285906cc91aa64f923db842bd5a" [[package]] name = "spmc" @@ -1187,13 +1135,13 @@ checksum = "02a8428da277a8e3a15271d79943e80ccc2ef254e78813a166a08d65e4c3ece5" [[package]] name = "sprs" -version = "0.11.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ea71e48b3eab4c4b153e8e35dcaeac132720809ef68359097b8cb54a18edd70" +checksum = "88bab60b0a18fb9b3e0c26e92796b3c3a278bf5fa4880f5ad5cc3bdfb843d0b1" dependencies = [ "alga", "ndarray", - "num-complex 0.4.3", + "num-complex 0.4.4", "num-traits", "num_cpus", "rayon", @@ -1222,20 +1170,20 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn", + "syn 1.0.109", ] [[package]] name = "subtle" -version = "2.4.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" +checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" [[package]] name = "syn" -version = "1.0.107" +version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ "proc-macro2", "quote", @@ -1243,52 +1191,43 @@ dependencies = [ ] [[package]] -name = "synstructure" -version = "0.12.6" +name = "syn" +version = "2.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" +checksum = "fcb8d4cebc40aa517dfb69618fa647a346562e67228e2236ae0042ee6ac14775" dependencies = [ "proc-macro2", "quote", - "syn", - "unicode-xid", -] - -[[package]] -name = "termcolor" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6" -dependencies = [ - "winapi-util", + "unicode-ident", ] [[package]] name = "thiserror" -version = "1.0.38" +version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a9cd18aa97d5c45c6603caea1da6628790b37f7a34b6ca89522331c5180fed0" +checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.38" +version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fb327af4685e4d03fa8cbcf1716380da910eeb2bb8be417e7f9fd3fb164f36f" +checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.20", ] [[package]] name = "time" -version = "0.3.19" +version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53250a3b3fed8ff8fd988587d8925d26a83ac3845d9e03b220b37f34c2b8d6c2" +checksum = "a79d09ac6b08c1ab3906a2f7cc2e81a0e27c7ae89c63812df75e52bef0751e07" dependencies = [ + "deranged", "itoa", "serde", "time-core", @@ -1297,15 +1236,15 @@ dependencies = [ [[package]] name = "time-core" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e153e1f1acaef8acc537e68b44906d2db6436e2b35ac2c6b42640fff91f00fd" +checksum = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb" [[package]] name = "time-macros" -version = "0.2.7" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a460aeb8de6dcb0f381e1ee05f1cd56fcf5a5f6eb8187ff3d8f0b11078d38b7c" +checksum = "75c65469ed6b3a4809d987a41eb1dc918e9bc1d92211cbad7ae82931846f7451" dependencies = [ "time-core", ] @@ -1321,33 +1260,21 @@ dependencies = [ [[package]] name = "typenum" -version = "1.16.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "ucd-trie" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e79c4d996edb816c91e4308506774452e55e95c3c9de07b6729e17e15a5ef81" +checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9" [[package]] name = "unicode-ident" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc" - -[[package]] -name = "unicode-width" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" - -[[package]] -name = "unicode-xid" -version = "0.2.4" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" [[package]] name = "version_check" @@ -1363,9 +1290,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.84" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31f8dcbc21f30d9b8f2ea926ecb58f6b91192c17e9d33594b3df58b2007ca53b" +checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -1373,24 +1300,24 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.84" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95ce90fd5bcc06af55a641a86428ee4229e44e07033963a2290a8e241607ccb9" +checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn", + "syn 2.0.20", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.34" +version = "0.4.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f219e0d211ba40266969f6dbdd90636da12f75bee4fc9d6c23d1260dadb51454" +checksum = "c02dbc21516f9f1f04f187958890d7e6026df8d16540b7ad9492bc34a67cea03" dependencies = [ "cfg-if", "js-sys", @@ -1400,9 +1327,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.84" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c21f77c0bedc37fd5dc21f897894a5ca01e7bb159884559461862ae90c0b4c5" +checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -1410,28 +1337,28 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.84" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2aff81306fcac3c7515ad4e177f521b5c9a15f2b08f4e32d823066102f35a5f6" +checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.20", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.84" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d" +checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" [[package]] name = "wasm-bindgen-test" -version = "0.3.34" +version = "0.3.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6db36fc0f9fb209e88fb3642590ae0205bb5a56216dabd963ba15879fe53a30b" +checksum = "6e6e302a7ea94f83a6d09e78e7dc7d9ca7b186bc2829c24a22d0753efd680671" dependencies = [ "console_error_panic_hook", "js-sys", @@ -1443,9 +1370,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-test-macro" -version = "0.3.34" +version = "0.3.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0734759ae6b3b1717d661fe4f016efcfb9828f5edb4520c18eaee05af3b43be9" +checksum = "ecb993dd8c836930ed130e020e77d9b2e65dd0fbab1b67c790b0f5d80b11a575" dependencies = [ "proc-macro2", "quote", @@ -1453,62 +1380,96 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.61" +version = "0.3.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e33b99f4b23ba3eec1a53ac264e35a755f00e966e0065077d6027c0f575b0b97" +checksum = "9b85cbef8c220a6abc02aefd892dfc0fc23afb1c6a426316ec33253a3877249b" dependencies = [ "js-sys", "wasm-bindgen", ] [[package]] -name = "winapi" -version = "0.3.9" +name = "windows-core" +version = "0.51.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1f8cf84f35d2db49a46868f947758c7a1138116f7fac3bc844f43ade1292e64" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" dependencies = [ - "winapi-i686-pc-windows-gnu", - "winapi-x86_64-pc-windows-gnu", + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", ] [[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" +name = "windows_aarch64_gnullvm" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] -name = "winapi-util" -version = "0.1.5" +name = "windows_aarch64_msvc" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" -dependencies = [ - "winapi", -] +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" +name = "windows_x86_64_msvc" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "zeroize" -version = "1.5.7" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c394b5bd0c6f669e7275d9c20aa90ae064cb22e75a1cad54e1b34088034b149f" +checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" dependencies = [ "zeroize_derive", ] [[package]] name = "zeroize_derive" -version = "1.3.3" +version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44bf07cb3e50ea2003396695d58bf46bc9887a1f362260446fad6bc4e79bd36c" +checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn", - "synstructure", + "syn 2.0.20", ] diff --git a/src/lib/crypto/kimchi_bindings/wasm/Cargo.toml b/src/lib/crypto/kimchi_bindings/wasm/Cargo.toml index 42a91c70480..6751722de1b 100644 --- a/src/lib/crypto/kimchi_bindings/wasm/Cargo.toml +++ b/src/lib/crypto/kimchi_bindings/wasm/Cargo.toml @@ -14,7 +14,8 @@ crate-type = ["cdylib"] ################################# Dependencies ################################ [dependencies] -wasm-bindgen = { version = "0.2.78" } +# Strictly enforcing 0.2.87 +wasm-bindgen = { version = "=0.2.87" } console_error_panic_hook = { version = "0.1.6" } web-sys = { version = "0.3.35", features = ["Window", "Document", "HtmlElement", "Text", "Node", "Element" ] } @@ -50,6 +51,9 @@ serde_json = "1.0" serde_with = "2.0.1" serde-wasm-bindgen = "0.4" js-sys = "0.3" +# Version for proc-macro2 needs to be 1.0.60+ to be compatible with newer rust versions +# https://github.com/rust-lang/rust/issues/113152 +proc-macro2 = { version = "=1.0.60", features=["default", "proc-macro"] } [dev-dependencies] wasm-bindgen-test = "0.3.0" diff --git a/src/lib/crypto/kimchi_bindings/wasm/rust-toolchain.toml b/src/lib/crypto/kimchi_bindings/wasm/rust-toolchain.toml index 1f6f0ea957b..b04d9639e65 100644 --- a/src/lib/crypto/kimchi_bindings/wasm/rust-toolchain.toml +++ b/src/lib/crypto/kimchi_bindings/wasm/rust-toolchain.toml @@ -1,2 +1,6 @@ +# A nightly is necessary to enable the use of #![feature] [toolchain] -channel = "nightly-2023-02-05" +# This should stay in line with the versions in +# - kimchi_bindings/js/node_js/build.sh +# - kimchi_bindings/js/web/build.sh +channel = "nightly-2023-09-01" # roughly matches 1.72 diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/oracles.rs b/src/lib/crypto/kimchi_bindings/wasm/src/oracles.rs index e333e8ce056..96a17baa432 100644 --- a/src/lib/crypto/kimchi_bindings/wasm/src/oracles.rs +++ b/src/lib/crypto/kimchi_bindings/wasm/src/oracles.rs @@ -9,6 +9,8 @@ use mina_poseidon::{ }; use paste::paste; use poly_commitment::commitment::{shift_scalar, PolyComm}; +use poly_commitment::evaluation_proof::OpeningProof; +use poly_commitment::SRS; use wasm_bindgen::prelude::*; // use wasm_bindgen::convert::{IntoWasmAbi, FromWasmAbi}; use crate::wasm_vector::WasmVector; @@ -178,7 +180,7 @@ macro_rules! impl_oracles { ) -> Result<[], JsError> { // conversions let result = crate::rayon::run_in_pool(|| { - let index: DlogVerifierIndex<$G> = index.into(); + let index: DlogVerifierIndex<$G, OpeningProof<$G>> = index.into(); let lgr_comm: Vec> = lgr_comm .into_iter() @@ -207,10 +209,13 @@ macro_rules! impl_oracles { .commitment }; - let (proof, public_input): (ProverProof<$G>, Vec<$F>) = proof.into(); + let (proof, public_input): (ProverProof<$G, OpeningProof<$G>>, Vec<$F>) = proof.into(); let oracles_result = - proof.oracles::, DefaultFrSponge<$F, PlonkSpongeConstantsKimchi>>(&index, &p_comm,&public_input); + proof.oracles::< + DefaultFqSponge<$curve_params, PlonkSpongeConstantsKimchi>, + DefaultFrSponge<$F, PlonkSpongeConstantsKimchi> + >(&index, &p_comm, Some(&public_input)); let oracles_result = match oracles_result { Err(e) => { return Err(format!("oracles_create: {}", e)); diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/pasta_fp_plonk_index.rs b/src/lib/crypto/kimchi_bindings/wasm/src/pasta_fp_plonk_index.rs index 5b48d79c494..b76170a2931 100644 --- a/src/lib/crypto/kimchi_bindings/wasm/src/pasta_fp_plonk_index.rs +++ b/src/lib/crypto/kimchi_bindings/wasm/src/pasta_fp_plonk_index.rs @@ -1,9 +1,15 @@ use ark_poly::EvaluationDomain; +use kimchi::circuits::lookup::runtime_tables::RuntimeTableCfg; +use crate::arkworks::WasmPastaFp; use crate::gate_vector::fp::WasmGateVector; use crate::srs::fp::WasmFpSrs as WasmSrs; +use crate::wasm_flat_vector::WasmFlatVector; +use crate::wasm_vector::{fp::*, WasmVector}; +use kimchi::circuits::lookup::tables::LookupTable; use kimchi::circuits::{constraints::ConstraintSystem, gate::CircuitGate}; use kimchi::linearization::expr_linearization; +use kimchi::poly_commitment::evaluation_proof::OpeningProof; use kimchi::prover_index::ProverIndex; use mina_curves::pasta::{Fp, Pallas as GAffineOther, Vesta as GAffine, VestaParameters}; use mina_poseidon::{constants::PlonkSpongeConstantsKimchi, sponge::DefaultFqSponge}; @@ -20,16 +26,81 @@ use wasm_bindgen::prelude::*; /// Boxed so that we don't store large proving indexes in the OCaml heap. #[wasm_bindgen] -pub struct WasmPastaFpPlonkIndex(#[wasm_bindgen(skip)] pub Box>); +pub struct WasmPastaFpPlonkIndex( + #[wasm_bindgen(skip)] pub Box>>, +); + +// This should mimic LookupTable structure +#[wasm_bindgen] +pub struct WasmPastaFpLookupTable { + #[wasm_bindgen(skip)] + pub id: i32, + #[wasm_bindgen(skip)] + pub data: WasmVecVecFp, +} + +// Converter from WasmPastaFpLookupTable to LookupTable, used by the binding +// below. +impl From for LookupTable { + fn from(wasm_lt: WasmPastaFpLookupTable) -> LookupTable { + LookupTable { + id: wasm_lt.id.into(), + data: wasm_lt.data.0, + } + } +} + +// JS constructor for js/bindings.js +#[wasm_bindgen] +impl WasmPastaFpLookupTable { + #[wasm_bindgen(constructor)] + pub fn new(id: i32, data: WasmVecVecFp) -> WasmPastaFpLookupTable { + WasmPastaFpLookupTable { id, data } + } +} + +// Runtime table config + +#[wasm_bindgen] +pub struct WasmPastaFpRuntimeTableCfg { + #[wasm_bindgen(skip)] + pub id: i32, + #[wasm_bindgen(skip)] + pub first_column: WasmFlatVector, +} + +// JS constructor for js/bindings.js +#[wasm_bindgen] +impl WasmPastaFpRuntimeTableCfg { + #[wasm_bindgen(constructor)] + pub fn new(id: i32, first_column: WasmFlatVector) -> Self { + Self { id, first_column } + } +} + +impl From for RuntimeTableCfg { + fn from(wasm_rt_table_cfg: WasmPastaFpRuntimeTableCfg) -> Self { + Self { + id: wasm_rt_table_cfg.id, + first_column: wasm_rt_table_cfg + .first_column + .into_iter() + .map(Into::into) + .collect(), + } + } +} -// // CamlPastaFpPlonkIndex methods // +// Change js/web/worker-spec.js accordingly #[wasm_bindgen] pub fn caml_pasta_fp_plonk_index_create( gates: &WasmGateVector, public_: i32, + lookup_tables: WasmVector, + runtime_table_cfgs: WasmVector, prev_challenges: i32, srs: &WasmSrs, ) -> Result { @@ -46,10 +117,22 @@ pub fn caml_pasta_fp_plonk_index_create( }) .collect(); + let rust_runtime_table_cfgs: Vec> = + runtime_table_cfgs.into_iter().map(Into::into).collect(); + + let rust_lookup_tables: Vec> = + lookup_tables.into_iter().map(Into::into).collect(); + // create constraint system let cs = match ConstraintSystem::::create(gates) .public(public_ as usize) .prev_challenges(prev_challenges as usize) + .lookup(rust_lookup_tables) + .runtime(if rust_runtime_table_cfgs.is_empty() { + None + } else { + Some(rust_runtime_table_cfgs) + }) .build() { Err(_) => { @@ -68,7 +151,8 @@ pub fn caml_pasta_fp_plonk_index_create( ptr.add_lagrange_basis(cs.domain.d1); } - let mut index = ProverIndex::::create(cs, endo_q, srs.0.clone()); + let mut index = + ProverIndex::>::create(cs, endo_q, srs.0.clone()); // Compute and cache the verifier index digest index.compute_verifier_index_digest::>(); Ok(index) @@ -106,6 +190,35 @@ pub fn caml_pasta_fp_plonk_index_domain_d8_size(index: &WasmPastaFpPlonkIndex) - index.0.cs.domain.d8.size() as i32 } +#[wasm_bindgen] +pub fn caml_pasta_fp_plonk_index_decode( + bytes: &[u8], + srs: &WasmSrs, +) -> Result { + let mut deserializer = rmp_serde::Deserializer::new(bytes); + let mut index = + ProverIndex::>::deserialize(&mut deserializer) + .map_err(|e| JsError::new(&format!("caml_pasta_fp_plonk_index_decode: {}", e)))?; + + index.srs = srs.0.clone(); + let (linearization, powers_of_alpha) = expr_linearization(Some(&index.cs.feature_flags), true); + index.linearization = linearization; + index.powers_of_alpha = powers_of_alpha; + + Ok(WasmPastaFpPlonkIndex(Box::new(index))) +} + +#[wasm_bindgen] +pub fn caml_pasta_fp_plonk_index_encode(index: &WasmPastaFpPlonkIndex) -> Result, JsError> { + let mut buffer = Vec::new(); + let mut serializer = rmp_serde::Serializer::new(&mut buffer); + index + .0 + .serialize(&mut serializer) + .map_err(|e| JsError::new(&format!("caml_pasta_fp_plonk_index_encode: {}", e)))?; + Ok(buffer) +} + #[wasm_bindgen] pub fn caml_pasta_fp_plonk_index_read( offset: Option, @@ -126,8 +239,10 @@ pub fn caml_pasta_fp_plonk_index_read( } // deserialize the index - let mut t = ProverIndex::::deserialize(&mut rmp_serde::Deserializer::new(r)) - .map_err(|err| JsValue::from_str(&format!("caml_pasta_fp_plonk_index_read: {err}")))?; + let mut t = ProverIndex::>::deserialize( + &mut rmp_serde::Deserializer::new(r), + ) + .map_err(|err| JsValue::from_str(&format!("caml_pasta_fp_plonk_index_read: {err}")))?; t.srs = srs.0.clone(); let (linearization, powers_of_alpha) = expr_linearization(Some(&t.cs.feature_flags), true); t.linearization = linearization; diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/pasta_fq_plonk_index.rs b/src/lib/crypto/kimchi_bindings/wasm/src/pasta_fq_plonk_index.rs index be56f0bd88d..07d1fabcf87 100644 --- a/src/lib/crypto/kimchi_bindings/wasm/src/pasta_fq_plonk_index.rs +++ b/src/lib/crypto/kimchi_bindings/wasm/src/pasta_fq_plonk_index.rs @@ -1,9 +1,15 @@ use ark_poly::EvaluationDomain; +use kimchi::circuits::lookup::runtime_tables::RuntimeTableCfg; +use crate::arkworks::WasmPastaFq; use crate::gate_vector::fq::WasmGateVector; use crate::srs::fq::WasmFqSrs as WasmSrs; +use crate::wasm_flat_vector::WasmFlatVector; +use crate::wasm_vector::{fq::*, WasmVector}; +use kimchi::circuits::lookup::tables::LookupTable; use kimchi::circuits::{constraints::ConstraintSystem, gate::CircuitGate}; use kimchi::linearization::expr_linearization; +use kimchi::poly_commitment::evaluation_proof::OpeningProof; use kimchi::prover_index::ProverIndex; use mina_curves::pasta::{Fq, Pallas as GAffine, PallasParameters, Vesta as GAffineOther}; use mina_poseidon::{constants::PlonkSpongeConstantsKimchi, sponge::DefaultFqSponge}; @@ -20,16 +26,79 @@ use wasm_bindgen::prelude::*; /// Boxed so that we don't store large proving indexes in the OCaml heap. #[wasm_bindgen] -pub struct WasmPastaFqPlonkIndex(#[wasm_bindgen(skip)] pub Box>); +pub struct WasmPastaFqPlonkIndex( + #[wasm_bindgen(skip)] pub Box>>, +); + +#[wasm_bindgen] +pub struct WasmPastaFqLookupTable { + #[wasm_bindgen(skip)] + pub id: i32, + #[wasm_bindgen(skip)] + pub data: WasmVecVecFq, +} + +impl From for LookupTable { + fn from(wasm_lt: WasmPastaFqLookupTable) -> LookupTable { + LookupTable { + id: wasm_lt.id.into(), + data: wasm_lt.data.0, + } + } +} + +// JS constructor for js/bindings.js +#[wasm_bindgen] +impl WasmPastaFqLookupTable { + #[wasm_bindgen(constructor)] + pub fn new(id: i32, data: WasmVecVecFq) -> WasmPastaFqLookupTable { + WasmPastaFqLookupTable { id, data } + } +} + +// Runtime table config + +#[wasm_bindgen] +pub struct WasmPastaFqRuntimeTableCfg { + #[wasm_bindgen(skip)] + pub id: i32, + #[wasm_bindgen(skip)] + pub first_column: WasmFlatVector, +} + +impl From for RuntimeTableCfg { + fn from(wasm_rt_cfg: WasmPastaFqRuntimeTableCfg) -> Self { + Self { + id: wasm_rt_cfg.id, + first_column: wasm_rt_cfg + .first_column + .into_iter() + .map(Into::into) + .collect(), + } + } +} + +// JS constructor for js/bindings.js +#[wasm_bindgen] +impl WasmPastaFqRuntimeTableCfg { + #[wasm_bindgen(constructor)] + pub fn new(id: i32, first_column: WasmFlatVector) -> Self { + Self { id, first_column } + } +} // // CamlPastaFqPlonkIndex methods // +// Change js/web/worker-spec.js accordingly #[wasm_bindgen] pub fn caml_pasta_fq_plonk_index_create( gates: &WasmGateVector, public_: i32, + lookup_tables: WasmVector, + runtime_table_cfgs: WasmVector, prev_challenges: i32, srs: &WasmSrs, ) -> Result { @@ -46,10 +115,22 @@ pub fn caml_pasta_fq_plonk_index_create( }) .collect(); + let rust_runtime_table_cfgs: Vec> = + runtime_table_cfgs.into_iter().map(Into::into).collect(); + + let rust_lookup_tables: Vec> = + lookup_tables.into_iter().map(Into::into).collect(); + // create constraint system let cs = match ConstraintSystem::::create(gates) .public(public_ as usize) .prev_challenges(prev_challenges as usize) + .lookup(rust_lookup_tables) + .runtime(if rust_runtime_table_cfgs.is_empty() { + None + } else { + Some(rust_runtime_table_cfgs) + }) .build() { Err(_) => { @@ -68,7 +149,8 @@ pub fn caml_pasta_fq_plonk_index_create( ptr.add_lagrange_basis(cs.domain.d1); } - let mut index = ProverIndex::::create(cs, endo_q, srs.0.clone()); + let mut index = + ProverIndex::>::create(cs, endo_q, srs.0.clone()); // Compute and cache the verifier index digest index.compute_verifier_index_digest::>(); @@ -107,6 +189,35 @@ pub fn caml_pasta_fq_plonk_index_domain_d8_size(index: &WasmPastaFqPlonkIndex) - index.0.cs.domain.d8.size() as i32 } +#[wasm_bindgen] +pub fn caml_pasta_fq_plonk_index_decode( + bytes: &[u8], + srs: &WasmSrs, +) -> Result { + let mut deserializer = rmp_serde::Deserializer::new(bytes); + let mut index = + ProverIndex::>::deserialize(&mut deserializer) + .map_err(|e| JsError::new(&format!("caml_pasta_fq_plonk_index_decode: {}", e)))?; + + index.srs = srs.0.clone(); + let (linearization, powers_of_alpha) = expr_linearization(Some(&index.cs.feature_flags), true); + index.linearization = linearization; + index.powers_of_alpha = powers_of_alpha; + + Ok(WasmPastaFqPlonkIndex(Box::new(index))) +} + +#[wasm_bindgen] +pub fn caml_pasta_fq_plonk_index_encode(index: &WasmPastaFqPlonkIndex) -> Result, JsError> { + let mut buffer = Vec::new(); + let mut serializer = rmp_serde::Serializer::new(&mut buffer); + index + .0 + .serialize(&mut serializer) + .map_err(|e| JsError::new(&format!("caml_pasta_fq_plonk_index_encode: {}", e)))?; + Ok(buffer) +} + #[wasm_bindgen] pub fn caml_pasta_fq_plonk_index_read( offset: Option, @@ -127,8 +238,10 @@ pub fn caml_pasta_fq_plonk_index_read( } // deserialize the index - let mut t = ProverIndex::::deserialize(&mut rmp_serde::Deserializer::new(r)) - .map_err(|err| JsValue::from_str(&format!("caml_pasta_fq_plonk_index_read: {err}")))?; + let mut t = ProverIndex::>::deserialize( + &mut rmp_serde::Deserializer::new(r), + ) + .map_err(|err| JsValue::from_str(&format!("caml_pasta_fq_plonk_index_read: {err}")))?; t.srs = srs.0.clone(); let (linearization, powers_of_alpha) = expr_linearization(Some(&t.cs.feature_flags), true); t.linearization = linearization; diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/plonk_proof.rs b/src/lib/crypto/kimchi_bindings/wasm/src/plonk_proof.rs index 8ce1f8869d0..6ce6724b9ad 100644 --- a/src/lib/crypto/kimchi_bindings/wasm/src/plonk_proof.rs +++ b/src/lib/crypto/kimchi_bindings/wasm/src/plonk_proof.rs @@ -1,12 +1,15 @@ // use kimchi::circuits::expr::{Linearization, PolishToken, Variable, Column}; // use kimchi::circuits::gate::{GateType, CurrOrNext}; use crate::wasm_flat_vector::WasmFlatVector; +use crate::wasm_vector::fp::WasmVecVecFp; +use crate::wasm_vector::fq::WasmVecVecFq; use crate::wasm_vector::WasmVector; use paste::paste; use std::convert::TryInto; use wasm_bindgen::prelude::*; // use std::sync::Arc; // use poly_commitment::srs::SRS; +use kimchi::circuits::lookup::runtime_tables::RuntimeTable; // use kimchi::index::{expr_linearization, VerifierIndex as DlogVerifierIndex}; // use ark_poly::{EvaluationDomain, Radix2EvaluationDomain as Domain}; use ark_ec::AffineCurve; @@ -55,35 +58,9 @@ macro_rules! impl_proof { $WasmVerifierIndex: ty, $field_name: ident ) => { - paste! { - #[wasm_bindgen] - pub struct [](Vec>); type WasmVecVecF = []; - #[wasm_bindgen] - impl [] { - #[wasm_bindgen(constructor)] - pub fn create(n: i32) -> Self { - [](Vec::with_capacity(n as usize)) - } - - #[wasm_bindgen] - pub fn push(&mut self, x: WasmFlatVector<$WasmF>) { - self.0.push(x.into_iter().map(Into::into).collect()) - } - - #[wasm_bindgen] - pub fn get(&self, i: i32) -> WasmFlatVector<$WasmF> { - self.0[i as usize].clone().into_iter().map(Into::into).collect() - } - - #[wasm_bindgen] - pub fn set(&mut self, i: i32, x: WasmFlatVector<$WasmF>) { - self.0[i as usize] = x.into_iter().map(Into::into).collect() - } - } - #[derive(Clone)] pub struct []( ProofEvaluations>> @@ -251,7 +228,7 @@ macro_rules! impl_proof { #[wasm_bindgen(skip)] pub t_comm: $WasmPolyComm, #[wasm_bindgen(skip)] - pub lookup: Option + pub lookup: Option, } type WasmProverCommitments = []; @@ -310,7 +287,7 @@ macro_rules! impl_proof { w_comm: x.w_comm.iter().map(Into::into).collect(), z_comm: x.z_comm.clone().into(), t_comm: x.t_comm.clone().into(), - lookup: x.lookup.clone().map(Into::into), + lookup: x.lookup.clone().map(Into::into) } } } @@ -484,8 +461,8 @@ macro_rules! impl_proof { } type WasmProverProof = []; - impl From<(&ProverProof<$G>, &Vec<$F>)> for WasmProverProof { - fn from((x, public): (&ProverProof<$G>, &Vec<$F>)) -> Self { + impl From<(&ProverProof<$G, OpeningProof<$G>>, &Vec<$F>)> for WasmProverProof { + fn from((x, public): (&ProverProof<$G, OpeningProof<$G>>, &Vec<$F>)) -> Self { let (scalars, comms) = x.prev_challenges .iter() @@ -505,8 +482,8 @@ macro_rules! impl_proof { } } - impl From<(ProverProof<$G>, Vec<$F>)> for WasmProverProof { - fn from((x, public): (ProverProof<$G>, Vec<$F>)) -> Self { + impl From<(ProverProof<$G, OpeningProof<$G>>, Vec<$F>)> for WasmProverProof { + fn from((x, public): (ProverProof<$G, OpeningProof<$G>>, Vec<$F>)) -> Self { let ProverProof {ft_eval1, commitments, proof, evals , prev_challenges} = x; let (scalars, comms) = prev_challenges @@ -525,7 +502,7 @@ macro_rules! impl_proof { } } - impl From<&WasmProverProof> for (ProverProof<$G>, Vec<$F>) { + impl From<&WasmProverProof> for (ProverProof<$G, OpeningProof<$G>>, Vec<$F>) { fn from(x: &WasmProverProof) -> Self { let proof = ProverProof { commitments: x.commitments.clone().into(), @@ -549,7 +526,7 @@ macro_rules! impl_proof { } } - impl From for (ProverProof<$G>, Vec<$F>) { + impl From for (ProverProof<$G, OpeningProof<$G>>, Vec<$F>) { fn from(x: WasmProverProof) -> Self { let proof =ProverProof { commitments: x.commitments.into(), @@ -653,10 +630,35 @@ macro_rules! impl_proof { } } + #[wasm_bindgen] + pub struct [] { + id: i32, + data: WasmFlatVector<$WasmF> + } + type WasmRuntimeTable = []; + + #[wasm_bindgen] + impl [] { + #[wasm_bindgen(constructor)] + pub fn new(id: i32, data: WasmFlatVector<$WasmF>) -> WasmRuntimeTable { + WasmRuntimeTable {id, data} + } + } + + impl From<[]> for RuntimeTable<$F> { + fn from(wasm_rt: WasmRuntimeTable) -> RuntimeTable<$F> { + RuntimeTable { + id: wasm_rt.id.into(), + data: wasm_rt.data.into_iter().map(Into::into).collect() + } + } + } + #[wasm_bindgen] pub fn [<$name:snake _create>]( index: &$WasmIndex, witness: WasmVecVecF, + wasm_runtime_tables: WasmVector, prev_challenges: WasmFlatVector<$WasmF>, prev_sgs: WasmVector<$WasmG>, ) -> Result { @@ -683,8 +685,7 @@ macro_rules! impl_proof { .map(|a| a.clone().into()) .collect(); let comm = PolyComm::<$G> { - unshifted: vec![sg], - shifted: None, + elems: vec![sg], }; RecursionChallenge { chals, comm } }) @@ -692,11 +693,13 @@ macro_rules! impl_proof { } }; + let rust_runtime_tables: Vec> = wasm_runtime_tables.into_iter().map(Into::into).collect(); + let witness: [Vec<_>; COLUMNS] = witness.0 .try_into() .expect("the witness should be a column of 15 vectors"); - let index: &ProverIndex<$G> = &index.0.as_ref(); + let index: &ProverIndex<$G, OpeningProof<$G>> = &index.0.as_ref(); let public_input = witness[0][0..index.cs.public].to_vec(); @@ -705,7 +708,7 @@ macro_rules! impl_proof { let maybe_proof = ProverProof::create_recursive::< DefaultFqSponge<_, PlonkSpongeConstantsKimchi>, DefaultFrSponge<_, PlonkSpongeConstantsKimchi>, - >(&group_map, witness, &[], index, prev, None); + >(&group_map, witness, &rust_runtime_tables, index, prev, None); (maybe_proof, public_input) }); @@ -728,6 +731,7 @@ macro_rules! impl_proof { $G, DefaultFqSponge<_, PlonkSpongeConstantsKimchi>, DefaultFrSponge<_, PlonkSpongeConstantsKimchi>, + OpeningProof<$G> >( &group_map, &[Context { verifier_index, proof, public_input }] @@ -769,6 +773,7 @@ macro_rules! impl_proof { $G, DefaultFqSponge<_, PlonkSpongeConstantsKimchi>, DefaultFrSponge<_, PlonkSpongeConstantsKimchi>, + OpeningProof<$G> >(&group_map, &ts) .is_ok() }) @@ -779,8 +784,7 @@ macro_rules! impl_proof { fn comm() -> PolyComm<$G> { let g = $G::prime_subgroup_generator(); PolyComm { - shifted: Some(g), - unshifted: vec![g, g, g], + elems: vec![g, g, g], } } @@ -828,6 +832,7 @@ macro_rules! impl_proof { lookup_gate_lookup_selector: None, range_check_lookup_selector: None, foreign_field_mul_lookup_selector: None, + public: None, }; let dlogproof = ProverProof { diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/plonk_verifier_index.rs b/src/lib/crypto/kimchi_bindings/wasm/src/plonk_verifier_index.rs index 9ae817de621..21d663ba7f3 100644 --- a/src/lib/crypto/kimchi_bindings/wasm/src/plonk_verifier_index.rs +++ b/src/lib/crypto/kimchi_bindings/wasm/src/plonk_verifier_index.rs @@ -7,15 +7,15 @@ use kimchi::circuits::{ constraints::FeatureFlags, lookup::index::LookupSelectors, lookup::lookups::{LookupFeatures, LookupInfo, LookupPatterns}, - polynomials::permutation::Shifts, - polynomials::permutation::{zk_polynomial, zk_w3}, + polynomials::permutation::{permutation_vanishing_polynomial, zk_w, Shifts}, wires::{COLUMNS, PERMUTS}, }; use kimchi::linearization::expr_linearization; +use kimchi::poly_commitment::evaluation_proof::OpeningProof; use kimchi::verifier_index::{LookupVerifierIndex, VerifierIndex as DlogVerifierIndex}; use paste::paste; -use poly_commitment::srs::SRS; use poly_commitment::commitment::PolyComm; +use poly_commitment::srs::SRS; use std::path::Path; use std::sync::Arc; use wasm_bindgen::prelude::*; @@ -71,11 +71,26 @@ macro_rules! impl_verification_key { pub emul_comm: $WasmPolyComm, #[wasm_bindgen(skip)] pub endomul_scalar_comm: $WasmPolyComm, + #[wasm_bindgen(skip)] + pub xor_comm: Option<$WasmPolyComm>, + #[wasm_bindgen(skip)] + pub range_check0_comm: Option<$WasmPolyComm>, + #[wasm_bindgen(skip)] + pub range_check1_comm: Option<$WasmPolyComm>, + #[wasm_bindgen(skip)] + pub foreign_field_add_comm: Option<$WasmPolyComm>, + #[wasm_bindgen(skip)] + pub foreign_field_mul_comm: Option<$WasmPolyComm>, + #[wasm_bindgen(skip)] + pub rot_comm: Option<$WasmPolyComm> } + type WasmPlonkVerificationEvals = []; + #[wasm_bindgen] impl [] { + #[allow(clippy::too_many_arguments)] #[wasm_bindgen(constructor)] pub fn new( sigma_comm: WasmVector<$WasmPolyComm>, @@ -86,6 +101,12 @@ macro_rules! impl_verification_key { mul_comm: &$WasmPolyComm, emul_comm: &$WasmPolyComm, endomul_scalar_comm: &$WasmPolyComm, + xor_comm: Option<$WasmPolyComm>, + range_check0_comm: Option<$WasmPolyComm>, + range_check1_comm: Option<$WasmPolyComm>, + foreign_field_add_comm: Option<$WasmPolyComm>, + foreign_field_mul_comm: Option<$WasmPolyComm>, + rot_comm: Option<$WasmPolyComm>, ) -> Self { WasmPlonkVerificationEvals { sigma_comm: sigma_comm.clone(), @@ -96,6 +117,12 @@ macro_rules! impl_verification_key { mul_comm: mul_comm.clone(), emul_comm: emul_comm.clone(), endomul_scalar_comm: endomul_scalar_comm.clone(), + xor_comm: xor_comm.clone(), + range_check0_comm: range_check0_comm.clone(), + range_check1_comm: range_check1_comm.clone(), + foreign_field_mul_comm: foreign_field_mul_comm.clone(), + foreign_field_add_comm: foreign_field_add_comm.clone(), + rot_comm: rot_comm.clone(), } } @@ -178,6 +205,67 @@ macro_rules! impl_verification_key { pub fn set_endomul_scalar_comm(&mut self, x: $WasmPolyComm) { self.endomul_scalar_comm = x; } + + #[wasm_bindgen(getter)] + pub fn xor_comm(&self) -> Option<$WasmPolyComm> { + self.xor_comm.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_xor_comm(&mut self, x: Option<$WasmPolyComm>) { + self.xor_comm = x; + } + + #[wasm_bindgen(getter)] + pub fn rot_comm(&self) -> Option<$WasmPolyComm> { + self.rot_comm.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_rot_comm(&mut self, x: Option<$WasmPolyComm>) { + self.rot_comm = x; + } + + #[wasm_bindgen(getter)] + pub fn range_check0_comm(&self) -> Option<$WasmPolyComm> { + self.range_check0_comm.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_range_check0_comm(&mut self, x: Option<$WasmPolyComm>) { + self.range_check0_comm = x; + } + + #[wasm_bindgen(getter)] + pub fn range_check1_comm(&self) -> Option<$WasmPolyComm> { + self.range_check1_comm.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_range_check1_comm(&mut self, x: Option<$WasmPolyComm>) { + self.range_check1_comm = x; + } + + #[wasm_bindgen(getter)] + pub fn foreign_field_add_comm(&self) -> Option<$WasmPolyComm> { + self.foreign_field_add_comm.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_foreign_field_add_comm(&mut self, x: Option<$WasmPolyComm>) { + self.foreign_field_add_comm = x; + } + + #[wasm_bindgen(getter)] + pub fn foreign_field_mul_comm(&self) -> Option<$WasmPolyComm> { + self.foreign_field_mul_comm.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_foreign_field_mul_comm(&mut self, x: Option<$WasmPolyComm>) { + self.foreign_field_mul_comm = x; + } + } #[derive(Clone, Copy)] @@ -410,7 +498,7 @@ macro_rules! impl_verification_key { lookup_table: WasmVector<$WasmPolyComm>, lookup_selectors: WasmLookupSelectors, table_ids: Option<$WasmPolyComm>, - lookup_info: LookupInfo, + lookup_info: &LookupInfo, runtime_tables_selector: Option<$WasmPolyComm> ) -> WasmLookupVerifierIndex { WasmLookupVerifierIndex { @@ -418,7 +506,7 @@ macro_rules! impl_verification_key { lookup_table, lookup_selectors, table_ids, - lookup_info, + lookup_info: lookup_info.clone(), runtime_tables_selector } } @@ -453,6 +541,16 @@ macro_rules! impl_verification_key { self.table_ids = x } + #[wasm_bindgen(getter)] + pub fn lookup_info(&self) -> LookupInfo { + self.lookup_info.clone() + } + + #[wasm_bindgen(setter)] + pub fn set_lookup_info(&mut self, x: LookupInfo) { + self.lookup_info = x + } + #[wasm_bindgen(getter)] pub fn runtime_tables_selector(&self) -> Option<$WasmPolyComm> { self.runtime_tables_selector.clone() @@ -478,6 +576,7 @@ macro_rules! impl_verification_key { pub shifts: WasmShifts, #[wasm_bindgen(skip)] pub lookup_index: Option, + pub zk_rows: isize, } type WasmPlonkVerifierIndex = []; @@ -493,6 +592,7 @@ macro_rules! impl_verification_key { evals: &WasmPlonkVerificationEvals, shifts: &WasmShifts, lookup_index: Option, + zk_rows: isize, ) -> Self { WasmPlonkVerifierIndex { domain: domain.clone(), @@ -503,6 +603,7 @@ macro_rules! impl_verification_key { evals: evals.clone(), shifts: shifts.clone(), lookup_index: lookup_index.clone(), + zk_rows: zk_rows, } } @@ -539,7 +640,7 @@ macro_rules! impl_verification_key { pub fn to_wasm<'a>( srs: &Arc>, - vi: DlogVerifierIndex<$G>, + vi: DlogVerifierIndex<$G, OpeningProof<$G>>, ) -> WasmPlonkVerifierIndex { WasmPlonkVerifierIndex { domain: WasmDomain { @@ -559,6 +660,12 @@ macro_rules! impl_verification_key { mul_comm: vi.mul_comm.into(), emul_comm: vi.emul_comm.into(), endomul_scalar_comm: vi.endomul_scalar_comm.into(), + xor_comm: vi.xor_comm.map(|v| v.into()), + range_check0_comm: vi.range_check0_comm.map(|v| v.into()), + range_check1_comm: vi.range_check1_comm.map(|v| v.into()), + foreign_field_add_comm: vi.foreign_field_add_comm.map(|v| v.into()), + foreign_field_mul_comm: vi.foreign_field_mul_comm.map(|v| v.into()), + rot_comm: vi.rot_comm.map(|v| v.into()) }, shifts: WasmShifts { @@ -571,6 +678,7 @@ macro_rules! impl_verification_key { s6: vi.shift[6].into(), }, lookup_index: vi.lookup_index.map(Into::into), + zk_rows: vi.zk_rows as isize, } } @@ -609,15 +717,54 @@ macro_rules! impl_verification_key { } } */ + fn compute_feature_flags(index: &WasmPlonkVerifierIndex) -> FeatureFlags { + let xor = index.evals.xor_comm.is_some(); + let range_check0 = index.evals.range_check0_comm.is_some(); + let range_check1 = index.evals.range_check1_comm.is_some(); + let foreign_field_add = index.evals.foreign_field_add_comm.is_some(); + let foreign_field_mul = index.evals.foreign_field_mul_comm.is_some(); + let rot = index.evals.rot_comm.is_some(); + + let lookup = index + .lookup_index.as_ref() + .map_or(false, |li| li.lookup_info.features.patterns.lookup); + + // TODO + let runtime_tables = false; + + let patterns = LookupPatterns { + xor, + lookup, + range_check: range_check0 || range_check1 || rot, + foreign_field_mul: foreign_field_mul, + }; + + FeatureFlags { + range_check0, + range_check1, + foreign_field_add, + foreign_field_mul, + xor, + rot, + lookup_features: LookupFeatures { + patterns, + joint_lookup_used: patterns.joint_lookups_used(), + uses_runtime_tables: runtime_tables, + }, + } + } + pub fn of_wasm( - max_poly_size: i32, - public_: i32, - prev_challenges: i32, - log_size_of_group: i32, - srs: &$WasmSrs, - evals: &WasmPlonkVerificationEvals, - shifts: &WasmShifts, - ) -> (DlogVerifierIndex, Arc>) { + index: WasmPlonkVerifierIndex, + ) -> (DlogVerifierIndex>, Arc>) { + let max_poly_size = index.max_poly_size; + let public_ = index.public_; + let prev_challenges = index.prev_challenges; + let log_size_of_group = index.domain.log_size_of_group; + let srs = &index.srs; + let evals = &index.evals; + let shifts = &index.shifts; + /* let urs_copy = Rc::clone(&*urs); let urs_copy_outer = Rc::clone(&*urs); @@ -629,26 +776,7 @@ macro_rules! impl_verification_key { let (endo_q, _endo_r) = poly_commitment::srs::endos::<$GOther>(); let domain = Domain::<$F>::new(1 << log_size_of_group).unwrap(); - let feature_flags = - FeatureFlags { - range_check0: false, - range_check1: false, - foreign_field_add: false, - foreign_field_mul: false, - rot: false, - xor: false, - lookup_features: - LookupFeatures { - patterns: LookupPatterns { - xor: false, - lookup: false, - range_check: false, - foreign_field_mul: false, }, - joint_lookup_used:false, - uses_runtime_tables: false, - }, - }; - + let feature_flags = compute_feature_flags(&index); let (linearization, powers_of_alpha) = expr_linearization(Some(&feature_flags), true); let index = @@ -666,26 +794,25 @@ macro_rules! impl_verification_key { emul_comm: (&evals.emul_comm).into(), endomul_scalar_comm: (&evals.endomul_scalar_comm).into(), - // TODO - range_check0_comm: None, - range_check1_comm: None, - foreign_field_add_comm: None, - foreign_field_mul_comm: None, - rot_comm: None, - xor_comm: None, + xor_comm: (&evals.xor_comm).as_ref().map(Into::into), + range_check0_comm: (&evals.range_check0_comm).as_ref().map(Into::into), + range_check1_comm: (&evals.range_check1_comm).as_ref().map(Into::into), + foreign_field_add_comm: (&evals.foreign_field_add_comm).as_ref().map(Into::into), + foreign_field_mul_comm: (&evals.foreign_field_mul_comm).as_ref().map(Into::into), + rot_comm: (&evals.rot_comm).as_ref().map(Into::into), w: { let res = once_cell::sync::OnceCell::new(); - res.set(zk_w3(domain)).unwrap(); + res.set(zk_w(domain, 3)).unwrap(); res }, endo: endo_q, max_poly_size: max_poly_size as usize, public: public_ as usize, prev_challenges: prev_challenges as usize, - zkpm: { + permutation_vanishing_polynomial_m: { let res = once_cell::sync::OnceCell::new(); - res.set(zk_polynomial(domain)).unwrap(); + res.set(permutation_vanishing_polynomial(domain, 3)).unwrap(); res }, shift: [ @@ -698,30 +825,21 @@ macro_rules! impl_verification_key { shifts.s6.into() ], srs: { - let res = once_cell::sync::OnceCell::new(); - res.set(srs.0.clone()).unwrap(); - res + Arc::clone(&srs.0) }, + + zk_rows: index.zk_rows as u64, + linearization, powers_of_alpha, - // TODO - lookup_index: None, + lookup_index: index.lookup_index.map(Into::into), }; (index, srs.0.clone()) } - impl From for DlogVerifierIndex<$G> { + impl From for DlogVerifierIndex<$G, OpeningProof<$G>> { fn from(index: WasmPlonkVerifierIndex) -> Self { - of_wasm( - index.max_poly_size, - index.public_, - index.prev_challenges, - index.domain.log_size_of_group, - &index.srs, - &index.evals, - &index.shifts, - ) - .0 + of_wasm(index).0 } } @@ -729,11 +847,11 @@ macro_rules! impl_verification_key { offset: Option, srs: &$WasmSrs, path: String, - ) -> Result, JsValue> { + ) -> Result>, JsValue> { let path = Path::new(&path); let (endo_q, _endo_r) = poly_commitment::srs::endos::(); - DlogVerifierIndex::<$G>::from_file( - Some(srs.0.clone()), + DlogVerifierIndex::<$G, OpeningProof<$G>>::from_file( + srs.0.clone(), path, offset.map(|x| x as u64), endo_q, @@ -756,7 +874,7 @@ macro_rules! impl_verification_key { index: WasmPlonkVerifierIndex, path: String, ) -> Result<(), JsValue> { - let index: DlogVerifierIndex<$G> = index.into(); + let index: DlogVerifierIndex<$G, OpeningProof<$G>> = index.into(); let path = Path::new(&path); index.to_file(path, append).map_err(|e| { println!("{}", e); @@ -788,7 +906,7 @@ macro_rules! impl_verification_key { pub fn [<$name:snake _serialize>]( index: WasmPlonkVerifierIndex, ) -> String { - let index: DlogVerifierIndex<$G> = index.into(); + let index: DlogVerifierIndex<$G, OpeningProof<$G>> = index.into(); serde_json::to_string(&index).unwrap() } @@ -796,9 +914,12 @@ macro_rules! impl_verification_key { pub fn [<$name:snake _deserialize>]( srs: &$WasmSrs, index: String, - ) -> WasmPlonkVerifierIndex { - let vi: DlogVerifierIndex<$G> = serde_json::from_str(&index).unwrap(); - return to_wasm(srs, vi.into()) + ) -> Result { + let vi: Result>, serde_json::Error> = serde_json::from_str(&index); + match vi { + Ok(vi) => Ok(to_wasm(srs, vi)), + Err(e) => Err(JsError::new(&(e.to_string()))), + } } #[wasm_bindgen] @@ -861,6 +982,12 @@ macro_rules! impl_verification_key { mul_comm: comm(), emul_comm: comm(), endomul_scalar_comm: comm(), + xor_comm: None, + range_check0_comm: None, + range_check1_comm: None, + foreign_field_add_comm: None, + foreign_field_mul_comm: None, + rot_comm: None, }, shifts: WasmShifts { @@ -873,6 +1000,7 @@ macro_rules! impl_verification_key { s6: $F::one().into(), }, lookup_index: None, + zk_rows: 3, } } diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/poly_comm.rs b/src/lib/crypto/kimchi_bindings/wasm/src/poly_comm.rs index 3338c0f23f3..aeefce15bd7 100644 --- a/src/lib/crypto/kimchi_bindings/wasm/src/poly_comm.rs +++ b/src/lib/crypto/kimchi_bindings/wasm/src/poly_comm.rs @@ -29,6 +29,10 @@ macro_rules! impl_poly_comm { impl [] { #[wasm_bindgen(constructor)] pub fn new(unshifted: WasmVector<$WasmG>, shifted: Option<$WasmG>) -> Self { + assert!( + shifted.is_none(), + "mina#14628: Shifted commitments are deprecated and must not be used" + ); WasmPolyComm { unshifted, shifted } } @@ -45,12 +49,12 @@ macro_rules! impl_poly_comm { impl From> for WasmPolyComm { fn from(x: PolyComm<$G>) -> Self { - let PolyComm {unshifted, shifted} = x; + let PolyComm { elems } = x; let unshifted: Vec<$WasmG> = - unshifted.into_iter().map(|x| x.into()).collect(); + elems.into_iter().map(|x| x.into()).collect(); WasmPolyComm { unshifted: unshifted.into(), - shifted: shifted.map(|x| x.into()), + shifted: None } } } @@ -58,10 +62,10 @@ macro_rules! impl_poly_comm { impl From<&PolyComm<$G>> for WasmPolyComm { fn from(x: &PolyComm<$G>) -> Self { let unshifted: Vec<$WasmG> = - x.unshifted.iter().map(|x| x.into()).collect(); + x.elems.iter().map(|x| x.into()).collect(); WasmPolyComm { unshifted: unshifted.into(), - shifted: x.shifted.map(|x| x.into()), + shifted: None, } } } @@ -69,18 +73,24 @@ macro_rules! impl_poly_comm { impl From for PolyComm<$G> { fn from(x: WasmPolyComm) -> Self { let WasmPolyComm {unshifted, shifted} = x; + assert!( + shifted.is_none(), + "mina#14628: Shifted commitments are deprecated and must not be used" + ); PolyComm { - unshifted: (*unshifted).iter().map(|x| { (*x).into() }).collect(), - shifted: shifted.map(|x| x.into()), + elems: (*unshifted).iter().map(|x| { (*x).into() }).collect(), } } } impl From<&WasmPolyComm> for PolyComm<$G> { fn from(x: &WasmPolyComm) -> Self { + assert!( + x.shifted.is_none(), + "mina#14628: Shifted commitments are deprecated and must not be used" + ); PolyComm { - unshifted: x.unshifted.iter().map(|x| { (*x).into() }).collect(), - shifted: x.shifted.map(|x| x.into()), + elems: x.unshifted.iter().map(|x| { (*x).into() }).collect(), } } } diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/srs.rs b/src/lib/crypto/kimchi_bindings/wasm/src/srs.rs index 44707dd4ed2..2e8bf8be9ac 100644 --- a/src/lib/crypto/kimchi_bindings/wasm/src/srs.rs +++ b/src/lib/crypto/kimchi_bindings/wasm/src/srs.rs @@ -3,6 +3,7 @@ use crate::wasm_vector::WasmVector; use ark_poly::UVPolynomial; use ark_poly::{univariate::DensePolynomial, EvaluationDomain, Evaluations}; use paste::paste; +use poly_commitment::SRS as ISRS; use poly_commitment::{commitment::b_poly_coefficients, srs::SRS}; use serde::{Deserialize, Serialize}; use std::ops::Deref; @@ -21,7 +22,6 @@ macro_rules! impl_srs { $G: ty, $WasmPolyComm: ty, $field_name: ident) => { - paste! { #[wasm_bindgen] #[derive(Clone)] @@ -75,10 +75,12 @@ macro_rules! impl_srs { srs: &[], log2_size: i32, ) { - let ptr: &mut poly_commitment::srs::SRS<$G> = - unsafe { &mut *(std::sync::Arc::as_ptr(&srs) as *mut _) }; - let domain = EvaluationDomain::<$F>::new(1 << (log2_size as usize)).expect("invalid domain size"); - ptr.add_lagrange_basis(domain); + crate::rayon::run_in_pool(|| { + let ptr: &mut poly_commitment::srs::SRS<$G> = + unsafe { &mut *(std::sync::Arc::as_ptr(&srs) as *mut _) }; + let domain = EvaluationDomain::<$F>::new(1 << (log2_size as usize)).expect("invalid domain size"); + ptr.add_lagrange_basis(domain); + }); } #[wasm_bindgen] @@ -157,7 +159,7 @@ macro_rules! impl_srs { let evals = evals.into_iter().map(Into::into).collect(); let p = Evaluations::<$F>::from_vec_and_domain(evals, x_domain).interpolate(); - Ok(srs.commit_non_hiding(&p, None).into()) + Ok(srs.commit_non_hiding(&p, 1).into()) } #[wasm_bindgen] @@ -169,7 +171,7 @@ macro_rules! impl_srs { let chals: Vec<$F> = chals.into_iter().map(Into::into).collect(); let coeffs = b_poly_coefficients(&chals); let p = DensePolynomial::<$F>::from_coefficients_vec(coeffs); - srs.commit_non_hiding(&p, None) + srs.commit_non_hiding(&p, 1) }); Ok(result.into()) } @@ -213,35 +215,171 @@ macro_rules! impl_srs { // pub mod fp { + use std::collections::HashMap; + use super::*; - use crate::arkworks::{WasmGVesta, WasmPastaFp}; + use crate::arkworks::{WasmGVesta as WasmG, WasmPastaFp}; use crate::poly_comm::vesta::WasmFpPolyComm as WasmPolyComm; - use mina_curves::pasta::{Fp, Vesta}; - - impl_srs!( - caml_fp_srs, - WasmPastaFp, - WasmGVesta, - Fp, - Vesta, - WasmPolyComm, - Fp - ); + use mina_curves::pasta::{Fp, Vesta as G}; + use poly_commitment::PolyComm; + + impl_srs!(caml_fp_srs, WasmPastaFp, WasmG, Fp, G, WasmPolyComm, Fp); + + #[wasm_bindgen] + pub fn caml_fp_srs_create_parallel(depth: i32) -> WasmFpSrs { + crate::rayon::run_in_pool(|| Arc::new(SRS::::create_parallel(depth as usize)).into()) + } + + // return the cloned srs in a form that we can store on the js side + #[wasm_bindgen] + pub fn caml_fp_srs_get(srs: &WasmFpSrs) -> WasmVector { + // return a vector which consists of h, then all the gs + let mut h_and_gs: Vec = vec![srs.0.h.clone().into()]; + h_and_gs.extend(srs.0.g.iter().map(|x: &G| WasmG::from(x.clone()))); + h_and_gs.into() + } + + // set the srs from a vector of h and gs + #[wasm_bindgen] + pub fn caml_fp_srs_set(h_and_gs: WasmVector) -> WasmFpSrs { + // return a vector which consists of h, then all the gs + let mut h_and_gs: Vec = h_and_gs.into_iter().map(|x| x.into()).collect(); + let h = h_and_gs.remove(0); + let g = h_and_gs; + let srs = SRS:: { + h, + g, + lagrange_bases: HashMap::new(), + }; + Arc::new(srs).into() + } + + // maybe get lagrange commitment + #[wasm_bindgen] + pub fn caml_fp_srs_maybe_lagrange_commitment( + srs: &WasmFpSrs, + domain_size: i32, + i: i32, + ) -> Option { + let bases = srs.0.lagrange_bases.get(&(domain_size as usize)); + bases.map(|bases| bases[i as usize].clone().into()) + } + + // set entire lagrange basis from input + #[wasm_bindgen] + pub fn caml_fp_srs_set_lagrange_basis( + srs: &WasmFpSrs, + domain_size: i32, + input_bases: WasmVector, + ) { + let bases: Vec> = input_bases.into_iter().map(Into::into).collect(); + + // add to srs + let ptr: &mut poly_commitment::srs::SRS = + unsafe { &mut *(std::sync::Arc::as_ptr(&srs) as *mut _) }; + ptr.lagrange_bases.insert(domain_size as usize, bases); + } + + // compute & add lagrange basis internally, return the entire basis + #[wasm_bindgen] + pub fn caml_fp_srs_get_lagrange_basis( + srs: &WasmFpSrs, + domain_size: i32, + ) -> WasmVector { + // compute lagrange basis + crate::rayon::run_in_pool(|| { + let ptr: &mut poly_commitment::srs::SRS = + unsafe { &mut *(std::sync::Arc::as_ptr(&srs) as *mut _) }; + let domain = + EvaluationDomain::::new(domain_size as usize).expect("invalid domain size"); + ptr.add_lagrange_basis(domain); + }); + let bases = &srs.0.lagrange_bases[&(domain_size as usize)]; + bases.into_iter().map(Into::into).collect() + } } pub mod fq { + use std::collections::HashMap; + use super::*; - use crate::arkworks::{WasmGPallas, WasmPastaFq}; + use crate::arkworks::{WasmGPallas as WasmG, WasmPastaFq}; use crate::poly_comm::pallas::WasmFqPolyComm as WasmPolyComm; - use mina_curves::pasta::{Fq, Pallas as GAffine}; - - impl_srs!( - caml_fq_srs, - WasmPastaFq, - WasmGPallas, - Fq, - GAffine, - WasmPolyComm, - Fq - ); + use mina_curves::pasta::{Fq, Pallas as G}; + use poly_commitment::PolyComm; + + impl_srs!(caml_fq_srs, WasmPastaFq, WasmG, Fq, G, WasmPolyComm, Fq); + + #[wasm_bindgen] + pub fn caml_fq_srs_create_parallel(depth: i32) -> WasmFqSrs { + crate::rayon::run_in_pool(|| Arc::new(SRS::::create_parallel(depth as usize)).into()) + } + + // return the cloned srs in a form that we can store on the js side + #[wasm_bindgen] + pub fn caml_fq_srs_get(srs: &WasmFqSrs) -> WasmVector { + // return a vector which consists of h, then all the gs + let mut h_and_gs: Vec = vec![srs.0.h.clone().into()]; + h_and_gs.extend(srs.0.g.iter().map(|x: &G| WasmG::from(x.clone()))); + h_and_gs.into() + } + + // set the srs from a vector of h and gs + #[wasm_bindgen] + pub fn caml_fq_srs_set(h_and_gs: WasmVector) -> WasmFqSrs { + // return a vector which consists of h, then all the gs + let mut h_and_gs: Vec = h_and_gs.into_iter().map(|x| x.into()).collect(); + let h = h_and_gs.remove(0); + let g = h_and_gs; + let srs = SRS:: { + h, + g, + lagrange_bases: HashMap::new(), + }; + Arc::new(srs).into() + } + + // maybe get lagrange commitment + #[wasm_bindgen] + pub fn caml_fq_srs_maybe_lagrange_commitment( + srs: &WasmFqSrs, + domain_size: i32, + i: i32, + ) -> Option { + let bases = srs.0.lagrange_bases.get(&(domain_size as usize)); + bases.map(|bases| bases[i as usize].clone().into()) + } + + // set entire lagrange basis from input + #[wasm_bindgen] + pub fn caml_fq_srs_set_lagrange_basis( + srs: &WasmFqSrs, + domain_size: i32, + input_bases: WasmVector, + ) { + let bases: Vec> = input_bases.into_iter().map(Into::into).collect(); + + // add to srs + let ptr: &mut poly_commitment::srs::SRS = + unsafe { &mut *(std::sync::Arc::as_ptr(&srs) as *mut _) }; + ptr.lagrange_bases.insert(domain_size as usize, bases); + } + + // compute & add lagrange basis internally, return the entire basis + #[wasm_bindgen] + pub fn caml_fq_srs_get_lagrange_basis( + srs: &WasmFqSrs, + domain_size: i32, + ) -> WasmVector { + // compute lagrange basis + crate::rayon::run_in_pool(|| { + let ptr: &mut poly_commitment::srs::SRS = + unsafe { &mut *(std::sync::Arc::as_ptr(&srs) as *mut _) }; + let domain = + EvaluationDomain::::new(domain_size as usize).expect("invalid domain size"); + ptr.add_lagrange_basis(domain); + }); + let bases = &srs.0.lagrange_bases[&(domain_size as usize)]; + bases.into_iter().map(Into::into).collect() + } } diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/wasm_ocaml_serde/de.rs b/src/lib/crypto/kimchi_bindings/wasm/src/wasm_ocaml_serde/de.rs index 39583717112..d990e856a13 100644 --- a/src/lib/crypto/kimchi_bindings/wasm/src/wasm_ocaml_serde/de.rs +++ b/src/lib/crypto/kimchi_bindings/wasm/src/wasm_ocaml_serde/de.rs @@ -273,8 +273,9 @@ impl<'de> de::Deserializer<'de> for Deserializer { } } + #[inline] fn is_human_readable(&self) -> bool { - true + false } } diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/wasm_ocaml_serde/ser.rs b/src/lib/crypto/kimchi_bindings/wasm/src/wasm_ocaml_serde/ser.rs index 5dbe8b9e9dd..ab4a4f9e95e 100644 --- a/src/lib/crypto/kimchi_bindings/wasm/src/wasm_ocaml_serde/ser.rs +++ b/src/lib/crypto/kimchi_bindings/wasm/src/wasm_ocaml_serde/ser.rs @@ -142,6 +142,11 @@ impl<'s> ser::Serializer for &'s Serializer { type SerializeStruct = ArraySerializer<'s>; type SerializeStructVariant = ErrorSerializer; + #[inline] + fn is_human_readable(&self) -> bool { + false + } + fn serialize_bool(self, v: bool) -> Result { if v { self.0.serialize_u32(1) diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/wasm_vector.rs b/src/lib/crypto/kimchi_bindings/wasm/src/wasm_vector.rs index f93a1247694..2ee0f6ef27a 100644 --- a/src/lib/crypto/kimchi_bindings/wasm/src/wasm_vector.rs +++ b/src/lib/crypto/kimchi_bindings/wasm/src/wasm_vector.rs @@ -1,7 +1,9 @@ -use wasm_bindgen::convert::{FromWasmAbi, IntoWasmAbi, OptionFromWasmAbi, OptionIntoWasmAbi}; - +use crate::wasm_flat_vector::WasmFlatVector; +use paste::paste; use std::convert::From; use std::ops::Deref; +use wasm_bindgen::convert::{FromWasmAbi, IntoWasmAbi, OptionFromWasmAbi, OptionIntoWasmAbi}; +use wasm_bindgen::prelude::*; #[derive(Clone, Debug)] pub struct WasmVector(Vec); @@ -114,3 +116,51 @@ impl> OptionIntoWasmAbi for WasmVector { as OptionIntoWasmAbi>::none() } } + +macro_rules! impl_vec_vec_fp { + ( $F:ty, $WasmF:ty ) => { + paste! { + #[wasm_bindgen] + pub struct [](#[wasm_bindgen(skip)] pub Vec>); + + #[wasm_bindgen] + impl [] { + #[wasm_bindgen(constructor)] + pub fn create(n: i32) -> Self { + [](Vec::with_capacity(n as usize)) + } + + #[wasm_bindgen] + pub fn push(&mut self, x: WasmFlatVector<$WasmF>) { + self.0.push(x.into_iter().map(Into::into).collect()) + } + + #[wasm_bindgen] + pub fn get(&self, i: i32) -> WasmFlatVector<$WasmF> { + self.0[i as usize].clone().into_iter().map(Into::into).collect() + } + + #[wasm_bindgen] + pub fn set(&mut self, i: i32, x: WasmFlatVector<$WasmF>) { + self.0[i as usize] = x.into_iter().map(Into::into).collect() + } + } + } + }; +} + +pub mod fp { + use super::*; + use crate::arkworks::WasmPastaFp; + use mina_curves::pasta::Fp; + + impl_vec_vec_fp!(Fp, WasmPastaFp); +} + +pub mod fq { + use super::*; + use crate::arkworks::WasmPastaFq; + use mina_curves::pasta::Fq; + + impl_vec_vec_fp!(Fq, WasmPastaFq); +} diff --git a/src/lib/crypto/proof-systems b/src/lib/crypto/proof-systems index fd7c19d4c5d..3ba74ba368d 160000 --- a/src/lib/crypto/proof-systems +++ b/src/lib/crypto/proof-systems @@ -1 +1 @@ -Subproject commit fd7c19d4c5dbb6826d1d5ce6bc24796a44087f70 +Subproject commit 3ba74ba368d088011f4566e7953300d2a06607d5 diff --git a/src/lib/crypto/snarky_tests/examples/field.json b/src/lib/crypto/snarky_tests/examples/field.json index 2a2e0d2c8ae..bc311158322 100644 --- a/src/lib/crypto/snarky_tests/examples/field.json +++ b/src/lib/crypto/snarky_tests/examples/field.json @@ -1 +1 @@ -{"public_input_size":1,"gates":[{"typ":"Generic","wires":[{"row":1,"col":3},{"row":0,"col":1},{"row":0,"col":2},{"row":0,"col":3},{"row":0,"col":4},{"row":0,"col":5},{"row":0,"col":6}],"coeffs":[[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":1,"col":5},{"row":1,"col":1},{"row":1,"col":2},{"row":2,"col":0},{"row":1,"col":4},{"row":1,"col":0},{"row":1,"col":6}],"coeffs":[[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[255,255,255,255,236,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":2,"col":3},{"row":2,"col":1},{"row":3,"col":0},{"row":0,"col":0},{"row":2,"col":4},{"row":2,"col":5},{"row":2,"col":6}],"coeffs":[[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64]]},{"typ":"Generic","wires":[{"row":2,"col":2},{"row":3,"col":1},{"row":3,"col":2},{"row":3,"col":3},{"row":3,"col":4},{"row":3,"col":5},{"row":3,"col":6}],"coeffs":[[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64]]}]} +{"public_input_size":1,"gates":[{"typ":"Generic","wires":[{"row":1,"col":3},{"row":0,"col":1},{"row":0,"col":2},{"row":0,"col":3},{"row":0,"col":4},{"row":0,"col":5},{"row":0,"col":6}],"coeffs":["0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":1,"col":5},{"row":1,"col":1},{"row":1,"col":2},{"row":2,"col":0},{"row":1,"col":4},{"row":1,"col":0},{"row":1,"col":6}],"coeffs":["0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","ffffffffec302d991bf94c09fc98462200000000000000000000000000000040","0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":2,"col":3},{"row":2,"col":1},{"row":3,"col":0},{"row":0,"col":0},{"row":2,"col":4},{"row":2,"col":5},{"row":2,"col":6}],"coeffs":["0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040"]},{"typ":"Generic","wires":[{"row":2,"col":2},{"row":3,"col":1},{"row":3,"col":2},{"row":3,"col":3},{"row":3,"col":4},{"row":3,"col":5},{"row":3,"col":6}],"coeffs":["0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040"]}]} diff --git a/src/lib/crypto/snarky_tests/examples/output.json b/src/lib/crypto/snarky_tests/examples/output.json index a6583a92c15..808ec834681 100644 --- a/src/lib/crypto/snarky_tests/examples/output.json +++ b/src/lib/crypto/snarky_tests/examples/output.json @@ -1 +1 @@ -{"public_input_size":2,"gates":[{"typ":"Generic","wires":[{"row":1,"col":0},{"row":0,"col":1},{"row":0,"col":2},{"row":0,"col":3},{"row":0,"col":4},{"row":0,"col":5},{"row":0,"col":6}],"coeffs":[[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":2,"col":0},{"row":1,"col":1},{"row":1,"col":2},{"row":1,"col":3},{"row":1,"col":4},{"row":1,"col":5},{"row":1,"col":6}],"coeffs":[[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":2,"col":1},{"row":0,"col":0},{"row":2,"col":2},{"row":2,"col":3},{"row":2,"col":4},{"row":2,"col":5},{"row":2,"col":6}],"coeffs":[[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]}]} +{"public_input_size":2,"gates":[{"typ":"Generic","wires":[{"row":1,"col":0},{"row":0,"col":1},{"row":0,"col":2},{"row":0,"col":3},{"row":0,"col":4},{"row":0,"col":5},{"row":0,"col":6}],"coeffs":["0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":2,"col":0},{"row":1,"col":1},{"row":1,"col":2},{"row":1,"col":3},{"row":1,"col":4},{"row":1,"col":5},{"row":1,"col":6}],"coeffs":["0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":2,"col":1},{"row":0,"col":0},{"row":2,"col":2},{"row":2,"col":3},{"row":2,"col":4},{"row":2,"col":5},{"row":2,"col":6}],"coeffs":["00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]}]} diff --git a/src/lib/crypto/snarky_tests/examples/range_gt.json b/src/lib/crypto/snarky_tests/examples/range_gt.json index 8e5643ce1ca..6e36180ca78 100644 --- a/src/lib/crypto/snarky_tests/examples/range_gt.json +++ b/src/lib/crypto/snarky_tests/examples/range_gt.json @@ -1 +1 @@ -{"public_input_size":1,"gates":[{"typ":"Generic","wires":[{"row":3,"col":0},{"row":0,"col":1},{"row":0,"col":2},{"row":0,"col":3},{"row":0,"col":4},{"row":0,"col":5},{"row":0,"col":6}],"coeffs":[[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":1,"col":1},{"row":2,"col":0},{"row":1,"col":2},{"row":1,"col":4},{"row":3,"col":3},{"row":1,"col":5},{"row":1,"col":6}],"coeffs":[[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":5,"col":3},{"row":2,"col":3},{"row":3,"col":4},{"row":2,"col":4},{"row":6,"col":0},{"row":2,"col":5},{"row":2,"col":6}],"coeffs":[[2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":0,"col":0},{"row":3,"col":1},{"row":4,"col":5},{"row":4,"col":0},{"row":2,"col":2},{"row":4,"col":3},{"row":3,"col":6}],"coeffs":[[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":1,"col":3},{"row":4,"col":1},{"row":5,"col":0},{"row":3,"col":5},{"row":4,"col":4},{"row":3,"col":2},{"row":4,"col":6}],"coeffs":[[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":4,"col":2},{"row":5,"col":5},{"row":6,"col":3},{"row":1,"col":0},{"row":5,"col":4},{"row":5,"col":1},{"row":5,"col":6}],"coeffs":[[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":2,"col":1},{"row":6,"col":5},{"row":7,"col":0},{"row":5,"col":2},{"row":6,"col":4},{"row":6,"col":1},{"row":6,"col":6}],"coeffs":[[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":6,"col":2},{"row":7,"col":1},{"row":7,"col":2},{"row":7,"col":3},{"row":7,"col":4},{"row":7,"col":5},{"row":7,"col":6}],"coeffs":[[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64]]}]} +{"public_input_size":1,"gates":[{"typ":"Generic","wires":[{"row":3,"col":0},{"row":0,"col":1},{"row":0,"col":2},{"row":0,"col":3},{"row":0,"col":4},{"row":0,"col":5},{"row":0,"col":6}],"coeffs":["0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":1,"col":1},{"row":2,"col":0},{"row":1,"col":2},{"row":1,"col":4},{"row":3,"col":3},{"row":1,"col":5},{"row":1,"col":6}],"coeffs":["00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":5,"col":3},{"row":2,"col":3},{"row":3,"col":4},{"row":2,"col":4},{"row":6,"col":0},{"row":2,"col":5},{"row":2,"col":6}],"coeffs":["0200000000000000000000000000000000000000000000000000000000000000","0400000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":0,"col":0},{"row":3,"col":1},{"row":4,"col":5},{"row":4,"col":0},{"row":2,"col":2},{"row":4,"col":3},{"row":3,"col":6}],"coeffs":["0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":1,"col":3},{"row":4,"col":1},{"row":5,"col":0},{"row":3,"col":5},{"row":4,"col":4},{"row":3,"col":2},{"row":4,"col":6}],"coeffs":["00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":4,"col":2},{"row":5,"col":5},{"row":6,"col":3},{"row":1,"col":0},{"row":5,"col":4},{"row":5,"col":1},{"row":5,"col":6}],"coeffs":["0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":2,"col":1},{"row":6,"col":5},{"row":7,"col":0},{"row":5,"col":2},{"row":6,"col":4},{"row":6,"col":1},{"row":6,"col":6}],"coeffs":["0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":6,"col":2},{"row":7,"col":1},{"row":7,"col":2},{"row":7,"col":3},{"row":7,"col":4},{"row":7,"col":5},{"row":7,"col":6}],"coeffs":["0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040"]}]} diff --git a/src/lib/crypto/snarky_tests/examples/range_gte.json b/src/lib/crypto/snarky_tests/examples/range_gte.json index 345f913541f..2ab65fd0e35 100644 --- a/src/lib/crypto/snarky_tests/examples/range_gte.json +++ b/src/lib/crypto/snarky_tests/examples/range_gte.json @@ -1 +1 @@ -{"public_input_size":1,"gates":[{"typ":"Generic","wires":[{"row":3,"col":0},{"row":0,"col":1},{"row":0,"col":2},{"row":0,"col":3},{"row":0,"col":4},{"row":0,"col":5},{"row":0,"col":6}],"coeffs":[[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":1,"col":1},{"row":2,"col":0},{"row":1,"col":2},{"row":1,"col":4},{"row":3,"col":3},{"row":1,"col":5},{"row":1,"col":6}],"coeffs":[[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":5,"col":3},{"row":2,"col":3},{"row":3,"col":4},{"row":2,"col":4},{"row":6,"col":0},{"row":2,"col":5},{"row":2,"col":6}],"coeffs":[[2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":0,"col":0},{"row":3,"col":1},{"row":4,"col":5},{"row":4,"col":0},{"row":2,"col":2},{"row":4,"col":3},{"row":3,"col":6}],"coeffs":[[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":1,"col":3},{"row":4,"col":1},{"row":5,"col":0},{"row":3,"col":5},{"row":4,"col":4},{"row":3,"col":2},{"row":4,"col":6}],"coeffs":[[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":4,"col":2},{"row":5,"col":5},{"row":6,"col":3},{"row":1,"col":0},{"row":5,"col":4},{"row":5,"col":1},{"row":5,"col":6}],"coeffs":[[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":7,"col":0},{"row":6,"col":5},{"row":6,"col":2},{"row":5,"col":2},{"row":6,"col":4},{"row":6,"col":1},{"row":6,"col":6}],"coeffs":[[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":2,"col":1},{"row":7,"col":1},{"row":7,"col":2},{"row":7,"col":3},{"row":7,"col":4},{"row":7,"col":5},{"row":7,"col":6}],"coeffs":[[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64]]}]} +{"public_input_size":1,"gates":[{"typ":"Generic","wires":[{"row":3,"col":0},{"row":0,"col":1},{"row":0,"col":2},{"row":0,"col":3},{"row":0,"col":4},{"row":0,"col":5},{"row":0,"col":6}],"coeffs":["0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":1,"col":1},{"row":2,"col":0},{"row":1,"col":2},{"row":1,"col":4},{"row":3,"col":3},{"row":1,"col":5},{"row":1,"col":6}],"coeffs":["00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":5,"col":3},{"row":2,"col":3},{"row":3,"col":4},{"row":2,"col":4},{"row":6,"col":0},{"row":2,"col":5},{"row":2,"col":6}],"coeffs":["0200000000000000000000000000000000000000000000000000000000000000","0400000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":0,"col":0},{"row":3,"col":1},{"row":4,"col":5},{"row":4,"col":0},{"row":2,"col":2},{"row":4,"col":3},{"row":3,"col":6}],"coeffs":["0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":1,"col":3},{"row":4,"col":1},{"row":5,"col":0},{"row":3,"col":5},{"row":4,"col":4},{"row":3,"col":2},{"row":4,"col":6}],"coeffs":["00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":4,"col":2},{"row":5,"col":5},{"row":6,"col":3},{"row":1,"col":0},{"row":5,"col":4},{"row":5,"col":1},{"row":5,"col":6}],"coeffs":["0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":7,"col":0},{"row":6,"col":5},{"row":6,"col":2},{"row":5,"col":2},{"row":6,"col":4},{"row":6,"col":1},{"row":6,"col":6}],"coeffs":["0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":2,"col":1},{"row":7,"col":1},{"row":7,"col":2},{"row":7,"col":3},{"row":7,"col":4},{"row":7,"col":5},{"row":7,"col":6}],"coeffs":["0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040"]}]} diff --git a/src/lib/crypto/snarky_tests/examples/range_lt.json b/src/lib/crypto/snarky_tests/examples/range_lt.json index 09eb59cf4e4..5c3b5dc7e08 100644 --- a/src/lib/crypto/snarky_tests/examples/range_lt.json +++ b/src/lib/crypto/snarky_tests/examples/range_lt.json @@ -1 +1 @@ -{"public_input_size":1,"gates":[{"typ":"Generic","wires":[{"row":3,"col":0},{"row":0,"col":1},{"row":0,"col":2},{"row":0,"col":3},{"row":0,"col":4},{"row":0,"col":5},{"row":0,"col":6}],"coeffs":[[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":1,"col":1},{"row":2,"col":0},{"row":1,"col":2},{"row":1,"col":4},{"row":3,"col":3},{"row":1,"col":5},{"row":1,"col":6}],"coeffs":[[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":5,"col":3},{"row":2,"col":3},{"row":3,"col":4},{"row":2,"col":4},{"row":6,"col":0},{"row":2,"col":5},{"row":2,"col":6}],"coeffs":[[2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":0,"col":0},{"row":3,"col":1},{"row":4,"col":5},{"row":4,"col":0},{"row":2,"col":2},{"row":4,"col":3},{"row":3,"col":6}],"coeffs":[[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":1,"col":3},{"row":4,"col":1},{"row":5,"col":0},{"row":3,"col":5},{"row":4,"col":4},{"row":3,"col":2},{"row":4,"col":6}],"coeffs":[[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":4,"col":2},{"row":5,"col":5},{"row":6,"col":3},{"row":1,"col":0},{"row":5,"col":4},{"row":5,"col":1},{"row":5,"col":6}],"coeffs":[[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":2,"col":1},{"row":6,"col":5},{"row":7,"col":0},{"row":5,"col":2},{"row":6,"col":4},{"row":6,"col":1},{"row":6,"col":6}],"coeffs":[[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":6,"col":2},{"row":7,"col":1},{"row":7,"col":2},{"row":7,"col":3},{"row":7,"col":4},{"row":7,"col":5},{"row":7,"col":6}],"coeffs":[[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64]]}]} +{"public_input_size":1,"gates":[{"typ":"Generic","wires":[{"row":3,"col":0},{"row":0,"col":1},{"row":0,"col":2},{"row":0,"col":3},{"row":0,"col":4},{"row":0,"col":5},{"row":0,"col":6}],"coeffs":["0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":1,"col":1},{"row":2,"col":0},{"row":1,"col":2},{"row":1,"col":4},{"row":3,"col":3},{"row":1,"col":5},{"row":1,"col":6}],"coeffs":["00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":5,"col":3},{"row":2,"col":3},{"row":3,"col":4},{"row":2,"col":4},{"row":6,"col":0},{"row":2,"col":5},{"row":2,"col":6}],"coeffs":["0200000000000000000000000000000000000000000000000000000000000000","0400000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":0,"col":0},{"row":3,"col":1},{"row":4,"col":5},{"row":4,"col":0},{"row":2,"col":2},{"row":4,"col":3},{"row":3,"col":6}],"coeffs":["00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0700000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":1,"col":3},{"row":4,"col":1},{"row":5,"col":0},{"row":3,"col":5},{"row":4,"col":4},{"row":3,"col":2},{"row":4,"col":6}],"coeffs":["00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":4,"col":2},{"row":5,"col":5},{"row":6,"col":3},{"row":1,"col":0},{"row":5,"col":4},{"row":5,"col":1},{"row":5,"col":6}],"coeffs":["0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":2,"col":1},{"row":6,"col":5},{"row":7,"col":0},{"row":5,"col":2},{"row":6,"col":4},{"row":6,"col":1},{"row":6,"col":6}],"coeffs":["0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":6,"col":2},{"row":7,"col":1},{"row":7,"col":2},{"row":7,"col":3},{"row":7,"col":4},{"row":7,"col":5},{"row":7,"col":6}],"coeffs":["0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040"]}]} diff --git a/src/lib/crypto/snarky_tests/examples/range_lte.json b/src/lib/crypto/snarky_tests/examples/range_lte.json index 06e3bcfa880..72b6d32b026 100644 --- a/src/lib/crypto/snarky_tests/examples/range_lte.json +++ b/src/lib/crypto/snarky_tests/examples/range_lte.json @@ -1 +1 @@ -{"public_input_size":1,"gates":[{"typ":"Generic","wires":[{"row":3,"col":0},{"row":0,"col":1},{"row":0,"col":2},{"row":0,"col":3},{"row":0,"col":4},{"row":0,"col":5},{"row":0,"col":6}],"coeffs":[[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":1,"col":1},{"row":2,"col":0},{"row":1,"col":2},{"row":1,"col":4},{"row":3,"col":3},{"row":1,"col":5},{"row":1,"col":6}],"coeffs":[[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":5,"col":3},{"row":2,"col":3},{"row":3,"col":4},{"row":2,"col":4},{"row":6,"col":0},{"row":2,"col":5},{"row":2,"col":6}],"coeffs":[[2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":0,"col":0},{"row":3,"col":1},{"row":4,"col":5},{"row":4,"col":0},{"row":2,"col":2},{"row":4,"col":3},{"row":3,"col":6}],"coeffs":[[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":1,"col":3},{"row":4,"col":1},{"row":5,"col":0},{"row":3,"col":5},{"row":4,"col":4},{"row":3,"col":2},{"row":4,"col":6}],"coeffs":[[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":4,"col":2},{"row":5,"col":5},{"row":6,"col":3},{"row":1,"col":0},{"row":5,"col":4},{"row":5,"col":1},{"row":5,"col":6}],"coeffs":[[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":7,"col":0},{"row":6,"col":5},{"row":6,"col":2},{"row":5,"col":2},{"row":6,"col":4},{"row":6,"col":1},{"row":6,"col":6}],"coeffs":[[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":2,"col":1},{"row":7,"col":1},{"row":7,"col":2},{"row":7,"col":3},{"row":7,"col":4},{"row":7,"col":5},{"row":7,"col":6}],"coeffs":[[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64]]}]} +{"public_input_size":1,"gates":[{"typ":"Generic","wires":[{"row":3,"col":0},{"row":0,"col":1},{"row":0,"col":2},{"row":0,"col":3},{"row":0,"col":4},{"row":0,"col":5},{"row":0,"col":6}],"coeffs":["0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":1,"col":1},{"row":2,"col":0},{"row":1,"col":2},{"row":1,"col":4},{"row":3,"col":3},{"row":1,"col":5},{"row":1,"col":6}],"coeffs":["00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":5,"col":3},{"row":2,"col":3},{"row":3,"col":4},{"row":2,"col":4},{"row":6,"col":0},{"row":2,"col":5},{"row":2,"col":6}],"coeffs":["0200000000000000000000000000000000000000000000000000000000000000","0400000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":0,"col":0},{"row":3,"col":1},{"row":4,"col":5},{"row":4,"col":0},{"row":2,"col":2},{"row":4,"col":3},{"row":3,"col":6}],"coeffs":["00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0700000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":1,"col":3},{"row":4,"col":1},{"row":5,"col":0},{"row":3,"col":5},{"row":4,"col":4},{"row":3,"col":2},{"row":4,"col":6}],"coeffs":["00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":4,"col":2},{"row":5,"col":5},{"row":6,"col":3},{"row":1,"col":0},{"row":5,"col":4},{"row":5,"col":1},{"row":5,"col":6}],"coeffs":["0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":7,"col":0},{"row":6,"col":5},{"row":6,"col":2},{"row":5,"col":2},{"row":6,"col":4},{"row":6,"col":1},{"row":6,"col":6}],"coeffs":["0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":2,"col":1},{"row":7,"col":1},{"row":7,"col":2},{"row":7,"col":3},{"row":7,"col":4},{"row":7,"col":5},{"row":7,"col":6}],"coeffs":["0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040"]}]} diff --git a/src/lib/crypto/snarky_tests/examples/simple.json b/src/lib/crypto/snarky_tests/examples/simple.json index 90cf3bcdf57..203f475af49 100644 --- a/src/lib/crypto/snarky_tests/examples/simple.json +++ b/src/lib/crypto/snarky_tests/examples/simple.json @@ -1 +1 @@ -{"public_input_size":2,"gates":[{"typ":"Generic","wires":[{"row":2,"col":3},{"row":0,"col":1},{"row":0,"col":2},{"row":0,"col":3},{"row":0,"col":4},{"row":0,"col":5},{"row":0,"col":6}],"coeffs":[[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":2,"col":0},{"row":1,"col":1},{"row":1,"col":2},{"row":1,"col":3},{"row":1,"col":4},{"row":1,"col":5},{"row":1,"col":6}],"coeffs":[[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":2,"col":1},{"row":4,"col":1},{"row":2,"col":2},{"row":2,"col":4},{"row":4,"col":0},{"row":2,"col":5},{"row":2,"col":6}],"coeffs":[[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":3,"col":1},{"row":4,"col":4},{"row":3,"col":2},{"row":3,"col":4},{"row":4,"col":3},{"row":3,"col":5},{"row":3,"col":6}],"coeffs":[[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":0,"col":0},{"row":1,"col":0},{"row":4,"col":5},{"row":3,"col":3},{"row":3,"col":0},{"row":5,"col":0},{"row":4,"col":6}],"coeffs":[[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":4,"col":2},{"row":5,"col":1},{"row":5,"col":2},{"row":5,"col":3},{"row":5,"col":4},{"row":5,"col":5},{"row":5,"col":6}],"coeffs":[[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64]]}]} +{"public_input_size":2,"gates":[{"typ":"Generic","wires":[{"row":2,"col":3},{"row":0,"col":1},{"row":0,"col":2},{"row":0,"col":3},{"row":0,"col":4},{"row":0,"col":5},{"row":0,"col":6}],"coeffs":["0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":2,"col":0},{"row":1,"col":1},{"row":1,"col":2},{"row":1,"col":3},{"row":1,"col":4},{"row":1,"col":5},{"row":1,"col":6}],"coeffs":["0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":2,"col":1},{"row":4,"col":1},{"row":2,"col":2},{"row":2,"col":4},{"row":4,"col":0},{"row":2,"col":5},{"row":2,"col":6}],"coeffs":["00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":3,"col":1},{"row":4,"col":4},{"row":3,"col":2},{"row":3,"col":4},{"row":4,"col":3},{"row":3,"col":5},{"row":3,"col":6}],"coeffs":["00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":0,"col":0},{"row":1,"col":0},{"row":4,"col":5},{"row":3,"col":3},{"row":3,"col":0},{"row":5,"col":0},{"row":4,"col":6}],"coeffs":["0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":4,"col":2},{"row":5,"col":1},{"row":5,"col":2},{"row":5,"col":3},{"row":5,"col":4},{"row":5,"col":5},{"row":5,"col":6}],"coeffs":["0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040"]}]} diff --git a/src/lib/crypto/snarky_tests/examples/ternary.json b/src/lib/crypto/snarky_tests/examples/ternary.json index bccd94ee56f..44208f53cb1 100644 --- a/src/lib/crypto/snarky_tests/examples/ternary.json +++ b/src/lib/crypto/snarky_tests/examples/ternary.json @@ -1 +1 @@ -{"public_input_size":1,"gates":[{"typ":"Generic","wires":[{"row":1,"col":0},{"row":0,"col":1},{"row":0,"col":2},{"row":0,"col":3},{"row":0,"col":4},{"row":0,"col":5},{"row":0,"col":6}],"coeffs":[[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":1,"col":3},{"row":1,"col":1},{"row":2,"col":0},{"row":1,"col":4},{"row":0,"col":0},{"row":1,"col":5},{"row":1,"col":6}],"coeffs":[[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,237,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]},{"typ":"Generic","wires":[{"row":1,"col":2},{"row":2,"col":1},{"row":2,"col":2},{"row":2,"col":3},{"row":2,"col":4},{"row":2,"col":5},{"row":2,"col":6}],"coeffs":[[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[255,255,255,255,236,48,45,153,27,249,76,9,252,152,70,34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64]]}]} +{"public_input_size":1,"gates":[{"typ":"Generic","wires":[{"row":1,"col":0},{"row":0,"col":1},{"row":0,"col":2},{"row":0,"col":3},{"row":0,"col":4},{"row":0,"col":5},{"row":0,"col":6}],"coeffs":["0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":1,"col":3},{"row":1,"col":1},{"row":2,"col":0},{"row":1,"col":4},{"row":0,"col":0},{"row":1,"col":5},{"row":1,"col":6}],"coeffs":["0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","00000000ed302d991bf94c09fc98462200000000000000000000000000000040","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]},{"typ":"Generic","wires":[{"row":1,"col":2},{"row":2,"col":1},{"row":2,"col":2},{"row":2,"col":3},{"row":2,"col":4},{"row":2,"col":5},{"row":2,"col":6}],"coeffs":["0100000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","ffffffffec302d991bf94c09fc98462200000000000000000000000000000040"]}]} diff --git a/src/lib/crypto/snarky_tests/snarky_tests.ml b/src/lib/crypto/snarky_tests/snarky_tests.ml index 53b2a3bdd58..10d98acbf74 100644 --- a/src/lib/crypto/snarky_tests/snarky_tests.ml +++ b/src/lib/crypto/snarky_tests/snarky_tests.ml @@ -664,8 +664,6 @@ let api_tests = ; ("compile monadic API", `Quick, MonadicAPI.get_hash_of_circuit) ] -(* run tests *) - let () = let range_checks = List.map ~f:QCheck_alcotest.to_alcotest [ RangeCircuits.test_range_gates ] diff --git a/src/lib/currency/currency.ml b/src/lib/currency/currency.ml index 68382841399..e192f7e3afd 100644 --- a/src/lib/currency/currency.ml +++ b/src/lib/currency/currency.ml @@ -95,9 +95,6 @@ module Make_str (A : Wire_types.Concrete) = struct type t = Unsigned.t [@@deriving sexp, compare, hash] - (* can't be automatically derived *) - let dhall_type = Ppx_dhall_type.Dhall_type.Text - [%%define_locally Unsigned.(to_uint64, of_uint64, of_int, to_int, of_string, to_string)] @@ -947,7 +944,7 @@ module Make_str (A : Wire_types.Concrete) = struct type t = Unsigned_extended.UInt64.Stable.V1.t [@@deriving sexp, compare, hash, equal] - [%%define_from_scope to_yojson, of_yojson, dhall_type] + [%%define_from_scope to_yojson, of_yojson] let to_latest = Fn.id end @@ -978,9 +975,6 @@ module Make_str (A : Wire_types.Concrete) = struct [@@@with_all_version_tags] type t = A.t [@@deriving sexp, compare, hash, equal, yojson] - - (* not automatically derived *) - val dhall_type : Ppx_dhall_type.Dhall_type.t end end] @@ -1094,7 +1088,7 @@ module Make_str (A : Wire_types.Concrete) = struct type t = Unsigned_extended.UInt64.Stable.V1.t [@@deriving sexp, compare, hash, equal, yojson] - [%%define_from_scope to_yojson, of_yojson, dhall_type] + [%%define_from_scope to_yojson, of_yojson] let to_latest = Fn.id end @@ -1139,9 +1133,6 @@ module Make_str (A : Wire_types.Concrete) = struct [@@deriving sexp, compare, equal, hash, yojson] let to_latest = Fn.id - - (* can't be automatically derived *) - let dhall_type = Ppx_dhall_type.Dhall_type.Text end end] diff --git a/src/lib/currency/dune b/src/lib/currency/dune index 26bdb0c4d26..1b839334a76 100644 --- a/src/lib/currency/dune +++ b/src/lib/currency/dune @@ -19,7 +19,6 @@ ;; local libraries bignum_bigint bitstring_lib - ppx_dhall_type codable test_util unsigned_extended diff --git a/src/lib/currency/intf.ml b/src/lib/currency/intf.ml index 197a229b463..46e3cf39565 100644 --- a/src/lib/currency/intf.ml +++ b/src/lib/currency/intf.ml @@ -11,9 +11,6 @@ module type Basic = sig type magnitude = t [@@deriving sexp, compare] - (* not automatically derived *) - val dhall_type : Ppx_dhall_type.Dhall_type.t - val max_int : t val length_in_bits : int @@ -310,9 +307,6 @@ module type Full = sig [@@@with_all_version_tags] type t [@@deriving sexp, compare, hash, yojson, equal] - - (* not automatically derived *) - val dhall_type : Ppx_dhall_type.Dhall_type.t end end] @@ -370,9 +364,6 @@ module type Full = sig [@@@with_all_version_tags] type t [@@deriving sexp, compare, hash, equal, yojson] - - (* not automatically derived *) - val dhall_type : Ppx_dhall_type.Dhall_type.t end end] @@ -442,9 +433,6 @@ module type Full = sig module Stable : sig module V1 : sig type t [@@deriving sexp, compare, hash, yojson, equal] - - (* not automatically derived *) - val dhall_type : Ppx_dhall_type.Dhall_type.t end end] diff --git a/src/lib/dummy_values/gen_values/gen_values.ml b/src/lib/dummy_values/gen_values/gen_values.ml index c468d60cfc8..83c6c506ee0 100644 --- a/src/lib/dummy_values/gen_values/gen_values.ml +++ b/src/lib/dummy_values/gen_values/gen_values.ml @@ -17,13 +17,15 @@ let str ~loc = end) in let open E in [%str - let blockchain_proof, transaction_proof = - ( Core_kernel.Binable.of_string - (module Pickles.Proof.Proofs_verified_2.Stable.Latest) - [%e estring blockchain_proof_string] - , Core_kernel.Binable.of_string - (module Pickles.Proof.Proofs_verified_2.Stable.Latest) - [%e estring transaction_proof_string] )] + let blockchain_proof () = + Core_kernel.Binable.of_string + (module Pickles.Proof.Proofs_verified_2.Stable.Latest) + [%e estring blockchain_proof_string] + + let transaction_proof () = + Core_kernel.Binable.of_string + (module Pickles.Proof.Proofs_verified_2.Stable.Latest) + [%e estring transaction_proof_string]] let main () = let fmt = diff --git a/src/lib/genesis_ledger_helper/dune b/src/lib/genesis_ledger_helper/dune index d90811f0051..7c2b6e44580 100644 --- a/src/lib/genesis_ledger_helper/dune +++ b/src/lib/genesis_ledger_helper/dune @@ -44,6 +44,7 @@ logger mina_base.import staged_ledger_diff + file_system ) (instrumentation (backend bisect_ppx)) (preprocess (pps ppx_mina ppx_jane ppx_version ppx_inline_test ppx_let ppx_deriving.std ppx_deriving_yojson ppx_custom_printf))) diff --git a/src/lib/genesis_ledger_helper/genesis_ledger_helper.ml b/src/lib/genesis_ledger_helper/genesis_ledger_helper.ml index 3c061d639fb..54b1527cf54 100644 --- a/src/lib/genesis_ledger_helper/genesis_ledger_helper.ml +++ b/src/lib/genesis_ledger_helper/genesis_ledger_helper.ml @@ -112,7 +112,8 @@ module Ledger = struct ; List.to_string balances ~f:(fun (i, balance) -> sprintf "%i %s" i (Currency.Balance.to_string balance) ) ; (* Distinguish ledgers when the hash function is different. *) - Snark_params.Tick.Field.to_string Mina_base.Account.empty_digest + Snark_params.Tick.Field.to_string + (Lazy.force Mina_base.Account.empty_digest) ; (* Distinguish ledgers when the account record layout has changed. *) Bin_prot.Writer.to_string Mina_base.Account.Stable.Latest.bin_writer_t Mina_base.Account.empty @@ -234,32 +235,10 @@ module Ledger = struct Tar.filename_without_extension @@ Filename.basename filename in let dirname = genesis_dir ^/ dirname in - (* remove dir if it exists *) - let%bind () = - if%bind file_exists ~follow_symlinks:true dirname then ( - [%log trace] "Genesis ledger dir $path already exists, removing" - ~metadata:[ ("path", `String dirname) ] ; - let rec remove_dir dir = - let%bind files = Sys.ls_dir dir in - let%bind () = - Deferred.List.iter files ~f:(fun file -> - let file = dir ^/ file in - remove file ) - in - Unix.rmdir dir - and remove file = - match%bind Sys.is_directory file with - | `Yes -> - remove_dir file - | _ -> - Unix.unlink file - in - remove dirname ) - else Deferred.unit + let%bind () = File_system.create_dir ~clear_if_exists:true dirname in + let%map.Deferred.Or_error () = + Tar.extract ~root:dirname ~file:filename () in - let%bind () = Unix.mkdir ~p:() dirname in - let open Deferred.Or_error.Let_syntax in - let%map () = Tar.extract ~root:dirname ~file:filename () in let (packed : Genesis_ledger.Packed.t) = match accounts with | Some accounts -> @@ -325,8 +304,9 @@ module Ledger = struct ; ("path", `String tar_path) ; ("dir", `String dirname) ] ; - let open Deferred.Or_error.Let_syntax in - let%map () = Tar.create ~root:dirname ~file:tar_path ~directory:"." () in + let%map.Deferred.Or_error () = + Tar.create ~root:dirname ~file:tar_path ~directory:"." () + in tar_path let padded_accounts_from_runtime_config_opt ~logger ~proof_level diff --git a/src/lib/graphql_basic_scalars/testing.ml b/src/lib/graphql_basic_scalars/testing.ml index 65f9ba7db1b..c9e7e9c4ad4 100644 --- a/src/lib/graphql_basic_scalars/testing.ml +++ b/src/lib/graphql_basic_scalars/testing.ml @@ -56,7 +56,7 @@ let get_test_field = function (Yojson.Basic.to_string json) () -module Make_test +module Produce_test (S : Json_intf_any_typ with type ('a, 'b) typ := ('a, 'b) Graphql.Schema.typ) (G : Test_Intf with type t = S.t) = struct @@ -73,7 +73,16 @@ struct test_query schema () "{ test }" (fun response -> [%test_eq: G.t] value (S.parse @@ get_test_field response) ) - let%test_unit "test" = + let test_query () = Core_kernel.Quickcheck.test G.gen ~sexp_of:G.sexp_of_t ~f:query_server_and_compare end + +module Make_test + (S : Json_intf_any_typ with type ('a, 'b) typ := ('a, 'b) Graphql.Schema.typ) + (G : Test_Intf with type t = S.t) = +struct + include Produce_test (S) (G) + + let%test_unit "test" = test_query () +end diff --git a/src/lib/hash_prefix_states/dune b/src/lib/hash_prefix_states/dune index f93e60a7895..0b851eca719 100644 --- a/src/lib/hash_prefix_states/dune +++ b/src/lib/hash_prefix_states/dune @@ -12,6 +12,7 @@ random_oracle mina_signature_kind hash_prefixes + hash_prefix_create pickles ) (preprocessor_deps ../../config.mlh) diff --git a/src/lib/hash_prefix_states/hash_prefix_create/dune b/src/lib/hash_prefix_states/hash_prefix_create/dune new file mode 100644 index 00000000000..7d88aa2b5d8 --- /dev/null +++ b/src/lib/hash_prefix_states/hash_prefix_create/dune @@ -0,0 +1,10 @@ +(library + (name hash_prefix_create) + (public_name hash_prefix_create) + (libraries hash_prefixes random_oracle) + (instrumentation + (backend bisect_ppx)) + (preprocess + (pps ppx_version)) + (virtual_modules hash_prefix_create) + (default_implementation hash_prefix_create.native)) diff --git a/src/lib/hash_prefix_states/hash_prefix_create/hash_prefix_create.mli b/src/lib/hash_prefix_states/hash_prefix_create/hash_prefix_create.mli new file mode 100644 index 00000000000..5854bb9998f --- /dev/null +++ b/src/lib/hash_prefix_states/hash_prefix_create/hash_prefix_create.mli @@ -0,0 +1,3 @@ +val salt : string -> Random_oracle.Digest.t Random_oracle.State.t + +val salt_legacy : string -> Random_oracle.Digest.t Random_oracle.Legacy.State.t diff --git a/src/lib/hash_prefix_states/hash_prefix_create/js/dune b/src/lib/hash_prefix_states/hash_prefix_create/js/dune new file mode 100644 index 00000000000..2c9f199a7b6 --- /dev/null +++ b/src/lib/hash_prefix_states/hash_prefix_create/js/dune @@ -0,0 +1,9 @@ +(library + (public_name hash_prefix_create.js) + (name hash_prefix_create_js) + (libraries pickles random_oracle js_of_ocaml base core_kernel) + (instrumentation + (backend bisect_ppx)) + (preprocess + (pps ppx_version)) + (implements hash_prefix_create)) diff --git a/src/lib/hash_prefix_states/hash_prefix_create/js/hash_prefix_create.ml b/src/lib/hash_prefix_states/hash_prefix_create/js/hash_prefix_create.ml new file mode 100644 index 00000000000..ad5335830d5 --- /dev/null +++ b/src/lib/hash_prefix_states/hash_prefix_create/js/hash_prefix_create.ml @@ -0,0 +1,31 @@ +open Core_kernel +module Js = Js_of_ocaml.Js +module Field = Pickles.Impls.Step.Field.Constant + +external get_ts_bindings : unit -> Js.Unsafe.any Js.Optdef.t = "getTsBindings" + +(* the ?. operator from JS *) +let ( |. ) (value : _ Js.Optdef.t) (key : string) = + Js.( + if phys_equal value undefined then undefined + else Unsafe.get value (string key)) + +let lookup kind prefix = + get_ts_bindings () |. kind |. prefix |> Js.Optdef.to_option + +let of_js x = + Js.to_array x |> Array.map ~f:(Fn.compose Field.of_string Js.to_string) + +let salt s = + match lookup "prefixHashes" s with + | Some state -> + of_js state |> Random_oracle.State.of_array + | None -> + Random_oracle.salt s + +let salt_legacy s = + match lookup "prefixHashesLegacy" s with + | Some state -> + of_js state |> Random_oracle.Legacy.State.of_array + | None -> + Random_oracle.Legacy.salt s diff --git a/src/lib/hash_prefix_states/hash_prefix_create/native/dune b/src/lib/hash_prefix_states/hash_prefix_create/native/dune new file mode 100644 index 00000000000..5f4d54d917c --- /dev/null +++ b/src/lib/hash_prefix_states/hash_prefix_create/native/dune @@ -0,0 +1,9 @@ +(library + (public_name hash_prefix_create.native) + (name hash_prefix_create_native) + (libraries random_oracle) + (instrumentation + (backend bisect_ppx)) + (preprocess + (pps ppx_version)) + (implements hash_prefix_create)) diff --git a/src/lib/hash_prefix_states/hash_prefix_create/native/hash_prefix_create.ml b/src/lib/hash_prefix_states/hash_prefix_create/native/hash_prefix_create.ml new file mode 100644 index 00000000000..05e667338e5 --- /dev/null +++ b/src/lib/hash_prefix_states/hash_prefix_create/native/hash_prefix_create.ml @@ -0,0 +1,3 @@ +let salt = Random_oracle.salt + +let salt_legacy = Random_oracle.Legacy.salt diff --git a/src/lib/hash_prefix_states/hash_prefix_states.ml b/src/lib/hash_prefix_states/hash_prefix_states.ml index c04844e5431..4b3190f6e93 100644 --- a/src/lib/hash_prefix_states/hash_prefix_states.ml +++ b/src/lib/hash_prefix_states/hash_prefix_states.ml @@ -1,9 +1,10 @@ open Core_kernel open Hash_prefixes -let salt (s : Hash_prefixes.t) = Random_oracle.salt (s :> string) +let salt (s : Hash_prefixes.t) = Hash_prefix_create.salt (s :> string) -let salt_legacy (s : Hash_prefixes.t) = Random_oracle.Legacy.salt (s :> string) +let salt_legacy (s : Hash_prefixes.t) = + Hash_prefix_create.salt_legacy (s :> string) let receipt_chain_signed_command = salt_legacy receipt_chain_user_command diff --git a/src/lib/integration_test_cloud_engine/dune b/src/lib/integration_test_cloud_engine/dune index 6f50aefc782..2757ed63d07 100644 --- a/src/lib/integration_test_cloud_engine/dune +++ b/src/lib/integration_test_cloud_engine/dune @@ -1,59 +1,70 @@ (library (public_name integration_test_cloud_engine) (name integration_test_cloud_engine) - (inline_tests (flags -verbose -show-counts)) - (instrumentation (backend bisect_ppx)) - (preprocessor_deps ../../graphql-ppx-config.inc ../../../graphql_schema.json) - (preprocess (pps - ppx_here - ppx_mina ppx_version ppx_let ppx_inline_test ppx_pipebang - ppx_custom_printf ppx_deriving_yojson lens.ppx_deriving - ppx_sexp_conv - graphql_ppx -- %{read-lines:../../graphql-ppx-config.inc})) -(libraries - ;; opam libraries - async_unix - async_kernel - core_kernel - core - async - cmdliner - base - uri - sexplib0 - stdio - result - base.caml - integers - re2 - ;; local libraries - key_gen - integration_test_lib - graphql_lib - mina_runtime_config - mina_base - genesis_constants - genesis_ledger_helper - logger - mina_base_import - signature_lib - currency - mina_version - timeout_lib - mina_numbers - mina_state - mina_stdlib - mina_transaction - file_system - pickles - pickles_types - backend - kimchi_pasta - kimchi_backend.pasta.basic - with_hash - data_hash_lib - generated_graphql_queries - mina_graphql - error_json - ) -) + (inline_tests + (flags -verbose -show-counts)) + (instrumentation + (backend bisect_ppx)) + (preprocessor_deps + ../../graphql-ppx-config.inc + ../../../graphql_schema.json) + (preprocess + (pps + ppx_here + ppx_mina + ppx_version + ppx_let + ppx_inline_test + ppx_pipebang + ppx_custom_printf + ppx_deriving_yojson + lens.ppx_deriving + ppx_sexp_conv + graphql_ppx + -- + %{read-lines:../../graphql-ppx-config.inc})) + (libraries + ;; opam libraries + async_unix + async_kernel + core_kernel + core + async + cmdliner + base + uri + sexplib0 + stdio + result + base.caml + integers + re2 + ;; local libraries + key_gen + integration_test_lib + graphql_lib + mina_runtime_config + mina_base + genesis_constants + genesis_ledger_helper + logger + mina_base_import + signature_lib + currency + mina_version + timeout_lib + mina_numbers + mina_state + mina_stdlib + mina_transaction + file_system + pickles + pickles_types + backend + kimchi_pasta + kimchi_backend.pasta.basic + with_hash + data_hash_lib + generated_graphql_queries + mina_graphql + error_json)) diff --git a/src/lib/integration_test_cloud_engine/graphql_polling_log_engine.ml b/src/lib/integration_test_cloud_engine/graphql_polling_log_engine.ml deleted file mode 100644 index 42d23e91dcd..00000000000 --- a/src/lib/integration_test_cloud_engine/graphql_polling_log_engine.ml +++ /dev/null @@ -1,142 +0,0 @@ -open Async -open Core -open Integration_test_lib -module Timeout = Timeout_lib.Core_time -module Node = Kubernetes_network.Node - -(** This implements Log_engine_intf for integration tests, by creating a simple system that polls a mina daemon's graphql endpoint for fetching logs*) - -let log_filter_of_event_type ev_existential = - let open Event_type in - let (Event_type ev_type) = ev_existential in - let (module Ty) = event_type_module ev_type in - match Ty.parse with - | From_error_log _ -> - [] (* TODO: Do we need this? *) - | From_daemon_log (struct_id, _) -> - [ Structured_log_events.string_of_id struct_id ] - | From_puppeteer_log _ -> - [] -(* TODO: Do we need this? *) - -let all_event_types_log_filter = - List.bind ~f:log_filter_of_event_type Event_type.all_event_types - -type t = - { logger : Logger.t - ; event_writer : (Node.t * Event_type.event) Pipe.Writer.t - ; event_reader : (Node.t * Event_type.event) Pipe.Reader.t - ; background_job : unit Deferred.Or_error.t - } - -let event_reader { event_reader; _ } = event_reader - -let parse_event_from_log_entry ~logger log_entry = - let open Or_error.Let_syntax in - let open Json_parsing in - Or_error.try_with_join (fun () -> - let payload = Yojson.Safe.from_string log_entry in - let%map event = - let%bind msg = - parse (parser_from_of_yojson Logger.Message.of_yojson) payload - in - let event_id = - Option.map ~f:Structured_log_events.string_of_id msg.event_id - in - [%log spam] "parsing daemon structured event, event_id = $event_id" - ~metadata:[ ("event_id", [%to_yojson: string option] event_id) ] ; - match msg.event_id with - | Some _ -> - Event_type.parse_daemon_event msg - | None -> - (* Currently unreachable, but we could include error logs here if - desired. - *) - Event_type.parse_error_log msg - in - event ) - -let rec filtered_log_entries_poll node ~logger ~event_writer - ~last_log_index_seen = - let open Deferred.Let_syntax in - if not (Pipe.is_closed event_writer) then ( - let%bind () = after (Time.Span.of_ms 10000.0) in - match%bind - Integration_test_lib.Graphql_requests.get_filtered_log_entries - (Node.get_ingress_uri node) - ~last_log_index_seen - with - | Ok log_entries -> - Array.iter log_entries ~f:(fun log_entry -> - match parse_event_from_log_entry ~logger log_entry with - | Ok a -> - Pipe.write_without_pushback_if_open event_writer (node, a) - | Error e -> - [%log warn] "Error parsing log $error" - ~metadata:[ ("error", `String (Error.to_string_hum e)) ] ) ; - let last_log_index_seen = - Array.length log_entries + last_log_index_seen - in - filtered_log_entries_poll node ~logger ~event_writer - ~last_log_index_seen - | Error err -> - [%log error] "Encountered an error while polling $node for logs: $err" - ~metadata: - [ ("node", `String (Node.infra_id node)) - ; ("err", Error_json.error_to_yojson err) - ] ; - (* Declare the node to be offline. *) - Pipe.write_without_pushback_if_open event_writer - (node, Event (Node_offline, ())) ; - (* Don't keep looping, the node may be restarting. *) - return (Ok ()) ) - else Deferred.Or_error.error_string "Event writer closed" - -let rec start_filtered_log node ~logger ~log_filter ~event_writer = - let open Deferred.Let_syntax in - if not (Pipe.is_closed event_writer) then - match%bind - Integration_test_lib.Graphql_requests.start_filtered_log ~logger - ~log_filter - (Node.get_ingress_uri node) - with - | Ok () -> - return (Ok ()) - | Error _ -> - start_filtered_log node ~logger ~log_filter ~event_writer - else Deferred.Or_error.error_string "Event writer closed" - -let rec poll_node_for_logs_in_background ~log_filter ~logger ~event_writer - (node : Node.t) = - let open Deferred.Or_error.Let_syntax in - [%log info] "Requesting for $node to start its filtered logs" - ~metadata:[ ("node", `String (Node.infra_id node)) ] ; - let%bind () = start_filtered_log ~logger ~log_filter ~event_writer node in - [%log info] "$node has started its filtered logs. Beginning polling" - ~metadata:[ ("node", `String (Node.infra_id node)) ] ; - let%bind () = - filtered_log_entries_poll node ~last_log_index_seen:0 ~logger ~event_writer - in - poll_node_for_logs_in_background ~log_filter ~logger ~event_writer node - -let poll_for_logs_in_background ~log_filter ~logger ~network ~event_writer = - Kubernetes_network.all_nodes network - |> Core.String.Map.data - |> Deferred.Or_error.List.iter ~how:`Parallel - ~f:(poll_node_for_logs_in_background ~log_filter ~logger ~event_writer) - -let create ~logger ~(network : Kubernetes_network.t) = - let open Deferred.Or_error.Let_syntax in - let log_filter = all_event_types_log_filter in - let event_reader, event_writer = Pipe.create () in - let background_job = - poll_for_logs_in_background ~log_filter ~logger ~network ~event_writer - in - return { logger; event_reader; event_writer; background_job } - -let destroy t : unit Deferred.Or_error.t = - let open Deferred.Or_error.Let_syntax in - let { logger; event_reader = _; event_writer; background_job = _ } = t in - Pipe.close event_writer ; - [%log debug] "graphql polling log engine destroyed" ; - return () diff --git a/src/lib/integration_test_cloud_engine/graphql_polling_log_engine.mli b/src/lib/integration_test_cloud_engine/graphql_polling_log_engine.mli deleted file mode 100644 index 1dbef606c1a..00000000000 --- a/src/lib/integration_test_cloud_engine/graphql_polling_log_engine.mli +++ /dev/null @@ -1,3 +0,0 @@ -include - Integration_test_lib.Intf.Engine.Log_engine_intf - with module Network := Kubernetes_network diff --git a/src/lib/integration_test_cloud_engine/integration_test_cloud_engine.ml b/src/lib/integration_test_cloud_engine/integration_test_cloud_engine.ml index 64f71860422..df72e1e1812 100644 --- a/src/lib/integration_test_cloud_engine/integration_test_cloud_engine.ml +++ b/src/lib/integration_test_cloud_engine/integration_test_cloud_engine.ml @@ -3,4 +3,13 @@ let name = "cloud" module Network = Kubernetes_network module Network_config = Mina_automation.Network_config module Network_manager = Mina_automation.Network_manager -module Log_engine = Graphql_polling_log_engine + +module Kubernetes_polling_interval = struct + let start_filtered_logs_interval = Core.Time.Span.of_sec 10.0 +end + +module Log_engine = + Integration_test_lib.Graphql_polling_log_engine + .Make_GraphQL_polling_log_engine + (Kubernetes_network) + (Kubernetes_polling_interval) diff --git a/src/lib/integration_test_cloud_engine/mina_automation.ml b/src/lib/integration_test_cloud_engine/mina_automation.ml index 7b6f35fc1be..fe5a7bc45c9 100644 --- a/src/lib/integration_test_cloud_engine/mina_automation.ml +++ b/src/lib/integration_test_cloud_engine/mina_automation.ml @@ -71,6 +71,7 @@ module Network_config = struct [@to_yojson fun j -> `String (Yojson.Safe.to_string j)] ; block_producer_configs : block_producer_config list ; log_precomputed_blocks : bool + ; start_filtered_logs : string list ; archive_node_count : int ; mina_archive_schema : string ; mina_archive_schema_aux_files : string list @@ -118,6 +119,7 @@ module Network_config = struct ; snark_worker_fee ; num_archive_nodes ; log_precomputed_blocks (* ; num_plain_nodes *) + ; start_filtered_logs ; proof_config ; k ; delta @@ -478,6 +480,7 @@ module Network_config = struct ; mina_archive_image = images.archive_node ; runtime_config = Runtime_config.to_yojson runtime_config ; block_producer_configs + ; start_filtered_logs ; log_precomputed_blocks ; archive_node_count = num_archive_nodes ; mina_archive_schema diff --git a/src/lib/integration_test_lib/dune b/src/lib/integration_test_lib/dune index 53fdd9550e8..892be91b836 100644 --- a/src/lib/integration_test_lib/dune +++ b/src/lib/integration_test_lib/dune @@ -62,4 +62,5 @@ transition_handler snark_worker one_or_two + error_json )) diff --git a/src/lib/integration_test_lib/graphql_polling_log_engine.ml b/src/lib/integration_test_lib/graphql_polling_log_engine.ml new file mode 100644 index 00000000000..e344df952f9 --- /dev/null +++ b/src/lib/integration_test_lib/graphql_polling_log_engine.ml @@ -0,0 +1,149 @@ +open Async +open Core +module Timeout = Timeout_lib.Core_time + +(** This implements Log_engine_intf for integration tests, by creating a simple system that polls a mina daemon's graphql endpoint for fetching logs*) + +module Make_GraphQL_polling_log_engine + (Network : Intf.Engine.Network_intf) (Polling_interval : sig + val start_filtered_logs_interval : Time.Span.t + end) = +struct + module Node = Network.Node + + let log_filter_of_event_type ev_existential = + let open Event_type in + let (Event_type ev_type) = ev_existential in + let (module Ty) = event_type_module ev_type in + match Ty.parse with + | From_error_log _ -> + [] (* TODO: Do we need this? *) + | From_daemon_log (struct_id, _) -> + [ Structured_log_events.string_of_id struct_id ] + | From_puppeteer_log _ -> + [] + (* TODO: Do we need this? *) + + let all_event_types_log_filter = + List.bind ~f:log_filter_of_event_type Event_type.all_event_types + + type t = + { logger : Logger.t + ; event_writer : (Node.t * Event_type.event) Pipe.Writer.t + ; event_reader : (Node.t * Event_type.event) Pipe.Reader.t + ; background_job : unit Deferred.Or_error.t + } + + let event_reader { event_reader; _ } = event_reader + + let parse_event_from_log_entry ~logger log_entry = + let open Or_error.Let_syntax in + let open Json_parsing in + Or_error.try_with_join (fun () -> + let payload = Yojson.Safe.from_string log_entry in + let%map event = + let%bind msg = + parse (parser_from_of_yojson Logger.Message.of_yojson) payload + in + let event_id = + Option.map ~f:Structured_log_events.string_of_id msg.event_id + in + [%log spam] "parsing daemon structured event, event_id = $event_id" + ~metadata:[ ("event_id", [%to_yojson: string option] event_id) ] ; + match msg.event_id with + | Some _ -> + Event_type.parse_daemon_event msg + | None -> + (* Currently unreachable, but we could include error logs here if + desired. + *) + Event_type.parse_error_log msg + in + event ) + + let rec filtered_log_entries_poll node ~logger ~event_writer + ~last_log_index_seen = + let open Deferred.Let_syntax in + if not (Pipe.is_closed event_writer) then ( + let%bind () = after (Time.Span.of_ms 10000.0) in + match%bind + Graphql_requests.get_filtered_log_entries + (Node.get_ingress_uri node) + ~last_log_index_seen + with + | Ok log_entries -> + Array.iter log_entries ~f:(fun log_entry -> + match parse_event_from_log_entry ~logger log_entry with + | Ok a -> + Pipe.write_without_pushback_if_open event_writer (node, a) + | Error e -> + [%log warn] "Error parsing log $error" + ~metadata:[ ("error", `String (Error.to_string_hum e)) ] ) ; + let last_log_index_seen = + Array.length log_entries + last_log_index_seen + in + filtered_log_entries_poll node ~logger ~event_writer + ~last_log_index_seen + | Error err -> + [%log error] "Encountered an error while polling $node for logs: $err" + ~metadata: + [ ("node", `String (Node.infra_id node)) + ; ("err", Error_json.error_to_yojson err) + ] ; + (* Declare the node to be offline. *) + Pipe.write_without_pushback_if_open event_writer + (node, Event (Node_offline, ())) ; + (* Don't keep looping, the node may be restarting. *) + return (Ok ()) ) + else Deferred.Or_error.error_string "Event writer closed" + + let rec start_filtered_log node ~logger ~log_filter ~event_writer = + let open Deferred.Let_syntax in + if not (Pipe.is_closed event_writer) then + match%bind + Graphql_requests.start_filtered_log ~logger ~log_filter + ~retry_delay_sec: + (Polling_interval.start_filtered_logs_interval |> Time.Span.to_sec) + (Node.get_ingress_uri node) + with + | Ok () -> + return (Ok ()) + | Error _ -> + start_filtered_log node ~logger ~log_filter ~event_writer + else Deferred.Or_error.error_string "Event writer closed" + + let rec poll_node_for_logs_in_background ~log_filter ~logger ~event_writer + (node : Node.t) = + let open Deferred.Or_error.Let_syntax in + [%log info] "Requesting for $node to start its filtered logs" + ~metadata:[ ("node", `String (Node.infra_id node)) ] ; + let%bind () = start_filtered_log ~logger ~log_filter ~event_writer node in + [%log info] "$node has started its filtered logs. Beginning polling" + ~metadata:[ ("node", `String (Node.infra_id node)) ] ; + let%bind () = + filtered_log_entries_poll node ~last_log_index_seen:0 ~logger + ~event_writer + in + poll_node_for_logs_in_background ~log_filter ~logger ~event_writer node + + let poll_for_logs_in_background ~log_filter ~logger ~network ~event_writer = + Network.all_nodes network |> Core.String.Map.data + |> Deferred.Or_error.List.iter ~how:`Parallel + ~f:(poll_node_for_logs_in_background ~log_filter ~logger ~event_writer) + + let create ~logger ~(network : Network.t) = + let open Deferred.Or_error.Let_syntax in + let log_filter = all_event_types_log_filter in + let event_reader, event_writer = Pipe.create () in + let background_job = + poll_for_logs_in_background ~log_filter ~logger ~network ~event_writer + in + return { logger; event_reader; event_writer; background_job } + + let destroy t : unit Deferred.Or_error.t = + let open Deferred.Or_error.Let_syntax in + let { logger; event_reader = _; event_writer; background_job = _ } = t in + Pipe.close event_writer ; + [%log debug] "graphql polling log engine destroyed" ; + return () +end diff --git a/src/lib/integration_test_lib/graphql_requests.ml b/src/lib/integration_test_lib/graphql_requests.ml index 5ca67f8c06b..adde8f2066c 100644 --- a/src/lib/integration_test_lib/graphql_requests.ml +++ b/src/lib/integration_test_lib/graphql_requests.ml @@ -1105,14 +1105,14 @@ let get_metrics ~logger node_uri = ; transaction_pool_size } -let start_filtered_log ~logger ~log_filter node_uri = +let start_filtered_log node_uri ~logger ~log_filter ~retry_delay_sec = let open Deferred.Let_syntax in let query_obj = Graphql.StartFilteredLog.(make @@ makeVariables ~filter:log_filter ()) in let%bind res = - exec_graphql_request ~logger:(Logger.null ()) ~retry_delay_sec:10.0 - ~node_uri ~query_name:"StartFilteredLog" query_obj + exec_graphql_request ~logger:(Logger.null ()) ~retry_delay_sec ~node_uri + ~query_name:"StartFilteredLog" query_obj in match res with | Ok query_result_obj -> diff --git a/src/lib/integration_test_lib/test_config.ml b/src/lib/integration_test_lib/test_config.ml index b9611833752..a86c367d94c 100644 --- a/src/lib/integration_test_lib/test_config.ml +++ b/src/lib/integration_test_lib/test_config.ml @@ -68,6 +68,7 @@ type t = ; snark_worker_fee : string ; num_archive_nodes : int ; log_precomputed_blocks : bool + ; start_filtered_logs : string list (* ; num_plain_nodes : int *) (* blockchain constants *) ; proof_config : Runtime_config.Proof_keys.t @@ -94,6 +95,19 @@ let proof_config_default : Runtime_config.Proof_keys.t = ; fork = None } +let log_filter_of_event_type ev_existential = + let open Event_type in + let (Event_type ev_type) = ev_existential in + let (module Ty) = event_type_module ev_type in + match Ty.parse with + | From_error_log _ -> + [] (* TODO: Do we need this? *) + | From_daemon_log (struct_id, _) -> + [ Structured_log_events.string_of_id struct_id ] + | From_puppeteer_log _ -> + [] +(* TODO: Do we need this? *) + let default = { requires_graphql = true @@ -105,6 +119,8 @@ let default = ; snark_worker_fee = "0.025" ; num_archive_nodes = 0 ; log_precomputed_blocks = false (* ; num_plain_nodes = 0 *) + ; start_filtered_logs = + List.bind ~f:log_filter_of_event_type Event_type.all_event_types ; proof_config = proof_config_default ; k = 20 ; slots_per_epoch = 3 * 8 * 20 diff --git a/src/lib/integration_test_local_engine/cli_inputs.ml b/src/lib/integration_test_local_engine/cli_inputs.ml new file mode 100644 index 00000000000..b0382124fa8 --- /dev/null +++ b/src/lib/integration_test_local_engine/cli_inputs.ml @@ -0,0 +1,5 @@ +open Cmdliner + +type t = unit + +let term = Term.const () diff --git a/src/lib/integration_test_local_engine/docker_compose.ml b/src/lib/integration_test_local_engine/docker_compose.ml new file mode 100644 index 00000000000..9faffd82c99 --- /dev/null +++ b/src/lib/integration_test_local_engine/docker_compose.ml @@ -0,0 +1,81 @@ +open Core_kernel +open Integration_test_lib + +module Dockerfile = struct + module Service = struct + module Volume = struct + type t = + { type_ : string [@key "type"]; source : string; target : string } + [@@deriving to_yojson] + + let create source target = { type_ = "bind"; source; target } + end + + module Environment = struct + type t = (string * string) list + + let to_yojson env = `Assoc (List.map env ~f:(fun (k, v) -> (k, `String v))) + end + + module Port = struct + type t = { published : int; target : int } [@@deriving to_yojson] + + let create ~published ~target = { published; target } + end + + type t = + { image : string + ; command : string list + ; entrypoint : string list option + [@to_yojson + fun j -> + match j with + | Some v -> + `List (List.map (fun s -> `String s) v) + | None -> + `Null] + ; ports : Port.t list + ; environment : Environment.t + ; volumes : Volume.t list + } + [@@deriving to_yojson] + + let create ~image ~command ~entrypoint ~ports ~environment ~volumes = + { image; command; entrypoint; ports; environment; volumes } + + let to_yojson { image; command; entrypoint; ports; environment; volumes } = + `Assoc + ( [ ("image", `String image) + ; ("command", `List (List.map ~f:(fun s -> `String s) command)) + ; ("ports", `List (List.map ~f:Port.to_yojson ports)) + ; ("environment", Environment.to_yojson environment) + ; ("volumes", `List (List.map ~f:Volume.to_yojson volumes)) + ] + @ + match entrypoint with + | Some ep -> + [ ("entrypoint", `List (List.map ~f:(fun s -> `String s) ep)) ] + | None -> + [] ) + end + + module StringMap = Map.Make (String) + + type service_map = Service.t StringMap.t + + let merge (m1 : service_map) (m2 : service_map) = + Base.Map.merge_skewed m1 m2 ~combine:(fun ~key:_ left _ -> left) + + let service_map_to_yojson m = + `Assoc (m |> Map.map ~f:Service.to_yojson |> Map.to_alist) + + type t = { version : string; services : service_map } [@@deriving to_yojson] + + let to_string = Fn.compose Yojson.Safe.pretty_to_string to_yojson + + let write_config t ~dir ~filename = + Out_channel.with_file ~fail_if_exists:false + (dir ^ "/" ^ filename) + ~f:(fun ch -> t |> to_string |> Out_channel.output_string ch) ; + Util.run_cmd_exn dir "chmod" [ "600"; filename ] +end diff --git a/src/lib/integration_test_local_engine/docker_network.ml b/src/lib/integration_test_local_engine/docker_network.ml new file mode 100644 index 00000000000..4deb1de6234 --- /dev/null +++ b/src/lib/integration_test_local_engine/docker_network.ml @@ -0,0 +1,330 @@ +open Core_kernel +open Async +open Integration_test_lib + +let get_container_id service_id = + let%bind cwd = Unix.getcwd () in + let open Malleable_error.Let_syntax in + let%bind container_ids = + Deferred.bind ~f:Malleable_error.or_hard_error + (Integration_test_lib.Util.run_cmd_or_error cwd "docker" + [ "ps"; "-f"; sprintf "name=%s" service_id; "--quiet" ] ) + in + let container_id_list = String.split container_ids ~on:'\n' in + match container_id_list with + | [] -> + Malleable_error.hard_error_format "No container id found for service %s" + service_id + | raw_container_id :: _ -> + return (String.strip raw_container_id) + +let run_in_container ?(exit_code = 10) container_id ~cmd = + let%bind.Deferred cwd = Unix.getcwd () in + Integration_test_lib.Util.run_cmd_or_hard_error ~exit_code cwd "docker" + ([ "exec"; container_id ] @ cmd) + +module Node = struct + type config = + { network_keypair : Network_keypair.t option + ; service_id : string + ; postgres_connection_uri : string option + ; graphql_port : int + } + + type t = { config : config; mutable should_be_running : bool } + + let id { config; _ } = config.service_id + + let infra_id { config; _ } = config.service_id + + let should_be_running { should_be_running; _ } = should_be_running + + let network_keypair { config; _ } = config.network_keypair + + let get_ingress_uri node = + Uri.make ~scheme:"http" ~host:"127.0.0.1" ~path:"/graphql" + ~port:node.config.graphql_port () + + let get_container_index_from_service_name service_name = + match String.split_on_chars ~on:[ '_' ] service_name with + | _ :: value :: _ -> + value + | _ -> + failwith "get_container_index_from_service_name: bad service name" + + let dump_archive_data ~logger (t : t) ~data_file = + let service_name = t.config.service_id in + match t.config.postgres_connection_uri with + | None -> + failwith + (sprintf "dump_archive_data: %s not an archive container" service_name) + | Some postgres_uri -> + let open Malleable_error.Let_syntax in + let%bind container_id = get_container_id service_name in + [%log info] "Dumping archive data from (node: %s, container: %s)" + service_name container_id ; + let%map data = + run_in_container container_id + ~cmd:[ "pg_dump"; "--create"; "--no-owner"; postgres_uri ] + in + [%log info] "Dumping archive data to file %s" data_file ; + Out_channel.with_file data_file ~f:(fun out_ch -> + Out_channel.output_string out_ch data ) + + let get_logs_in_container container_id = + let%bind.Deferred cwd = Unix.getcwd () in + Integration_test_lib.Util.run_cmd_or_hard_error ~exit_code:13 cwd "docker" + [ "logs"; container_id ] + + let dump_mina_logs ~logger (t : t) ~log_file = + let open Malleable_error.Let_syntax in + let%bind container_id = get_container_id t.config.service_id in + [%log info] "Dumping mina logs from (node: %s, container: %s)" + t.config.service_id container_id ; + let%map logs = get_logs_in_container container_id in + [%log info] "Dumping mina logs to file %s" log_file ; + Out_channel.with_file log_file ~f:(fun out_ch -> + Out_channel.output_string out_ch logs ) + + let cp_string_to_container_file container_id ~str ~dest = + let tmp_file, oc = + Caml.Filename.open_temp_file ~temp_dir:Filename.temp_dir_name + "integration_test_cp_string" ".tmp" + in + Out_channel.output_string oc str ; + Out_channel.close oc ; + let%bind cwd = Unix.getcwd () in + let dest_file = sprintf "%s:%s" container_id dest in + Integration_test_lib.Util.run_cmd_or_error cwd "docker" + [ "cp"; tmp_file; dest_file ] + + let run_replayer ?(start_slot_since_genesis = 0) ~logger (t : t) = + let open Malleable_error.Let_syntax in + let%bind container_id = get_container_id t.config.service_id in + [%log info] "Running replayer on (node: %s, container: %s)" + t.config.service_id container_id ; + let%bind accounts = + run_in_container container_id + ~cmd:[ "jq"; "-c"; ".ledger.accounts"; "/root/runtime_config.json" ] + in + let replayer_input = + sprintf + {| { "start_slot_since_genesis": %d, + "genesis_ledger": { "accounts": %s, "add_genesis_winner": true }} |} + start_slot_since_genesis accounts + in + let dest = "replayer-input.json" in + let%bind archive_container_id = get_container_id "archive" in + let%bind () = + Deferred.bind ~f:Malleable_error.return + (cp_string_to_container_file archive_container_id ~str:replayer_input + ~dest ) + >>| ignore + in + let postgres_url = Option.value_exn t.config.postgres_connection_uri in + run_in_container container_id + ~cmd: + [ "mina-replayer" + ; "--archive-uri" + ; postgres_url + ; "--input-file" + ; dest + ; "--output-file" + ; "/dev/null" + ; "--continue-on-error" + ] + + let dump_precomputed_blocks ~logger (t : t) = + let open Malleable_error.Let_syntax in + let container_id = t.config.service_id in + [%log info] + "Dumping precomputed blocks from logs for (node: %s, container: %s)" + t.config.service_id container_id ; + let%bind logs = get_logs_in_container container_id in + (* kubectl logs may include non-log output, like "Using password from environment variable" *) + let log_lines = + String.split logs ~on:'\n' + |> List.filter ~f:(String.is_prefix ~prefix:"{\"timestamp\":") + in + let jsons = List.map log_lines ~f:Yojson.Safe.from_string in + let metadata_jsons = + List.map jsons ~f:(fun json -> + match json with + | `Assoc items -> ( + match List.Assoc.find items ~equal:String.equal "metadata" with + | Some md -> + md + | None -> + failwithf "Log line is missing metadata: %s" + (Yojson.Safe.to_string json) + () ) + | other -> + failwithf "Expected log line to be a JSON record, got: %s" + (Yojson.Safe.to_string other) + () ) + in + let state_hash_and_blocks = + List.fold metadata_jsons ~init:[] ~f:(fun acc json -> + match json with + | `Assoc items -> ( + match + List.Assoc.find items ~equal:String.equal "precomputed_block" + with + | Some block -> ( + match + List.Assoc.find items ~equal:String.equal "state_hash" + with + | Some state_hash -> + (state_hash, block) :: acc + | None -> + failwith + "Log metadata contains a precomputed block, but no \ + state hash" ) + | None -> + acc ) + | other -> + failwithf "Expected log line to be a JSON record, got: %s" + (Yojson.Safe.to_string other) + () ) + in + let%bind.Deferred () = + Deferred.List.iter state_hash_and_blocks + ~f:(fun (state_hash_json, block_json) -> + let double_quoted_state_hash = + Yojson.Safe.to_string state_hash_json + in + let state_hash = + String.sub double_quoted_state_hash ~pos:1 + ~len:(String.length double_quoted_state_hash - 2) + in + let block = Yojson.Safe.pretty_to_string block_json in + let filename = state_hash ^ ".json" in + match%map.Deferred Sys.file_exists filename with + | `Yes -> + [%log info] + "File already exists for precomputed block with state hash %s" + state_hash + | _ -> + [%log info] + "Dumping precomputed block with state hash %s to file %s" + state_hash filename ; + Out_channel.with_file filename ~f:(fun out_ch -> + Out_channel.output_string out_ch block ) ) + in + Malleable_error.return () + + let start ~fresh_state node : unit Malleable_error.t = + let open Malleable_error.Let_syntax in + let%bind container_id = get_container_id node.config.service_id in + node.should_be_running <- true ; + let%bind () = + if fresh_state then + run_in_container container_id ~cmd:[ "rm"; "-rf"; ".mina-config/*" ] + >>| ignore + else Malleable_error.return () + in + run_in_container ~exit_code:11 container_id ~cmd:[ "/start.sh" ] >>| ignore + + let stop node = + let open Malleable_error.Let_syntax in + let%bind container_id = get_container_id node.config.service_id in + node.should_be_running <- false ; + run_in_container ~exit_code:12 container_id ~cmd:[ "/stop.sh" ] >>| ignore +end + +module Service_to_deploy = struct + type config = + { network_keypair : Network_keypair.t option + ; postgres_connection_uri : string option + ; graphql_port : int + } + + type t = { stack_name : string; service_name : string; config : config } + + let construct_service stack_name service_name config : t = + { stack_name; service_name; config } + + let init_service_to_deploy_config ?(network_keypair = None) + ?(postgres_connection_uri = None) ~graphql_port = + { network_keypair; postgres_connection_uri; graphql_port } + + let get_node_from_service t = + let open Malleable_error.Let_syntax in + let service_id = sprintf "%s_%s" t.stack_name t.service_name in + let%bind container_id = get_container_id service_id in + if String.is_empty container_id then + Malleable_error.hard_error_format "No container id found for service %s" + t.service_name + else + return + { Node.config = + { service_id + ; network_keypair = t.config.network_keypair + ; postgres_connection_uri = t.config.postgres_connection_uri + ; graphql_port = t.config.graphql_port + } + ; should_be_running = false + } +end + +type t = + { namespace : string + ; constants : Test_config.constants + ; seeds : Node.t Core.String.Map.t + ; block_producers : Node.t Core.String.Map.t + ; snark_coordinators : Node.t Core.String.Map.t + ; snark_workers : Node.t Core.String.Map.t + ; archive_nodes : Node.t Core.String.Map.t + ; genesis_keypairs : Network_keypair.t Core.String.Map.t + } + +let constants { constants; _ } = constants + +let constraint_constants { constants; _ } = constants.constraints + +let genesis_constants { constants; _ } = constants.genesis + +let seeds { seeds; _ } = seeds + +let block_producers { block_producers; _ } = block_producers + +let snark_coordinators { snark_coordinators; _ } = snark_coordinators + +let archive_nodes { archive_nodes; _ } = archive_nodes + +let all_mina_nodes { seeds; block_producers; snark_coordinators; _ } = + List.concat + [ Core.String.Map.to_alist seeds + ; Core.String.Map.to_alist block_producers + ; Core.String.Map.to_alist snark_coordinators + ] + |> Core.String.Map.of_alist_exn + +let all_nodes t = + List.concat + [ Core.String.Map.to_alist t.seeds + ; Core.String.Map.to_alist t.block_producers + ; Core.String.Map.to_alist t.snark_coordinators + ; Core.String.Map.to_alist t.snark_workers + ] + |> Core.String.Map.of_alist_exn + +let all_non_seed_nodes t = + List.concat + [ Core.String.Map.to_alist t.block_producers + ; Core.String.Map.to_alist t.snark_coordinators + ; Core.String.Map.to_alist t.snark_workers + ] + |> Core.String.Map.of_alist_exn + +let genesis_keypairs { genesis_keypairs; _ } = genesis_keypairs + +let all_ids t = + let deployments = all_nodes t |> Core.Map.to_alist in + List.fold deployments ~init:[] ~f:(fun acc (_, node) -> + List.cons node.config.service_id acc ) + +let initialize_infra ~logger network = + let _ = logger in + let _ = network in + Malleable_error.return () diff --git a/src/lib/integration_test_local_engine/docker_node_config.ml b/src/lib/integration_test_local_engine/docker_node_config.ml new file mode 100644 index 00000000000..bbdd0cb8773 --- /dev/null +++ b/src/lib/integration_test_local_engine/docker_node_config.ml @@ -0,0 +1,570 @@ +open Core_kernel +open Async +open Integration_test_lib +open Docker_compose + +module PortManager = struct + let mina_internal_rest_port = 3085 + + let mina_internal_client_port = 8301 + + let mina_internal_metrics_port = 10001 + + let mina_internal_server_port = 3086 + + let mina_internal_external_port = 10101 + + let postgres_internal_port = 5432 + + type t = + { mutable available_ports : int list + ; mutable used_ports : int list + ; min_port : int + ; max_port : int + } + + let create ~min_port ~max_port = + let available_ports = List.range min_port max_port in + { available_ports; used_ports = []; min_port; max_port } + + let allocate_port t = + match t.available_ports with + | [] -> + failwith "No available ports" + | port :: rest -> + t.available_ports <- rest ; + t.used_ports <- port :: t.used_ports ; + port + + let allocate_ports_for_node t = + let rest_port_source = allocate_port t in + let client_port_source = allocate_port t in + let metrics_port_source = allocate_port t in + [ { Dockerfile.Service.Port.published = rest_port_source + ; target = mina_internal_rest_port + } + ; { published = client_port_source; target = mina_internal_client_port } + ; { published = metrics_port_source; target = mina_internal_metrics_port } + ] + + let release_port t port = + t.used_ports <- List.filter t.used_ports ~f:(fun p -> p <> port) ; + t.available_ports <- port :: t.available_ports + + let get_latest_used_port t = + match t.used_ports with [] -> failwith "No used ports" | port :: _ -> port +end + +module Base_node_config = struct + type t = + { peer : string option + ; log_level : string + ; log_snark_work_gossip : bool + ; log_txn_pool_gossip : bool + ; generate_genesis_proof : bool + ; client_port : string + ; rest_port : string + ; external_port : string + ; metrics_port : string + ; runtime_config_path : string option + ; libp2p_key_path : string + ; libp2p_secret : string + } + [@@deriving to_yojson] + + let container_runtime_config_path = "/root/runtime_config.json" + + let container_entrypoint_path = "/root/entrypoint.sh" + + let container_keys_path = "/root/keys" + + let container_libp2p_key_path = container_keys_path ^ "/libp2p_key" + + let entrypoint_script = + ( "entrypoint.sh" + , {|#!/bin/bash + # This file is auto-generated by the local integration test framework. + # Path to the libp2p_key file + LIBP2P_KEY_PATH="|} + ^ container_libp2p_key_path + ^ {|" + # Generate keypair and set permissions if libp2p_key does not exist + if [ ! -f "$LIBP2P_KEY_PATH" ]; then + mina libp2p generate-keypair --privkey-path $LIBP2P_KEY_PATH + fi + /bin/chmod -R 700 |} + ^ container_keys_path ^ {|/ + # Import any compatible keys in |} + ^ container_keys_path ^ {|/*, excluding certain keys + for key_file in |} + ^ container_keys_path + ^ {|/*; do + # Exclude specific keys (e.g., libp2p keys) + if [[ $(basename "$key_file") != "libp2p_key" ]]; then + mina accounts import -config-directory /root/.mina-config -privkey-path "$key_file" + fi + done + # Execute the puppeteer script + exec /mina_daemon_puppeteer.py "$@" + |} + ) + + let runtime_config_volume : Docker_compose.Dockerfile.Service.Volume.t = + { type_ = "bind" + ; source = "runtime_config.json" + ; target = container_runtime_config_path + } + + let entrypoint_volume : Docker_compose.Dockerfile.Service.Volume.t = + { type_ = "bind" + ; source = "entrypoint.sh" + ; target = container_entrypoint_path + } + + let default ?(runtime_config_path = None) ?(peer = None) = + { runtime_config_path + ; peer + ; log_snark_work_gossip = true + ; log_txn_pool_gossip = true + ; generate_genesis_proof = true + ; log_level = "Debug" + ; client_port = PortManager.mina_internal_client_port |> Int.to_string + ; rest_port = PortManager.mina_internal_rest_port |> Int.to_string + ; metrics_port = PortManager.mina_internal_metrics_port |> Int.to_string + ; external_port = PortManager.mina_internal_external_port |> Int.to_string + ; libp2p_key_path = container_libp2p_key_path + ; libp2p_secret = "" + } + + let to_docker_env_vars t = + [ ("DAEMON_REST_PORT", t.rest_port) + ; ("DAEMON_CLIENT_PORT", t.client_port) + ; ("DAEMON_METRICS_PORT", t.metrics_port) + ; ("DAEMON_EXTERNAL_PORT", t.external_port) + ; ("RAYON_NUM_THREADS", "8") + ; ("MINA_PRIVKEY_PASS", "naughty blue worm") + ; ("MINA_LIBP2P_PASS", "") + ] + + let to_list t = + let base_args = + [ "-log-level" + ; t.log_level + ; "-log-snark-work-gossip" + ; Bool.to_string t.log_snark_work_gossip + ; "-log-txn-pool-gossip" + ; Bool.to_string t.log_txn_pool_gossip + ; "-generate-genesis-proof" + ; Bool.to_string t.generate_genesis_proof + ; "-client-port" + ; t.client_port + ; "-rest-port" + ; t.rest_port + ; "-external-port" + ; t.external_port + ; "-metrics-port" + ; t.metrics_port + ; "--libp2p-keypair" + ; t.libp2p_key_path + ; "-log-json" + ; "--insecure-rest-server" + ; "-external-ip" + ; "0.0.0.0" + ] + in + let peer_args = + match t.peer with Some peer -> [ "-peer"; peer ] | None -> [] + in + let runtime_config_path = + match t.runtime_config_path with + | Some path -> + [ "-config-file"; path ] + | None -> + [] + in + List.concat [ base_args; runtime_config_path; peer_args ] +end + +module Block_producer_config = struct + type config = + { keypair : Network_keypair.t + ; priv_key_path : string + ; enable_flooding : bool + ; enable_peer_exchange : bool + ; base_config : Base_node_config.t + } + [@@deriving to_yojson] + + type t = + { service_name : string + ; config : config + ; docker_config : Dockerfile.Service.t + } + [@@deriving to_yojson] + + let create_cmd config = + let base_args = Base_node_config.to_list config.base_config in + let block_producer_args = + [ "daemon" + ; "-block-producer-key" + ; config.priv_key_path + ; "-enable-flooding" + ; config.enable_flooding |> Bool.to_string + ; "-enable-peer-exchange" + ; config.enable_peer_exchange |> Bool.to_string + ] + in + List.concat [ block_producer_args; base_args ] + + let create_docker_config ~image ~entrypoint ~ports ~volumes ~environment + ~config = + { Dockerfile.Service.image + ; command = create_cmd config + ; entrypoint + ; ports + ; environment + ; volumes + } + + let create ~service_name ~image ~ports ~volumes ~config = + let entrypoint = Some [ "/root/entrypoint.sh" ] in + let environment = Base_node_config.to_docker_env_vars config.base_config in + let docker_config = + create_docker_config ~image ~ports ~volumes ~environment ~entrypoint + ~config + in + { service_name; config; docker_config } +end + +module Seed_config = struct + let peer_id = "12D3KooWMg66eGtSEx5UZ9EAqEp3W7JaGd6WTxdRFuqhskRN55dT" + + let libp2p_keypair = + {|{"box_primitive":"xsalsa20poly1305","pw_primitive":"argon2i","nonce":"7Bbvv2wZ6iCeqVyooU9WR81aygshMrLdXKieaHT","pwsalt":"Bh1WborqSwdzBi7m95iZdrCGspSf","pwdiff":[134217728,6],"ciphertext":"8fgvt4eKSzF5HMr1uEZARVHBoMgDKTx17zV7STVQyhyyEz1SqdH4RrU51MFGMPZJXNznLfz8RnSPsjrVqhc1CenfSLLWP5h7tTn86NbGmzkshCNvUiGEoSb2CrSLsvJsdn13ey9ibbZfdeXyDp9y6mKWYVmefAQLWUC1Kydj4f4yFwCJySEttAhB57647ewBRicTjdpv948MjdAVNf1tTxms4VYg4Jb3pLVeGAPaRtW5QHUkA8LwN5fh3fmaFk1mRudMd67UzGdzrVBeEHAp4zCnN7g2iVdWNmwN3"}|} + + let create_libp2p_peer ~peer_name ~external_port = + Printf.sprintf "/dns4/%s/tcp/%d/p2p/%s" peer_name external_port peer_id + + type config = + { archive_address : string option; base_config : Base_node_config.t } + [@@deriving to_yojson] + + type t = + { service_name : string + ; config : config + ; docker_config : Dockerfile.Service.t + } + [@@deriving to_yojson] + + let seed_libp2p_keypair : Docker_compose.Dockerfile.Service.Volume.t = + { type_ = "bind" + ; source = "keys/libp2p_key" + ; target = Base_node_config.container_libp2p_key_path + } + + let create_cmd config = + let base_args = Base_node_config.to_list config.base_config in + let seed_args = + match config.archive_address with + | Some archive_address -> + [ "daemon"; "-seed"; "-archive-address"; archive_address ] + | None -> + [ "daemon"; "-seed" ] + in + List.concat [ seed_args; base_args ] + + let create_docker_config ~image ~entrypoint ~ports ~volumes ~environment + ~config = + { Dockerfile.Service.image + ; command = create_cmd config + ; entrypoint + ; ports + ; environment + ; volumes + } + + let create ~service_name ~image ~ports ~volumes ~config = + let entrypoint = Some [ "/root/entrypoint.sh" ] in + let environment = Base_node_config.to_docker_env_vars config.base_config in + let docker_config = + create_docker_config ~image ~ports ~volumes ~environment ~entrypoint + ~config + in + { service_name; config; docker_config } +end + +module Snark_worker_config = struct + type config = + { daemon_address : string + ; daemon_port : string + ; proof_level : string + ; base_config : Base_node_config.t + } + [@@deriving to_yojson] + + type t = + { service_name : string + ; config : config + ; docker_config : Dockerfile.Service.t + } + [@@deriving to_yojson] + + let create_cmd config = + [ "internal" + ; "snark-worker" + ; "-proof-level" + ; config.proof_level + ; "-daemon-address" + ; config.daemon_address ^ ":" ^ config.daemon_port + ; "--shutdown-on-disconnect" + ; "false" + ] + + let create_docker_config ~image ~entrypoint ~ports ~volumes ~environment + ~config = + { Dockerfile.Service.image + ; command = create_cmd config + ; entrypoint + ; ports + ; environment + ; volumes + } + + let create ~service_name ~image ~ports ~volumes ~config = + let entrypoint = Some [ "/root/entrypoint.sh" ] in + let environment = Base_node_config.to_docker_env_vars config.base_config in + let docker_config = + create_docker_config ~image ~ports ~volumes ~environment ~entrypoint + ~config + in + { service_name; config; docker_config } +end + +module Snark_coordinator_config = struct + type config = + { snark_coordinator_key : string + ; snark_worker_fee : string + ; work_selection : string + ; worker_nodes : Snark_worker_config.t list + ; base_config : Base_node_config.t + } + [@@deriving to_yojson] + + type t = + { service_name : string + ; config : config + ; docker_config : Dockerfile.Service.t + } + [@@deriving to_yojson] + + let snark_coordinator_default_env ~snark_coordinator_key ~snark_worker_fee + ~work_selection = + [ ("MINA_SNARK_KEY", snark_coordinator_key) + ; ("MINA_SNARK_FEE", snark_worker_fee) + ; ("WORK_SELECTION", work_selection) + ; ("MINA_CLIENT_TRUSTLIST", "10.0.0.0/8,172.16.0.0/12,192.168.0.0/16") + ] + + let create_cmd config = + let base_args = Base_node_config.to_list config.base_config in + let snark_coordinator_args = + [ "daemon" + ; "-run-snark-coordinator" + ; config.snark_coordinator_key + ; "-snark-worker-fee" + ; config.snark_worker_fee + ; "-work-selection" + ; config.work_selection + ] + in + List.concat [ snark_coordinator_args; base_args ] + + let create_docker_config ~image ~entrypoint ~ports ~volumes ~environment + ~config = + { Dockerfile.Service.image + ; command = create_cmd config + ; entrypoint + ; ports + ; environment + ; volumes + } + + let create ~service_name ~image ~ports ~volumes ~config = + let entrypoint = Some [ "/root/entrypoint.sh" ] in + let environment = + snark_coordinator_default_env + ~snark_coordinator_key:config.snark_coordinator_key + ~snark_worker_fee:config.snark_worker_fee + ~work_selection:config.work_selection + @ Base_node_config.to_docker_env_vars config.base_config + in + let docker_config = + create_docker_config ~image ~ports ~volumes ~environment ~entrypoint + ~config + in + { service_name; config; docker_config } +end + +module Postgres_config = struct + type config = + { host : string + ; username : string + ; password : string + ; database : string + ; port : int + } + [@@deriving to_yojson] + + type t = + { service_name : string + ; config : config + ; docker_config : Dockerfile.Service.t + } + [@@deriving to_yojson] + + let postgres_image = "docker.io/bitnami/postgresql" + + let postgres_script = + ( "postgres_entrypoint.sh" + , {|#!/bin/bash +# This file is auto-generated by the local integration test framework. +cd /bitnami +# Create the archive database and import the schema +psql -U postgres -d archive -f ./create_schema.sql +|} + ) + + let postgres_create_schema_volume : Dockerfile.Service.Volume.t = + { type_ = "bind" + ; source = "create_schema.sql" + ; target = "/bitnami/create_schema.sql" + } + + let postgres_zkapp_schema_volume : Dockerfile.Service.Volume.t = + { type_ = "bind" + ; source = "zkapp_tables.sql" + ; target = "/bitnami/zkapp_tables.sql" + } + + let postgres_entrypoint_volume : Dockerfile.Service.Volume.t = + { type_ = "bind" + ; source = "postgres_entrypoint.sh" + ; target = "/docker-entrypoint-initdb.d/postgres_entrypoint.sh" + } + + let postgres_default_envs ~username ~password ~database ~port = + [ ("BITNAMI_DEBUG", "false") + ; ("POSTGRES_USER", username) + ; ("POSTGRES_PASSWORD", password) + ; ("POSTGRES_DB", database) + ; ("PGPASSWORD", password) + ; ("POSTGRESQL_PORT_NUMBER", port) + ; ("POSTGRESQL_ENABLE_LDAP", "no") + ; ("POSTGRESQL_ENABLE_TLS", "no") + ; ("POSTGRESQL_LOG_HOSTNAME", "false") + ; ("POSTGRESQL_LOG_CONNECTIONS", "false") + ; ("POSTGRESQL_LOG_DISCONNECTIONS", "false") + ; ("POSTGRESQL_PGAUDIT_LOG_CATALOG", "off") + ; ("POSTGRESQL_CLIENT_MIN_MESSAGES", "error") + ; ("POSTGRESQL_SHARED_PRELOAD_LIBRARIES", "pgaudit") + ; ("POSTGRES_HOST_AUTH_METHOD", "trust") + ] + + let create_connection_uri ~host ~username ~password ~database ~port = + Printf.sprintf "postgres://%s:%s@%s:%d/%s" username password host port + database + + let to_connection_uri t = + create_connection_uri ~host:t.host ~port:t.port ~username:t.username + ~password:t.password ~database:t.database + + let create_docker_config ~image ~entrypoint ~ports ~volumes ~environment = + { Dockerfile.Service.image + ; command = [] + ; entrypoint + ; ports + ; environment + ; volumes + } + + let create ~service_name ~image ~ports ~volumes ~config = + let environment = + postgres_default_envs ~username:config.username ~password:config.password + ~database:config.database + ~port:(Int.to_string config.port) + in + let docker_config = + create_docker_config ~image ~ports ~volumes ~environment ~entrypoint:None + in + { service_name; config; docker_config } +end + +module Archive_node_config = struct + type config = + { postgres_config : Postgres_config.t + ; server_port : int + ; base_config : Base_node_config.t + } + [@@deriving to_yojson] + + type t = + { service_name : string + ; config : config + ; docker_config : Dockerfile.Service.t + } + [@@deriving to_yojson] + + let archive_entrypoint_script = + ( "archive_entrypoint.sh" + , {|#!/bin/bash + # This file is auto-generated by the local integration test framework. + # Sleep for 15 seconds + echo "Sleeping for 15 seconds before starting..." + sleep 15 + exec "$@"|} + ) + + let archive_entrypoint_volume : Docker_compose.Dockerfile.Service.Volume.t = + { type_ = "bind" + ; source = "archive_entrypoint.sh" + ; target = Base_node_config.container_entrypoint_path + } + + let create_cmd config = + let base_args = + [ "mina-archive" + ; "run" + ; "-postgres-uri" + ; Postgres_config.to_connection_uri config.postgres_config.config + ; "-server-port" + ; Int.to_string config.server_port + ] + in + let runtime_config_path = + match config.base_config.runtime_config_path with + | Some path -> + [ "-config-file"; path ] + | None -> + [] + in + List.concat [ base_args; runtime_config_path ] + + let create_docker_config ~image ~entrypoint ~ports ~volumes ~environment + ~config = + { Dockerfile.Service.image + ; command = create_cmd config + ; entrypoint + ; ports + ; environment + ; volumes + } + + let create ~service_name ~image ~ports ~volumes ~config = + let entrypoint = Some [ "/root/entrypoint.sh" ] in + let environment = Base_node_config.to_docker_env_vars config.base_config in + let docker_config = + create_docker_config ~image ~ports ~volumes ~environment ~entrypoint + ~config + in + { service_name; config; docker_config } +end diff --git a/src/lib/integration_test_local_engine/dune b/src/lib/integration_test_local_engine/dune new file mode 100644 index 00000000000..e4a772945f2 --- /dev/null +++ b/src/lib/integration_test_local_engine/dune @@ -0,0 +1,70 @@ +(library + (public_name integration_test_local_engine) + (name integration_test_local_engine) + (inline_tests + (flags -verbose -show-counts)) + (instrumentation + (backend bisect_ppx)) + (preprocessor_deps + ../../graphql-ppx-config.inc + ../../../graphql_schema.json) + (preprocess + (pps + ppx_here + ppx_mina + ppx_version + ppx_let + ppx_inline_test + ppx_pipebang + ppx_custom_printf + ppx_deriving_yojson + lens.ppx_deriving + ppx_sexp_conv + graphql_ppx + -- + %{read-lines:../../graphql-ppx-config.inc})) + (libraries + ;; opam libraries + async_unix + async_kernel + core_kernel + core + async + cmdliner + base + uri + sexplib0 + stdio + result + base.caml + integers + re2 + ;; local libraries + key_gen + integration_test_lib + graphql_lib + mina_runtime_config + mina_base + genesis_constants + genesis_ledger_helper + logger + mina_base_import + signature_lib + currency + mina_version + timeout_lib + mina_numbers + mina_state + mina_stdlib + mina_transaction + file_system + pickles + pickles_types + backend + kimchi_pasta + kimchi_backend.pasta.basic + with_hash + data_hash_lib + generated_graphql_queries + mina_graphql + error_json)) diff --git a/src/lib/integration_test_local_engine/integration_test_local_engine.ml b/src/lib/integration_test_local_engine/integration_test_local_engine.ml new file mode 100644 index 00000000000..dc672814be8 --- /dev/null +++ b/src/lib/integration_test_local_engine/integration_test_local_engine.ml @@ -0,0 +1,15 @@ +let name = "local" + +module Network = Docker_network +module Network_config = Mina_docker.Network_config +module Network_manager = Mina_docker.Network_manager + +module Docker_polling_interval = struct + let start_filtered_logs_interval = Core.Time.Span.of_sec 0.25 +end + +module Log_engine = + Integration_test_lib.Graphql_polling_log_engine + .Make_GraphQL_polling_log_engine + (Docker_network) + (Docker_polling_interval) diff --git a/src/lib/integration_test_local_engine/integration_test_local_engine.mli b/src/lib/integration_test_local_engine/integration_test_local_engine.mli new file mode 100644 index 00000000000..e1ba61b4e77 --- /dev/null +++ b/src/lib/integration_test_local_engine/integration_test_local_engine.mli @@ -0,0 +1 @@ +include Integration_test_lib.Intf.Engine.S diff --git a/src/lib/integration_test_local_engine/mina_docker.ml b/src/lib/integration_test_local_engine/mina_docker.ml new file mode 100644 index 00000000000..ac1c63ec9ab --- /dev/null +++ b/src/lib/integration_test_local_engine/mina_docker.ml @@ -0,0 +1,1063 @@ +open Core +open Async +open Currency +open Signature_lib +open Mina_base +open Integration_test_lib + +let docker_swarm_version = "3.8" + +module Network_config = struct + module Cli_inputs = Cli_inputs + + type docker_config = + { docker_swarm_version : string + ; stack_name : string + ; mina_image : string + ; mina_agent_image : string + ; mina_bots_image : string + ; mina_points_image : string + ; mina_archive_image : string + ; runtime_config : Yojson.Safe.t + ; seed_configs : Docker_node_config.Seed_config.t list + ; block_producer_configs : Docker_node_config.Block_producer_config.t list + ; snark_coordinator_config : + Docker_node_config.Snark_coordinator_config.t option + ; archive_node_configs : Docker_node_config.Archive_node_config.t list + ; mina_archive_schema_aux_files : string list + ; log_precomputed_blocks : bool + ; start_filtered_logs : string list + } + [@@deriving to_yojson] + + type t = + { debug_arg : bool + ; genesis_keypairs : + (Network_keypair.t Core.String.Map.t + [@to_yojson + fun map -> + `Assoc + (Core.Map.fold_right ~init:[] + ~f:(fun ~key:k ~data:v accum -> + (k, Network_keypair.to_yojson v) :: accum ) + map )] ) + ; constants : Test_config.constants + ; docker : docker_config + } + [@@deriving to_yojson] + + let expand ~logger ~test_name ~(cli_inputs : Cli_inputs.t) ~(debug : bool) + ~(test_config : Test_config.t) ~(images : Test_config.Container_images.t) + = + let _ = cli_inputs in + let { genesis_ledger + ; epoch_data + ; block_producers + ; snark_coordinator + ; snark_worker_fee + ; num_archive_nodes + ; log_precomputed_blocks + ; start_filtered_logs + ; proof_config + ; Test_config.k + ; delta + ; slots_per_epoch + ; slots_per_sub_window + ; txpool_max_size + ; slot_tx_end + ; slot_chain_end + ; _ + } = + test_config + in + let git_commit = Mina_version.commit_id_short in + let stack_name = "it-" ^ git_commit ^ "-" ^ test_name in + let key_names_list = + List.map genesis_ledger ~f:(fun acct -> acct.account_name) + in + if List.contains_dup ~compare:String.compare key_names_list then + failwith + "All accounts in genesis ledger must have unique names. Check to make \ + sure you are not using the same account_name more than once" ; + let all_nodes_names_list = + List.map block_producers ~f:(fun acct -> acct.node_name) + @ match snark_coordinator with None -> [] | Some n -> [ n.node_name ] + in + if List.contains_dup ~compare:String.compare all_nodes_names_list then + failwith + "All nodes in testnet must have unique names. Check to make sure you \ + are not using the same node_name more than once" ; + let keypairs = + List.take + (List.tl_exn + (Array.to_list (Lazy.force Key_gen.Sample_keypairs.keypairs)) ) + (List.length genesis_ledger) + in + let runtime_timing_of_timing = function + | Account.Timing.Untimed -> + None + | Timed t -> + Some + { Runtime_config.Accounts.Single.Timed.initial_minimum_balance = + t.initial_minimum_balance + ; cliff_time = t.cliff_time + ; cliff_amount = t.cliff_amount + ; vesting_period = t.vesting_period + ; vesting_increment = t.vesting_increment + } + in + let add_accounts accounts_and_keypairs = + List.map accounts_and_keypairs + ~f:(fun + ( { Test_config.Test_account.balance; account_name; timing; _ } + , (pk, sk) ) + -> + let timing = runtime_timing_of_timing timing in + let default = Runtime_config.Accounts.Single.default in + let account = + { default with + pk = Public_key.Compressed.to_string pk + ; sk = Some (Private_key.to_base58_check sk) + ; balance = Balance.of_mina_string_exn balance + ; delegate = None + ; timing + } + in + (account_name, account) ) + in + let genesis_accounts_and_keys = List.zip_exn genesis_ledger keypairs in + let genesis_ledger_accounts = add_accounts genesis_accounts_and_keys in + let constraint_constants = + Genesis_ledger_helper.make_constraint_constants + ~default:Genesis_constants.Constraint_constants.compiled proof_config + in + let ledger_is_prefix ledger1 ledger2 = + List.is_prefix ledger2 ~prefix:ledger1 + ~equal:(fun + ({ account_name = name1; _ } : Test_config.Test_account.t) + ({ account_name = name2; _ } : Test_config.Test_account.t) + -> String.equal name1 name2 ) + in + let runtime_config = + { Runtime_config.daemon = + Some + { txpool_max_size = Some txpool_max_size + ; peer_list_url = None + ; zkapp_proof_update_cost = None + ; zkapp_signed_single_update_cost = None + ; zkapp_signed_pair_update_cost = None + ; zkapp_transaction_cost_limit = None + ; max_event_elements = None + ; max_action_elements = None + ; zkapp_cmd_limit_hardcap = None + ; slot_tx_end + ; slot_chain_end + } + ; genesis = + Some + { k = Some k + ; delta = Some delta + ; slots_per_epoch = Some slots_per_epoch + ; slots_per_sub_window = Some slots_per_sub_window + ; genesis_state_timestamp = + Some Core.Time.(to_string_abs ~zone:Zone.utc (now ())) + ; grace_period_slots = None + } + ; proof = Some proof_config + ; ledger = + Some + { base = + Accounts + (List.map genesis_ledger_accounts ~f:(fun (_name, acct) -> + acct ) ) + ; add_genesis_winner = None + ; num_accounts = None + ; balances = [] + ; hash = None + ; name = None + ; s3_data_hash = None + } + ; epoch_data = + Option.map epoch_data ~f:(fun { staking = staking_ledger; next } -> + let genesis_winner_account : Runtime_config.Accounts.single = + Runtime_config.Accounts.Single.of_account + Mina_state.Consensus_state_hooks.genesis_winner_account + |> Result.ok_or_failwith + in + let ledger_of_epoch_accounts + (epoch_accounts : Test_config.Test_account.t list) = + let epoch_ledger_accounts = + List.map epoch_accounts + ~f:(fun { account_name; balance; timing; _ } -> + let balance = Balance.of_mina_string_exn balance in + let timing = runtime_timing_of_timing timing in + let genesis_account = + match + List.Assoc.find genesis_ledger_accounts account_name + ~equal:String.equal + with + | Some acct -> + acct + | None -> + failwithf + "Epoch ledger account %s not in genesis ledger" + account_name () + in + { genesis_account with balance; timing } ) + in + ( { base = + Accounts (genesis_winner_account :: epoch_ledger_accounts) + ; add_genesis_winner = None (* no effect *) + ; num_accounts = None + ; balances = [] + ; hash = None + ; name = None + ; s3_data_hash = None + } + : Runtime_config.Ledger.t ) + in + let staking = + let ({ epoch_ledger; epoch_seed } + : Test_config.Epoch_data.Data.t ) = + staking_ledger + in + if not (ledger_is_prefix epoch_ledger genesis_ledger) then + failwith "Staking epoch ledger not a prefix of genesis ledger" ; + let ledger = ledger_of_epoch_accounts epoch_ledger in + let seed = epoch_seed in + ({ ledger; seed } : Runtime_config.Epoch_data.Data.t) + in + let next = + Option.map next ~f:(fun { epoch_ledger; epoch_seed } -> + if + not + (ledger_is_prefix staking_ledger.epoch_ledger + epoch_ledger ) + then + failwith + "Staking epoch ledger not a prefix of next epoch ledger" ; + if not (ledger_is_prefix epoch_ledger genesis_ledger) then + failwith + "Next epoch ledger not a prefix of genesis ledger" ; + let ledger = ledger_of_epoch_accounts epoch_ledger in + let seed = epoch_seed in + ({ ledger; seed } : Runtime_config.Epoch_data.Data.t) ) + in + ({ staking; next } : Runtime_config.Epoch_data.t) ) + } + in + let genesis_constants = + Or_error.ok_exn + (Genesis_ledger_helper.make_genesis_constants ~logger + ~default:Genesis_constants.compiled runtime_config ) + in + let constants : Test_config.constants = + { constraints = constraint_constants; genesis = genesis_constants } + in + let mk_net_keypair keypair_name (pk, sk) = + let keypair = + { Keypair.public_key = Public_key.decompress_exn pk; private_key = sk } + in + Network_keypair.create_network_keypair ~keypair_name ~keypair + in + let long_commit_id = + if String.is_substring Mina_version.commit_id ~substring:"[DIRTY]" then + String.sub Mina_version.commit_id ~pos:7 + ~len:(String.length Mina_version.commit_id - 7) + else Mina_version.commit_id + in + let mina_archive_base_url = + "https://raw.githubusercontent.com/MinaProtocol/mina/" ^ long_commit_id + ^ "/src/app/archive/" + in + let mina_archive_schema_aux_files = + [ sprintf "%screate_schema.sql" mina_archive_base_url + ; sprintf "%szkapp_tables.sql" mina_archive_base_url + ] + in + let genesis_keypairs = + List.fold genesis_accounts_and_keys ~init:String.Map.empty + ~f:(fun map ({ account_name; _ }, (pk, sk)) -> + let keypair = mk_net_keypair account_name (pk, sk) in + String.Map.add_exn map ~key:account_name ~data:keypair ) + in + let open Docker_node_config in + let open Docker_compose.Dockerfile in + let port_manager = PortManager.create ~min_port:10000 ~max_port:11000 in + let docker_volumes = + [ Base_node_config.runtime_config_volume + ; Base_node_config.entrypoint_volume + ] + in + let generate_random_id () = + let rand_char () = + let ascii_a = int_of_char 'a' in + let ascii_z = int_of_char 'z' in + char_of_int (ascii_a + Random.int (ascii_z - ascii_a + 1)) + in + String.init 4 ~f:(fun _ -> rand_char ()) + in + let seed_config = + let config : Seed_config.config = + { archive_address = None + ; base_config = + Base_node_config.default ~peer:None + ~runtime_config_path: + (Some Base_node_config.container_runtime_config_path) + } + in + Seed_config.create + ~service_name:(sprintf "seed-%s" (generate_random_id ())) + ~image:images.mina + ~ports:(PortManager.allocate_ports_for_node port_manager) + ~volumes:(docker_volumes @ [ Seed_config.seed_libp2p_keypair ]) + ~config + in + let seed_config_peer = + Some + (Seed_config.create_libp2p_peer ~peer_name:seed_config.service_name + ~external_port:PortManager.mina_internal_external_port ) + in + let archive_node_configs = + List.init num_archive_nodes ~f:(fun index -> + let config = + { Postgres_config.host = + sprintf "postgres-%d-%s" (index + 1) (generate_random_id ()) + ; username = "postgres" + ; password = "password" + ; database = "archive" + ; port = PortManager.postgres_internal_port + } + in + let postgres_port = + Service.Port.create + ~published:(PortManager.allocate_port port_manager) + ~target:PortManager.postgres_internal_port + in + let postgres_config = + Postgres_config.create ~service_name:config.host + ~image:Postgres_config.postgres_image ~ports:[ postgres_port ] + ~volumes: + [ Postgres_config.postgres_create_schema_volume + ; Postgres_config.postgres_zkapp_schema_volume + ; Postgres_config.postgres_entrypoint_volume + ] + ~config + in + let archive_server_port = + Service.Port.create + ~published:(PortManager.allocate_port port_manager) + ~target:PortManager.mina_internal_server_port + in + let config : Archive_node_config.config = + { postgres_config + ; server_port = archive_server_port.target + ; base_config = + Base_node_config.default ~peer:None + ~runtime_config_path: + (Some Base_node_config.container_runtime_config_path) + } + in + let archive_rest_port = + Service.Port.create + ~published:(PortManager.allocate_port port_manager) + ~target:PortManager.mina_internal_rest_port + in + Archive_node_config.create + ~service_name: + (sprintf "archive-%d-%s" (index + 1) (generate_random_id ())) + ~image:images.archive_node + ~ports:[ archive_server_port; archive_rest_port ] + ~volumes: + [ Base_node_config.runtime_config_volume + ; Archive_node_config.archive_entrypoint_volume + ] + ~config ) + in + (* Each archive node has it's own seed node *) + let seed_configs = + List.mapi archive_node_configs ~f:(fun index archive_config -> + let config : Seed_config.config = + { archive_address = + Some + (sprintf "%s:%d" archive_config.service_name + PortManager.mina_internal_server_port ) + ; base_config = + Base_node_config.default ~peer:seed_config_peer + ~runtime_config_path: + (Some Base_node_config.container_runtime_config_path) + } + in + Seed_config.create + ~service_name: + (sprintf "seed-%d-%s" (index + 1) (generate_random_id ())) + ~image:images.mina + ~ports:(PortManager.allocate_ports_for_node port_manager) + ~volumes:docker_volumes ~config ) + @ [ seed_config ] + in + let block_producer_configs = + List.map block_producers ~f:(fun node -> + let keypair = + match + List.find genesis_accounts_and_keys + ~f:(fun ({ account_name; _ }, _keypair) -> + String.equal account_name node.account_name ) + with + | Some (_acct, keypair) -> + keypair |> mk_net_keypair node.account_name + | None -> + let failstring = + Format.sprintf + "Failing because the account key of all initial block \ + producers must be in the genesis ledger. name of Node: \ + %s. name of Account which does not exist: %s" + node.node_name node.account_name + in + failwith failstring + in + let priv_key_path = + Base_node_config.container_keys_path ^/ node.account_name + in + let volumes = + [ Service.Volume.create ("keys" ^/ node.account_name) priv_key_path + ] + @ docker_volumes + in + let block_producer_config : Block_producer_config.config = + { keypair + ; priv_key_path + ; enable_peer_exchange = true + ; enable_flooding = true + ; base_config = + Base_node_config.default ~peer:seed_config_peer + ~runtime_config_path: + (Some Base_node_config.container_runtime_config_path) + } + in + Block_producer_config.create ~service_name:node.node_name + ~image:images.mina + ~ports:(PortManager.allocate_ports_for_node port_manager) + ~volumes ~config:block_producer_config ) + in + let snark_coordinator_config = + match snark_coordinator with + | None -> + None + | Some snark_coordinator_node -> + let network_kp = + match + String.Map.find genesis_keypairs + snark_coordinator_node.account_name + with + | Some acct -> + acct + | None -> + let failstring = + Format.sprintf + "Failing because the account key of all initial snark \ + coordinators must be in the genesis ledger. name of \ + Node: %s. name of Account which does not exist: %s" + snark_coordinator_node.node_name + snark_coordinator_node.account_name + in + failwith failstring + in + let public_key = + Public_key.Compressed.to_base58_check + (Public_key.compress network_kp.keypair.public_key) + in + let coordinator_ports = + PortManager.allocate_ports_for_node port_manager + in + let daemon_port = + coordinator_ports + |> List.find_exn ~f:(fun p -> + p.target + = Docker_node_config.PortManager.mina_internal_client_port ) + in + let snark_node_service_name = snark_coordinator_node.node_name in + let worker_node_config : Snark_worker_config.config = + { daemon_address = snark_node_service_name + ; daemon_port = Int.to_string daemon_port.target + ; proof_level = "full" + ; base_config = + Base_node_config.default ~peer:None ~runtime_config_path:None + } + in + let worker_nodes = + List.init snark_coordinator_node.worker_nodes ~f:(fun index -> + Docker_node_config.Snark_worker_config.create + ~service_name: + (sprintf "snark-worker-%d-%s" (index + 1) + (generate_random_id ()) ) + ~image:images.mina + ~ports: + (Docker_node_config.PortManager.allocate_ports_for_node + port_manager ) + ~volumes:docker_volumes ~config:worker_node_config ) + in + let snark_coordinator_config : Snark_coordinator_config.config = + { worker_nodes + ; snark_worker_fee + ; snark_coordinator_key = public_key + ; work_selection = "seq" + ; base_config = + Base_node_config.default ~peer:seed_config_peer + ~runtime_config_path: + (Some Base_node_config.container_runtime_config_path) + } + in + Some + (Snark_coordinator_config.create + ~service_name:snark_node_service_name ~image:images.mina + ~ports:coordinator_ports ~volumes:docker_volumes + ~config:snark_coordinator_config ) + in + { debug_arg = debug + ; genesis_keypairs + ; constants + ; docker = + { docker_swarm_version + ; stack_name + ; mina_image = images.mina + ; mina_agent_image = images.user_agent + ; mina_bots_image = images.bots + ; mina_points_image = images.points + ; mina_archive_image = images.archive_node + ; runtime_config = Runtime_config.to_yojson runtime_config + ; log_precomputed_blocks + ; start_filtered_logs + ; block_producer_configs + ; seed_configs + ; mina_archive_schema_aux_files + ; snark_coordinator_config + ; archive_node_configs + } + } + + (* + Composes a docker_compose.json file from the network_config specification and writes to disk. This docker_compose + file contains docker service definitions for each node in the local network. Each node service has different + configurations which are specified as commands, environment variables, and docker bind volumes. + We start by creating a runtime config volume to mount to each node service as a bind volume and then continue to create each + node service. As we create each definition for a service, we specify the docker command, volume, and environment varibles to + be used (which are mostly defaults). + *) + let to_docker network_config = + let open Docker_compose.Dockerfile in + let block_producer_map = + List.map network_config.docker.block_producer_configs ~f:(fun config -> + (config.service_name, config.docker_config) ) + |> StringMap.of_alist_exn + in + let seed_map = + List.map network_config.docker.seed_configs ~f:(fun config -> + (config.service_name, config.docker_config) ) + |> StringMap.of_alist_exn + in + let snark_coordinator_map = + match network_config.docker.snark_coordinator_config with + | Some config -> + StringMap.of_alist_exn [ (config.service_name, config.docker_config) ] + | None -> + StringMap.empty + in + let snark_worker_map = + match network_config.docker.snark_coordinator_config with + | Some snark_coordinator_config -> + List.map snark_coordinator_config.config.worker_nodes + ~f:(fun config -> (config.service_name, config.docker_config)) + |> StringMap.of_alist_exn + | None -> + StringMap.empty + in + let archive_node_map = + List.map network_config.docker.archive_node_configs ~f:(fun config -> + (config.service_name, config.docker_config) ) + |> StringMap.of_alist_exn + in + let postgres_map = + List.map network_config.docker.archive_node_configs + ~f:(fun archive_config -> + let config = archive_config.config.postgres_config in + (config.service_name, config.docker_config) ) + |> StringMap.of_alist_exn + in + let services = + postgres_map |> merge archive_node_map |> merge snark_worker_map + |> merge snark_coordinator_map + |> merge block_producer_map |> merge seed_map + in + { version = docker_swarm_version; services } +end + +module Network_manager = struct + type t = + { logger : Logger.t + ; stack_name : string + ; graphql_enabled : bool + ; docker_dir : string + ; docker_compose_file_path : string + ; constants : Test_config.constants + ; seed_workloads : Docker_network.Service_to_deploy.t Core.String.Map.t + ; block_producer_workloads : + Docker_network.Service_to_deploy.t Core.String.Map.t + ; snark_coordinator_workloads : + Docker_network.Service_to_deploy.t Core.String.Map.t + ; snark_worker_workloads : + Docker_network.Service_to_deploy.t Core.String.Map.t + ; archive_workloads : Docker_network.Service_to_deploy.t Core.String.Map.t + ; services_by_id : Docker_network.Service_to_deploy.t Core.String.Map.t + ; mutable deployed : bool + ; genesis_keypairs : Network_keypair.t Core.String.Map.t + } + + let get_current_running_stacks = + let open Malleable_error.Let_syntax in + let%bind all_stacks_str = + Util.run_cmd_or_hard_error "/" "docker" + [ "stack"; "ls"; "--format"; "{{.Name}}" ] + in + return (String.split ~on:'\n' all_stacks_str) + + let remove_stack_if_exists ~logger (network_config : Network_config.t) = + let open Malleable_error.Let_syntax in + let%bind all_stacks = get_current_running_stacks in + if List.mem all_stacks network_config.docker.stack_name ~equal:String.equal + then + let%bind () = + if network_config.debug_arg then + Deferred.bind ~f:Malleable_error.return + (Util.prompt_continue + "Existing stack name of same name detected, pausing startup. \ + Enter [y/Y] to continue on and remove existing stack name, \ + start clean, and run the test; press Ctrl-C to quit out: " ) + else + Malleable_error.return + ([%log info] + "Existing stack of same name detected; removing to start clean" ) + in + Util.run_cmd_or_hard_error "/" "docker" + [ "stack"; "rm"; network_config.docker.stack_name ] + >>| Fn.const () + else return () + + let generate_docker_stack_file ~logger ~docker_dir ~docker_compose_file_path + ~network_config = + let open Deferred.Let_syntax in + let%bind () = + if%bind File_system.dir_exists docker_dir then ( + [%log info] "Old docker stack directory found; removing to start clean" ; + File_system.remove_dir docker_dir ) + else return () + in + [%log info] "Writing docker configuration %s" docker_dir ; + let%bind () = Unix.mkdir docker_dir in + let%bind _ = + Docker_compose.Dockerfile.write_config ~dir:docker_dir + ~filename:docker_compose_file_path + (Network_config.to_docker network_config) + in + return () + + let write_docker_bind_volumes ~logger ~docker_dir + ~(network_config : Network_config.t) = + let open Deferred.Let_syntax in + [%log info] "Writing runtime_config %s" docker_dir ; + let%bind () = + Yojson.Safe.to_file + (String.concat [ docker_dir; "/runtime_config.json" ]) + network_config.docker.runtime_config + |> Deferred.return + in + [%log info] "Writing out the genesis keys to dir %s" docker_dir ; + let kps_base_path = String.concat [ docker_dir; "/keys" ] in + let%bind () = Unix.mkdir kps_base_path in + [%log info] "Writing genesis keys to %s" kps_base_path ; + let%bind () = + Core.String.Map.iter network_config.genesis_keypairs ~f:(fun kp -> + let keypath = String.concat [ kps_base_path; "/"; kp.keypair_name ] in + Out_channel.with_file ~fail_if_exists:true keypath ~f:(fun ch -> + kp.private_key |> Out_channel.output_string ch ) ; + Out_channel.with_file ~fail_if_exists:true (keypath ^ ".pub") + ~f:(fun ch -> kp.public_key |> Out_channel.output_string ch) ; + ignore + (Util.run_cmd_exn kps_base_path "chmod" [ "600"; kp.keypair_name ]) ) + |> Deferred.return + in + [%log info] "Writing seed libp2p keypair to %s" kps_base_path ; + let%bind () = + let keypath = String.concat [ kps_base_path; "/"; "libp2p_key" ] in + Out_channel.with_file ~fail_if_exists:true keypath ~f:(fun ch -> + Docker_node_config.Seed_config.libp2p_keypair + |> Out_channel.output_string ch ) ; + ignore (Util.run_cmd_exn kps_base_path "chmod" [ "600"; "libp2p_key" ]) ; + return () + in + let%bind () = + ignore (Util.run_cmd_exn docker_dir "chmod" [ "700"; "keys" ]) + |> Deferred.return + in + [%log info] + "Writing custom entrypoint script (libp2p key generation and puppeteer \ + context)" ; + let entrypoint_filename, entrypoint_script = + Docker_node_config.Base_node_config.entrypoint_script + in + Out_channel.with_file ~fail_if_exists:true + (docker_dir ^/ entrypoint_filename) ~f:(fun ch -> + entrypoint_script |> Out_channel.output_string ch ) ; + [%log info] + "Writing custom archive entrypoint script (wait for postgres to \ + initialize)" ; + let archive_filename, archive_script = + Docker_node_config.Archive_node_config.archive_entrypoint_script + in + Out_channel.with_file ~fail_if_exists:true (docker_dir ^/ archive_filename) + ~f:(fun ch -> archive_script |> Out_channel.output_string ch) ; + ignore (Util.run_cmd_exn docker_dir "chmod" [ "+x"; archive_filename ]) ; + let%bind _ = + Deferred.List.iter network_config.docker.mina_archive_schema_aux_files + ~f:(fun schema_url -> + let filename = Filename.basename schema_url in + [%log info] "Downloading %s" schema_url ; + let%bind _ = + Util.run_cmd_or_hard_error docker_dir "curl" + [ "-o"; filename; schema_url ] + in + [%log info] + "Writing custom postgres entrypoint script (import archive node \ + schema)" ; + + Deferred.return () ) + |> Deferred.return + in + ignore (Util.run_cmd_exn docker_dir "chmod" [ "+x"; entrypoint_filename ]) ; + [%log info] "Writing custom postgres entrypoint script (create schema)" ; + let postgres_entrypoint_filename, postgres_entrypoint_script = + Docker_node_config.Postgres_config.postgres_script + in + Out_channel.with_file ~fail_if_exists:true + (docker_dir ^/ postgres_entrypoint_filename) ~f:(fun ch -> + postgres_entrypoint_script |> Out_channel.output_string ch ) ; + ignore + (Util.run_cmd_exn docker_dir "chmod" + [ "+x"; postgres_entrypoint_filename ] ) ; + return () + + let initialize_workloads ~logger (network_config : Network_config.t) = + let find_rest_port ports = + List.find_map_exn ports ~f:(fun port -> + match port with + | Docker_compose.Dockerfile.Service.Port.{ published; target } -> + if target = Docker_node_config.PortManager.mina_internal_rest_port + then Some published + else None ) + in + [%log info] "Initializing seed workloads" ; + let seed_workloads = + List.map network_config.docker.seed_configs ~f:(fun seed_config -> + let graphql_port = find_rest_port seed_config.docker_config.ports in + let node = + Docker_network.Service_to_deploy.construct_service + network_config.docker.stack_name seed_config.service_name + (Docker_network.Service_to_deploy.init_service_to_deploy_config + ~network_keypair:None ~postgres_connection_uri:None + ~graphql_port ) + in + (seed_config.service_name, node) ) + |> Core.String.Map.of_alist_exn + in + [%log info] "Initializing block producer workloads" ; + let block_producer_workloads = + List.map network_config.docker.block_producer_configs ~f:(fun bp_config -> + let graphql_port = find_rest_port bp_config.docker_config.ports in + let node = + Docker_network.Service_to_deploy.construct_service + network_config.docker.stack_name bp_config.service_name + (Docker_network.Service_to_deploy.init_service_to_deploy_config + ~network_keypair:(Some bp_config.config.keypair) + ~postgres_connection_uri:None ~graphql_port ) + in + (bp_config.service_name, node) ) + |> Core.String.Map.of_alist_exn + in + [%log info] "Initializing snark coordinator and worker workloads" ; + let snark_coordinator_workloads, snark_worker_workloads = + match network_config.docker.snark_coordinator_config with + | Some snark_coordinator_config -> + let snark_coordinator_workloads = + if List.length snark_coordinator_config.config.worker_nodes > 0 then + let graphql_port = + find_rest_port snark_coordinator_config.docker_config.ports + in + let coordinator = + Docker_network.Service_to_deploy.construct_service + network_config.docker.stack_name + snark_coordinator_config.service_name + (Docker_network.Service_to_deploy + .init_service_to_deploy_config ~network_keypair:None + ~postgres_connection_uri:None ~graphql_port ) + in + [ (snark_coordinator_config.service_name, coordinator) ] + |> Core.String.Map.of_alist_exn + else Core.String.Map.empty + in + let snark_worker_workloads = + List.map snark_coordinator_config.config.worker_nodes + ~f:(fun snark_worker_config -> + let graphql_port = + find_rest_port snark_worker_config.docker_config.ports + in + let worker = + Docker_network.Service_to_deploy.construct_service + network_config.docker.stack_name + snark_worker_config.service_name + (Docker_network.Service_to_deploy + .init_service_to_deploy_config ~network_keypair:None + ~postgres_connection_uri:None ~graphql_port ) + in + + (snark_worker_config.service_name, worker) ) + |> Core.String.Map.of_alist_exn + in + (snark_coordinator_workloads, snark_worker_workloads) + | None -> + (Core.String.Map.of_alist_exn [], Core.String.Map.of_alist_exn []) + in + [%log info] "Initializing archive node workloads" ; + let archive_workloads = + List.map network_config.docker.archive_node_configs + ~f:(fun archive_config -> + let graphql_port = + find_rest_port archive_config.docker_config.ports + in + let postgres_connection_uri = + Some + (Docker_node_config.Postgres_config.to_connection_uri + archive_config.config.postgres_config.config ) + in + let node = + Docker_network.Service_to_deploy.construct_service + network_config.docker.stack_name archive_config.service_name + (Docker_network.Service_to_deploy.init_service_to_deploy_config + ~network_keypair:None ~postgres_connection_uri ~graphql_port ) + in + (archive_config.service_name, node) ) + |> Core.String.Map.of_alist_exn + in + ( seed_workloads + , block_producer_workloads + , snark_coordinator_workloads + , snark_worker_workloads + , archive_workloads ) + + let poll_until_stack_deployed ~logger = + let poll_interval = Time.Span.of_sec 15.0 in + let max_polls = 20 (* 5 mins *) in + let get_service_statuses () = + let%bind output = + Util.run_cmd_exn "/" "docker" + [ "service"; "ls"; "--format"; "{{.Name}}: {{.Replicas}}" ] + in + return + ( output |> String.split_lines + |> List.map ~f:(fun line -> + match String.split ~on:':' line with + | [ name; replicas ] -> + (String.strip name, String.strip replicas) + | _ -> + failwith "Unexpected format for docker service output" ) ) + in + let rec poll n = + [%log debug] "Checking Docker service statuses, n=%d" n ; + let%bind service_statuses = get_service_statuses () in + let bad_service_statuses = + List.filter service_statuses ~f:(fun (_, status) -> + let parts = String.split ~on:'/' status in + assert (List.length parts = 2) ; + let num, denom = + ( String.strip (List.nth_exn parts 0) + , String.strip (List.nth_exn parts 1) ) + in + not (String.equal num denom) ) + in + let open Malleable_error.Let_syntax in + if List.is_empty bad_service_statuses then return () + else if n > 0 then ( + [%log debug] "Got bad service statuses, polling again ($failed_statuses" + ~metadata: + [ ( "failed_statuses" + , `Assoc + (List.Assoc.map bad_service_statuses ~f:(fun v -> `String v)) + ) + ] ; + let%bind () = + after poll_interval |> Deferred.bind ~f:Malleable_error.return + in + poll (n - 1) ) + else + let bad_service_statuses_json = + `List + (List.map bad_service_statuses ~f:(fun (service_name, status) -> + `Assoc + [ ("service_name", `String service_name) + ; ("status", `String status) + ] ) ) + in + [%log fatal] + "Not all services could be deployed in time: $bad_service_statuses" + ~metadata:[ ("bad_service_statuses", bad_service_statuses_json) ] ; + Malleable_error.hard_error_string ~exit_code:4 + (Yojson.Safe.to_string bad_service_statuses_json) + in + [%log info] "Waiting for Docker services to be deployed" ; + let res = poll max_polls in + match%bind.Deferred res with + | Error _ -> + [%log error] "Not all Docker services were deployed, cannot proceed!" ; + res + | Ok _ -> + [%log info] "Docker services deployed" ; + res + + let create ~logger (network_config : Network_config.t) = + let open Malleable_error.Let_syntax in + let%bind () = remove_stack_if_exists ~logger network_config in + let ( seed_workloads + , block_producer_workloads + , snark_coordinator_workloads + , snark_worker_workloads + , archive_workloads ) = + initialize_workloads ~logger network_config + in + let services_by_id = + let all_workloads = + Core.String.Map.data seed_workloads + @ Core.String.Map.data snark_coordinator_workloads + @ Core.String.Map.data snark_worker_workloads + @ Core.String.Map.data block_producer_workloads + @ Core.String.Map.data archive_workloads + in + all_workloads + |> List.map ~f:(fun w -> (w.service_name, w)) + |> String.Map.of_alist_exn + in + let open Deferred.Let_syntax in + let docker_dir = network_config.docker.stack_name in + let docker_compose_file_path = + network_config.docker.stack_name ^ ".compose.json" + in + let%bind () = + generate_docker_stack_file ~logger ~docker_dir ~docker_compose_file_path + ~network_config + in + let%bind () = + write_docker_bind_volumes ~logger ~docker_dir ~network_config + in + let t = + { stack_name = network_config.docker.stack_name + ; logger + ; docker_dir + ; docker_compose_file_path + ; constants = network_config.constants + ; graphql_enabled = true + ; seed_workloads + ; block_producer_workloads + ; snark_coordinator_workloads + ; snark_worker_workloads + ; archive_workloads + ; services_by_id + ; deployed = false + ; genesis_keypairs = network_config.genesis_keypairs + } + in + [%log info] "Initializing docker swarm" ; + Malleable_error.return t + + let deploy t = + let logger = t.logger in + if t.deployed then failwith "network already deployed" ; + [%log info] "Deploying stack '%s' from %s" t.stack_name t.docker_dir ; + let open Malleable_error.Let_syntax in + let%bind (_ : string) = + Util.run_cmd_or_hard_error t.docker_dir "docker" + [ "stack"; "deploy"; "-c"; t.docker_compose_file_path; t.stack_name ] + in + t.deployed <- true ; + let%bind () = poll_until_stack_deployed ~logger in + let open Malleable_error.Let_syntax in + let func_for_fold ~(key : string) ~data accum_M = + let%bind mp = accum_M in + let%map node = + Docker_network.Service_to_deploy.get_node_from_service data + in + Core.String.Map.add_exn mp ~key ~data:node + in + let%map seeds = + Core.String.Map.fold t.seed_workloads + ~init:(Malleable_error.return Core.String.Map.empty) + ~f:func_for_fold + and block_producers = + Core.String.Map.fold t.block_producer_workloads + ~init:(Malleable_error.return Core.String.Map.empty) + ~f:func_for_fold + and snark_coordinators = + Core.String.Map.fold t.snark_coordinator_workloads + ~init:(Malleable_error.return Core.String.Map.empty) + ~f:func_for_fold + and snark_workers = + Core.String.Map.fold t.snark_worker_workloads + ~init:(Malleable_error.return Core.String.Map.empty) + ~f:func_for_fold + and archive_nodes = + Core.String.Map.fold t.archive_workloads + ~init:(Malleable_error.return Core.String.Map.empty) + ~f:func_for_fold + in + let network = + { Docker_network.namespace = t.stack_name + ; constants = t.constants + ; seeds + ; block_producers + ; snark_coordinators + ; snark_workers + ; archive_nodes + ; genesis_keypairs = t.genesis_keypairs + } + in + let nodes_to_string = + Fn.compose (String.concat ~sep:", ") (List.map ~f:Docker_network.Node.id) + in + [%log info] "Network deployed" ; + [%log info] "testnet namespace: %s" t.stack_name ; + [%log info] "snark coordinators: %s" + (nodes_to_string (Core.String.Map.data network.snark_coordinators)) ; + [%log info] "snark workers: %s" + (nodes_to_string (Core.String.Map.data network.snark_workers)) ; + [%log info] "block producers: %s" + (nodes_to_string (Core.String.Map.data network.block_producers)) ; + [%log info] "archive nodes: %s" + (nodes_to_string (Core.String.Map.data network.archive_nodes)) ; + network + + let destroy t = + [%log' info t.logger] "Destroying network" ; + if not t.deployed then failwith "network not deployed" ; + let%bind _ = + Util.run_cmd_exn "/" "docker" [ "stack"; "rm"; t.stack_name ] + in + t.deployed <- false ; + Deferred.unit + + let cleanup t = + let%bind () = if t.deployed then destroy t else return () in + [%log' info t.logger] "Cleaning up network configuration" ; + let%bind () = File_system.remove_dir t.docker_dir in + Deferred.unit + + let destroy t = + Deferred.Or_error.try_with ~here:[%here] (fun () -> destroy t) + |> Deferred.bind ~f:Malleable_error.or_hard_error +end diff --git a/src/lib/ledger_proof/ledger_proof.ml b/src/lib/ledger_proof/ledger_proof.ml index cea58436fe0..312dd66e820 100644 --- a/src/lib/ledger_proof/ledger_proof.ml +++ b/src/lib/ledger_proof/ledger_proof.ml @@ -40,5 +40,5 @@ include Prod module For_tests = struct let mk_dummy_proof statement = create ~statement ~sok_digest:Sok_message.Digest.default - ~proof:Proof.transaction_dummy + ~proof:(Lazy.force Proof.transaction_dummy) end diff --git a/src/lib/logger/fake/logger.ml b/src/lib/logger/fake/logger.ml index 7ed9164b36b..f44ba270cde 100644 --- a/src/lib/logger/fake/logger.ml +++ b/src/lib/logger/fake/logger.ml @@ -32,6 +32,8 @@ module Time = struct let of_yojson _ = not_implemented () + let pp _ _ = not_implemented () + let set_pretty_to_string _ = not_implemented () let pretty_to_string _ = not_implemented () diff --git a/src/lib/logger/logger.mli b/src/lib/logger/logger.mli index 3089ccc4cbd..92457c2f520 100644 --- a/src/lib/logger/logger.mli +++ b/src/lib/logger/logger.mli @@ -31,6 +31,8 @@ module Time : sig val of_yojson : Yojson.Safe.t -> (t, string) Result.t + val pp : Format.formatter -> t -> unit + val pretty_to_string : t -> string val set_pretty_to_string : (t -> string) -> unit diff --git a/src/lib/logger/native/logger.ml b/src/lib/logger/native/logger.ml index b6498790027..c3dfffec55b 100644 --- a/src/lib/logger/native/logger.ml +++ b/src/lib/logger/native/logger.ml @@ -33,7 +33,7 @@ module Time = struct let of_yojson json = json |> Yojson.Safe.Util.to_string |> fun s -> Ok (Time.of_string s) - let pretty_to_string timestamp = + let pp ppf timestamp = (* This used to be [Core.Time.format timestamp "%Y-%m-%d %H:%M:%S UTC" ~zone:Time.Zone.utc] @@ -44,14 +44,11 @@ module Time = struct let zone = Time.Zone.utc in let date, time = Time.to_date_ofday ~zone timestamp in let time_parts = Time.Ofday.to_parts time in - let fmt_2_chars () i = - let s = string_of_int i in - if Int.(i < 10) then "0" ^ s else s - in - Stdlib.Format.sprintf "%i-%a-%a %a:%a:%a UTC" (Date.year date) fmt_2_chars + Format.fprintf ppf "%i-%02d-%02d %02d:%02d:%02d UTC" (Date.year date) (Date.month date |> Month.to_int) - fmt_2_chars (Date.day date) fmt_2_chars time_parts.hr fmt_2_chars - time_parts.min fmt_2_chars time_parts.sec + (Date.day date) time_parts.hr time_parts.min time_parts.sec + + let pretty_to_string timestamp = Format.asprintf "%a" pp timestamp let pretty_to_string_ref = ref pretty_to_string @@ -205,14 +202,15 @@ module Processor = struct err ) ; None | Ok (str, extra) -> - let formatted_extra = - extra - |> List.map ~f:(fun (k, v) -> "\n\t" ^ k ^ ": " ^ v) - |> String.concat ~sep:"" + let msg = + (* The previously existing \t has been changed to 2 spaces. *) + Format.asprintf "@[%a [%a] %s@,%a@]" Time.pp msg.timestamp + Level.pp msg.level str + (Format.pp_print_list ~pp_sep:Format.pp_print_cut + (fun ppf (k, v) -> Format.fprintf ppf "%s: %s" k v) ) + extra in - let time = Time.pretty_to_string msg.timestamp in - Some - (time ^ " [" ^ Level.show msg.level ^ "] " ^ str ^ formatted_extra) + Some msg end let raw ?(log_level = Level.Spam) () = T ((module Raw), Raw.create ~log_level) diff --git a/src/lib/merkle_ledger/any_ledger.ml b/src/lib/merkle_ledger/any_ledger.ml index 147fa13e03a..e74ac391d90 100644 --- a/src/lib/merkle_ledger/any_ledger.ml +++ b/src/lib/merkle_ledger/any_ledger.ml @@ -13,55 +13,8 @@ * Props to @nholland for showing me this trick. * *) -open Core_kernel - -module type S = sig - type key - - type token_id - - type token_id_set - - type account_id - - type account_id_set - - type account - - type hash - - module Location : Location_intf.S - - (** The type of the witness for a base ledger exposed here so that it can - * be easily accessed from outside this module *) - type witness [@@deriving sexp_of] - - module type Base_intf = - Base_ledger_intf.S - with module Addr = Location.Addr - with module Location = Location - with type key := key - and type token_id := token_id - and type token_id_set := token_id_set - and type account_id := account_id - and type account_id_set := account_id_set - and type hash := hash - and type root_hash := hash - and type account := account - - val cast : (module Base_intf with type t = 'a) -> 'a -> witness - - module M : Base_intf with type t = witness -end - -module type Inputs_intf = sig - include Base_inputs_intf.S - - module Location : Location_intf.S -end - -module Make_base (Inputs : Inputs_intf) : - S +module Make_base (Inputs : Intf.Inputs.Intf) : + Intf.Ledger.ANY with module Location = Inputs.Location with type key := Inputs.Key.t and type token_id := Inputs.Token_id.t @@ -74,7 +27,7 @@ module Make_base (Inputs : Inputs_intf) : module Location = Location module type Base_intf = - Base_ledger_intf.S + Intf.Ledger.S with module Addr = Location.Addr with module Location = Location with type key := Inputs.Key.t diff --git a/src/lib/merkle_ledger/any_ledger.mli b/src/lib/merkle_ledger/any_ledger.mli new file mode 100644 index 00000000000..8a0c50a527a --- /dev/null +++ b/src/lib/merkle_ledger/any_ledger.mli @@ -0,0 +1,10 @@ +module Make_base (Inputs : Intf.Inputs.Intf) : + Intf.Ledger.ANY + with module Location = Inputs.Location + with type key := Inputs.Key.t + and type token_id := Inputs.Token_id.t + and type token_id_set := Inputs.Token_id.Set.t + and type account_id := Inputs.Account_id.t + and type hash := Inputs.Hash.t + and type account_id_set := Inputs.Account_id.Set.t + and type account := Inputs.Account.t diff --git a/src/lib/merkle_ledger/base_inputs_intf.ml b/src/lib/merkle_ledger/base_inputs_intf.ml deleted file mode 100644 index cda6d078c74..00000000000 --- a/src/lib/merkle_ledger/base_inputs_intf.ml +++ /dev/null @@ -1,18 +0,0 @@ -module type S = sig - module Key : Intf.Key - - module Token_id : Intf.Token_id - - module Account_id : - Intf.Account_id with type key := Key.t and type token_id := Token_id.t - - module Balance : Intf.Balance - - module Account : - Intf.Account - with type token_id := Token_id.t - and type account_id := Account_id.t - and type balance := Balance.t - - module Hash : Intf.Hash with type account := Account.t -end diff --git a/src/lib/merkle_ledger/base_ledger_intf.ml b/src/lib/merkle_ledger/base_ledger_intf.ml deleted file mode 100644 index 74c13db4376..00000000000 --- a/src/lib/merkle_ledger/base_ledger_intf.ml +++ /dev/null @@ -1,150 +0,0 @@ -open Core - -module type S = sig - (** a Merkle hash associated with the root node *) - type root_hash - - (** a Merkle hash associated any non-root node *) - type hash - - type account - - type key - - type token_id - - type token_id_set - - type account_id - - type account_id_set - - type index = int - - (** no deriving, purposely; signatures that include this one may add deriving *) - type t - - module Addr : module type of Merkle_address - - module Path : Merkle_path.S with type hash := hash - - module Location : sig - type t [@@deriving sexp, compare, hash] - - include Comparable.S with type t := t - end - - include - Syncable_intf.S - with type root_hash := root_hash - and type hash := hash - and type account := account - and type addr := Addr.t - and type path = Path.t - and type t := t - - (** list of accounts in the ledger *) - val to_list : t -> account list Async.Deferred.t - - (** list of accounts via slower sequential mechanism *) - val to_list_sequential : t -> account list - - (** iterate over all indexes and accounts *) - val iteri : t -> f:(index -> account -> unit) -> unit - - (** fold over accounts in the ledger, passing the Merkle address *) - val foldi : - t -> init:'accum -> f:(Addr.t -> 'accum -> account -> 'accum) -> 'accum - - (** the set of [account_id]s are ledger elements to skip during the fold, - because they're in a mask - *) - val foldi_with_ignored_accounts : - t - -> account_id_set - -> init:'accum - -> f:(Addr.t -> 'accum -> account -> 'accum) - -> 'accum - - (** fold over accounts until stop condition reached when calling [f]; calls [finish] for - result - *) - val fold_until : - t - -> init:'accum - -> f:('accum -> account -> ('accum, 'stop) Base.Continue_or_stop.t) - -> finish:('accum -> 'stop) - -> 'stop Async.Deferred.t - - (** set of account ids associated with accounts *) - val accounts : t -> account_id_set Async.Deferred.t - - (** Get the account id that owns a token. *) - val token_owner : t -> token_id -> account_id option - - (** Get the set of all accounts which own a token. *) - val token_owners : t -> account_id_set - - (** Get all of the tokens for which a public key has accounts. *) - val tokens : t -> key -> token_id_set - - val location_of_account : t -> account_id -> Location.t option - - val location_of_account_batch : - t -> account_id list -> (account_id * Location.t option) list - - (** This may return an error if the ledger is full. *) - val get_or_create_account : - t - -> account_id - -> account - -> ([ `Added | `Existed ] * Location.t) Or_error.t - - (** the ledger should not be used after calling [close] *) - val close : t -> unit - - (** for account locations in the ledger, the last (rightmost) filled location *) - val last_filled : t -> Location.t option - - val get_uuid : t -> Uuid.t - - (** return Some [directory] for ledgers that use a file system, else None *) - val get_directory : t -> string option - - val get : t -> Location.t -> account option - - val get_batch : t -> Location.t list -> (Location.t * account option) list - - val set : t -> Location.t -> account -> unit - - val set_batch : t -> (Location.t * account) list -> unit - - val get_at_index_exn : t -> int -> account - - val set_at_index_exn : t -> int -> account -> unit - - val index_of_account_exn : t -> account_id -> int - - (** meant to be a fast operation: the root hash is stored, rather - than calculated dynamically - *) - val merkle_root : t -> root_hash - - val merkle_path : t -> Location.t -> Path.t - - val merkle_path_at_index_exn : t -> int -> Path.t - - val merkle_path_batch : t -> Location.t list -> Path.t list - - val wide_merkle_path_batch : - t - -> Location.t list - -> [ `Left of hash * hash | `Right of hash * hash ] list list - - val get_hash_batch_exn : t -> Location.t list -> hash list - - (** Triggers when the ledger has been detached and should no longer be - accessed. - *) - val detached_signal : t -> unit Async_kernel.Deferred.t -end diff --git a/src/lib/merkle_ledger/database.ml b/src/lib/merkle_ledger/database.ml index 863fcbcdf54..7488725511c 100644 --- a/src/lib/merkle_ledger/database.ml +++ b/src/lib/merkle_ledger/database.ml @@ -1,33 +1,10 @@ -open Core - -module type Inputs_intf = sig - include Base_inputs_intf.S - - module Location : Location_intf.S - - module Location_binable : Hashable.S_binable with type t := Location.t - - module Kvdb : Intf.Key_value_database with type config := string - - module Storage_locations : Intf.Storage_locations -end - -module Make (Inputs : Inputs_intf) : - Database_intf.S - with module Location = Inputs.Location - and module Addr = Inputs.Location.Addr - and type key := Inputs.Key.t - and type token_id := Inputs.Token_id.t - and type token_id_set := Inputs.Token_id.Set.t - and type account := Inputs.Account.t - and type root_hash := Inputs.Hash.t - and type hash := Inputs.Hash.t - and type account_id := Inputs.Account_id.t - and type account_id_set := Inputs.Account_id.Set.t = struct +module Make (Inputs : Intf.Inputs.DATABASE) = struct (* The max depth of a merkle tree can never be greater than 253. *) open Inputs module Db_error = struct + [@@@warning "-4"] (* due to deriving sexp below *) + type t = Account_location_not_found | Out_of_leaves | Malformed_database [@@deriving sexp] end @@ -67,6 +44,8 @@ module Make (Inputs : Inputs_intf) : let depth t = t.depth let create ?directory_name ~depth () = + let open Core in + (* for ^/ and Unix below *) assert (depth < 0xfe) ; let uuid = Uuid_unix.create () in let directory = @@ -290,25 +269,13 @@ module Make (Inputs : Inputs_intf) : Result.map location_result ~f:(fun location -> set mdb key location ; location ) - let last_location_address mdb = - match - last_location_key () |> get_raw mdb |> Result.of_option ~error:() - |> Result.bind ~f:(Location.parse ~ledger_depth:mdb.depth) - with - | Error () -> - None - | Ok parsed_location -> - Some (Location.to_path_exn parsed_location) - let last_location mdb = - match - last_location_key () |> get_raw mdb |> Result.of_option ~error:() - |> Result.bind ~f:(Location.parse ~ledger_depth:mdb.depth) - with - | Error () -> - None - | Ok parsed_location -> - Some parsed_location + last_location_key () |> get_raw mdb + |> Option.bind ~f:(fun data -> + Location.parse ~ledger_depth:mdb.depth data |> Result.ok ) + + let last_location_address mdb = + Option.map (last_location mdb) ~f:Location.to_path_exn end let get_at_index_exn mdb index = @@ -590,7 +557,7 @@ module Make (Inputs : Inputs_intf) : let get_or_create_account mdb account_id account = match Account_location.get mdb account_id with - | Error Account_location_not_found -> ( + | Error Db_error.Account_location_not_found -> ( match Account_location.allocate mdb account_id with | Ok location -> set mdb location account ; @@ -599,7 +566,7 @@ module Make (Inputs : Inputs_intf) : | Error err -> Error (Error.create "get_or_create_account" err Db_error.sexp_of_t) ) - | Error err -> + | Error ((Db_error.Malformed_database | Db_error.Out_of_leaves) as err) -> Error (Error.create "get_or_create_account" err Db_error.sexp_of_t) | Ok location -> Ok (`Existed, location) diff --git a/src/lib/merkle_ledger/database.mli b/src/lib/merkle_ledger/database.mli new file mode 100644 index 00000000000..b399d3e09f6 --- /dev/null +++ b/src/lib/merkle_ledger/database.mli @@ -0,0 +1,12 @@ +module Make (Inputs : Intf.Inputs.DATABASE) : + Intf.Ledger.DATABASE + with module Location = Inputs.Location + and module Addr = Inputs.Location.Addr + and type key := Inputs.Key.t + and type token_id := Inputs.Token_id.t + and type token_id_set := Inputs.Token_id.Set.t + and type account := Inputs.Account.t + and type root_hash := Inputs.Hash.t + and type hash := Inputs.Hash.t + and type account_id := Inputs.Account_id.t + and type account_id_set := Inputs.Account_id.Set.t diff --git a/src/lib/merkle_ledger/database_intf.ml b/src/lib/merkle_ledger/database_intf.ml deleted file mode 100644 index 40f40195b40..00000000000 --- a/src/lib/merkle_ledger/database_intf.ml +++ /dev/null @@ -1,18 +0,0 @@ -module type S = sig - include Base_ledger_intf.S - - val create : ?directory_name:string -> depth:int -> unit -> t - - (** create_checkpoint would create the checkpoint and open a db connection to that checkpoint *) - val create_checkpoint : t -> directory_name:string -> unit -> t - - (** make_checkpoint would only create the checkpoint *) - val make_checkpoint : t -> directory_name:string -> unit - - val with_ledger : depth:int -> f:(t -> 'a) -> 'a - - module For_tests : sig - val gen_account_location : - ledger_depth:int -> Location.t Core.Quickcheck.Generator.t - end -end diff --git a/src/lib/merkle_ledger/dune b/src/lib/merkle_ledger/dune index 70339fc26c5..c85fcfa76c4 100644 --- a/src/lib/merkle_ledger/dune +++ b/src/lib/merkle_ledger/dune @@ -1,36 +1,49 @@ (library (name merkle_ledger) (public_name merkle_ledger) + (flags + ; Deactivated warnings + ; 40: name-out-scope (activate later) + ; + ; 41: ambiguous name (too many of them for now, activate later) + ; + ; 42: disambiguated-name (rely on type disambiguation ,not too bad but closer + ; module openings may both solve the warning *and* help the reader) + ; + ; 44: open-shadow-identifier (operation overloading is common in the codebase) + (:standard -w +a-40..42-44 -warn-error +a-70 -open Core_kernel)) (library_flags -linkall) + (modules_without_implementation + location_intf) (libraries ;; opam libraries - bitstring - async_kernel - core_kernel.uuid - bin_prot.shape - sexplib0 - integers - core.uuid - async - core - extlib - rocks - core_kernel - base.caml - base.base_internalhash_types - async_unix - ;; local libraries - merkle_address - immutable_array - direction - cache_dir - empty_hashes - key_value_database - mina_stdlib - visualization - ppx_version.runtime - bounded_types - ) + async + async_kernel + async_unix + base.base_internalhash_types + base.caml + bin_prot.shape + bitstring + core + core.uuid + core_kernel + core_kernel.uuid + extlib + integers + rocks + sexplib0 + ;; local libraries + bounded_types + cache_dir + direction + empty_hashes + immutable_array + key_value_database + merkle_address + mina_stdlib + ppx_version.runtime + visualization + ) (preprocess (pps ppx_mina ppx_version ppx_jane ppx_compare ppx_deriving.show ppx_deriving_yojson)) (instrumentation (backend bisect_ppx)) diff --git a/src/lib/merkle_ledger/graphviz.ml b/src/lib/merkle_ledger/graphviz.ml index 95204555772..894514cf361 100644 --- a/src/lib/merkle_ledger/graphviz.ml +++ b/src/lib/merkle_ledger/graphviz.ml @@ -1,57 +1,9 @@ -open Core open Async -(** Visualizable_ledger shows a subgraph of a merkle_ledger using Graphviz *) -module type S = sig - type addr - - type ledger - - type t - - (* Visualize will enumerate through all edges of a subtree with a - initial_address. It will then interpret all of the edges and nodes into an - intermediate form that will be easy to write into a dot file *) - val visualize : ledger -> initial_address:addr -> t - - (* Write will transform the intermediate form generate by visualize and save - the results into a dot file *) - val write : path:string -> name:string -> t -> unit Deferred.t -end - -module type Inputs_intf = sig - module Key : Intf.Key - - module Token_id : Intf.Token_id - - module Account_id : - Intf.Account_id with type key := Key.t and type token_id := Token_id.t - - module Balance : Intf.Balance - - module Account : - Intf.Account - with type account_id := Account_id.t - and type balance := Balance.t - - module Hash : Intf.Hash with type account := Account.t - - module Location : Location_intf.S - - module Ledger : - Base_ledger_intf.S - with module Addr = Location.Addr - and module Location = Location - and type account_id := Account_id.t - and type account_id_set := Account_id.Set.t - and type hash := Hash.t - and type root_hash := Hash.t - and type account := Account.t -end - -module Make (Inputs : Inputs_intf) : - S with type addr := Inputs.Location.Addr.t and type ledger := Inputs.Ledger.t = -struct +module Make (Inputs : Intf.Graphviz.I) : + Intf.Graphviz.S + with type addr := Inputs.Location.Addr.t + and type ledger := Inputs.Ledger.t = struct open Inputs module Account = struct diff --git a/src/lib/merkle_ledger/graphviz.mli b/src/lib/merkle_ledger/graphviz.mli new file mode 100644 index 00000000000..86a52fa54c5 --- /dev/null +++ b/src/lib/merkle_ledger/graphviz.mli @@ -0,0 +1,6 @@ +(** Visualizable_ledger shows a subgraph of a merkle_ledger using Graphviz *) + +module Make (Inputs : Intf.Graphviz.I) : + Intf.Graphviz.S + with type addr := Inputs.Location.Addr.t + and type ledger := Inputs.Ledger.t diff --git a/src/lib/merkle_ledger/intf.ml b/src/lib/merkle_ledger/intf.ml index df9c2769ff3..39de10c4c5e 100644 --- a/src/lib/merkle_ledger/intf.ml +++ b/src/lib/merkle_ledger/intf.ml @@ -1,4 +1,53 @@ -open Core +module type LOCATION = sig + module Addr : module type of Merkle_address + + module Prefix : sig + val generic : Unsigned.UInt8.t + + val account : Unsigned.UInt8.t + + val hash : ledger_depth:int -> int -> Unsigned.UInt8.t + end + + type t = Generic of Bigstring.t | Account of Addr.t | Hash of Addr.t + [@@deriving sexp, hash, compare] + + val is_generic : t -> bool + + val is_account : t -> bool + + val is_hash : t -> bool + + val height : ledger_depth:int -> t -> int + + val root_hash : t + + val last_direction : Addr.t -> Direction.t + + val build_generic : Bigstring.t -> t + + val parse : ledger_depth:int -> Bigstring.t -> (t, unit) Result.t + + val prefix_bigstring : Unsigned.UInt8.t -> Bigstring.t -> Bigstring.t + + val to_path_exn : t -> Addr.t + + val serialize : ledger_depth:int -> t -> Bigstring.t + + val parent : t -> t + + val next : t -> t Option.t + + val prev : t -> t Option.t + + val sibling : t -> t + + val order_siblings : t -> 'a -> 'a -> 'a * 'a + + val merkle_path_dependencies_exn : t -> (t * Direction.t) list + + include Comparable.S with type t := t +end module type Key = sig type t [@@deriving sexp] @@ -133,11 +182,341 @@ module type Key_value_database = sig -> key_data_pairs:(Bigstring.t * Bigstring.t) list -> unit + (** An association list, sorted by key *) val to_alist : t -> (Bigstring.t * Bigstring.t) list - (* an association list, sorted by key *) + val foldi : + t + -> init:'a + -> f:(int -> 'a -> key:Bigstring.t -> data:Bigstring.t -> 'a) + -> 'a + + val fold_until : + t + -> init:'a + -> f: + ( 'a + -> key:Bigstring.t + -> data:Bigstring.t + -> ('a, 'b) Continue_or_stop.t ) + -> finish:('a -> 'b) + -> 'b end module type Storage_locations = sig val key_value_db_dir : string end + +module type SYNCABLE = sig + type root_hash + + type hash + + type account + + type addr + + type t [@@deriving sexp] + + type path + + val depth : t -> int + + val num_accounts : t -> int + + val merkle_path_at_addr_exn : t -> addr -> path + + val get_inner_hash_at_addr_exn : t -> addr -> hash + + val set_inner_hash_at_addr_exn : t -> addr -> hash -> unit + + val set_all_accounts_rooted_at_exn : t -> addr -> account list -> unit + + val set_batch_accounts : t -> (addr * account) list -> unit + + (** Get all of the accounts that are in a subtree of the underlying Merkle + tree rooted at `address`. The accounts are ordered by their addresses. *) + val get_all_accounts_rooted_at_exn : t -> addr -> (addr * account) list + + val merkle_root : t -> root_hash +end + +module Inputs = struct + module type Intf = sig + module Key : Key + + module Token_id : Token_id + + module Account_id : + Account_id with type key := Key.t and type token_id := Token_id.t + + module Balance : Balance + + module Account : + Account + with type token_id := Token_id.t + and type account_id := Account_id.t + and type balance := Balance.t + + module Hash : Hash with type account := Account.t + + module Location : LOCATION + end + + module type DATABASE = sig + include Intf + + module Location_binable : Hashable.S_binable with type t := Location.t + + module Kvdb : Key_value_database with type config := string + + module Storage_locations : Storage_locations + end +end + +module Ledger = struct + module type S = sig + (** a Merkle hash associated with the root node *) + type root_hash + + (** a Merkle hash associated any non-root node *) + type hash + + type account + + type key + + type token_id + + type token_id_set + + type account_id + + type account_id_set + + type index = int + + (** no deriving, purposely; signatures that include this one may add deriving *) + type t + + module Addr : module type of Merkle_address + + module Path : Merkle_path.S with type hash := hash + + module Location : sig + type t [@@deriving sexp, compare, hash] + + include Comparable.S with type t := t + end + + include + SYNCABLE + with type root_hash := root_hash + and type hash := hash + and type account := account + and type addr := Addr.t + and type path = Path.t + and type t := t + + (** list of accounts in the ledger *) + val to_list : t -> account list Async.Deferred.t + + (** list of accounts via slower sequential mechanism *) + val to_list_sequential : t -> account list + + (** iterate over all indexes and accounts *) + val iteri : t -> f:(index -> account -> unit) -> unit + + (** fold over accounts in the ledger, passing the Merkle address *) + val foldi : + t -> init:'accum -> f:(Addr.t -> 'accum -> account -> 'accum) -> 'accum + + (** the set of [account_id]s are ledger elements to skip during the fold, + because they're in a mask + *) + val foldi_with_ignored_accounts : + t + -> account_id_set + -> init:'accum + -> f:(Addr.t -> 'accum -> account -> 'accum) + -> 'accum + + (** fold over accounts until stop condition reached when calling [f]; calls [finish] for + result + *) + val fold_until : + t + -> init:'accum + -> f:('accum -> account -> ('accum, 'stop) Base.Continue_or_stop.t) + -> finish:('accum -> 'stop) + -> 'stop Async.Deferred.t + + (** set of account ids associated with accounts *) + val accounts : t -> account_id_set Async.Deferred.t + + (** Get the account id that owns a token. *) + val token_owner : t -> token_id -> account_id option + + (** Get the set of all accounts which own a token. *) + val token_owners : t -> account_id_set + + (** Get all of the tokens for which a public key has accounts. *) + val tokens : t -> key -> token_id_set + + val location_of_account : t -> account_id -> Location.t option + + val location_of_account_batch : + t -> account_id list -> (account_id * Location.t option) list + + (** This may return an error if the ledger is full. *) + val get_or_create_account : + t + -> account_id + -> account + -> ([ `Added | `Existed ] * Location.t) Or_error.t + + (** the ledger should not be used after calling [close] *) + val close : t -> unit + + (** for account locations in the ledger, the last (rightmost) filled location *) + val last_filled : t -> Location.t option + + val get_uuid : t -> Uuid.t + + (** return Some [directory] for ledgers that use a file system, else None *) + val get_directory : t -> string option + + val get : t -> Location.t -> account option + + val get_batch : t -> Location.t list -> (Location.t * account option) list + + val set : t -> Location.t -> account -> unit + + val set_batch : t -> (Location.t * account) list -> unit + + val get_at_index_exn : t -> int -> account + + val set_at_index_exn : t -> int -> account -> unit + + val index_of_account_exn : t -> account_id -> int + + (** meant to be a fast operation: the root hash is stored, rather + than calculated dynamically + *) + val merkle_root : t -> root_hash + + val merkle_path : t -> Location.t -> Path.t + + val merkle_path_at_index_exn : t -> int -> Path.t + + val merkle_path_batch : t -> Location.t list -> Path.t list + + val wide_merkle_path_batch : + t + -> Location.t list + -> [ `Left of hash * hash | `Right of hash * hash ] list list + + val get_hash_batch_exn : t -> Location.t list -> hash list + + (** Triggers when the ledger has been detached and should no longer be + accessed. + *) + val detached_signal : t -> unit Async_kernel.Deferred.t + end + + module type NULL = sig + include S + + val create : depth:int -> unit -> t + end + + module type ANY = sig + type key + + type token_id + + type token_id_set + + type account_id + + type account_id_set + + type account + + type hash + + module Location : LOCATION + + (** The type of the witness for a base ledger exposed here so that it can + * be easily accessed from outside this module *) + type witness [@@deriving sexp_of] + + module type Base_intf = + S + with module Addr = Location.Addr + with module Location = Location + with type key := key + and type token_id := token_id + and type token_id_set := token_id_set + and type account_id := account_id + and type account_id_set := account_id_set + and type hash := hash + and type root_hash := hash + and type account := account + + val cast : (module Base_intf with type t = 'a) -> 'a -> witness + + module M : Base_intf with type t = witness + end + + module type DATABASE = sig + include S + + val create : ?directory_name:string -> depth:int -> unit -> t + + (** create_checkpoint would create the checkpoint and open a db connection to that checkpoint *) + val create_checkpoint : t -> directory_name:string -> unit -> t + + (** make_checkpoint would only create the checkpoint *) + val make_checkpoint : t -> directory_name:string -> unit + + val with_ledger : depth:int -> f:(t -> 'a) -> 'a + + module For_tests : sig + val gen_account_location : + ledger_depth:int -> Location.t Quickcheck.Generator.t + end + end +end + +module Graphviz = struct + module type S = sig + type addr + + type ledger + + type t + + (* Visualize will enumerate through all edges of a subtree with a + initial_address. It will then interpret all of the edges and nodes into an + intermediate form that will be easy to write into a dot file *) + val visualize : ledger -> initial_address:addr -> t + + (* Write will transform the intermediate form generate by visualize and save + the results into a dot file *) + val write : path:string -> name:string -> t -> unit Async.Deferred.t + end + + module type I = sig + include Inputs.Intf + + module Ledger : + Ledger.S + with module Addr = Location.Addr + and module Location = Location + and type account_id := Account_id.t + and type account_id_set := Account_id.Set.t + and type hash := Hash.t + and type account := Account.t + end +end diff --git a/src/lib/merkle_ledger/ledger_extras_intf.ml b/src/lib/merkle_ledger/ledger_extras_intf.ml deleted file mode 100644 index 9f2bb5bd930..00000000000 --- a/src/lib/merkle_ledger/ledger_extras_intf.ml +++ /dev/null @@ -1,15 +0,0 @@ -(* ledger_extras_intf.ml -- adds functionality to Base_ledger_intf.S *) - -module type S = sig - include Merkle_ledger_intf.S - - val with_ledger : f:(t -> 'a) -> 'a - - val set_at_addr_exn : t -> Addr.t -> account -> unit - - val account_id_of_index : t -> index -> account_id option - - val account_id_of_index_exn : t -> index -> account_id - - val recompute_tree : t -> unit -end diff --git a/src/lib/merkle_ledger/location.ml b/src/lib/merkle_ledger/location.ml index ee8e1c61e33..cee243a14f4 100644 --- a/src/lib/merkle_ledger/location.ml +++ b/src/lib/merkle_ledger/location.ml @@ -1,4 +1,3 @@ -open Core open Unsigned (* add functions to library module Bigstring so we can derive hash for the type t below *) @@ -6,12 +5,10 @@ module Bigstring = struct [%%versioned_binable module Stable = struct module V1 = struct - type t = Core_kernel.Bigstring.Stable.V1.t [@@deriving sexp, compare] + type t = Bigstring.Stable.V1.t [@@deriving sexp, compare] let to_latest = Fn.id - let equal = Bigstring.equal - let hash t = Bigstring.to_string t |> String.hash let hash_fold_t hash_state t = @@ -20,15 +17,14 @@ module Bigstring = struct include Bounded_types.String.Of_stringable (struct type nonrec t = t - let of_string s = Core_kernel.Bigstring.of_string s + let of_string s = Bigstring.of_string s - let to_string s = Core_kernel.Bigstring.to_string s + let to_string s = Bigstring.to_string s end) end end] - [%%define_locally - Bigstring.(get, length, equal, create, to_string, set, blit, sub)] + [%%define_locally Bigstring.(get, length, create, to_string, set, blit, sub)] include Hashable.Make (Stable.Latest) end @@ -53,14 +49,18 @@ module T = struct let hash ~ledger_depth depth = UInt8.of_int (ledger_depth - depth) end + [@@@warning "-4"] (* disabled because of deriving sexp *) + type t = Generic of Bigstring.t | Account of Addr.t | Hash of Addr.t [@@deriving hash, sexp, compare] - let is_generic = function Generic _ -> true | _ -> false + [@@@warning "+4"] + + let is_generic = function Generic _ -> true | Account _ | Hash _ -> false - let is_account = function Account _ -> true | _ -> false + let is_account = function Account _ -> true | Generic _ | Hash _ -> false - let is_hash = function Hash _ -> true | _ -> false + let is_hash = function Hash _ -> true | Account _ | Generic _ -> false let height ~ledger_depth : t -> int = function | Generic _ -> @@ -172,7 +172,7 @@ module T = struct match location with | Hash addr -> loop addr - | _ -> + | Account _ | Generic _ -> failwith "can only get merkle path dependencies of a hash location" type location = t [@@deriving sexp, compare] diff --git a/src/lib/merkle_ledger/location.mli b/src/lib/merkle_ledger/location.mli new file mode 100644 index 00000000000..7fc11b37d21 --- /dev/null +++ b/src/lib/merkle_ledger/location.mli @@ -0,0 +1,18 @@ +module Bigstring : sig + [%%versioned: + module Stable : sig + module V1 : sig + type t = Bigstring.Stable.V1.t [@@deriving sexp, compare] + + include Binable.S with type t := t + + val hash_fold_t : Hash.state -> t -> Hash.state + + val hash : t -> Hash.hash_value + end + end] + + include Hashable.S with type t := t +end + +module T : Location_intf.S diff --git a/src/lib/merkle_ledger/location_intf.ml b/src/lib/merkle_ledger/location_intf.mli similarity index 99% rename from src/lib/merkle_ledger/location_intf.ml rename to src/lib/merkle_ledger/location_intf.mli index 15cbe61d40e..19fb518e2e7 100644 --- a/src/lib/merkle_ledger/location_intf.ml +++ b/src/lib/merkle_ledger/location_intf.mli @@ -1,7 +1,5 @@ (* location_intf.ml -- interface file for Location *) -open Core - module type S = sig module Addr : module type of Merkle_address diff --git a/src/lib/merkle_ledger/merkle_ledger_intf.ml b/src/lib/merkle_ledger/merkle_ledger_intf.ml deleted file mode 100644 index 7b85045befb..00000000000 --- a/src/lib/merkle_ledger/merkle_ledger_intf.ml +++ /dev/null @@ -1,5 +0,0 @@ -module type S = sig - type t [@@deriving bin_io, sexp] - - include Base_ledger_intf.S with type t := t -end diff --git a/src/lib/merkle_ledger/merkle_path.ml b/src/lib/merkle_ledger/merkle_path.ml index a2a5acd294d..c837c8a9149 100644 --- a/src/lib/merkle_ledger/merkle_path.ml +++ b/src/lib/merkle_ledger/merkle_path.ml @@ -1,5 +1,3 @@ -open Core_kernel - module type S = sig type hash diff --git a/src/lib/merkle_ledger/merkle_path.mli b/src/lib/merkle_ledger/merkle_path.mli new file mode 100644 index 00000000000..158b5489511 --- /dev/null +++ b/src/lib/merkle_ledger/merkle_path.mli @@ -0,0 +1,23 @@ +module type S = sig + type hash + + type elem = [ `Left of hash | `Right of hash ] [@@deriving sexp, equal] + + val elem_hash : elem -> hash + + type t = elem list [@@deriving sexp, equal] + + val implied_root : t -> hash -> hash + + (** [check_path path leaf_hash root_hash] is used in tests to check that + [leaf_hash] along with [path] actually corresponds to [root_hash]. *) + val check_path : t -> hash -> hash -> bool +end + +module Make (Hash : sig + type t [@@deriving sexp, equal] + + val merge : height:int -> t -> t -> t + + val equal : t -> t -> bool +end) : S with type hash := Hash.t diff --git a/src/lib/merkle_ledger/merkle_path_intf.ml b/src/lib/merkle_ledger/merkle_path_intf.ml deleted file mode 100644 index 4627b1ddd3d..00000000000 --- a/src/lib/merkle_ledger/merkle_path_intf.ml +++ /dev/null @@ -1,11 +0,0 @@ -module type S = sig - type hash - - type elem = [ `Left of hash | `Right of hash ] [@@deriving sexp, equal] - - val elem_hash : elem -> hash - - type t = elem list [@@deriving sexp, equal] - - val implied_root : t -> hash -> hash -end diff --git a/src/lib/merkle_ledger/null_ledger.ml b/src/lib/merkle_ledger/null_ledger.ml index e9e511f15c2..157244e8c10 100644 --- a/src/lib/merkle_ledger/null_ledger.ml +++ b/src/lib/merkle_ledger/null_ledger.ml @@ -1,14 +1,6 @@ -open Core_kernel - -module type Inputs_intf = sig - include Base_inputs_intf.S - - module Location : Location_intf.S -end - -module Make (Inputs : Inputs_intf) : sig +module Make (Inputs : Intf.Inputs.Intf) : sig include - Base_ledger_intf.S + Intf.Ledger.NULL with module Addr = Inputs.Location.Addr with module Location = Inputs.Location with type key := Inputs.Key.t @@ -19,8 +11,6 @@ module Make (Inputs : Inputs_intf) : sig and type hash := Inputs.Hash.t and type root_hash := Inputs.Hash.t and type account := Inputs.Account.t - - val create : depth:int -> unit -> t end = struct open Inputs diff --git a/src/lib/merkle_ledger/null_ledger.mli b/src/lib/merkle_ledger/null_ledger.mli new file mode 100644 index 00000000000..e7a3b77c377 --- /dev/null +++ b/src/lib/merkle_ledger/null_ledger.mli @@ -0,0 +1,14 @@ +module Make (Inputs : Intf.Inputs.Intf) : sig + include + Intf.Ledger.NULL + with module Addr = Inputs.Location.Addr + with module Location = Inputs.Location + with type key := Inputs.Key.t + and type token_id := Inputs.Token_id.t + and type token_id_set := Inputs.Token_id.Set.t + and type account_id := Inputs.Account_id.t + and type account_id_set := Inputs.Account_id.Set.t + and type hash := Inputs.Hash.t + and type root_hash := Inputs.Hash.t + and type account := Inputs.Account.t +end diff --git a/src/lib/merkle_ledger/syncable_intf.ml b/src/lib/merkle_ledger/syncable_intf.ml deleted file mode 100644 index 7665106793d..00000000000 --- a/src/lib/merkle_ledger/syncable_intf.ml +++ /dev/null @@ -1,33 +0,0 @@ -module type S = sig - type root_hash - - type hash - - type account - - type addr - - type t [@@deriving sexp] - - type path - - val depth : t -> int - - val num_accounts : t -> int - - val merkle_path_at_addr_exn : t -> addr -> path - - val get_inner_hash_at_addr_exn : t -> addr -> hash - - val set_inner_hash_at_addr_exn : t -> addr -> hash -> unit - - val set_all_accounts_rooted_at_exn : t -> addr -> account list -> unit - - val set_batch_accounts : t -> (addr * account) list -> unit - - (** Get all of the accounts that are in a subtree of the underlying Merkle - tree rooted at `address`. The accounts are ordered by their addresses. *) - val get_all_accounts_rooted_at_exn : t -> addr -> (addr * account) list - - val merkle_root : t -> root_hash -end diff --git a/src/lib/merkle_ledger/util.ml b/src/lib/merkle_ledger/util.ml index f2061c85e73..af51910d7c5 100644 --- a/src/lib/merkle_ledger/util.ml +++ b/src/lib/merkle_ledger/util.ml @@ -1,5 +1,3 @@ -open Core_kernel - module type Inputs_intf = sig module Location : Location_intf.S diff --git a/src/lib/merkle_ledger/util.mli b/src/lib/merkle_ledger/util.mli new file mode 100644 index 00000000000..5473f664eb8 --- /dev/null +++ b/src/lib/merkle_ledger/util.mli @@ -0,0 +1,67 @@ +module type Inputs_intf = sig + module Location : Location_intf.S + + module Location_binable : Hashable.S_binable with type t := Location.t + + module Key : Intf.Key + + module Token_id : Intf.Token_id + + module Account_id : + Intf.Account_id with type key := Key.t and type token_id := Token_id.t + + module Balance : Intf.Balance + + module Account : + Intf.Account + with type balance := Balance.t + and type account_id := Account_id.t + and type token_id := Token_id.t + + module Hash : Intf.Hash with type account := Account.t + + module Base : sig + type t + + val get : t -> Location.t -> Account.t option + + val last_filled : t -> Location.t option + end + + val get_hash : Base.t -> Location.t -> Hash.t + + val location_of_account_addr : Location.Addr.t -> Location.t + + val location_of_hash_addr : Location.Addr.t -> Location.t + + val ledger_depth : Base.t -> int + + val set_raw_hash_batch : Base.t -> (Location.t * Hash.t) list -> unit + + val set_raw_account_batch : Base.t -> (Location.t * Account.t) list -> unit + + val set_location_batch : + last_location:Location.t + -> Base.t + -> (Account_id.t * Location.t) Mina_stdlib.Nonempty_list.t + -> unit +end + +module Make (Inputs : Inputs_intf) : sig + val get_all_accounts_rooted_at_exn : + Inputs.Base.t + -> Inputs.Location.Addr.t + -> (Inputs.Location.Addr.t * Inputs.Account.t) list + + val set_hash_batch : + Inputs.Base.t -> (Inputs.Location.t * Inputs.Hash.t) list -> unit + + val set_batch : + Inputs.Base.t -> (Inputs.Location.t * Inputs.Account.t) list -> unit + + val set_batch_accounts : + Inputs.Base.t -> (Inputs.Location.Addr.t * Inputs.Account.t) list -> unit + + val set_all_accounts_rooted_at_exn : + Inputs.Base.t -> Inputs.Location.Addr.t -> Inputs.Account.t list -> unit +end diff --git a/src/lib/merkle_ledger_tests/test_database.ml b/src/lib/merkle_ledger_tests/test_database.ml index 70c21a828d3..e6ac59a406f 100644 --- a/src/lib/merkle_ledger_tests/test_database.ml +++ b/src/lib/merkle_ledger_tests/test_database.ml @@ -7,7 +7,7 @@ let%test_module "test functor on in memory databases" = module Database = Merkle_ledger.Database module type DB = - Merkle_ledger.Database_intf.S + Intf.Ledger.DATABASE with type key := Key.t and type token_id := Token_id.t and type token_id_set := Token_id.Set.t diff --git a/src/lib/merkle_ledger_tests/test_mask.ml b/src/lib/merkle_ledger_tests/test_mask.ml index eea869a5753..a7455679115 100644 --- a/src/lib/merkle_ledger_tests/test_mask.ml +++ b/src/lib/merkle_ledger_tests/test_mask.ml @@ -667,7 +667,7 @@ module Make_maskable_and_mask_with_depth (Depth : Depth_S) = struct (* underlying Merkle tree *) module Base_db : - Merkle_ledger.Database_intf.S + Merkle_ledger.Intf.Ledger.DATABASE with module Location = Location and module Addr = Location.Addr and type account := Account.t diff --git a/src/lib/merkle_ledger_tests/test_stubs.ml b/src/lib/merkle_ledger_tests/test_stubs.ml index 3c47f97f083..61d26b19d0a 100644 --- a/src/lib/merkle_ledger_tests/test_stubs.ml +++ b/src/lib/merkle_ledger_tests/test_stubs.ml @@ -128,6 +128,17 @@ struct let remove t ~key = Bigstring_frozen.Table.remove t.table key let make_checkpoint _ _ = () + + let foldi t ~init ~f = + let i = ref (-1) in + let f ~key ~data accum = incr i ; f !i accum ~key ~data in + Bigstring_frozen.Table.fold t.table ~init ~f + + (* Relying on {!val:to_alist} is probably enough for testing purposes. *) + let fold_until t ~init ~f ~finish = + let f accum (key, data) = f accum ~key ~data in + let alist = to_alist t in + List.fold_until alist ~init ~f ~finish end module Storage_locations : Intf.Storage_locations = struct diff --git a/src/lib/merkle_mask/base_merkle_tree_intf.ml b/src/lib/merkle_mask/base_merkle_tree_intf.mli similarity index 65% rename from src/lib/merkle_mask/base_merkle_tree_intf.ml rename to src/lib/merkle_mask/base_merkle_tree_intf.mli index 8c0266524ae..be6db18f7ea 100644 --- a/src/lib/merkle_mask/base_merkle_tree_intf.ml +++ b/src/lib/merkle_mask/base_merkle_tree_intf.mli @@ -1,4 +1,4 @@ (* base_merkle_tree_intf.ml *) (** base module type for masking and masked Merkle trees *) -module type S = Merkle_ledger.Base_ledger_intf.S +module type S = Merkle_ledger.Intf.Ledger.S diff --git a/src/lib/merkle_mask/dune b/src/lib/merkle_mask/dune index bcf75bb1cde..7580ff1671d 100644 --- a/src/lib/merkle_mask/dune +++ b/src/lib/merkle_mask/dune @@ -1,39 +1,55 @@ (library (name merkle_mask) (public_name merkle_mask) + (flags + ; Deactivated warnings + ; 40: name-out-scope (activate later) + ; + ; 41: ambiguous name (too many of them for now, activate later) + ; + ; 42: disambiguated-name (rely on type disambiguation ,not too bad but closer + ; module openings may both solve the warning *and* help the reader) + ; + ; 44: open-shadow-identifier (operation overloading is common in the codebase) + (:standard -w +a-40..42-44 -warn-error +a)) (library_flags -linkall) + (modules_without_implementation + base_merkle_tree_intf + inputs_intf + maskable_merkle_tree_intf + masking_merkle_tree_intf) (libraries ;; opam libraries - base.caml - async_kernel - async - sexplib0 - core_kernel.uuid - integers - bitstring - core_kernel - yojson - core - base.base_internalhash_types - core.uuid - stdio - ;; local libraries - debug_assert - merkle_ledger - visualization - mina_stdlib - direction - empty_hashes - logger - ) + async + async_kernel + base.base_internalhash_types + base.caml + bitstring + core + core.uuid + core_kernel + core_kernel.uuid + integers + sexplib0 + stdio + yojson + ;; local libraries + debug_assert + direction + empty_hashes + logger + merkle_ledger + mina_stdlib + visualization + ) (preprocess (pps - ppx_mina - ppx_compare - ppx_deriving.show - ppx_deriving_yojson - ppx_jane - ppx_version - )) + ppx_compare + ppx_deriving.show + ppx_deriving_yojson + ppx_jane + ppx_mina + ppx_version + )) (instrumentation (backend bisect_ppx)) (synopsis "Implementation of Merkle tree masks")) diff --git a/src/lib/merkle_mask/inputs_intf.ml b/src/lib/merkle_mask/inputs_intf.mli similarity index 100% rename from src/lib/merkle_mask/inputs_intf.ml rename to src/lib/merkle_mask/inputs_intf.mli diff --git a/src/lib/merkle_mask/maskable_merkle_tree.ml b/src/lib/merkle_mask/maskable_merkle_tree.ml index f057a9f6587..c4cd8a888bf 100644 --- a/src/lib/merkle_mask/maskable_merkle_tree.ml +++ b/src/lib/merkle_mask/maskable_merkle_tree.ml @@ -47,7 +47,7 @@ module Make (Inputs : Inputs_intf) = struct Visualization.display_prefix_of_string @@ Uuid.to_string @@ Mask.Attached.get_uuid mask - let name mask = sprintf !"\"%s \"" (format_uuid mask) + let name mask = sprintf !"%S" (format_uuid mask) let display_attached_mask mask = let root_hash = Mask.Attached.merkle_root mask in @@ -109,40 +109,6 @@ module Make (Inputs : Inputs_intf) = struct Graphviz.output_graph output_channel graph ) end - module Visualize = struct - module Summary = struct - type t = [ `Uuid of Uuid.t ] * [ `Hash of Hash.t ] [@@deriving sexp_of] - end - - type t = Leaf of Summary.t | Node of Summary.t * t list - [@@deriving sexp_of] - - module type Crawler_intf = sig - type t - - val get_uuid : t -> Uuid.t - - val merkle_root : t -> Hash.t - end - - let rec _crawl : type a. (module Crawler_intf with type t = a) -> a -> t = - fun (module C) c -> - let summary = - let uuid = C.get_uuid c in - ( `Uuid uuid - , `Hash - ( try C.merkle_root c - with _ -> - Core.printf !"CAUGHT %{sexp: Uuid.t}\n%!" uuid ; - Hash.empty_account ) ) - in - match Uuid.Table.find registered_masks (C.get_uuid c) with - | None -> - Leaf summary - | Some masks -> - Node (summary, List.map masks ~f:(_crawl (module Mask.Attached))) - end - let unsafe_preload_accounts_from_parent = Mask.Attached.unsafe_preload_accounts_from_parent diff --git a/src/lib/merkle_mask/maskable_merkle_tree.mli b/src/lib/merkle_mask/maskable_merkle_tree.mli new file mode 100644 index 00000000000..f744398f810 --- /dev/null +++ b/src/lib/merkle_mask/maskable_merkle_tree.mli @@ -0,0 +1,35 @@ +module type Inputs_intf = sig + include Inputs_intf.S + + module Mask : + Masking_merkle_tree_intf.S + with module Location = Location + and type account := Account.t + and type location := Location.t + and type hash := Hash.t + and type key := Key.t + and type token_id := Token_id.t + and type token_id_set := Token_id.Set.t + and type account_id := Account_id.t + and type account_id_set := Account_id.Set.t + and type parent := Base.t + + val mask_to_base : Mask.Attached.t -> Base.t +end + +module Make (I : Inputs_intf) : + Maskable_merkle_tree_intf.S + with module Location = I.Location + and module Addr = I.Location.Addr + and type t := I.Base.t + and type root_hash := I.Hash.t + and type hash := I.Hash.t + and type account := I.Account.t + and type key := I.Key.t + and type token_id := I.Token_id.t + and type token_id_set := I.Token_id.Set.t + and type account_id := I.Account_id.t + and type account_id_set := I.Account_id.Set.t + and type unattached_mask := I.Mask.t + and type attached_mask := I.Mask.Attached.t + and type accumulated_t := I.Mask.accumulated_t diff --git a/src/lib/merkle_mask/maskable_merkle_tree_intf.ml b/src/lib/merkle_mask/maskable_merkle_tree_intf.mli similarity index 100% rename from src/lib/merkle_mask/maskable_merkle_tree_intf.ml rename to src/lib/merkle_mask/maskable_merkle_tree_intf.mli diff --git a/src/lib/merkle_mask/masking_merkle_tree.ml b/src/lib/merkle_mask/masking_merkle_tree.ml index ab56cf72e51..1b0e91ac1f3 100644 --- a/src/lib/merkle_mask/masking_merkle_tree.ml +++ b/src/lib/merkle_mask/masking_merkle_tree.ml @@ -8,17 +8,6 @@ open Core *) module Make (Inputs : Inputs_intf.S) = struct open Inputs - - type account = Account.t - - type hash = Hash.t - - type account_id = Account_id.t - - type account_id_set = Account_id.Set.t - - type location = Location.t - module Location = Location module Addr = Location.Addr @@ -125,12 +114,6 @@ module Make (Inputs : Inputs_intf.S) = struct let get_uuid { uuid; _ } = uuid - let depth t = t.depth - - let with_ledger ~f = - let mask = create () in - f mask - module Attached = struct type parent = Base.t [@@deriving sexp] @@ -144,24 +127,10 @@ module Make (Inputs : Inputs_intf.S) = struct type path = Path.t - type root_hash = Hash.t - - exception Location_is_not_account of Location.t - exception Dangling_parent_reference of Uuid.t * (* Location where null was set*) string - let create () = - failwith - "Mask.Attached.create: cannot create an attached mask; use Mask.create \ - and Mask.set_parent" - - let with_ledger ~f:_ = - failwith - "Mask.Attached.with_ledger: cannot create an attached mask; use \ - Mask.create and Mask.set_parent" - let unset_parent ?(trigger_signal = true) ~loc t = assert (Result.is_ok t.parent) ; t.parent <- Error loc ; @@ -617,13 +586,6 @@ module Make (Inputs : Inputs_intf.S) = struct Some hash with _ -> None ) - (* batch operations TODO: rely on availability of batch operations in Base - for speed *) - (* NB: rocksdb does not support batch reads; should we offer this? *) - let get_batch_exn t locations = - assert_is_attached t ; - List.map locations ~f:(fun location -> get t location) - let get_hash_batch_exn t locations = assert_is_attached t ; let maps, ancestor = maps_and_ancestor t in @@ -713,7 +675,9 @@ module Make (Inputs : Inputs_intf.S) = struct else our_loc in Some loc - | _ -> + | (Generic _ | Hash _), Account _ + | Account _, (Generic _ | Hash _) + | (Generic _ | Hash _), (Generic _ | Hash _) -> failwith "last_filled: expected account locations for the parent \ and mask" ) ) @@ -816,7 +780,7 @@ module Make (Inputs : Inputs_intf.S) = struct match location with | Account addr -> Addr.to_int addr + 1 - | _ -> + | Generic _ | Hash _ -> failwith "Expected mask current location to represent an account" ) @@ -1002,11 +966,6 @@ module Make (Inputs : Inputs_intf.S) = struct ( Addr.of_directions @@ List.init ledger_depth ~f:(fun _ -> Direction.Left) ) - let loc_max a b = - let a' = Location.to_path_exn a in - let b' = Location.to_path_exn b in - if Location.Addr.compare a' b' > 0 then a else b - (* NB: updates the mutable current_location field in t *) let get_or_create_account t account_id account = assert_is_attached t ; @@ -1037,10 +996,6 @@ module Make (Inputs : Inputs_intf.S) = struct Ok (`Added, location) ) ) | Some location -> Ok (`Existed, location) - - let sexp_of_location = Location.sexp_of_t - - let location_of_sexp = Location.t_of_sexp end let set_parent ?accumulated:accumulated_opt t parent = @@ -1066,6 +1021,4 @@ module Make (Inputs : Inputs_intf.S) = struct | _ -> () ) ; t - - let addr_to_location addr = Location.Account addr end diff --git a/src/lib/merkle_mask/masking_merkle_tree.mli b/src/lib/merkle_mask/masking_merkle_tree.mli new file mode 100644 index 00000000000..9490a20e917 --- /dev/null +++ b/src/lib/merkle_mask/masking_merkle_tree.mli @@ -0,0 +1,12 @@ +module Make (I : Inputs_intf.S) : + Masking_merkle_tree_intf.S + with module Location = I.Location + and type parent := I.Base.t + and type key := I.Key.t + and type token_id := I.Token_id.t + and type token_id_set := I.Token_id.Set.t + and type hash := I.Hash.t + and type account := I.Account.t + and type account_id := I.Account_id.t + and type account_id_set := I.Account_id.Set.t + and type location := I.Location.t diff --git a/src/lib/merkle_mask/masking_merkle_tree_intf.ml b/src/lib/merkle_mask/masking_merkle_tree_intf.mli similarity index 100% rename from src/lib/merkle_mask/masking_merkle_tree_intf.ml rename to src/lib/merkle_mask/masking_merkle_tree_intf.mli diff --git a/src/lib/mina_base/account.ml b/src/lib/mina_base/account.ml index 6b3f960448d..5f588458c23 100644 --- a/src/lib/mina_base/account.ml +++ b/src/lib/mina_base/account.ml @@ -627,7 +627,7 @@ let empty = ; zkapp = None } -let empty_digest = digest empty +let empty_digest = lazy (digest empty) let create account_id balance = let public_key = Account_id.public_key account_id in diff --git a/src/lib/mina_base/account_update.ml b/src/lib/mina_base/account_update.ml index 232e91d7b33..8f0cbe5ee99 100644 --- a/src/lib/mina_base/account_update.ml +++ b/src/lib/mina_base/account_update.ml @@ -907,8 +907,8 @@ module Update = struct } ) ) ; Set_or_keep.typ ~dummy:Permissions.empty Permissions.typ ; Set_or_keep.optional_typ - (Data_as_hash.optional_typ ~hash:Zkapp_account.hash_zkapp_uri - ~non_preimage:(Zkapp_account.hash_zkapp_uri_opt None) + (Data_as_hash.lazy_optional_typ ~hash:Zkapp_account.hash_zkapp_uri + ~non_preimage:(lazy (Zkapp_account.hash_zkapp_uri_opt None)) ~dummy_value:"" ) ~to_option:Fn.id ~of_option:Fn.id ; Set_or_keep.typ ~dummy:Account.Token_symbol.default @@ -1629,6 +1629,36 @@ module Body = struct ; may_use_token ; authorization_kind } + + let gen_with_events_and_actions = + let open Quickcheck.Generator.Let_syntax in + let%map public_key = Public_key.Compressed.gen + and token_id = Token_id.gen + and update = Update.gen () + and balance_change = Currency.Amount.Signed.gen + and increment_nonce = Quickcheck.Generator.bool + and events = return [ [| Field.zero |]; [| Field.zero |] ] + and actions = return [ [| Field.zero |]; [| Field.zero |] ] + and call_data = Field.gen + and preconditions = Preconditions.gen + and use_full_commitment = Quickcheck.Generator.bool + and implicit_account_creation_fee = Quickcheck.Generator.bool + and may_use_token = May_use_token.gen + and authorization_kind = Authorization_kind.gen in + { public_key + ; token_id + ; update + ; balance_change + ; increment_nonce + ; events + ; actions + ; call_data + ; preconditions + ; use_full_commitment + ; implicit_account_creation_fee + ; may_use_token + ; authorization_kind + } end module T = struct @@ -1695,6 +1725,12 @@ module T = struct let%map body = Body.gen and authorization = Control.gen_with_dummies in { body; authorization } + let gen_with_events_and_actions : t Quickcheck.Generator.t = + let open Quickcheck.Generator.Let_syntax in + let%map body = Body.gen_with_events_and_actions + and authorization = Control.gen_with_dummies in + { body; authorization } + let quickcheck_generator : t Quickcheck.Generator.t = gen let quickcheck_observer : t Quickcheck.Observer.t = diff --git a/src/lib/mina_base/data_as_hash.ml b/src/lib/mina_base/data_as_hash.ml index 97561357d3b..e0d956857c2 100644 --- a/src/lib/mina_base/data_as_hash.ml +++ b/src/lib/mina_base/data_as_hash.ml @@ -20,6 +20,14 @@ let optional_typ ~hash ~non_preimage ~dummy_value = | None -> (non_preimage, dummy_value) | Some s -> (hash s, s) ) ~back:(fun (_, s) -> Some s) +let lazy_optional_typ ~hash ~non_preimage ~dummy_value = + Typ.transport + Typ.(Field.typ * Internal.ref ()) + ~there:(function + | None -> (Lazy.force non_preimage, dummy_value) | Some s -> (hash s, s) + ) + ~back:(fun (_, s) -> Some s) + let to_input (x, _) = Random_oracle_input.Chunked.field x let if_ b ~then_ ~else_ = diff --git a/src/lib/mina_base/data_as_hash.mli b/src/lib/mina_base/data_as_hash.mli index 332e2c9fcd8..90f15db1ccb 100644 --- a/src/lib/mina_base/data_as_hash.mli +++ b/src/lib/mina_base/data_as_hash.mli @@ -36,6 +36,12 @@ val optional_typ : -> dummy_value:'value -> ('value t, 'value option) Typ.t +val lazy_optional_typ : + hash:('value -> Field.t) + -> non_preimage:Field.t lazy_t + -> dummy_value:'value + -> ('value t, 'value option) Typ.t + val to_input : _ t -> Field.Var.t Random_oracle_input.Chunked.t val if_ : Boolean.var -> then_:'value t -> else_:'value t -> 'value t diff --git a/src/lib/mina_base/dune b/src/lib/mina_base/dune index 885c6fe1793..c77f8e6cffc 100644 --- a/src/lib/mina_base/dune +++ b/src/lib/mina_base/dune @@ -41,6 +41,7 @@ rosetta_coding random_oracle hash_prefix_states + hash_prefix_create dummy_values currency mina_numbers diff --git a/src/lib/mina_base/pending_coinbase.ml b/src/lib/mina_base/pending_coinbase.ml index b0396c1deb6..3e95eec97a4 100644 --- a/src/lib/mina_base/pending_coinbase.ml +++ b/src/lib/mina_base/pending_coinbase.ml @@ -183,7 +183,7 @@ module Make_str (A : Wire_types.Concrete) = struct (Input.Chunked.append (Coinbase_data.to_input coinbase) (to_input h)) ) |> of_hash - let empty = Random_oracle.salt "CoinbaseStack" |> Random_oracle.digest + let empty = Hash_prefix_create.salt "CoinbaseStack" |> Random_oracle.digest module Checked = struct type t = var @@ -261,6 +261,10 @@ module Make_str (A : Wire_types.Concrete) = struct end end] + let init (t : t) = t.init + + let curr (t : t) = t.curr + type var = Stack_hash.var Poly.t let gen : t Quickcheck.Generator.t = @@ -396,7 +400,8 @@ module Make_str (A : Wire_types.Concrete) = struct |> of_hash let empty_hash = - Random_oracle.(digest (salt "PendingCoinbaseMerkleTree")) |> of_hash + Hash_prefix_create.salt "PendingCoinbaseMerkleTree" + |> Random_oracle.digest |> of_hash let of_digest = of_hash end @@ -528,6 +533,10 @@ module Make_str (A : Wire_types.Concrete) = struct let to_latest = Fn.id end end] + + let data (t : t) = t.data + + let state (t : t) = t.state end module Hash_versioned = struct @@ -1025,12 +1034,16 @@ module Make_str (A : Wire_types.Concrete) = struct type t = (Merkle_tree.t, Stack_id.t) Poly.t [@@deriving sexp, to_yojson] - let init_hash = Stack.data_hash Stack.empty - let hash_at_level = - let cached = ref [| init_hash |] in + let cached = ref [||] in fun i -> let len = Array.length !cached in + let len = + if len = 0 then ( + cached := [| Stack.data_hash Stack.empty |] ; + 1 ) + else len + in ( if i >= len then let cur_hash = ref (Array.last !cached) in cached := diff --git a/src/lib/mina_base/pending_coinbase_intf.ml b/src/lib/mina_base/pending_coinbase_intf.ml index fec6411105b..2dc6d37ba9f 100644 --- a/src/lib/mina_base/pending_coinbase_intf.ml +++ b/src/lib/mina_base/pending_coinbase_intf.ml @@ -96,6 +96,28 @@ module type S = sig end] end + module Coinbase_stack : sig + [%%versioned: + module Stable : sig + module V1 : sig + type t = Field.t + end + end] + end + + module State_stack : sig + [%%versioned: + module Stable : sig + module V1 : sig + type t + end + end] + + val init : t -> Field.t + + val curr : t -> Field.t + end + module Stack_versioned : sig [%%versioned: module Stable : sig @@ -103,6 +125,10 @@ module type S = sig type nonrec t [@@deriving sexp, compare, equal, yojson, hash] end end] + + val data : t -> Coinbase_stack.t + + val state : t -> State_stack.t end module Stack : sig @@ -168,15 +194,6 @@ module type S = sig end end - module State_stack : sig - [%%versioned: - module Stable : sig - module V1 : sig - type t - end - end] - end - module Update : sig module Action : sig [%%versioned: diff --git a/src/lib/mina_base/proof.ml b/src/lib/mina_base/proof.ml index 3404770ad40..dbf7296c661 100644 --- a/src/lib/mina_base/proof.ml +++ b/src/lib/mina_base/proof.ml @@ -2,9 +2,9 @@ open Core_kernel -let blockchain_dummy = Dummy_values.blockchain_proof +let blockchain_dummy = lazy (Dummy_values.blockchain_proof ()) -let transaction_dummy = Dummy_values.transaction_proof +let transaction_dummy = lazy (Dummy_values.transaction_proof ()) [%%versioned module Stable = struct diff --git a/src/lib/mina_base/proof.mli b/src/lib/mina_base/proof.mli index 469a7d6efc3..813a2b1883f 100644 --- a/src/lib/mina_base/proof.mli +++ b/src/lib/mina_base/proof.mli @@ -2,9 +2,9 @@ open Pickles_types type t = (Nat.N2.n, Nat.N2.n) Pickles.Proof.t [@@deriving sexp, compare, yojson] -val blockchain_dummy : t +val blockchain_dummy : t lazy_t -val transaction_dummy : t +val transaction_dummy : t lazy_t [%%versioned: module Stable : sig diff --git a/src/lib/mina_base/stack_frame.ml b/src/lib/mina_base/stack_frame.ml index bc7d1e0f490..2f55534a873 100644 --- a/src/lib/mina_base/stack_frame.ml +++ b/src/lib/mina_base/stack_frame.ml @@ -61,7 +61,7 @@ module Make_str (A : Wire_types.Concrete) = struct [%%versioned module Stable = struct module V1 = struct - type t = Kimchi_backend.Pasta.Basic.Fp.Stable.V1.t + type t = Zkapp_basic.F.Stable.V1.t [@@deriving sexp, compare, equal, hash, yojson] let to_latest = Fn.id diff --git a/src/lib/mina_base/test/zkapp_command_test.ml b/src/lib/mina_base/test/zkapp_command_test.ml index b21e243cac6..2deb78a2c70 100644 --- a/src/lib/mina_base/test/zkapp_command_test.ml +++ b/src/lib/mina_base/test/zkapp_command_test.ml @@ -119,9 +119,10 @@ end = struct let full = deriver @@ Fd.o () let json_roundtrip_dummy () = + let dummy = Lazy.force dummy in [%test_eq: t] dummy (dummy |> Fd.to_json full |> Fd.of_json full) let full_circuit () = Run_in_thread.block_on_async_exn - @@ fun () -> Fields_derivers_zkapps.Test.Loop.run full dummy + @@ fun () -> Fields_derivers_zkapps.Test.Loop.run full (Lazy.force dummy) end diff --git a/src/lib/mina_base/zkapp_account.ml b/src/lib/mina_base/zkapp_account.ml index 306f390f933..78fe1a128a7 100644 --- a/src/lib/mina_base/zkapp_account.ml +++ b/src/lib/mina_base/zkapp_account.ml @@ -34,7 +34,8 @@ end) = struct type t = Event.t list [@@deriving compare, sexp] - let empty_hash = Random_oracle.(salt Inputs.salt_phrase |> digest) + let empty_hash = + Hash_prefix_create.salt Inputs.salt_phrase |> Random_oracle.digest let push_hash acc hash = Random_oracle.hash ~init:Inputs.hash_prefix [| acc; hash |] @@ -126,7 +127,7 @@ module Actions = struct let empty_state_element = let salt_phrase = "MinaZkappActionStateEmptyElt" in - Random_oracle.(salt salt_phrase |> digest) + Hash_prefix_create.salt salt_phrase |> Random_oracle.digest let push_events (acc : Field.t) (events : t) : Field.t = push_hash acc (hash events) @@ -345,8 +346,8 @@ let typ : (Checked.t, t) Typ.t = let open Poly in Typ.of_hlistable [ Zkapp_state.typ Field.typ - ; Flagged_option.option_typ - ~default:{ With_hash.data = None; hash = dummy_vk_hash () } + ; Flagged_option.lazy_option_typ + ~default:(lazy { With_hash.data = None; hash = dummy_vk_hash () }) (Data_as_hash.typ ~hash:With_hash.hash) |> Typ.transport ~there:(Option.map ~f:(With_hash.map ~f:Option.some)) diff --git a/src/lib/mina_base/zkapp_basic.ml b/src/lib/mina_base/zkapp_basic.ml index 1b14b041006..37d96cfb5a2 100644 --- a/src/lib/mina_base/zkapp_basic.ml +++ b/src/lib/mina_base/zkapp_basic.ml @@ -90,6 +90,11 @@ module Flagged_option = struct let option_typ ~default t = Typ.transport (typ t) ~there:(of_option ~default) ~back:to_option + let lazy_option_typ ~default t = + Typ.transport (typ t) + ~there:(fun t -> of_option t ~default:(Lazy.force default)) + ~back:to_option + [%%endif] end diff --git a/src/lib/mina_base/zkapp_command.ml b/src/lib/mina_base/zkapp_command.ml index aed1d465147..a38a437df01 100644 --- a/src/lib/mina_base/zkapp_command.ml +++ b/src/lib/mina_base/zkapp_command.ml @@ -1576,20 +1576,21 @@ let arg_query_string x = Fields_derivers_zkapps.Test.Loop.json_to_string_gql @@ to_json x let dummy = - let account_update : Account_update.t = - { body = Account_update.Body.dummy - ; authorization = Control.dummy_of_tag Signature - } - in - let fee_payer : Account_update.Fee_payer.t = - { body = Account_update.Body.Fee_payer.dummy - ; authorization = Signature.dummy - } - in - { fee_payer - ; account_updates = Call_forest.cons account_update [] - ; memo = Signed_command_memo.empty - } + lazy + (let account_update : Account_update.t = + { body = Account_update.Body.dummy + ; authorization = Control.dummy_of_tag Signature + } + in + let fee_payer : Account_update.Fee_payer.t = + { body = Account_update.Body.Fee_payer.dummy + ; authorization = Signature.dummy + } + in + { fee_payer + ; account_updates = Call_forest.cons account_update [] + ; memo = Signed_command_memo.empty + } ) module Make_update_group (Input : sig type global_state diff --git a/src/lib/mina_block/mina_block.ml b/src/lib/mina_block/mina_block.ml index 22fc412bd12..9a5bd100730 100644 --- a/src/lib/mina_block/mina_block.ml +++ b/src/lib/mina_block/mina_block.ml @@ -24,7 +24,8 @@ let genesis ~precomputed_values : Block.with_hash * Validation.fully_valid = let block_with_hash = let body = Staged_ledger_diff.Body.create Staged_ledger_diff.empty_diff in let header = - Header.create ~protocol_state ~protocol_state_proof:Proof.blockchain_dummy + Header.create ~protocol_state + ~protocol_state_proof:(Lazy.force Proof.blockchain_dummy) ~delta_block_chain_proof: (Protocol_state.previous_state_hash protocol_state, []) () diff --git a/src/lib/mina_caqti/README.md b/src/lib/mina_caqti/README.md new file mode 100644 index 00000000000..ed3beb29610 --- /dev/null +++ b/src/lib/mina_caqti/README.md @@ -0,0 +1,77 @@ +Mina_caqti +========== + +This library is designed to assist in querying relational databases +using the Caqti library. It is used extensively for querying the +archive database in the `Processor` and `Load_data` modules in +`Archive_lib`. + +Constructing SQL queries +------------------------ + +Instead of writing out SQL queries as text, the +functions here can construct those queries from table information. + +For example, the `Token` module in the archive processor contains: +```ocaml + let table_name = "tokens" + + let find_by_id (module Conn : CONNECTION) id = + Conn.find + (Caqti_request.find Caqti_type.int typ + (Mina_caqti.select_cols_from_id ~table_name ~cols:Fields.names) ) + id +``` +The list `Fields.names` is generated from the `deriving fields` annotation on +the type `Token.t`. The call to `select_cols_fromid` constructs the query +``` +SELECT value,owner_public_key_id,owner_token_id FROM tokens WHERE id = ? +``` + +There are other SQL-building functions in the library, like +`select_cols`, `insert_into_cols`, and `select_insert_into_cols`, which +are documented in the source code. + +Custom array types +------------------ + +You can use custom array types to provide a `Caqti.Type.t` for OCaml array types not +already built into Caqti. For example, `array_int_typ` is used to +give a type for the OCaml type `int array`. Such Caqti types can be +used for the input or result type of queries, or to provide type +annotations on columns in queries. In some cases, PostgreSQL may not +be able to decode data without such annotations. There's an example of +using an annotation in +`Archive_lib.Processor.Zkapp_field_array.add_if_doesn't_exist`. + +Encoding values as NULLs +------------------------ + +In the descriptions of the functions that follow, please note that the +values returned are in the `Deferred` monad, because they are the +result of database queries. +- For the `add...` functions, the result +actually has a `Deferred.Result.t` type because queries can fail. +- For +the `get...` functions, a failure raises an exception. + +There are some zkApps-related functions that are useful for storing +`Set_or_keep.t` and `Or_ignore.t` values. The function +`add_if_zkapp_set` runs a query if the data is `Set`, returning its +result (if it succeeds), and if the data is `Keep`, returns `None`. +Similarly, `add_if_zkapp_check` runs a query if the data is `Check`, +returning its result (if it succeeds), and if the data is `Ignore`, +returns `None`. The functions `get_zkapp_set_or_keep` and +`get_zkapp_or_ignore` operate symmetrically, by converting a queried value to +a value construct with `Set` or `Check`, if not NULL, and converting a +NULL to `Keep` or `Ignore`. The use of NULL to encode these +zkApp-related values is mentioned as the `NULL convention` in the part +of the database schema in `zkapp_tables.sql`. + +The functions `add_if_some` and `get_opt_item` are similar to these +zkApps-related functions, except that the constructors involved are +`Some` and `None` for option types. Therefore, `add_if_some` runs its +query argument if the data has `Some` as its constructor, returning +the result, and otherwise returns `None`. The function `get_opt_item` +returns a `Some`-constructed value, if the item is not NULL in the +database, and `None` otherwise. diff --git a/src/lib/mina_caqti/mina_caqti.ml b/src/lib/mina_caqti/mina_caqti.ml index b1e49d9bcb8..8f625cb74d9 100644 --- a/src/lib/mina_caqti/mina_caqti.ml +++ b/src/lib/mina_caqti/mina_caqti.ml @@ -5,6 +5,7 @@ open Core_kernel open Caqti_async open Mina_base +(* custom Caqti types for generating type annotations on queries *) type _ Caqti_type.field += | Array_nullable_int : int option array Caqti_type.field @@ -225,10 +226,12 @@ let add_if_some (f : 'arg -> ('res, 'err) Deferred.Result.t) : 'arg option -> ('res option, 'err) Deferred.Result.t = Fn.compose deferred_result_lift_opt @@ Option.map ~f +(* if zkApp-related item is Set, run `f` *) let add_if_zkapp_set (f : 'arg -> ('res, 'err) Deferred.Result.t) : 'arg Zkapp_basic.Set_or_keep.t -> ('res option, 'err) Deferred.Result.t = Fn.compose (add_if_some f) Zkapp_basic.Set_or_keep.to_option +(* if zkApp-related item is Check, run `f` *) let add_if_zkapp_check (f : 'arg -> ('res, 'err) Deferred.Result.t) : 'arg Zkapp_basic.Or_ignore.t -> ('res option, 'err) Deferred.Result.t = Fn.compose (add_if_some f) Zkapp_basic.Or_ignore.to_option @@ -278,6 +281,9 @@ let insert_into_cols ~(returning : string) ~(table_name : string) (String.concat ~sep:", " cols) values returning +(* run `select_cols` and return the result, if found + if not found, run `insert_into_cols` and return the result +*) let select_insert_into_cols ~(select : string * 'select Caqti_type.t) ~(table_name : string) ?tannot ~(cols : string list * 'cols Caqti_type.t) (module Conn : CONNECTION) (value : 'cols) = @@ -348,11 +354,13 @@ let make_get_opt ~of_option ~f item_opt = in of_option res_opt +(** convert options to Set or Keep for zkApps-related results *) let get_zkapp_set_or_keep (item_opt : 'arg option) ~(f : 'arg -> ('res, _) Deferred.Result.t) : 'res Zkapp_basic.Set_or_keep.t Deferred.t = make_get_opt ~of_option:Zkapp_basic.Set_or_keep.of_option ~f item_opt +(** convert options to Check or Ignore for zkApps-related results *) let get_zkapp_or_ignore (item_opt : 'arg option) ~(f : 'arg -> ('res, _) Deferred.Result.t) : 'res Zkapp_basic.Or_ignore.t Deferred.t = diff --git a/src/lib/mina_graphql/mina_graphql.ml b/src/lib/mina_graphql/mina_graphql.ml index 01c5e9ac0cc..30a66886863 100644 --- a/src/lib/mina_graphql/mina_graphql.ml +++ b/src/lib/mina_graphql/mina_graphql.ml @@ -2056,7 +2056,7 @@ module Queries = struct expensive proof generation step if we don't have one available. *) - Proof.blockchain_dummy ) + Lazy.force Proof.blockchain_dummy ) } ; hash } ) diff --git a/src/lib/mina_graphql/types.ml b/src/lib/mina_graphql/types.ml index 211426e38fb..8531e726879 100644 --- a/src/lib/mina_graphql/types.ml +++ b/src/lib/mina_graphql/types.ml @@ -21,6 +21,8 @@ let token_id = Scalars.TokenId.typ () let json = Scalars.JSON.typ () +let field_elem = Mina_base_unix.Graphql_scalars.FieldElem.typ () + let epoch_seed = Scalars.EpochSeed.typ () let balance = Scalars.Balance.typ () @@ -204,7 +206,6 @@ let block_producer_timing : let merkle_path_element : (_, [ `Left of Zkapp_basic.F.t | `Right of Zkapp_basic.F.t ] option) typ = - let field_elem = Mina_base_unix.Graphql_scalars.FieldElem.typ () in obj "MerklePathElement" ~fields:(fun _ -> [ field "left" ~typ:field_elem ~args:Arg.[] @@ -510,12 +511,47 @@ let sign = let signed_fee = obj "SignedFee" ~doc:"Signed fee" ~fields:(fun _ -> + [ field "sign" ~typ:(non_null sign) ~doc:"+/-" + ~args:Arg.[] + ~resolve:(fun _ fee -> Currency.Fee.Signed.sgn fee) + ; field "feeMagnitude" ~typ:(non_null fee) ~doc:"Fee" + ~args:Arg.[] + ~resolve:(fun _ fee -> Currency.Fee.Signed.magnitude fee) + ] ) + +let signed_amount = + obj "SignedAmount" ~doc:"Signed amount" ~fields:(fun _ -> [ field "sign" ~typ:(non_null sign) ~doc:"+/-" ~args:Arg.[] ~resolve:(fun _ fee -> Currency.Amount.Signed.sgn fee) - ; field "feeMagnitude" ~typ:(non_null amount) ~doc:"Fee" + ; field "amountMagnitude" ~typ:(non_null amount) ~doc:"Amount" + ~args:Arg.[] + ~resolve:(fun _ amount -> Currency.Amount.Signed.magnitude amount) + ] ) + +let fee_excess : (Mina_lib.t, Fee_excess.t option) typ = + let module M = Fee_excess.Poly in + obj "FeeExcess" ~doc:"Fee excess divided into left, right components" + ~fields:(fun _ -> + [ field "feeTokenLeft" + ~args:Arg.[] + ~doc:"Token id for left component of fee excess" + ~typ:(non_null token_id) + ~resolve:(fun _ ({ fee_token_l; _ } : _ M.t) -> fee_token_l) + ; field "feeExcessLeft" + ~args:Arg.[] + ~doc:"Fee for left component of fee excess" ~typ:(non_null signed_fee) + ~resolve:(fun _ ({ fee_excess_l; _ } : _ M.t) -> fee_excess_l) + ; field "feeTokenRight" + ~args:Arg.[] + ~doc:"Token id for right component of fee excess" + ~typ:(non_null token_id) + ~resolve:(fun _ ({ fee_token_r; _ } : _ M.t) -> fee_token_r) + ; field "feeExcessRight" ~args:Arg.[] - ~resolve:(fun _ fee -> Currency.Amount.Signed.magnitude fee) + ~doc:"Fee for right component of fee excess" + ~typ:(non_null signed_fee) + ~resolve:(fun _ ({ fee_excess_r; _ } : _ M.t) -> fee_excess_r) ] ) let work_statement = @@ -543,18 +579,13 @@ let work_statement = ~args:Arg.[] ~resolve:(fun _ { Transaction_snark.Statement.Poly.target; _ } -> target.second_pass_ledger ) - ; field "feeExcess" ~typ:(non_null signed_fee) + ; field "feeExcess" ~typ:(non_null fee_excess) ~doc: "Total transaction fee that is not accounted for in the transition \ from source ledger to target ledger" ~args:Arg.[] - ~resolve:(fun _ - ({ fee_excess = { fee_excess_l; _ }; _ } : - Transaction_snark.Statement.t ) -> - (* TODO: Expose full fee excess data. *) - { fee_excess_l with - magnitude = Currency.Amount.of_fee fee_excess_l.magnitude - } ) + ~resolve:(fun _ ({ fee_excess; _ } : Transaction_snark.Statement.t) -> + fee_excess ) ; field "supplyIncrease" ~typ:(non_null amount) ~doc:"Increase in total supply" ~args:Arg.[] @@ -562,7 +593,7 @@ let work_statement = ~resolve:(fun _ ({ supply_increase; _ } : Transaction_snark.Statement.t) -> supply_increase.magnitude ) - ; field "supplyChange" ~typ:(non_null signed_fee) + ; field "supplyChange" ~typ:(non_null signed_amount) ~doc:"Increase/Decrease in total supply" ~args:Arg.[] ~resolve:(fun _ @@ -585,6 +616,155 @@ let pending_work = ~resolve:(fun _ w -> One_or_two.to_list w) ] ) +let state_stack = + let module M = Pending_coinbase.State_stack in + obj "StateStack" ~fields:(fun _ -> + [ field "initial" + ~args:Arg.[] + ~doc:"Initial hash" ~typ:(non_null field_elem) + ~resolve:(fun _ t -> M.init t) + ; field "current" + ~args:Arg.[] + ~doc:"Current hash" ~typ:(non_null field_elem) + ~resolve:(fun _ t -> M.curr t) + ] ) + +let pending_coinbase_stack = + let module M = Pending_coinbase.Stack_versioned in + obj "PendingCoinbaseStack" ~fields:(fun _ -> + [ field "dataStack" + ~args:Arg.[] + ~doc:"Data component of pending coinbase stack" + ~typ:(non_null field_elem) + ~resolve:(fun _ t -> M.data t) + ; field "stateStack" + ~args:Arg.[] + ~doc:"State component of pending coinbase stack" + ~typ:(non_null state_stack) + ~resolve:(fun _ t -> M.state t) + ] ) + +let local_state : (Mina_lib.t, Mina_state.Local_state.t option) typ = + let module M = Mina_transaction_logic.Zkapp_command_logic.Local_state in + obj "LocalState" ~fields:(fun _ -> + [ field "stackFrame" + ~args:Arg.[] + ~doc:"Stack frame component of local state" ~typ:(non_null field_elem) + ~resolve:(fun _ t -> + (M.stack_frame t : Stack_frame.Digest.t :> Zkapp_basic.F.Stable.V1.t) + ) + ; field "callStack" + ~args:Arg.[] + ~doc:"Call stack component of local state" ~typ:(non_null field_elem) + ~resolve:(fun _ t -> + (M.call_stack t : Call_stack_digest.t :> Zkapp_basic.F.Stable.V1.t) + ) + ; field "transactionCommitment" + ~args:Arg.[] + ~doc:"Transaction commitment component of local state" + ~typ:(non_null field_elem) + ~resolve:(fun _ t -> M.transaction_commitment t) + ; field "fullTransactionCommitment" + ~args:Arg.[] + ~doc:"Full transaction commitment component of local state" + ~typ:(non_null field_elem) + ~resolve:(fun _ t -> M.full_transaction_commitment t) + ; field "excess" + ~args:Arg.[] + ~doc:"Excess component of local state" ~typ:(non_null signed_amount) + ~resolve:(fun _ t -> M.excess t) + ; field "supplyIncrease" + ~args:Arg.[] + ~doc:"Supply increase component of local state" + ~typ:(non_null signed_amount) + ~resolve:(fun _ t -> M.supply_increase t) + ; field "ledger" + ~args:Arg.[] + ~doc:"Ledger component of local state" ~typ:(non_null ledger_hash) + ~resolve:(fun _ t -> M.ledger t) + ; field "success" + ~args:Arg.[] + ~doc:"Success component of local state" ~typ:(non_null bool) + ~resolve:(fun _ t -> M.success t) + ; field "accountUpdateIndex" + ~args:Arg.[] + ~doc:"Account update index component of local state" + ~typ:(non_null @@ Graphql_basic_scalars.UInt32.typ ()) + ~resolve:(fun _ t -> M.account_update_index t) + ; field "failureStatusTable" + ~args:Arg.[] + ~doc:"Failure status table component of local state" + ~typ:(non_null (list (non_null (list (non_null string))))) + ~resolve:(fun _ t -> + List.map (M.failure_status_tbl t) + ~f:(List.map ~f:Mina_base.Transaction_status.Failure.to_string) ) + ; field "willSucceed" + ~args:Arg.[] + ~doc:"Will-succeed component of local state" ~typ:(non_null bool) + ~resolve:(fun _ t -> M.will_succeed t) + ] ) + +let registers : (Mina_lib.t, Mina_state.Registers.Value.t option) typ = + let module M = Mina_state.Registers in + obj "Registers" ~fields:(fun _ -> + [ field "firstPassLedger" + ~args:Arg.[] + ~doc:"First pass ledger hash" ~typ:(non_null ledger_hash) + ~resolve:(fun _ ({ first_pass_ledger; _ } : _ M.t) -> + first_pass_ledger ) + ; field "secondPassLedger" + ~args:Arg.[] + ~doc:"Second pass ledger hash" ~typ:(non_null ledger_hash) + ~resolve:(fun _ ({ second_pass_ledger; _ } : _ M.t) -> + second_pass_ledger ) + ; field "pendingCoinbaseStack" + ~args:Arg.[] + ~doc:"Pending coinbase stack" + ~typ:(non_null pending_coinbase_stack) + ~resolve:(fun _ ({ pending_coinbase_stack; _ } : _ M.t) -> + pending_coinbase_stack ) + ; field "localState" + ~args:Arg.[] + ~doc:"Local state" ~typ:(non_null local_state) + ~resolve:(fun _ ({ local_state; _ } : _ M.t) -> local_state) + ] ) + +let snarked_ledger_state : + (Mina_lib.t, Mina_state.Snarked_ledger_state.t option) typ = + let module M = Mina_state.Snarked_ledger_state.Poly in + obj "SnarkedLedgerState" ~fields:(fun _ -> + [ field "sourceRegisters" + ~args:Arg.[] + ~typ:(non_null registers) + ~resolve:(fun _ ({ source; _ } : _ M.t) -> source) + ; field "targetRegisters" + ~args:Arg.[] + ~typ:(non_null registers) + ~resolve:(fun _ ({ target; _ } : _ M.t) -> target) + ; field "connectingLedgerLeft" + ~args:Arg.[] + ~typ:(non_null ledger_hash) + ~resolve:(fun _ ({ connecting_ledger_left; _ } : _ M.t) -> + connecting_ledger_left ) + ; field "connectingLedgerRight" + ~args:Arg.[] + ~typ:(non_null ledger_hash) + ~resolve:(fun _ ({ connecting_ledger_right; _ } : _ M.t) -> + connecting_ledger_right ) + ; field "supplyIncrease" + ~args:Arg.[] + ~typ:(non_null signed_amount) + ~resolve:(fun _ ({ supply_increase; _ } : _ M.t) -> supply_increase) + ; field "feeExcess" + ~args:Arg.[] + ~typ:(non_null fee_excess) + ~resolve:(fun _ ({ fee_excess; _ } : _ M.t) -> fee_excess) + ; field "sokDigest" + ~args:Arg.[] + ~doc:"Placeholder for SOK digest" ~typ:string + ~resolve:(fun _ ({ sok_digest = _; _ } : _ M.t) -> None) + ] ) + let blockchain_state : ( Mina_lib.t , (Mina_state.Blockchain_state.Value.t * State_hash.t) option ) @@ -667,6 +847,13 @@ let blockchain_state : None | Some b -> Some (Transition_frontier.Breadcrumb.just_emitted_a_proof b) ) + ; field "ledgerProofStatement" + ~typ:(non_null snarked_ledger_state) + ~args:Arg.[] + ~resolve:(fun _ t -> + let blockchain_state, _ = t in + Mina_state.Blockchain_state.ledger_proof_statement blockchain_state + ) ; field "bodyReference" ~typ:(non_null @@ Graphql_lib.Scalars.BodyReference.typ ()) ~doc: @@ -1244,8 +1431,8 @@ module AccountObj = struct ~doc: "The list of accounts which are delegating to you in the last \ epoch (note that the info is recorded in the one before last \ - epoch epoch so it might not be up to date with the current \ - account status)" + epoch so it might not be up to date with the current account \ + status)" ~args:Arg.[] ~resolve:(fun { ctx = mina; _ } { account; _ } -> let open Option.Let_syntax in @@ -1617,7 +1804,15 @@ module User_command = struct | Applied | Enqueued -> None | Included_but_failed failures -> - List.concat failures |> List.hd ) + let rec first_failure = function + | (failure :: _) :: _ -> + Some failure + | [] :: others -> + first_failure others + | [] -> + None + in + first_failure failures ) ] let payment = diff --git a/src/lib/mina_ledger/ledger.ml b/src/lib/mina_ledger/ledger.ml index f4fd61a37ff..470452519b0 100644 --- a/src/lib/mina_ledger/ledger.ml +++ b/src/lib/mina_ledger/ledger.ml @@ -57,7 +57,8 @@ module Ledger_inner = struct let hash_account = Fn.compose Ledger_hash.of_digest Account.digest - let empty_account = Ledger_hash.of_digest Account.empty_digest + let empty_account = + Ledger_hash.of_digest (Lazy.force Account.empty_digest) end end] end @@ -105,7 +106,7 @@ module Ledger_inner = struct end module Db : - Merkle_ledger.Database_intf.S + Merkle_ledger.Intf.Ledger.DATABASE with module Location = Location_at_depth with module Addr = Location_at_depth.Addr with type root_hash := Ledger_hash.t @@ -121,7 +122,7 @@ module Ledger_inner = struct module Null = Null_ledger.Make (Inputs) module Any_ledger : - Merkle_ledger.Any_ledger.S + Merkle_ledger.Intf.Ledger.ANY with module Location = Location_at_depth with type account := Account.t and type key := Public_key.Compressed.t diff --git a/src/lib/mina_ledger/ledger.mli b/src/lib/mina_ledger/ledger.mli index ecba767a7cb..9ad1c37855b 100644 --- a/src/lib/mina_ledger/ledger.mli +++ b/src/lib/mina_ledger/ledger.mli @@ -5,7 +5,7 @@ open Mina_base module Location : Merkle_ledger.Location_intf.S module Db : - Merkle_ledger.Database_intf.S + Merkle_ledger.Intf.Ledger.DATABASE with module Location = Location with module Addr = Location.Addr with type root_hash := Ledger_hash.t @@ -18,7 +18,7 @@ module Db : and type account_id_set := Account_id.Set.t module Any_ledger : - Merkle_ledger.Any_ledger.S + Merkle_ledger.Intf.Ledger.ANY with module Location = Location with type account := Account.t and type key := Public_key.Compressed.t diff --git a/src/lib/mina_ledger/ledger_transfer.ml b/src/lib/mina_ledger/ledger_transfer.ml index 22efdb917a8..9efd4643d35 100644 --- a/src/lib/mina_ledger/ledger_transfer.ml +++ b/src/lib/mina_ledger/ledger_transfer.ml @@ -2,7 +2,7 @@ open Core_kernel open Mina_base module type Base_ledger_intf = - Merkle_ledger.Base_ledger_intf.S + Merkle_ledger.Intf.Ledger.S with type account := Account.t and type key := Signature_lib.Public_key.Compressed.t and type token_id := Token_id.t diff --git a/src/lib/mina_ledger/sync_ledger.ml b/src/lib/mina_ledger/sync_ledger.ml index 0f404fc3d56..397cdbf3434 100644 --- a/src/lib/mina_ledger/sync_ledger.ml +++ b/src/lib/mina_ledger/sync_ledger.ml @@ -10,7 +10,7 @@ module Hash = struct let hash_account = Fn.compose Ledger_hash.of_digest Account.digest - let empty_account = Ledger_hash.of_digest Account.empty_digest + let empty_account = Ledger_hash.of_digest (Lazy.force Account.empty_digest) end module Root_hash = struct diff --git a/src/lib/mina_lib/config.ml b/src/lib/mina_lib/config.ml index 22d622d98b9..2512042fe9d 100644 --- a/src/lib/mina_lib/config.ml +++ b/src/lib/mina_lib/config.ml @@ -52,6 +52,7 @@ type t = ; start_time : Time.t ; precomputed_blocks_path : string option ; log_precomputed_blocks : bool + ; start_filtered_logs : string list ; upload_blocks_to_gcloud : bool ; block_reward_threshold : Currency.Amount.t option [@default None] ; node_status_url : string option [@default None] diff --git a/src/lib/mina_lib/mina_lib.ml b/src/lib/mina_lib/mina_lib.ml index 2f192f3835a..51120886167 100644 --- a/src/lib/mina_lib/mina_lib.ml +++ b/src/lib/mina_lib/mina_lib.ml @@ -833,85 +833,8 @@ let staged_ledger_ledger_proof t = let validated_transitions t = t.pipes.validated_transitions_reader -module Root_diff = struct - [%%versioned - module Stable = struct - module V2 = struct - type t = - { commands : User_command.Stable.V2.t With_status.Stable.V2.t list - ; root_length : int - } - - let to_latest = Fn.id - end - end] -end - let initialization_finish_signal t = t.initialization_finish_signal -(* TODO: this is a bad pattern for two reasons: - * - uses an abstraction leak to patch new functionality instead of making a new extension - * - every call to this function will create a new, unique pipe with it's own thread for transfering - * items from the identity extension with no route for termination - *) -let root_diff t = - let root_diff_reader, root_diff_writer = - Strict_pipe.create ~name:"root diff" - (Buffered (`Capacity 30, `Overflow Crash)) - in - O1trace.background_thread "read_root_diffs" (fun () -> - let open Root_diff.Stable.Latest in - let length_of_breadcrumb b = - Transition_frontier.Breadcrumb.consensus_state b - |> Consensus.Data.Consensus_state.blockchain_length - |> Mina_numbers.Length.to_uint32 |> Unsigned.UInt32.to_int - in - Broadcast_pipe.Reader.iter t.components.transition_frontier ~f:(function - | None -> - Deferred.unit - | Some frontier -> - let root = Transition_frontier.root frontier in - Strict_pipe.Writer.write root_diff_writer - { commands = - List.map - ( Transition_frontier.Breadcrumb.validated_transition root - |> Mina_block.Validated.valid_commands ) - ~f:(With_status.map ~f:User_command.forget_check) - ; root_length = length_of_breadcrumb root - } ; - Broadcast_pipe.Reader.iter - Transition_frontier.( - Extensions.(get_view_pipe (extensions frontier) Identity)) - ~f: - (Deferred.List.iter ~f:(function - | Transition_frontier.Diff.Full.With_mutant.E (New_node _, _) - -> - Deferred.unit - | Transition_frontier.Diff.Full.With_mutant.E - (Best_tip_changed _, _) -> - Deferred.unit - | Transition_frontier.Diff.Full.With_mutant.E - (Root_transitioned { new_root; _ }, _) -> - let root_hash = - (Transition_frontier.Root_data.Limited.hashes new_root) - .state_hash - in - let new_root_breadcrumb = - Transition_frontier.(find_exn frontier root_hash) - in - Strict_pipe.Writer.write root_diff_writer - { commands = - Transition_frontier.Breadcrumb.validated_transition - new_root_breadcrumb - |> Mina_block.Validated.valid_commands - |> List.map - ~f: - (With_status.map ~f:User_command.forget_check) - ; root_length = length_of_breadcrumb new_root_breadcrumb - } ; - Deferred.unit ) ) ) ) ; - root_diff_reader - let dump_tf t = peek_frontier t.components.transition_frontier |> Or_error.map ~f:Transition_frontier.visualize_to_string @@ -1505,6 +1428,32 @@ module type Itn_settable = sig val set_itn_logger_data : t -> daemon_port:int -> unit Deferred.Or_error.t end +let start_filtered_log + in_memory_reverse_structured_log_messages_for_integration_test + (structured_log_ids : string list) = + let handle str = + let idx, old_messages, started = + !in_memory_reverse_structured_log_messages_for_integration_test + in + in_memory_reverse_structured_log_messages_for_integration_test := + (idx + 1, str :: old_messages, started) + in + let _, _, started = + !in_memory_reverse_structured_log_messages_for_integration_test + in + if started then Or_error.error_string "Already initialized" + else ( + in_memory_reverse_structured_log_messages_for_integration_test := + (0, [], true) ; + let event_set = + Structured_log_events.Set.of_list + @@ List.map ~f:Structured_log_events.id_of_string structured_log_ids + in + Logger.Consumer_registry.register ~id:Logger.Logger_id.mina + ~processor:(Logger.Processor.raw_structured_log_events event_set) + ~transport:(Logger.Transport.raw handle) ; + Ok () ) + let create ?wallets (config : Config.t) = let module Context = (val context config) in let catchup_mode = if config.super_catchup then `Super else `Normal in @@ -1528,6 +1477,15 @@ let create ?wallets (config : Config.t) = else Deferred.unit in O1trace.thread "mina_lib" (fun () -> + let in_memory_reverse_structured_log_messages_for_integration_test = + ref (0, [], false) + in + if not (List.is_empty config.start_filtered_logs) then + (* Start the filtered logs, if requested. *) + Or_error.ok_exn + @@ start_filtered_log + in_memory_reverse_structured_log_messages_for_integration_test + config.start_filtered_logs ; let%bind prover = Monitor.try_with ~here:[%here] ~rest: @@ -2320,8 +2278,7 @@ let create ?wallets (config : Config.t) = ; sync_status ; precomputed_block_writer ; block_production_status = ref `Free - ; in_memory_reverse_structured_log_messages_for_integration_test = - ref (0, [], false) + ; in_memory_reverse_structured_log_messages_for_integration_test ; vrf_evaluation_state = Block_producer.Vrf_evaluation_state.create () } ) ) @@ -2334,28 +2291,9 @@ let runtime_config { config = { precomputed_values; _ }; _ } = let start_filtered_log ({ in_memory_reverse_structured_log_messages_for_integration_test; _ } : t) (structured_log_ids : string list) = - let handle str = - let idx, old_messages, started = - !in_memory_reverse_structured_log_messages_for_integration_test - in - in_memory_reverse_structured_log_messages_for_integration_test := - (idx + 1, str :: old_messages, started) - in - let _, _, started = - !in_memory_reverse_structured_log_messages_for_integration_test - in - if started then Or_error.error_string "Already initialized" - else ( - in_memory_reverse_structured_log_messages_for_integration_test := - (0, [], true) ; - let event_set = - Structured_log_events.Set.of_list - @@ List.map ~f:Structured_log_events.id_of_string structured_log_ids - in - Logger.Consumer_registry.register ~id:Logger.Logger_id.mina - ~processor:(Logger.Processor.raw_structured_log_events event_set) - ~transport:(Logger.Transport.raw handle) ; - Ok () ) + start_filtered_log + in_memory_reverse_structured_log_messages_for_integration_test + structured_log_ids let get_filtered_log_entries ({ in_memory_reverse_structured_log_messages_for_integration_test; _ } : t) diff --git a/src/lib/mina_lib/mina_lib.mli b/src/lib/mina_lib/mina_lib.mli index 81529666991..44e90e2876a 100644 --- a/src/lib/mina_lib/mina_lib.mli +++ b/src/lib/mina_lib/mina_lib.mli @@ -155,20 +155,6 @@ val client_port : t -> int val validated_transitions : t -> Mina_block.Validated.t Strict_pipe.Reader.t -module Root_diff : sig - [%%versioned: - module Stable : sig - module V2 : sig - type t = - { commands : User_command.Stable.V2.t With_status.Stable.V2.t list - ; root_length : int - } - end - end] -end - -val root_diff : t -> Root_diff.t Strict_pipe.Reader.t - val initialization_finish_signal : t -> unit Ivar.t val dump_tf : t -> string Or_error.t diff --git a/src/lib/mina_numbers/dune b/src/lib/mina_numbers/dune index 3c5db6b65fe..0ad727203f6 100644 --- a/src/lib/mina_numbers/dune +++ b/src/lib/mina_numbers/dune @@ -21,7 +21,6 @@ bignum_bigint pickles codable - ppx_dhall_type snarky.backendless fold_lib tuple_lib diff --git a/src/lib/mina_numbers/global_slot.ml b/src/lib/mina_numbers/global_slot.ml index 76d8d481ec3..87e64edaa73 100644 --- a/src/lib/mina_numbers/global_slot.ml +++ b/src/lib/mina_numbers/global_slot.ml @@ -75,8 +75,6 @@ module Make (M : S) = struct let%map.Quickcheck u32 = T.gen_incl u32_1 u32_2 in of_uint32 u32 - let dhall_type = Ppx_dhall_type.Dhall_type.Text - let zero = of_uint32 T.zero let one = of_uint32 T.one diff --git a/src/lib/mina_numbers/global_slot_intf.ml b/src/lib/mina_numbers/global_slot_intf.ml index a57b52e4d13..444ee1073a9 100644 --- a/src/lib/mina_numbers/global_slot_intf.ml +++ b/src/lib/mina_numbers/global_slot_intf.ml @@ -18,8 +18,6 @@ module type S_base = sig val gen_incl : t -> t -> t Core_kernel.Quickcheck.Generator.t - val dhall_type : Ppx_dhall_type.Dhall_type.t - val zero : t val one : t diff --git a/src/lib/mina_numbers/global_slot_span.ml b/src/lib/mina_numbers/global_slot_span.ml index 67f1167d184..2704436d290 100644 --- a/src/lib/mina_numbers/global_slot_span.ml +++ b/src/lib/mina_numbers/global_slot_span.ml @@ -82,8 +82,6 @@ module Make_str (_ : Wire_types.Concrete) = struct let%map.Quickcheck u32 = T.gen_incl u32_1 u32_2 in of_uint32 u32 - let dhall_type = Ppx_dhall_type.Dhall_type.Text - let zero = of_uint32 T.zero let one = of_uint32 T.one diff --git a/src/lib/mina_numbers/intf.ml b/src/lib/mina_numbers/intf.ml index df506258430..d6788d0ea11 100644 --- a/src/lib/mina_numbers/intf.ml +++ b/src/lib/mina_numbers/intf.ml @@ -14,9 +14,6 @@ module type S_unchecked = sig include Hashable.S with type t := t - (* not automatically derived *) - val dhall_type : Ppx_dhall_type.Dhall_type.t - val max_value : t val length_in_bits : int diff --git a/src/lib/mina_numbers/nat.ml b/src/lib/mina_numbers/nat.ml index d0129786252..aa68a9be130 100644 --- a/src/lib/mina_numbers/nat.ml +++ b/src/lib/mina_numbers/nat.ml @@ -196,9 +196,6 @@ end) struct type t = N.t [@@deriving sexp, compare, hash, yojson] - (* can't be automatically derived *) - let dhall_type = Ppx_dhall_type.Dhall_type.Text - let max_value = N.max_int include Comparable.Make (N) diff --git a/src/lib/mina_stdlib/list.mli b/src/lib/mina_stdlib/list.mli index 7345148cd6c..46a5a504e42 100644 --- a/src/lib/mina_stdlib/list.mli +++ b/src/lib/mina_stdlib/list.mli @@ -7,6 +7,10 @@ module Length : sig *) val equal : 'a t + (** [unequal l len] returns [true] if [List.length l <> len], [false] otherwise. + *) + val unequal : 'a t + (** [gte l len] returns [true] if [List.length l >= len], [false] otherwise. *) @@ -29,24 +33,24 @@ module Length : sig (** {2 Infix comparison operators} *) - (** [Compare] contains infix aliases for functions of {!module:Length} *) + (** [Compare] contains infix aliases for functions of {!module:Length}. *) module Compare : sig - (** [( = )] is [equal] *) + (** [( = )] is {!val:equal}. *) val ( = ) : 'a t - (** [( <> )] is [unequal] *) + (** [( <> )] is {!val:unequal}. *) val ( <> ) : 'a t - (** [( >= )] is [gte] *) + (** [( >= )] is {!val:gte}. *) val ( >= ) : 'a t - (** [l > len] is [gt] *) + (** [l > len] is {!val:gt}. *) val ( > ) : 'a t - (** [( <= )] is [lte] *) + (** [( <= )] is {!val:lte}. *) val ( <= ) : 'a t - (** [l < len] is [lt] *) + (** [l < len] is {!val:lt}. *) val ( < ) : 'a t end end diff --git a/src/lib/mina_wire_types/pickles/pickles.ml b/src/lib/mina_wire_types/pickles/pickles.ml index b10729c1d5b..3708c754de9 100644 --- a/src/lib/mina_wire_types/pickles/pickles.ml +++ b/src/lib/mina_wire_types/pickles/pickles.ml @@ -99,6 +99,10 @@ module M = struct ( Snark_params.Tick.Field.t , Snark_params.Tick.Field.t array ) Pickles_types.Plonk_types.All_evals.t + (* A job half-done may be worse than not done at all. + TODO: Migrate Plonk_types here, and actually include the + *wire* type, not this in-memory version. + *) ; proof : Wrap_wire_proof.V1.t } end diff --git a/src/lib/mina_wire_types/pickles/pickles_limb_vector.ml b/src/lib/mina_wire_types/pickles/pickles_limb_vector.ml index 7f17a3bbb7a..be1297d0848 100644 --- a/src/lib/mina_wire_types/pickles/pickles_limb_vector.ml +++ b/src/lib/mina_wire_types/pickles/pickles_limb_vector.ml @@ -3,7 +3,7 @@ module Constant = struct type t = Int64.t end - module Make (N : Pickles_types.Vector.Nat_intf) = struct + module Make (N : Pickles_types.Nat.Intf) = struct module A = Pickles_types.Vector.With_length (N) type t = Hex64.t A.t diff --git a/src/lib/network_pool/batcher.ml b/src/lib/network_pool/batcher.ml index 349de2d1f9f..06af016fbf6 100644 --- a/src/lib/network_pool/batcher.ml +++ b/src/lib/network_pool/batcher.ml @@ -533,7 +533,7 @@ module Snark_pool = struct let message = Mina_base.Sok_message.create ~fee ~prover in ( One_or_two.map statements ~f:(fun statement -> Ledger_proof.create ~statement ~sok_digest - ~proof:Proof.transaction_dummy ) + ~proof:(Lazy.force Proof.transaction_dummy) ) , message ) in Envelope.Incoming.gen data_gen diff --git a/src/lib/network_pool/snark_pool.ml b/src/lib/network_pool/snark_pool.ml index aa61df58af6..0351a8aa9c7 100644 --- a/src/lib/network_pool/snark_pool.ml +++ b/src/lib/network_pool/snark_pool.ml @@ -675,7 +675,7 @@ let%test_module "random set test" = , One_or_two.map work ~f:(fun statement -> Ledger_proof.create ~statement ~sok_digest:invalid_sok_digest - ~proof:Proof.transaction_dummy ) + ~proof:(Lazy.force Proof.transaction_dummy) ) , fee , some_other_pk ) :: acc ) diff --git a/src/lib/pickles/README.md b/src/lib/pickles/README.md new file mode 100644 index 00000000000..e12164e98f0 --- /dev/null +++ b/src/lib/pickles/README.md @@ -0,0 +1,96 @@ +# Pickles - a modular typed inductive proof system + +Pickles is a framework that provides an interface for writing SNARK circuits as +inductive rules, backed by a co-recursive kimchi verifier operating over the +Pallas and Vesta curves. + +It is currently implemented to work with the polynomial IOP +[PLONK](https://eprint.iacr.org/2019/953.pdf) and the recursion layer +[Halo](https://eprint.iacr.org/2019/1021.pdf), generally called +[Halo2](https://electriccoin.co/blog/explaining-halo-2/). +It also provides an abstraction to deal with lookup arguments and custom gates compatible with PLONK. + +Pickles aims to be modular using the OCaml module system. Ideally, an inductive proof +system can be parametrized by any finite field and 2-cycle of curves (called +`Tick` and `Tock`). However, at the moment, it is hardcoded to be used with +Pasta and Vesta curves. The code refers to the algebraic parameters as `Impls` +and are passed to generic functions as a first class module. + +A particularity of Pickles is to use the power of the OCaml type system to +encode runtime invariants like vector sizes, heterogeneous lists and others at +the type level to avoid constructing some statements that might be wrong at +compile time. Some encoded theories can be found in +[`Pickles_types`](../pickles_types/). +Some types are parametrized by type-level defined naturals. + +## Coding guidelines + +Functions related to computations are parametrized by at least two types, +suffixed respectively with `var` and `val`, which represent `in circuit` values +and `out circuit` values. The type `[('var, 'val) t]` describes a mapping from +the OCaml type `['val]` to a type representing the value using PlonK variables +(`['var]`). +Each in-circuit encoded value has a corresponding `'a Typ.t` value, which +carries the in-circuit values, out-circuit values and the related circuit +constraints. +A nested module `Constant` must be defined to encode the out-circuit values +and operations. +The reader can find more information in [Snarky +documentation](https://github.com/o1-labs/snarky/blob/master/src/base/snark_intf.ml#L140-L153). + +When a type is supposed to encode a list at the type level, a `s` is added to +its name. + +## Files structures + +This is a non-exhaustive classification of the files structure related to the +library. Refer to the file header for a content description: + +- Fiat Shamir (FS) transformation: + - [`Make_sponge`](make_sponge.mli) + - [`Opt_sponge`](opt_sponge.mli) + - [`Ro`](ro.mli) + - [`Scalar_challenge`](scalar_challenge.mli) + - [`Sponge_inputs`](sponge_inputs.mli) + - [`Tick_field_sponge`](tick_field_sponge.mli) + - [`Tock_field_sponge`](tock_field_sponge.mli) +- Polynomial commitment scheme: + - [`Commitment_length`](commitment_length.mli) + - [`Evaluation_length`](evaluation_length.mli) +- Main protocol: + - [`Impls`](impls.mli) +- Optimisations: + - [`Endo`](endo.mli) +- Miscellaneous: + - [`Cache`](cache.mli) + - [`Cache_handle`](cache_handle.mli) + - [`Common`](common.mli) + - [`Dirty`](dirty.mli) + - [`Import`](import.mli) +- Inductive proof system: + - [`Compile`](compile.mli) + - [`Dummy`](dummy.mli) + - [`Fix_domains`](fix_domains.mli) + - [`Full_signature`](full_signature.mli) + - [`Inductive_rule`](inductive_rule.mli) + - [`Per_proof_witness`](per_proof_witness.mli) + - [`Pickles`](pickles.mli) + - [`Pickles_intf`](pickles_intf.mli) + - [`Proof`](proof.mli) + - [`Reduced_messages_for_next_proof_over_same_field`](reduced_messages_for_next_proof_over_same_field.mli) + - [`Requests`](requests.mli) + - [`Side_loaded_verification_key`](side_loaded_verification_key.mli) + - [`Step`](step.mli) + - [`Step_branch_data`](step_branch_data.mli) + - [`Step_main`](step_main.mli) + - [`Step_verifier`](step_verifier.mli) + - [`Tag`](tag.mli) +- Algebraic objects: + - [`Plonk_curve_ops`](plonk_curve_ops.mli) + +## Resources + +Some public videos you can find on YouTube describing the intial idea behind +Pickles. It might be outdated if the IOP is changed. +- [zkStudyClub: Izaak Meckler o1Labs - Pickles ](https://www.youtube.com/watch?v=kmCXdjv5oP0) +- [ZK-GLOBAL 0x05 - Izaak Meckler - Meet Pickles SNARK ](https://www.youtube.com/watch?v=nOnGOxyh7jY) diff --git a/src/lib/pickles/backend/backend.mli b/src/lib/pickles/backend/backend.mli index 70d8453f7b0..011bcdb640c 100644 --- a/src/lib/pickles/backend/backend.mli +++ b/src/lib/pickles/backend/backend.mli @@ -1,3 +1,8 @@ +(** Pickles backend, i.e. curves used by the inductive proof system. + At the time of writing, Pallas and Vesta curves (so-called Pasta) are + hardcoded. The 2-cycle is called [Tick] and [Tock]. +*) + module Tick : sig include module type of Kimchi_backend.Pasta.Vesta_based_plonk diff --git a/src/lib/pickles/cache.ml b/src/lib/pickles/cache.ml index 398a8c6cbaf..987513e73a9 100644 --- a/src/lib/pickles/cache.ml +++ b/src/lib/pickles/cache.ml @@ -10,7 +10,7 @@ module Step = struct * Backend.Tick.R1CS_constraint_system.t let to_string : t -> _ = function - | _id, header, n, h -> + | _id, header, n, _h -> sprintf !"step-%s-%s-%d-%s" header.kind.type_ header.kind.identifier n header.identifying_hash end @@ -20,12 +20,21 @@ module Step = struct [@@deriving sexp] let to_string : t -> _ = function - | _id, header, n, h -> + | _id, header, n, _h -> sprintf !"vk-step-%s-%s-%d-%s" header.kind.type_ header.kind.identifier n header.identifying_hash end + [@@warning "-4"] end + type storable = + (Key.Proving.t, Backend.Tick.Keypair.t) Key_cache.Sync.Disk_storable.t + + type vk_storable = + ( Key.Verification.t + , Kimchi_bindings.Protocol.VerifierIndex.Fp.t ) + Key_cache.Sync.Disk_storable.t + let storable = Key_cache.Sync.Disk_storable.simple Key.Proving.to_string (fun (_, header, _, cs) ~path -> @@ -82,9 +91,8 @@ module Step = struct (Kimchi_bindings.Protocol.VerifierIndex.Fp.write (Some true) x) header path ) ) - let read_or_generate ~prev_challenges cache k_p k_v typ return_typ main = - let s_p = storable in - let s_v = vk_storable in + let read_or_generate ~prev_challenges cache ?(s_p = storable) k_p + ?(s_v = vk_storable) k_v typ return_typ main = let open Impls.Step in let pk = lazy @@ -93,8 +101,7 @@ module Step = struct Key_cache.Sync.read cache s_p (Lazy.force k_p) ) with | Ok (pk, dirty) -> - Common.time "step keypair create" (fun () -> - (Keypair.create ~pk ~vk:(Backend.Tick.Keypair.vk pk), dirty) ) + Common.time "step keypair create" (fun () -> (pk, dirty)) | Error _e -> let r = Common.time "stepkeygen" (fun () -> @@ -105,7 +112,7 @@ module Step = struct ignore ( Key_cache.Sync.write cache s_p (Lazy.force k_p) (Keypair.pk r) : unit Or_error.t ) ; - (r, `Generated_something) ) + (Keypair.pk r, `Generated_something) ) in let vk = lazy @@ -118,7 +125,7 @@ module Step = struct (vk, `Cache_hit) | Error _e -> let pk, c = Lazy.force pk in - let vk = Keypair.vk pk in + let vk = Backend.Tick.Keypair.vk pk in ignore (Key_cache.Sync.write cache s_v k_v vk : unit Or_error.t) ; (vk, c) ) in @@ -135,10 +142,11 @@ module Wrap = struct [%equal: unit * Md5.t] ((* TODO: *) ignore x1, y1) (ignore x2, y2) let to_string : t -> _ = function - | _id, header, h -> + | _id, header, _h -> sprintf !"vk-wrap-%s-%s-%s" header.kind.type_ header.kind.identifier header.identifying_hash end + [@@warning "-4"] module Proving = struct type t = @@ -147,12 +155,18 @@ module Wrap = struct * Backend.Tock.R1CS_constraint_system.t let to_string : t -> _ = function - | _id, header, h -> + | _id, header, _h -> sprintf !"wrap-%s-%s-%s" header.kind.type_ header.kind.identifier header.identifying_hash end end + type storable = + (Key.Proving.t, Backend.Tock.Keypair.t) Key_cache.Sync.Disk_storable.t + + type vk_storable = + (Key.Verification.t, Verification_key.t) Key_cache.Sync.Disk_storable.t + let storable = Key_cache.Sync.Disk_storable.simple Key.Proving.to_string (fun (_, header, cs) ~path -> @@ -180,10 +194,42 @@ module Wrap = struct (Kimchi_bindings.Protocol.Index.Fq.write (Some true) t.index) header path ) ) - let read_or_generate ~prev_challenges cache k_p k_v typ return_typ main = + let vk_storable = + Key_cache.Sync.Disk_storable.simple Key.Verification.to_string + (fun (_, header, _cs) ~path -> + Or_error.try_with_join (fun () -> + let open Or_error.Let_syntax in + let%map header_read, index = + Snark_keys_header.read_with_header + ~read_data:(fun ~offset:_ path -> + Binable.of_string + (module Verification_key.Stable.Latest) + (In_channel.read_all path) ) + path + in + [%test_eq: int] header.header_version header_read.header_version ; + [%test_eq: Snark_keys_header.Kind.t] header.kind header_read.kind ; + [%test_eq: Snark_keys_header.Constraint_constants.t] + header.constraint_constants header_read.constraint_constants ; + [%test_eq: string] header.constraint_system_hash + header_read.constraint_system_hash ; + index ) ) + (fun (_, header, _) t path -> + Or_error.try_with (fun () -> + Snark_keys_header.write_with_header + ~expected_max_size_log2:33 (* 8 GB should be enough *) + ~append_data:(fun path -> + Out_channel.with_file ~append:true path ~f:(fun file -> + Out_channel.output_string file + (Binable.to_string + (module Verification_key.Stable.Latest) + t ) ) ) + header path ) ) + + let read_or_generate ~prev_challenges cache ?(s_p = storable) k_p + ?(s_v = vk_storable) k_v typ return_typ main = let module Vk = Verification_key in let open Impls.Wrap in - let s_p = storable in let pk = lazy (let k = Lazy.force k_p in @@ -192,7 +238,7 @@ module Wrap = struct Key_cache.Sync.read cache s_p k ) with | Ok (pk, d) -> - (Keypair.create ~pk ~vk:(Backend.Tock.Keypair.vk pk), d) + (pk, d) | Error _e -> let r = Common.time "wrapkeygen" (fun () -> @@ -202,52 +248,17 @@ module Wrap = struct ignore ( Key_cache.Sync.write cache s_p k (Keypair.pk r) : unit Or_error.t ) ; - (r, `Generated_something) ) + (Keypair.pk r, `Generated_something) ) in let vk = lazy (let k_v = Lazy.force k_v in - let s_v = - Key_cache.Sync.Disk_storable.simple Key.Verification.to_string - (fun (_, header, cs) ~path -> - Or_error.try_with_join (fun () -> - let open Or_error.Let_syntax in - let%map header_read, index = - Snark_keys_header.read_with_header - ~read_data:(fun ~offset path -> - Binable.of_string - (module Vk.Stable.Latest) - (In_channel.read_all path) ) - path - in - [%test_eq: int] header.header_version - header_read.header_version ; - [%test_eq: Snark_keys_header.Kind.t] header.kind - header_read.kind ; - [%test_eq: Snark_keys_header.Constraint_constants.t] - header.constraint_constants - header_read.constraint_constants ; - [%test_eq: string] header.constraint_system_hash - header_read.constraint_system_hash ; - index ) ) - (fun (_, header, _) t path -> - Or_error.try_with (fun () -> - Snark_keys_header.write_with_header - ~expected_max_size_log2:33 (* 8 GB should be enough *) - ~append_data:(fun path -> - Out_channel.with_file ~append:true path ~f:(fun file -> - Out_channel.output_string file - (Binable.to_string (module Vk.Stable.Latest) t) ) - ) - header path ) ) - in match Key_cache.Sync.read cache s_v k_v with | Ok (vk, d) -> (vk, d) - | Error e -> - let kp, _dirty = Lazy.force pk in - let vk = Keypair.vk kp in - let pk = Keypair.pk kp in + | Error _e -> + let pk, _dirty = Lazy.force pk in + let vk = Backend.Tock.Keypair.vk pk in let vk : Vk.t = { index = vk ; commitments = diff --git a/src/lib/pickles/cache.mli b/src/lib/pickles/cache.mli index c52e7ff2c52..75d3893597b 100644 --- a/src/lib/pickles/cache.mli +++ b/src/lib/pickles/cache.mli @@ -8,6 +8,8 @@ module Step : sig * Snark_keys_header.t * int * Backend.Tick.R1CS_constraint_system.t + + val to_string : t -> string end module Verification : sig @@ -17,18 +19,34 @@ module Step : sig * Snark_keys_header.t * int * Core_kernel.Md5.t + + val to_string : t -> string end end + type storable = + (Key.Proving.t, Backend.Tick.Keypair.t) Key_cache.Sync.Disk_storable.t + + type vk_storable = + ( Key.Verification.t + , Kimchi_bindings.Protocol.VerifierIndex.Fp.t ) + Key_cache.Sync.Disk_storable.t + + val storable : storable + + val vk_storable : vk_storable + val read_or_generate : prev_challenges:int -> Key_cache.Spec.t list + -> ?s_p:storable -> Key.Proving.t lazy_t + -> ?s_v:vk_storable -> Key.Verification.t lazy_t -> ('a, 'b) Impls.Step.Typ.t -> ('c, 'd) Impls.Step.Typ.t -> ('a -> unit -> 'c) - -> ( Impls.Step.Keypair.t + -> ( Impls.Step.Proving_key.t * [> `Cache_hit | `Generated_something | `Locally_generated ] ) lazy_t * ( Kimchi_bindings.Protocol.VerifierIndex.Fp.t @@ -43,6 +61,8 @@ module Wrap : sig Core_kernel.Type_equal.Id.Uid.t * Snark_keys_header.t * Backend.Tock.R1CS_constraint_system.t + + val to_string : t -> string end module Verification : sig @@ -59,15 +79,27 @@ module Wrap : sig end end + type storable = + (Key.Proving.t, Backend.Tock.Keypair.t) Key_cache.Sync.Disk_storable.t + + type vk_storable = + (Key.Verification.t, Verification_key.t) Key_cache.Sync.Disk_storable.t + + val storable : storable + + val vk_storable : vk_storable + val read_or_generate : prev_challenges:Core_kernel.Int.t -> Key_cache.Spec.t list - -> Key.Proving.t Core_kernel.Lazy.t - -> Key.Verification.t Core_kernel.Lazy.t + -> ?s_p:storable + -> Key.Proving.t lazy_t + -> ?s_v:vk_storable + -> Key.Verification.t lazy_t -> ('a, 'b) Impls.Wrap.Typ.t -> ('c, 'd) Impls.Wrap.Typ.t -> ('a -> unit -> 'c) - -> ( Impls.Wrap.Keypair.t + -> ( Impls.Wrap.Proving_key.t * [> `Cache_hit | `Generated_something | `Locally_generated ] ) lazy_t * ( Verification_key.Stable.V2.t diff --git a/src/lib/pickles/cache_handle.mli b/src/lib/pickles/cache_handle.mli index 4e562bd5a48..a9bedc2f26c 100644 --- a/src/lib/pickles/cache_handle.mli +++ b/src/lib/pickles/cache_handle.mli @@ -1,4 +1,4 @@ -(* Cache handle *) +(** Cache handle. It is currently used to cache proving and verifying keys for pickles *) type t = Dirty.t lazy_t diff --git a/src/lib/pickles/commitment_lengths.ml b/src/lib/pickles/commitment_lengths.ml index 3099a63ed86..269861c171c 100644 --- a/src/lib/pickles/commitment_lengths.ml +++ b/src/lib/pickles/commitment_lengths.ml @@ -1,12 +1,8 @@ -open Core_kernel open Pickles_types -open Import -open Plonk_types -let create (type a) ~(of_int : int -> a) : - (a Columns_vec.t, a, a) Messages.Poly.t = - let one = of_int 1 in - { w = Vector.init Plonk_types.Columns.n ~f:(fun _ -> one) - ; z = one - ; t = of_int 7 +let default ~num_chunks = + { Plonk_types.Messages.Poly.w = + Vector.init Plonk_types.Columns.n ~f:(fun _ -> num_chunks) + ; z = num_chunks + ; t = 7 * num_chunks } diff --git a/src/lib/pickles/commitment_lengths.mli b/src/lib/pickles/commitment_lengths.mli index 0c1a1f9e016..fe8a346510c 100644 --- a/src/lib/pickles/commitment_lengths.mli +++ b/src/lib/pickles/commitment_lengths.mli @@ -1,9 +1,23 @@ -(* Commitment_lengths *) +(** This module provides functions to keep track of the number of commitments + the proving system requires for each type of polynomials. -(** [create] *) -val create : - of_int:(int -> 'a) - -> ( 'a Pickles_types.Plonk_types.Columns_vec.t - , 'a - , 'a ) + Refer to the {{ https://eprint.iacr.org/2019/953.pdf } PlonK paper } for a basic + understanding of the different polynomials involved in a proof. +*) + +(** [default] returns a tuple of naturals [(length_w, length_z, length_t)] + encoding at the type level the number of polynomials we must commit to. + - [length_w] is the number of wires. It must be in line with + {Plonk_types.Commons}. + - [length_z] is the permutation polynomial + - [length_t] is the quotient polynomial + + Encoding the size at the type level allows to check at compile time the + length of vectors, and avoid runtime checks. +*) +val default : + num_chunks:int + -> ( int Pickles_types.Plonk_types.Columns_vec.t + , int + , int ) Pickles_types.Plonk_types.Messages.Poly.t diff --git a/src/lib/pickles/common.ml b/src/lib/pickles/common.ml index 368b3c57cc0..c4149ed19ab 100644 --- a/src/lib/pickles/common.ml +++ b/src/lib/pickles/common.ml @@ -10,7 +10,7 @@ module Max_degree = struct let wrap_log2 = Nat.to_int Backend.Tock.Rounds.n - let wrap = 1 lsl wrap_log2 + let _wrap = 1 lsl wrap_log2 end let tick_shifts, tock_shifts = @@ -28,7 +28,7 @@ let wrap_domains ~proofs_verified = let h = match proofs_verified with 0 -> 13 | 1 -> 14 | 2 -> 15 | _ -> assert false in - { Domains.h = Pow_2_roots_of_unity h } + { Domains.h = Domain.Pow_2_roots_of_unity h } let actual_wrap_domain_size ~log_2_domain_size = let d = @@ -42,15 +42,15 @@ let actual_wrap_domain_size ~log_2_domain_size = | _ -> assert false in - Pickles_base.Proofs_verified.of_int d + Pickles_base.Proofs_verified.of_int_exn d let hash_messages_for_next_step_proof ~app_state (t : _ Types.Step.Proof_state.Messages_for_next_step_proof.t) = let g (x, y) = [ x; y ] in - let open Backend in Tick_field_sponge.digest Tick_field_sponge.params (Types.Step.Proof_state.Messages_for_next_step_proof.to_field_elements t ~g - ~comm:(fun (x : Tock.Curve.Affine.t) -> Array.of_list (g x)) + ~comm:(fun (x : Tock.Curve.Affine.t array) -> + Array.concat_map x ~f:(fun x -> Array.of_list (g x)) ) ~app_state ) let dlog_pcs_batch (type nat proofs_verified total) @@ -89,51 +89,47 @@ let group_map m ~a ~b = stage (fun x -> Group_map.to_group m ~params x) module Shifts = struct - let tock1 : Tock.Field.t Shifted_value.Type1.Shift.t = - Shifted_value.Type1.Shift.create (module Tock.Field) + let tock2 : Backend.Tock.Field.t Shifted_value.Type2.Shift.t = + Shifted_value.Type2.Shift.create (module Backend.Tock.Field) - let tock2 : Tock.Field.t Shifted_value.Type2.Shift.t = - Shifted_value.Type2.Shift.create (module Tock.Field) - - let tick1 : Tick.Field.t Shifted_value.Type1.Shift.t = - Shifted_value.Type1.Shift.create (module Tick.Field) - - let tick2 : Tick.Field.t Shifted_value.Type2.Shift.t = - Shifted_value.Type2.Shift.create (module Tick.Field) + let tick1 : Backend.Tick.Field.t Shifted_value.Type1.Shift.t = + Shifted_value.Type1.Shift.create (module Backend.Tick.Field) end module Lookup_parameters = struct let tick_zero : _ Composition_types.Zero_values.t = - { value = - { challenge = Challenge.Constant.zero - ; scalar = - Shifted_value.Type2.Shifted_value Impls.Wrap.Field.Constant.zero - } - ; var = - { challenge = Impls.Step.Field.zero - ; scalar = - Shifted_value.Type2.Shifted_value - (Impls.Step.Field.zero, Impls.Step.Boolean.false_) - } - } + Composition_types.Zero_values. + { value = + { challenge = Challenge.Constant.zero + ; scalar = + Shifted_value.Type2.Shifted_value Impls.Wrap.Field.Constant.zero + } + ; var = + { challenge = Impls.Step.Field.zero + ; scalar = + Shifted_value.Type2.Shifted_value + (Impls.Step.Field.zero, Impls.Step.Boolean.false_) + } + } let tock_zero : _ Composition_types.Zero_values.t = - { value = - { challenge = Challenge.Constant.zero - ; scalar = - Shifted_value.Type2.Shifted_value Impls.Wrap.Field.Constant.zero - } - ; var = - { challenge = Impls.Wrap.Field.zero - ; scalar = Shifted_value.Type2.Shifted_value Impls.Wrap.Field.zero - } - } + Composition_types.Zero_values. + { value = + { challenge = Challenge.Constant.zero + ; scalar = + Shifted_value.Type2.Shifted_value Impls.Wrap.Field.Constant.zero + } + ; var = + { challenge = Impls.Wrap.Field.zero + ; scalar = Shifted_value.Type2.Shifted_value Impls.Wrap.Field.zero + } + } end let finite_exn : 'a Kimchi_types.or_infinity -> 'a * 'a = function - | Finite (x, y) -> + | Kimchi_types.Finite (x, y) -> (x, y) - | Infinity -> + | Kimchi_types.Infinity -> invalid_arg "finite_exn" let or_infinite_conv : ('a * 'a) Or_infinity.t -> 'a Kimchi_types.or_infinity = @@ -148,9 +144,7 @@ module Ipa = struct (* TODO: Make all this completely generic over backend *) - let compute_challenge (type f) ~endo_to_field - (module Field : Kimchi_backend.Field.S with type t = f) c = - endo_to_field c + let compute_challenge ~endo_to_field _ c = endo_to_field c let compute_challenges ~endo_to_field field chals = Vector.map chals ~f:(fun prechallenge -> @@ -211,23 +205,24 @@ module Ipa = struct end end -let tock_unpadded_public_input_of_statement prev_statement = +let tock_unpadded_public_input_of_statement ~feature_flags prev_statement = let input = - let (T (typ, _conv, _conv_inv)) = Impls.Wrap.input () in + let (T (typ, _conv, _conv_inv)) = Impls.Wrap.input ~feature_flags () in Impls.Wrap.generate_public_input typ prev_statement in List.init (Backend.Tock.Field.Vector.length input) ~f:(Backend.Tock.Field.Vector.get input) -let tock_public_input_of_statement s = tock_unpadded_public_input_of_statement s +let tock_public_input_of_statement ~feature_flags s = + tock_unpadded_public_input_of_statement ~feature_flags s -let tick_public_input_of_statement ~max_proofs_verified ~feature_flags +let tick_public_input_of_statement ~max_proofs_verified (prev_statement : _ Types.Step.Statement.t) = let input = let (T (input, _conv, _conv_inv)) = Impls.Step.input ~proofs_verified:max_proofs_verified - ~wrap_rounds:Tock.Rounds.n ~feature_flags + ~wrap_rounds:Tock.Rounds.n in Impls.Step.generate_public_input input prev_statement in @@ -235,23 +230,24 @@ let tick_public_input_of_statement ~max_proofs_verified ~feature_flags (Backend.Tick.Field.Vector.length input) ~f:(Backend.Tick.Field.Vector.get input) -let ft_comm ~add:( + ) ~scale ~endoscale ~negate - ~verification_key:(m : _ Plonk_verification_key_evals.t) ~alpha +let ft_comm ~add:( + ) ~scale ~negate + ~verification_key:(m : _ array Plonk_verification_key_evals.t) ~(plonk : _ Types.Wrap.Proof_state.Deferred_values.Plonk.In_circuit.t) ~t_comm = - let ( * ) x g = scale g x in - let _, [ sigma_comm_last ] = - Vector.split m.sigma_comm (snd (Plonk_types.Permuts_minus_1.add Nat.N1.n)) - in - let f_comm = List.reduce_exn ~f:( + ) [ plonk.perm * sigma_comm_last ] in - let chunked_t_comm = - let n = Array.length t_comm in - let res = ref t_comm.(n - 1) in + let reduce_chunks comm = + let n = Array.length comm in + let res = ref comm.(n - 1) in for i = n - 2 downto 0 do - res := t_comm.(i) + scale !res plonk.zeta_to_srs_length + res := comm.(i) + scale !res plonk.zeta_to_srs_length done ; !res in + let _, [ sigma_comm_last ] = + Vector.split m.sigma_comm (snd (Plonk_types.Permuts_minus_1.add Nat.N1.n)) + in + let sigma_comm_last = reduce_chunks sigma_comm_last in + let f_comm = List.reduce_exn ~f:( + ) [ scale sigma_comm_last plonk.perm ] in + let chunked_t_comm = reduce_chunks t_comm in f_comm + chunked_t_comm + negate (scale chunked_t_comm plonk.zeta_to_domain_size) @@ -261,11 +257,11 @@ let combined_evaluation (type f) let open Impl in let open Field in let mul_and_add ~(acc : Field.t) ~(xi : Field.t) - (fx : (Field.t, Boolean.var) Plonk_types.Opt.t) : Field.t = + (fx : (Field.t, Boolean.var) Opt.t) : Field.t = match fx with - | None -> + | Nothing -> acc - | Some fx -> + | Just fx -> fx + (xi * acc) | Maybe (b, fx) -> Field.if_ b ~then_:(fx + (xi * acc)) ~else_:acc @@ -273,9 +269,9 @@ let combined_evaluation (type f) with_label __LOC__ (fun () -> Pcs_batch.combine_split_evaluations ~mul_and_add ~init:(function - | Some x -> + | Just x -> x - | None -> + | Nothing -> Field.zero | Maybe (b, x) -> (b :> Field.t) * x ) diff --git a/src/lib/pickles/common.mli b/src/lib/pickles/common.mli index 7a64f0b7641..63d94881fc1 100644 --- a/src/lib/pickles/common.mli +++ b/src/lib/pickles/common.mli @@ -1,5 +1,3 @@ -open Pickles_types - val wrap_domains : proofs_verified:int -> Import.Domains.Stable.V2.t val actual_wrap_domain_size : @@ -15,7 +13,7 @@ val actual_wrap_domain_size : val when_profiling : 'a -> 'a -> 'a (** [time label f] times function [f] and prints the measured time to [stdout] - prepended with [label], when profiling is set (see {!val:when_profiling}). + prepended with [label], when profiling is set (see {!val:when_profiling}). Otherwise, it just runs [f]. *) @@ -38,22 +36,20 @@ val bits_to_bytes : bool list -> string val finite_exn : 'a Kimchi_types.or_infinity -> 'a * 'a val ft_comm : - add:('a -> 'a -> 'a) - -> scale:('a -> 'b -> 'a) - -> endoscale:('a -> 'c -> 'a) - -> negate:('a -> 'a) - -> verification_key:'a Pickles_types.Plonk_verification_key_evals.t - -> alpha:'c + add:('comm -> 'comm -> 'comm) + -> scale:('comm -> 'scalar -> 'comm) + -> negate:('comm -> 'comm) + -> verification_key:'comm array Pickles_types.Plonk_verification_key_evals.t -> plonk: ( 'd , 'e - , 'b + , 'scalar , 'g , 'f , 'bool ) Import.Types.Wrap.Proof_state.Deferred_values.Plonk.In_circuit.t - -> t_comm:'a array - -> 'a + -> t_comm:'comm array + -> 'comm val dlog_pcs_batch : 'total Pickles_types.Nat.t @@ -65,7 +61,7 @@ val combined_evaluation : -> xi:'f Snarky_backendless.Cvar.t -> ( 'f Snarky_backendless.Cvar.t , 'f Snarky_backendless.Cvar.t Snarky_backendless.Snark_intf.Boolean0.t ) - Pickles_types.Plonk_types.Opt.t + Pickles_types.Opt.t array list -> 'f Snarky_backendless.Cvar.t @@ -98,6 +94,7 @@ module Lookup_parameters : sig val tock_zero : (Impls.Wrap.Field.t, Impls.Wrap.Field.t) zero_value end +(** Inner Product Argument *) module Ipa : sig type 'a challenge := ( Import.Challenge.Constant.t Import.Scalar_challenge.t @@ -146,21 +143,27 @@ end val hash_messages_for_next_step_proof : app_state:('a -> Kimchi_pasta.Basic.Fp.Stable.Latest.t Core_kernel.Array.t) - -> ( Backend.Tock.Curve.Affine.t + -> ( Backend.Tock.Curve.Affine.t array + (* the type for the verification key *) , 'a + (* the state of the application *) , ( Kimchi_pasta.Basic.Fp.Stable.Latest.t * Kimchi_pasta.Basic.Fp.Stable.Latest.t - , 'b ) + , 'n ) + Pickles_types.Vector.t + (* challenge polynomial commitments. We use the full parameter type to + restrict the size of the vector to be the same than the one for the next + parameter which are the bulletproof challenges *) + , ( (Kimchi_pasta.Basic.Fp.Stable.Latest.t, 'm) Pickles_types.Vector.t + , 'n + (* size of the vector *) ) Pickles_types.Vector.t - , ( (Kimchi_pasta.Basic.Fp.Stable.Latest.t, 'c) Pickles_types.Vector.t - , 'b ) - Pickles_types.Vector.t ) + (* bulletproof challenges *) ) Import.Types.Step.Proof_state.Messages_for_next_step_proof.t -> (int64, Composition_types.Digest.Limbs.n) Pickles_types.Vector.t val tick_public_input_of_statement : max_proofs_verified:'a Pickles_types.Nat.t - -> feature_flags:Plonk_types.Opt.Flag.t Plonk_types.Features.t -> ( ( ( Impls.Step.Challenge.Constant.t , Impls.Step.Challenge.Constant.t Composition_types.Scalar_challenge.t , Impls.Step.Other_field.Constant.t Pickles_types.Shifted_value.Type2.t @@ -191,15 +194,14 @@ val tick_public_input_of_statement : -> Backend.Tick.Field.Vector.elt list val tock_public_input_of_statement : - ( Limb_vector.Challenge.Constant.t + feature_flags: + Pickles_types.Opt.Flag.t Pickles_types.Plonk_types.Features.Full.t + -> ( Limb_vector.Challenge.Constant.t , Limb_vector.Challenge.Constant.t Composition_types.Scalar_challenge.t , Impls.Wrap.Other_field.Constant.t Pickles_types.Shifted_value.Type1.t , Impls.Wrap.Other_field.Constant.t Pickles_types.Shifted_value.Type1.t option , Limb_vector.Challenge.Constant.t Composition_types.Scalar_challenge.t - Composition_types.Wrap.Proof_state.Deferred_values.Plonk.In_circuit - .Lookup - .t option , bool , ( Limb_vector.Constant.Hex64.t @@ -222,15 +224,14 @@ val tock_public_input_of_statement : -> Backend.Tock.Field.Vector.elt list val tock_unpadded_public_input_of_statement : - ( Limb_vector.Challenge.Constant.t + feature_flags: + Pickles_types.Opt.Flag.t Pickles_types.Plonk_types.Features.Full.t + -> ( Limb_vector.Challenge.Constant.t , Limb_vector.Challenge.Constant.t Composition_types.Scalar_challenge.t , Impls.Wrap.Other_field.Constant.t Pickles_types.Shifted_value.Type1.t , Impls.Wrap.Other_field.Constant.t Pickles_types.Shifted_value.Type1.t option , Limb_vector.Challenge.Constant.t Composition_types.Scalar_challenge.t - Composition_types.Wrap.Proof_state.Deferred_values.Plonk.In_circuit - .Lookup - .t option , bool , ( Limb_vector.Constant.Hex64.t diff --git a/src/lib/pickles/compile.ml b/src/lib/pickles/compile.ml index 62e13d5777e..bc585564495 100644 --- a/src/lib/pickles/compile.ml +++ b/src/lib/pickles/compile.ml @@ -7,7 +7,6 @@ module type Statement_var_intf = Intf.Statement_var module type Statement_value_intf = Intf.Statement_value module SC = Scalar_challenge -open Tuple_lib open Core_kernel open Async_kernel open Import @@ -16,44 +15,17 @@ open Poly_types open Hlist open Backend -exception Return_digest of Md5.t - let profile_constraints = false let verify_promise = Verify.verify -let pad_local_max_proofs_verifieds - (type prev_varss prev_valuess env max_proofs_verified branches) - (max_proofs_verified : max_proofs_verified Nat.t) - (length : (prev_varss, branches) Hlist.Length.t) - (local_max_proofs_verifieds : - (prev_varss, prev_valuess, env) H2_1.T(H2_1.T(E03(Int))).t ) : - ((int, max_proofs_verified) Vector.t, branches) Vector.t = - let module Vec = struct - type t = (int, max_proofs_verified) Vector.t - end in - let module M = - H2_1.Map - (H2_1.T - (E03 (Int))) (E03 (Vec)) - (struct - module HI = H2_1.T (E03 (Int)) - - let f : type a b e. (a, b, e) H2_1.T(E03(Int)).t -> Vec.t = - fun xs -> - let (T (_proofs_verified, pi)) = HI.length xs in - let module V = H2_1.To_vector (Int) in - let v = V.f pi xs in - Vector.extend_front_exn v max_proofs_verified 0 - end) - in - let module V = H2_1.To_vector (Vec) in - V.f length (M.f local_max_proofs_verifieds) - open Kimchi_backend module Proof_ = P.Base module Proof = P +type chunking_data = Verify.Instance.chunking_data = + { num_chunks : int; domain_size : int; zk_rows : int } + let pad_messages_for_next_wrap_proof (type local_max_proofs_verifieds max_local_max_proofs_verifieds max_proofs_verified ) @@ -146,7 +118,9 @@ type ('max_proofs_verified, 'branches, 'prev_varss) wrap_main_generic = , 'max_local_max_proofs_verifieds ) Full_signature.t -> ('prev_varss, 'branches) Hlist.Length.t - -> ( Wrap_main_inputs.Inner_curve.Constant.t Wrap_verifier.index' + -> ( ( Wrap_main_inputs.Inner_curve.Constant.t array + , Wrap_main_inputs.Inner_curve.Constant.t array option ) + Wrap_verifier.index' , 'branches ) Vector.t Lazy.t @@ -159,14 +133,10 @@ type ('max_proofs_verified, 'branches, 'prev_varss) wrap_main_generic = , Wrap_verifier.Other_field.Packed.t Shifted_value.Type1.t , ( Wrap_verifier.Other_field.Packed.t Shifted_value.Type1.t , Impls.Wrap.Boolean.var ) - Plonk_types.Opt.t + Opt.t , ( Impls.Wrap.Impl.Field.t Composition_types.Scalar_challenge.t - Composition_types.Wrap.Proof_state.Deferred_values.Plonk - .In_circuit - .Lookup - .t , Impls.Wrap.Boolean.var ) - Pickles_types__Plonk_types.Opt.t + Pickles_types__Opt.t , Impls.Wrap.Boolean.var ) Composition_types.Wrap.Proof_state.Deferred_values.Plonk .In_circuit @@ -196,9 +166,6 @@ type ('max_proofs_verified, 'branches, 'prev_varss) wrap_main_generic = , bool ) Import.Types.Opt.t , ( Import.Challenge.Constant.t Import.Types.Scalar_challenge.t - Composition_types.Wrap.Proof_state.Deferred_values.Plonk.In_circuit - .Lookup - .t , bool ) Import.Types.Opt.t , bool @@ -228,9 +195,6 @@ type ('max_proofs_verified, 'branches, 'prev_varss) wrap_main_generic = , bool ) Import.Types.Opt.t , ( Import.Challenge.Constant.t Import.Types.Scalar_challenge.t - Composition_types.Wrap.Proof_state.Deferred_values.Plonk.In_circuit - .Lookup - .t , bool ) Import.Types.Opt.t , bool @@ -264,6 +228,22 @@ type ('max_proofs_verified, 'branches, 'prev_varss) wrap_main_generic = *) } +module Storables = struct + type t = + { step_storable : Cache.Step.storable + ; step_vk_storable : Cache.Step.vk_storable + ; wrap_storable : Cache.Wrap.storable + ; wrap_vk_storable : Cache.Wrap.vk_storable + } + + let default = + { step_storable = Cache.Step.storable + ; step_vk_storable = Cache.Step.vk_storable + ; wrap_storable = Cache.Wrap.storable + ; wrap_vk_storable = Cache.Wrap.vk_storable + } +end + module Make (Arg_var : Statement_var_intf) (Arg_value : Statement_value_intf) @@ -312,19 +292,15 @@ struct let padded = V.f branches (M.f choices) |> Vector.transpose in (padded, Maxes.m padded) - module Lazy_ (A : T0) = struct - type t = A.t Lazy.t - end - module Lazy_keys = struct type t = - (Impls.Step.Keypair.t * Dirty.t) Lazy.t + (Impls.Step.Proving_key.t * Dirty.t) Lazy.t * (Kimchi_bindings.Protocol.VerifierIndex.Fp.t * Dirty.t) Lazy.t (* TODO Think this is right.. *) end - let log_step main typ name index = + let log_step main _typ name index = let module Constraints = Snarky_log.Constraints (Impls.Step.Internal_Basic) in let log = let weight = @@ -381,19 +357,21 @@ struct type var value prev_varss prev_valuess widthss heightss max_proofs_verified branches. self:(var, value, max_proofs_verified, branches) Tag.t -> cache:Key_cache.Spec.t list + -> storables:Storables.t -> proof_cache:Proof_cache.t option -> ?disk_keys: (Cache.Step.Key.Verification.t, branches) Vector.t * Cache.Wrap.Key.Verification.t - -> ?return_early_digest_exception:bool -> ?override_wrap_domain:Pickles_base.Proofs_verified.t -> ?override_wrap_main: (max_proofs_verified, branches, prev_varss) wrap_main_generic + -> ?num_chunks:int -> branches:(module Nat.Intf with type n = branches) -> max_proofs_verified: (module Nat.Add.Intf with type n = max_proofs_verified) -> name:string - -> constraint_constants:Snark_keys_header.Constraint_constants.t + -> ?constraint_constants:Snark_keys_header.Constraint_constants.t + -> ?commits:Snark_keys_header.Commits.With_date.t -> public_input: ( var , value @@ -419,20 +397,44 @@ struct * _ * _ * _ = - fun ~self ~cache ~proof_cache ?disk_keys - ?(return_early_digest_exception = false) ?override_wrap_domain - ?override_wrap_main ~branches:(module Branches) ~max_proofs_verified - ~name ~constraint_constants ~public_input ~auxiliary_typ ~choices () -> + fun ~self ~cache + ~storables: + { step_storable; step_vk_storable; wrap_storable; wrap_vk_storable } + ~proof_cache ?disk_keys ?override_wrap_domain ?override_wrap_main + ?(num_chunks = 1) ~branches:(module Branches) ~max_proofs_verified ~name + ?constraint_constants ?commits ~public_input ~auxiliary_typ ~choices () -> let snark_keys_header kind constraint_system_hash = + let constraint_constants : Snark_keys_header.Constraint_constants.t = + match constraint_constants with + | Some constraint_constants -> + constraint_constants + | None -> + { sub_windows_per_window = 0 + ; ledger_depth = 0 + ; work_delay = 0 + ; block_window_duration_ms = 0 + ; transaction_capacity = Log_2 0 + ; pending_coinbase_depth = 0 + ; coinbase_amount = Unsigned.UInt64.of_int 0 + ; supercharged_coinbase_factor = 0 + ; account_creation_fee = Unsigned.UInt64.of_int 0 + ; fork = None + } + in + let (commits, commit_date) : Snark_keys_header.Commits.t * string = + match commits with + | Some { commits; commit_date } -> + (commits, commit_date) + | None -> + ( { mina = "[NOT SPECIFIED]"; marlin = "[NOT SPECIFIED]" } + , "[UNKNOWN]" ) + in { Snark_keys_header.header_version = Snark_keys_header.header_version ; kind ; constraint_constants - ; commits = - { mina = Mina_version.commit_id - ; marlin = Mina_version.marlin_commit_id - } + ; commits ; length = (* This is a dummy, it gets filled in on read/write. *) 0 - ; commit_date = Mina_version.commit_date + ; commit_date ; constraint_system_hash ; identifying_hash = (* TODO: Proper identifying hash. *) @@ -459,25 +461,26 @@ struct let feature_flags = let rec go : type a b c d. - (a, b, c, d) H4.T(IR).t - -> Plonk_types.Opt.Flag.t Plonk_types.Features.t = + (a, b, c, d) H4.T(IR).t -> Opt.Flag.t Plonk_types.Features.Full.t = fun rules -> match rules with | [] -> - Plonk_types.Features.none + Plonk_types.Features.Full.none | [ r ] -> Plonk_types.Features.map r.feature_flags ~f:(function | true -> - Plonk_types.Opt.Flag.Yes + Opt.Flag.Yes | false -> - Plonk_types.Opt.Flag.No ) + Opt.Flag.No ) + |> Plonk_types.Features.to_full ~or_:Opt.Flag.( ||| ) | r :: rules -> let feature_flags = go rules in - Plonk_types.Features.map2 r.feature_flags feature_flags - ~f:(fun enabled flag -> + Plonk_types.Features.Full.map2 + (Plonk_types.Features.to_full ~or_:( || ) r.feature_flags) + feature_flags ~f:(fun enabled flag -> match (enabled, flag) with | true, Yes -> - Plonk_types.Opt.Flag.Yes + Opt.Flag.Yes | false, No -> No | _, Maybe | true, No | false, Yes -> @@ -494,7 +497,7 @@ struct (Auxiliary_value) in M.f full_signature prev_varss_n prev_varss_length ~max_proofs_verified - ~feature_flags + ~feature_flags ~num_chunks | Some override -> Common.wrap_domains ~proofs_verified:(Pickles_base.Proofs_verified.to_int override) @@ -544,7 +547,7 @@ struct Timer.clock __LOC__ ; let res = Common.time "make step data" (fun () -> - Step_branch_data.create ~index:!i ~feature_flags + Step_branch_data.create ~index:!i ~feature_flags ~num_chunks ~actual_feature_flags:rule.feature_flags ~max_proofs_verified:Max_proofs_verified.n ~branches:Branches.n ~self ~public_input ~auxiliary_typ @@ -580,7 +583,7 @@ struct (struct let etyp = Impls.Step.input ~proofs_verified:Max_proofs_verified.n - ~wrap_rounds:Tock.Rounds.n ~feature_flags + ~wrap_rounds:Tock.Rounds.n let f (T b : _ Branch_data.t) = let (T (typ, _conv, conv_inv)) = etyp in @@ -590,13 +593,6 @@ struct in let () = if true then log_step main typ name b.index in let open Impls.Step in - (* HACK: TODO docs *) - if return_early_digest_exception then - raise - (Return_digest - ( constraint_system ~input_typ:Typ.unit ~return_typ:typ main - |> R1CS_constraint_system.digest ) ) ; - let k_p = lazy (let cs = @@ -635,7 +631,7 @@ struct Common.time "step read or generate" (fun () -> Cache.Step.read_or_generate ~prev_challenges:(Nat.to_int (fst b.proofs_verified)) - cache k_p k_v + cache ~s_p:step_storable k_p ~s_v:step_vk_storable k_v (Snarky_backendless.Typ.unit ()) typ main ) in @@ -651,14 +647,14 @@ struct let module V = H4.To_vector (Lazy_keys) in lazy (Vector.map (V.f prev_varss_length step_keypairs) ~f:(fun (_, vk) -> - Tick.Keypair.vk_commitments (fst (Lazy.force vk)) ) ) + Tick.Keypair.full_vk_commitments (fst (Lazy.force vk)) ) ) in Timer.clock __LOC__ ; let wrap_requests, wrap_main = match override_wrap_main with | None -> let srs = Tick.Keypair.load_urs () in - Wrap_main.wrap_main ~feature_flags ~srs full_signature + Wrap_main.wrap_main ~num_chunks ~feature_flags ~srs full_signature prev_varss_length step_vks proofs_verifieds step_domains max_proofs_verified | Some { wrap_main; tweak_statement = _ } -> @@ -676,7 +672,7 @@ struct Timer.clock __LOC__ ; let (wrap_pk, wrap_vk), disk_key = let open Impls.Wrap in - let (T (typ, conv, _conv_inv)) = input () in + let (T (typ, conv, _conv_inv)) = input ~feature_flags () in let main x () : unit = wrap_main (conv x) in let () = if true then log_wrap main typ name self.id in let self_id = Type_equal.Id.uid self.id in @@ -711,7 +707,8 @@ struct let r = Common.time "wrap read or generate " (fun () -> Cache.Wrap.read_or_generate (* Due to Wrap_hack *) - ~prev_challenges:2 cache disk_key_prover disk_key_verifier typ + ~prev_challenges:2 cache ~s_p:wrap_storable disk_key_prover + ~s_v:wrap_vk_storable disk_key_verifier typ (Snarky_backendless.Typ.unit ()) main ) in @@ -750,7 +747,6 @@ struct Impls.Step.Typ.(input_typ * output_typ) in let provers = - let module Z = H4.Zip (Branch_data) (E04 (Impls.Step.Keypair)) in let f : type prev_vars prev_values local_widths local_heights. (prev_vars, prev_values, local_widths, local_heights) Branch_data.t @@ -764,14 +760,17 @@ struct * (Max_proofs_verified.n, Max_proofs_verified.n) Proof.t ) Promise.t = fun (T b as branch_data) (step_pk, step_vk) -> - let (module Requests) = b.requests in let _, prev_vars_length = b.proofs_verified in let step handler next_state = let wrap_vk = Lazy.force wrap_vk in S.f ?handler branch_data next_state ~prevs_length:prev_vars_length - ~self ~step_domains ~self_dlog_plonk_index:wrap_vk.commitments + ~self ~step_domains + ~self_dlog_plonk_index: + ((* TODO *) Plonk_verification_key_evals.map + ~f:(fun x -> [| x |]) + wrap_vk.commitments ) ~public_input ~auxiliary_typ ~feature_flags - (Impls.Step.Keypair.pk (fst (Lazy.force step_pk))) + (fst (Lazy.force step_pk)) wrap_vk.index in let step_vk = fst (Lazy.force step_vk) in @@ -813,9 +812,13 @@ struct Wrap.wrap ~proof_cache ~max_proofs_verified:Max_proofs_verified.n ~feature_flags ~actual_feature_flags:b.feature_flags full_signature.maxes wrap_requests ?tweak_statement - ~dlog_plonk_index:wrap_vk.commitments wrap_main ~typ ~step_vk - ~step_plonk_indices:(Lazy.force step_vks) ~actual_wrap_domains - (Impls.Wrap.Keypair.pk (fst (Lazy.force wrap_pk))) + ~dlog_plonk_index: + ((* TODO *) Plonk_verification_key_evals.map + ~f:(fun x -> [| x |]) + wrap_vk.commitments ) + wrap_main ~typ ~step_vk ~step_plonk_indices:(Lazy.force step_vks) + ~actual_wrap_domains + (fst (Lazy.force wrap_pk)) proof in ( return_value @@ -834,7 +837,7 @@ struct wrap in let rec go : - type xs1 xs2 xs3 xs4 xs5 xs6. + type xs1 xs2 xs3 xs4. (xs1, xs2, xs3, xs4) H4.T(Branch_data).t -> (xs1, xs2, xs3, xs4) H4.T(E04(Lazy_keys)).t -> ( xs2 @@ -861,11 +864,22 @@ struct ; proofs_verifieds ; max_proofs_verified ; public_input = typ - ; wrap_key = Lazy.map wrap_vk ~f:Verification_key.commitments + ; wrap_key = + Lazy.map wrap_vk ~f:(fun x -> + Plonk_verification_key_evals.map (Verification_key.commitments x) + ~f:(fun x -> [| x |]) ) ; wrap_vk = Lazy.map wrap_vk ~f:Verification_key.index ; wrap_domains ; step_domains ; feature_flags + ; num_chunks + ; zk_rows = + ( match num_chunks with + | 1 -> + 3 + | num_chunks -> + let permuts = 7 in + ((2 * (permuts + 1) * num_chunks) - 1 + permuts) / permuts ) } in Timer.clock __LOC__ ; @@ -889,9 +903,12 @@ module Side_loaded = struct ~log_2_domain_size:(Lazy.force d.wrap_vk).domain.log_size_of_group in { wrap_vk = Some (Lazy.force d.wrap_vk) - ; wrap_index = Lazy.force d.wrap_key + ; wrap_index = + Plonk_verification_key_evals.map (Lazy.force d.wrap_key) ~f:(fun x -> + x.(0) ) ; max_proofs_verified = - Pickles_base.Proofs_verified.of_nat (Nat.Add.n d.max_proofs_verified) + Pickles_base.Proofs_verified.of_nat_exn + (Nat.Add.n d.max_proofs_verified) ; actual_wrap_domain_size } @@ -907,7 +924,10 @@ module Side_loaded = struct { max_proofs_verified ; public_input = typ ; branches = Verification_key.Max_branches.n - ; feature_flags + ; feature_flags = + Plonk_types.Features.to_full ~or_:Opt.Flag.( ||| ) feature_flags + ; num_chunks = 1 + ; zk_rows = 3 } module Proof = struct @@ -950,7 +970,7 @@ module Side_loaded = struct { constraints = 0 } } in - Verify.Instance.T (max_proofs_verified, m, vk, x, p) ) + Verify.Instance.T (max_proofs_verified, m, None, vk, x, p) ) |> Verify.verify_heterogenous ) let verify ~typ ts = verify_promise ~typ ts |> Promise.to_deferred @@ -963,17 +983,18 @@ module Side_loaded = struct end let compile_with_wrap_main_override_promise : - type var value a_var a_value ret_var ret_value auxiliary_var auxiliary_value prev_varss prev_valuess prev_ret_varss prev_ret_valuess widthss heightss max_proofs_verified branches. + type var value a_var a_value ret_var ret_value auxiliary_var auxiliary_value prev_varss prev_valuess widthss heightss max_proofs_verified branches. ?self:(var, value, max_proofs_verified, branches) Tag.t -> ?cache:Key_cache.Spec.t list + -> ?storables:Storables.t -> ?proof_cache:Proof_cache.t -> ?disk_keys: (Cache.Step.Key.Verification.t, branches) Vector.t * Cache.Wrap.Key.Verification.t - -> ?return_early_digest_exception:bool -> ?override_wrap_domain:Pickles_base.Proofs_verified.t -> ?override_wrap_main: (max_proofs_verified, branches, prev_varss) wrap_main_generic + -> ?num_chunks:int -> public_input: ( var , value @@ -987,7 +1008,8 @@ let compile_with_wrap_main_override_promise : -> max_proofs_verified: (module Nat.Add.Intf with type n = max_proofs_verified) -> name:string - -> constraint_constants:Snark_keys_header.Constraint_constants.t + -> ?constraint_constants:Snark_keys_header.Constraint_constants.t + -> ?commits:Snark_keys_header.Commits.With_date.t -> choices: ( self:(var, value, max_proofs_verified, branches) Tag.t -> ( prev_varss @@ -1019,10 +1041,10 @@ let compile_with_wrap_main_override_promise : (* This function is an adapter between the user-facing Pickles.compile API and the underlying Make(_).compile function which builds the circuits. *) - fun ?self ?(cache = []) ?proof_cache ?disk_keys - ?(return_early_digest_exception = false) ?override_wrap_domain - ?override_wrap_main ~public_input ~auxiliary_typ ~branches - ~max_proofs_verified ~name ~constraint_constants ~choices () -> + fun ?self ?(cache = []) ?(storables = Storables.default) ?proof_cache + ?disk_keys ?override_wrap_domain ?override_wrap_main ?num_chunks + ~public_input ~auxiliary_typ ~branches ~max_proofs_verified ~name + ?constraint_constants ?commits ~choices () -> let self = match self with | None -> @@ -1069,7 +1091,7 @@ let compile_with_wrap_main_override_promise : (Auxiliary_value) in let rec conv_irs : - type v1ss v2ss v3ss v4ss wss hss. + type v1ss v2ss wss hss. ( v1ss , v2ss , wss @@ -1088,10 +1110,10 @@ let compile_with_wrap_main_override_promise : r :: conv_irs rs in let provers, wrap_vk, wrap_disk_key, cache_handle = - M.compile ~return_early_digest_exception ~self ~proof_cache ~cache - ?disk_keys ?override_wrap_domain ?override_wrap_main ~branches + M.compile ~self ~proof_cache ~cache ~storables ?disk_keys + ?override_wrap_domain ?override_wrap_main ?num_chunks ~branches ~max_proofs_verified ~name ~public_input ~auxiliary_typ - ~constraint_constants + ?constraint_constants ?commits ~choices:(fun ~self -> conv_irs (choices ~self)) () in @@ -1113,17 +1135,31 @@ let compile_with_wrap_main_override_promise : let (Typ typ) = typ in fun x -> fst (typ.value_to_fields x) end in + let chunking_data = + match num_chunks with + | None -> + None + | Some num_chunks -> + let compiled = Types_map.lookup_compiled self.id in + let { h = Pow_2_roots_of_unity domain_size } = + compiled.step_domains + |> Vector.reduce_exn + ~f:(fun + { h = Pow_2_roots_of_unity d1 } + { h = Pow_2_roots_of_unity d2 } + -> { h = Pow_2_roots_of_unity (Int.max d1 d2) } ) + in + Some + { Verify.Instance.num_chunks + ; domain_size + ; zk_rows = compiled.zk_rows + } + in let module P = struct type statement = value - type return_type = ret_value - module Max_local_max_proofs_verified = Max_proofs_verified - module Max_proofs_verified_vec = Nvector (struct - include Max_proofs_verified - end) - include Proof.Make (struct @@ -1138,7 +1174,7 @@ let compile_with_wrap_main_override_promise : let verification_key = wrap_vk let verify_promise ts = - verify_promise + verify_promise ?chunking_data ( module struct include Max_proofs_verified end ) @@ -1147,8 +1183,6 @@ let compile_with_wrap_main_override_promise : ts let verify ts = verify_promise ts |> Promise.to_deferred - - let statement (T p : t) = p.statement.messages_for_next_step_proof.app_state end in (self, cache_handle, (module P), provers) @@ -1169,7 +1203,6 @@ let wrap_main_dummy_override _ _ _ _ _ _ _ = let module SC' = SC in let open Impls.Wrap in let open Wrap_main_inputs in - let open Wrap_main in (* Create some variables to be used in constraints below. *) let x = exists Field.typ ~compute:(fun () -> Field.Constant.of_int 3) in let y = exists Field.typ ~compute:(fun () -> Field.Constant.of_int 0) in @@ -1197,7 +1230,7 @@ let wrap_main_dummy_override _ _ _ _ _ _ _ = (* Pad the circuit so that its domain size matches the one that would have been used by the true wrap_main. *) - for i = 0 to 64000 do + for _ = 0 to 64000 do assert_r1cs x y z done in @@ -1212,9 +1245,6 @@ module Make_adversarial_test (M : sig , bool ) Import.Types.Opt.t , ( Import.Challenge.Constant.t Import.Types.Scalar_challenge.t - Composition_types.Wrap.Proof_state.Deferred_values.Plonk.In_circuit - .Lookup - .t , bool ) Import.Types.Opt.t , bool @@ -1244,9 +1274,6 @@ module Make_adversarial_test (M : sig , bool ) Import.Types.Opt.t , ( Import.Challenge.Constant.t Import.Types.Scalar_challenge.t - Composition_types.Wrap.Proof_state.Deferred_values.Plonk.In_circuit - .Lookup - .t , bool ) Import.Types.Opt.t , bool @@ -1277,33 +1304,6 @@ end) = struct open Impls.Step - module Statement = struct - type t = unit - - let to_field_elements () = [||] - end - - module A = Statement - module A_value = Statement - - let typ = Typ.unit - - module Branches = Nat.N1 - module Max_proofs_verified = Nat.N2 - - let constraint_constants : Snark_keys_header.Constraint_constants.t = - { sub_windows_per_window = 0 - ; ledger_depth = 0 - ; work_delay = 0 - ; block_window_duration_ms = 0 - ; transaction_capacity = Log_2 0 - ; pending_coinbase_depth = 0 - ; coinbase_amount = Unsigned.UInt64.of_int 0 - ; supercharged_coinbase_factor = 0 - ; account_creation_fee = Unsigned.UInt64.of_int 0 - ; fork = None - } - let rule self : _ Inductive_rule.t = { identifier = "main" ; prevs = [ self; self ] @@ -1340,19 +1340,6 @@ struct ~branches:(module Nat.N1) ~max_proofs_verified:(module Nat.N2) ~name:"blockchain-snark" - ~constraint_constants: - (* Dummy values *) - { sub_windows_per_window = 0 - ; ledger_depth = 0 - ; work_delay = 0 - ; block_window_duration_ms = 0 - ; transaction_capacity = Log_2 0 - ; pending_coinbase_depth = 0 - ; coinbase_amount = Unsigned.UInt64.of_int 0 - ; supercharged_coinbase_factor = 0 - ; account_creation_fee = Unsigned.UInt64.of_int 0 - ; fork = None - } ~choices:(fun ~self -> [ rule self ]) module Proof = (val p) @@ -1374,8 +1361,6 @@ struct module Recurse_on_bad_proof = struct open Impls.Step - let dummy_proof = P.dummy Nat.N2.n Nat.N2.n Nat.N2.n ~domain_log2:15 - type _ Snarky_backendless.Request.t += | Proof : Proof.t Snarky_backendless.Request.t @@ -1387,14 +1372,14 @@ struct | _ -> respond Unhandled - let tag, _, p, ([ step ] : _ H3_2.T(Prover).t) = + let _tag, _, p, ([ step ] : _ H3_2.T(Prover).t) = Common.time "compile" (fun () -> compile_with_wrap_main_override_promise () ~public_input:(Input Typ.unit) ~auxiliary_typ:Typ.unit ~branches:(module Nat.N1) ~max_proofs_verified:(module Nat.N2) - ~name:"recurse-on-bad" ~constraint_constants - ~choices:(fun ~self -> + ~name:"recurse-on-bad" + ~choices:(fun ~self:_ -> [ { identifier = "main" ; feature_flags = Plonk_types.Features.none_bool ; prevs = [ tag; tag ] diff --git a/src/lib/pickles/compile.mli b/src/lib/pickles/compile.mli index 3153b0391b8..485e3591211 100644 --- a/src/lib/pickles/compile.mli +++ b/src/lib/pickles/compile.mli @@ -1,10 +1,10 @@ +(** Compile the inductive rules *) + open Core_kernel open Async_kernel open Pickles_types open Hlist -exception Return_digest of Md5.t - val pad_messages_for_next_wrap_proof : (module Pickles_types.Hlist.Maxes.S with type length = 'max_proofs_verified @@ -42,8 +42,12 @@ module type Proof_intf = sig val verify_promise : (statement * t) list -> unit Or_error.t Promise.t end +type chunking_data = Verify.Instance.chunking_data = + { num_chunks : int; domain_size : int; zk_rows : int } + val verify_promise : - (module Nat.Intf with type n = 'n) + ?chunking_data:chunking_data + -> (module Nat.Intf with type n = 'n) -> (module Statement_value_intf with type t = 'a) -> Verification_key.t -> ('a * ('n, 'n) Proof.t) list @@ -117,7 +121,7 @@ module Side_loaded : sig val create : name:string -> max_proofs_verified:(module Nat.Add.Intf with type n = 'n1) - -> feature_flags:Plonk_types.Opt.Flag.t Plonk_types.Features.t + -> feature_flags:Opt.Flag.t Plonk_types.Features.t -> typ:('var, 'value) Impls.Step.Typ.t -> ('var, 'value, 'n1, Verification_key.Max_branches.n) Tag.t @@ -152,7 +156,9 @@ type ('max_proofs_verified, 'branches, 'prev_varss) wrap_main_generic = , 'max_local_max_proofs_verifieds ) Full_signature.t -> ('prev_varss, 'branches) Hlist.Length.t - -> ( Wrap_main_inputs.Inner_curve.Constant.t Wrap_verifier.index' + -> ( ( Wrap_main_inputs.Inner_curve.Constant.t array + , Wrap_main_inputs.Inner_curve.Constant.t array option ) + Wrap_verifier.index' , 'branches ) Vector.t Lazy.t @@ -165,14 +171,10 @@ type ('max_proofs_verified, 'branches, 'prev_varss) wrap_main_generic = , Wrap_verifier.Other_field.Packed.t Shifted_value.Type1.t , ( Wrap_verifier.Other_field.Packed.t Shifted_value.Type1.t , Impls.Wrap.Boolean.var ) - Plonk_types.Opt.t + Opt.t , ( Impls.Wrap.Impl.Field.t Composition_types.Scalar_challenge.t - Composition_types.Wrap.Proof_state.Deferred_values.Plonk - .In_circuit - .Lookup - .t , Impls.Wrap.Boolean.var ) - Pickles_types__Plonk_types.Opt.t + Pickles_types__Opt.t , Impls.Wrap.Boolean.var ) Composition_types.Wrap.Proof_state.Deferred_values.Plonk .In_circuit @@ -202,9 +204,6 @@ type ('max_proofs_verified, 'branches, 'prev_varss) wrap_main_generic = , bool ) Import.Types.Opt.t , ( Import.Challenge.Constant.t Import.Types.Scalar_challenge.t - Composition_types.Wrap.Proof_state.Deferred_values.Plonk.In_circuit - .Lookup - .t , bool ) Import.Types.Opt.t , bool @@ -234,9 +233,6 @@ type ('max_proofs_verified, 'branches, 'prev_varss) wrap_main_generic = , bool ) Import.Types.Opt.t , ( Import.Challenge.Constant.t Import.Types.Scalar_challenge.t - Composition_types.Wrap.Proof_state.Deferred_values.Plonk.In_circuit - .Lookup - .t , bool ) Import.Types.Opt.t , bool @@ -270,20 +266,32 @@ type ('max_proofs_verified, 'branches, 'prev_varss) wrap_main_generic = *) } +module Storables : sig + type t = + { step_storable : Cache.Step.storable + ; step_vk_storable : Cache.Step.vk_storable + ; wrap_storable : Cache.Wrap.storable + ; wrap_vk_storable : Cache.Wrap.vk_storable + } + + val default : t +end + (** This compiles a series of inductive rules defining a set into a proof system for proving membership in that set, with a prover corresponding to each inductive rule. *) val compile_with_wrap_main_override_promise : ?self:('var, 'value, 'max_proofs_verified, 'branches) Tag.t -> ?cache:Key_cache.Spec.t list + -> ?storables:Storables.t -> ?proof_cache:Proof_cache.t -> ?disk_keys: (Cache.Step.Key.Verification.t, 'branches) Vector.t * Cache.Wrap.Key.Verification.t - -> ?return_early_digest_exception:bool -> ?override_wrap_domain:Pickles_base.Proofs_verified.t -> ?override_wrap_main: ('max_proofs_verified, 'branches, 'prev_varss) wrap_main_generic + -> ?num_chunks:int -> public_input: ( 'var , 'value @@ -296,7 +304,8 @@ val compile_with_wrap_main_override_promise : -> branches:(module Nat.Intf with type n = 'branches) -> max_proofs_verified:(module Nat.Add.Intf with type n = 'max_proofs_verified) -> name:string - -> constraint_constants:Snark_keys_header.Constraint_constants.t + -> ?constraint_constants:Snark_keys_header.Constraint_constants.t + -> ?commits:Snark_keys_header.Commits.With_date.t -> choices: ( self:('var, 'value, 'max_proofs_verified, 'branches) Tag.t -> ( 'prev_varss @@ -333,7 +342,9 @@ val wrap_main_dummy_override : , 'max_local_max_proofs_verifieds ) Full_signature.t -> ('prev_varss, 'branches) Hlist.Length.t - -> ( Wrap_main_inputs.Inner_curve.Constant.t Wrap_verifier.index' + -> ( ( Wrap_main_inputs.Inner_curve.Constant.t + , Wrap_main_inputs.Inner_curve.Constant.t option ) + Wrap_verifier.index' , 'branches ) Vector.t Lazy.t @@ -346,14 +357,10 @@ val wrap_main_dummy_override : , Wrap_verifier.Other_field.Packed.t Shifted_value.Type1.t , ( Wrap_verifier.Other_field.Packed.t Shifted_value.Type1.t , Impls.Wrap.Boolean.var ) - Plonk_types.Opt.t + Opt.t , ( Impls.Wrap.Impl.Field.t Composition_types.Scalar_challenge.t - Composition_types.Wrap.Proof_state.Deferred_values.Plonk - .In_circuit - .Lookup - .t , Impls.Wrap.Boolean.var ) - Pickles_types__Plonk_types.Opt.t + Pickles_types__Opt.t , Impls.Wrap.Boolean.var ) Composition_types.Wrap.Proof_state.Deferred_values.Plonk.In_circuit .t @@ -372,7 +379,7 @@ val wrap_main_dummy_override : -> unit ) module Make_adversarial_test : functor - (M : sig + (_ : sig val tweak_statement : ( Import.Challenge.Constant.t , Import.Challenge.Constant.t Import.Types.Scalar_challenge.t @@ -381,10 +388,6 @@ module Make_adversarial_test : functor , bool ) Import.Types.Opt.t , ( Import.Challenge.Constant.t Import.Types.Scalar_challenge.t - Composition_types.Wrap.Proof_state.Deferred_values.Plonk - .In_circuit - .Lookup - .t , bool ) Import.Types.Opt.t , bool @@ -414,10 +417,6 @@ module Make_adversarial_test : functor , bool ) Import.Types.Opt.t , ( Import.Challenge.Constant.t Import.Types.Scalar_challenge.t - Composition_types.Wrap.Proof_state.Deferred_values.Plonk - .In_circuit - .Lookup - .t , bool ) Import.Types.Opt.t , bool diff --git a/src/lib/pickles/composition_types/branch_data.ml b/src/lib/pickles/composition_types/branch_data.ml index f237e3f57c6..f8445947736 100644 --- a/src/lib/pickles/composition_types/branch_data.ml +++ b/src/lib/pickles/composition_types/branch_data.ml @@ -1,6 +1,3 @@ -open Core_kernel -open Pickles_types - (** See documentation of the {!Mina_wire_types} library *) module Wire_types = Mina_wire_types.Pickles_composition_types.Branch_data diff --git a/src/lib/pickles/composition_types/composition_types.ml b/src/lib/pickles/composition_types/composition_types.ml index 4fd0af10803..7eda368186b 100644 --- a/src/lib/pickles/composition_types/composition_types.ml +++ b/src/lib/pickles/composition_types/composition_types.ml @@ -4,7 +4,7 @@ module Bulletproof_challenge = Bulletproof_challenge module Branch_data = Branch_data module Digest = Digest module Spec = Spec -module Opt = Plonk_types.Opt +module Opt = Opt open Core_kernel type 'f impl = 'f Spec.impl @@ -73,28 +73,22 @@ module Wrap = struct ; zeta = scalar t.zeta ; joint_combiner = Option.map ~f:scalar t.joint_combiner } + + module In_circuit = struct + type ('challenge, 'scalar_challenge, 'bool) t = + { alpha : 'scalar_challenge + ; beta : 'challenge + ; gamma : 'challenge + ; zeta : 'scalar_challenge + ; joint_combiner : ('scalar_challenge, 'bool) Opt.t + ; feature_flags : 'bool Plonk_types.Features.t + } + end end open Pickles_types module In_circuit = struct - module Lookup = struct - type 'scalar_challenge t = { joint_combiner : 'scalar_challenge } - [@@deriving sexp, compare, yojson, hlist, hash, equal, fields] - - let to_struct l = Hlist.HlistId.[ l.joint_combiner ] - - let of_struct Hlist.HlistId.[ joint_combiner ] = { joint_combiner } - - let map ~f { joint_combiner } = - { joint_combiner = f joint_combiner } - - let typ (type f fp) scalar_challenge = - Snarky_backendless.Typ.of_hlistable ~var_to_hlist:to_hlist - ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist - ~value_of_hlist:of_hlist [ scalar_challenge ] - end - (** All scalar values deferred by a verifier circuit. We expose them so the next guy (who can do scalar arithmetic) can check that they were computed correctly from the evaluations in the proof and the challenges. @@ -103,7 +97,7 @@ module Wrap = struct , 'scalar_challenge , 'fp , 'fp_opt - , 'lookup_opt + , 'scalar_challenge_opt , 'bool ) t = { alpha : 'scalar_challenge @@ -118,7 +112,7 @@ module Wrap = struct ; perm : 'fp (** scalar used on one of the permutation polynomial commitments. *) ; feature_flags : 'bool Plonk_types.Features.t - ; lookup : 'lookup_opt + ; joint_combiner : 'scalar_challenge_opt } [@@deriving sexp, compare, yojson, hlist, hash, equal, fields] @@ -127,8 +121,8 @@ module Wrap = struct alpha = scalar t.alpha ; beta = f t.beta ; gamma = f t.gamma + ; joint_combiner = Opt.map ~f:scalar t.joint_combiner ; zeta = scalar t.zeta - ; lookup = Opt.map ~f:(Lookup.map ~f:scalar) t.lookup } let map_fields t ~f = @@ -140,30 +134,11 @@ module Wrap = struct let typ (type f fp) (module Impl : Snarky_backendless.Snark_intf.Run - with type field = f ) ~dummy_scalar ~dummy_scalar_challenge - ~challenge ~scalar_challenge ~bool ~feature_flags + with type field = f ) ~dummy_scalar_challenge ~challenge + ~scalar_challenge ~bool + ~feature_flags: + ({ Plonk_types.Features.Full.uses_lookups; _ } as feature_flags) (fp : (fp, _, f) Snarky_backendless.Typ.t) = - let uses_lookup = - let { Plonk_types.Features.range_check0 - ; range_check1 - ; foreign_field_add = _ (* Doesn't use lookup *) - ; foreign_field_mul - ; xor - ; rot - ; lookup - ; runtime_tables = _ (* Fixme *) - } = - feature_flags - in - Array.reduce_exn ~f:Opt.Flag.( ||| ) - [| range_check0 - ; range_check1 - ; foreign_field_mul - ; xor - ; rot - ; lookup - |] - in Snarky_backendless.Typ.of_hlistable [ Scalar_challenge.typ scalar_challenge ; challenge @@ -172,10 +147,12 @@ module Wrap = struct ; fp ; fp ; fp - ; Plonk_types.Features.typ ~feature_flags bool - ; Plonk_types.Opt.typ Impl.Boolean.typ uses_lookup - ~dummy:{ joint_combiner = dummy_scalar_challenge } - (Lookup.typ (Scalar_challenge.typ scalar_challenge)) + ; Plonk_types.Features.typ + ~feature_flags:(Plonk_types.Features.of_full feature_flags) + bool + ; Opt.typ Impl.Boolean.typ uses_lookups + ~dummy:dummy_scalar_challenge + (Scalar_challenge.typ scalar_challenge) ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist @@ -189,17 +166,13 @@ module Wrap = struct , fp_opt , lookup_opt , 'bool ) - In_circuit.t ) - ~(to_option : - lookup_opt -> scalar_challenge In_circuit.Lookup.t option ) : - (challenge, scalar_challenge, 'bool) Minimal.t = + In_circuit.t ) ~(to_option : lookup_opt -> scalar_challenge option) + : (challenge, scalar_challenge, 'bool) Minimal.t = { alpha = t.alpha ; beta = t.beta ; zeta = t.zeta ; gamma = t.gamma - ; joint_combiner = - Option.map (to_option t.lookup) ~f:(fun l -> - l.In_circuit.Lookup.joint_combiner ) + ; joint_combiner = to_option t.joint_combiner ; feature_flags = t.feature_flags } end @@ -317,7 +290,7 @@ module Wrap = struct ; xi ; bulletproof_challenges ; branch_data - } ~f ~scalar = + } ~f:_ ~scalar = { xi = scalar xi ; combined_inner_product ; b @@ -355,12 +328,11 @@ module Wrap = struct let typ (type f fp) ((module Impl) as impl : (module Snarky_backendless.Snark_intf.Run with type field = f) ) - ~dummy_scalar ~dummy_scalar_challenge ~challenge ~scalar_challenge - ~feature_flags (fp : (fp, _, f) Snarky_backendless.Typ.t) index = + ~dummy_scalar_challenge ~challenge ~scalar_challenge ~feature_flags + (fp : (fp, _, f) Snarky_backendless.Typ.t) index = Snarky_backendless.Typ.of_hlistable - [ Plonk.In_circuit.typ impl ~dummy_scalar ~dummy_scalar_challenge - ~challenge ~scalar_challenge ~bool:Impl.Boolean.typ - ~feature_flags fp + [ Plonk.In_circuit.typ impl ~dummy_scalar_challenge ~challenge + ~scalar_challenge ~bool:Impl.Boolean.typ ~feature_flags fp ; fp ; fp ; Scalar_challenge.typ scalar_challenge @@ -375,9 +347,9 @@ module Wrap = struct let to_minimal ({ plonk - ; combined_inner_product - ; b - ; xi + ; combined_inner_product = _ + ; b = _ + ; xi = _ ; bulletproof_challenges ; branch_data } : @@ -534,13 +506,12 @@ module Wrap = struct let typ (type f fp) (impl : (module Snarky_backendless.Snark_intf.Run with type field = f)) - ~dummy_scalar ~dummy_scalar_challenge ~challenge ~scalar_challenge - ~feature_flags (fp : (fp, _, f) Snarky_backendless.Typ.t) + ~dummy_scalar_challenge ~challenge ~scalar_challenge ~feature_flags + (fp : (fp, _, f) Snarky_backendless.Typ.t) messages_for_next_wrap_proof digest index = Snarky_backendless.Typ.of_hlistable - [ Deferred_values.In_circuit.typ impl ~dummy_scalar - ~dummy_scalar_challenge ~challenge ~scalar_challenge - ~feature_flags fp index + [ Deferred_values.In_circuit.typ impl ~dummy_scalar_challenge + ~challenge ~scalar_challenge ~feature_flags fp index ; digest ; messages_for_next_wrap_proof ] @@ -608,7 +579,7 @@ module Wrap = struct open Snarky_backendless.H_list - let to_hlist + let[@warning "-45"] to_hlist { app_state ; dlog_plonk_index ; challenge_polynomial_commitments @@ -620,7 +591,7 @@ module Wrap = struct ; old_bulletproof_challenges ] - let of_hlist + let[@warning "-45"] of_hlist ([ app_state ; dlog_plonk_index ; challenge_polynomial_commitments @@ -793,11 +764,11 @@ module Wrap = struct in let maybe_constant flag = match flag with - | Plonk_types.Opt.Flag.Yes -> + | Opt.Flag.Yes -> constant true - | Plonk_types.Opt.Flag.No -> + | Opt.Flag.No -> constant false - | Plonk_types.Opt.Flag.Maybe -> + | Opt.Flag.Maybe -> Spec.T.B Bool in Spec.T.Struct @@ -823,7 +794,7 @@ module Wrap = struct ] (** Convert a statement (as structured data) into the flat data-based representation. *) - let to_data + let[@warning "-45"] to_data ({ proof_state = { deferred_values = { xi @@ -840,7 +811,7 @@ module Wrap = struct ; zeta_to_domain_size ; perm ; feature_flags - ; lookup + ; joint_combiner } } ; sponge_digest_before_evaluations @@ -850,7 +821,7 @@ module Wrap = struct ; messages_for_next_step_proof (* messages_for_next_step_proof is represented as a digest inside the circuit *) } : - _ t ) ~option_map ~to_opt = + _ t ) ~option_map = let open Vector in let fp = [ combined_inner_product @@ -877,12 +848,11 @@ module Wrap = struct ; bulletproof_challenges ; index ; Plonk_types.Features.to_data feature_flags - ; option_map lookup - ~f:Proof_state.Deferred_values.Plonk.In_circuit.Lookup.to_struct + ; option_map joint_combiner ~f:(fun x -> Hlist.HlistId.[ x ]) ] (** Construct a statement (as structured data) from the flat data-based representation. *) - let of_data + let[@warning "-45"] of_data Hlist.HlistId. [ fp ; challenge @@ -891,8 +861,8 @@ module Wrap = struct ; bulletproof_challenges ; index ; feature_flags - ; lookup - ] ~feature_flags:flags ~option_map ~of_opt : _ t = + ; joint_combiner + ] ~option_map : _ t = let open Vector in let [ combined_inner_product ; b @@ -928,11 +898,9 @@ module Wrap = struct ; zeta_to_domain_size ; perm ; feature_flags - ; lookup = - option_map lookup - ~f: - Proof_state.Deferred_values.Plonk.In_circuit.Lookup - .of_struct + ; joint_combiner = + option_map joint_combiner ~f:(fun Hlist.HlistId.[ x ] -> + x ) } } ; sponge_digest_before_evaluations @@ -990,8 +958,6 @@ module Step = struct ; zeta : 'scalar_challenge } [@@deriving sexp, compare, yojson, hlist, hash, equal] - - let to_latest = Fn.id end end] @@ -1000,7 +966,13 @@ module Step = struct { alpha; beta; gamma; zeta; joint_combiner = None; feature_flags } let of_wrap - ({ alpha; beta; gamma; zeta; joint_combiner = _; feature_flags } : + ({ alpha + ; beta + ; gamma + ; zeta + ; joint_combiner = _ + ; feature_flags = _ + } : _ Wrap.Proof_state.Deferred_values.Plonk.Minimal.t ) = { alpha; beta; gamma; zeta } end @@ -1057,7 +1029,7 @@ module Step = struct ; lookup = false_ ; runtime_tables = false_ } - ; lookup = opt_none + ; joint_combiner = opt_none } let of_wrap ~assert_none ~assert_false @@ -1069,7 +1041,7 @@ module Step = struct ; zeta_to_domain_size ; perm ; feature_flags - ; lookup + ; joint_combiner } : _ Wrap.Proof_state.Deferred_values.Plonk.In_circuit.t ) = let () = @@ -1093,7 +1065,7 @@ module Step = struct assert_false lookup ; assert_false runtime_tables in - assert_none lookup ; + assert_none joint_combiner ; { alpha ; beta ; gamma @@ -1118,10 +1090,7 @@ module Step = struct ; perm = f t.perm } - let typ (type f fp) - (module Impl : Snarky_backendless.Snark_intf.Run - with type field = f ) ~dummy_scalar ~dummy_scalar_challenge - ~challenge ~scalar_challenge ~bool ~feature_flags + let typ (type f fp) _ ~challenge ~scalar_challenge (fp : (fp, _, f) Snarky_backendless.Typ.t) = Snarky_backendless.Typ.of_hlistable [ Scalar_challenge.typ scalar_challenge @@ -1275,7 +1244,7 @@ module Step = struct ; Vector (B Bool, Nat.N1.n) ] - let to_data + let[@warning "-45"] to_data ({ deferred_values = { xi ; bulletproof_challenges @@ -1317,7 +1286,7 @@ module Step = struct ; bool ] - let of_data + let[@warning "-45"] of_data Hlist.HlistId. [ Vector. [ combined_inner_product @@ -1352,7 +1321,7 @@ module Step = struct } end - let typ impl fq ~assert_16_bits ~zero = + let typ impl fq ~assert_16_bits = let open In_circuit in Spec.typ impl fq ~assert_16_bits (spec Backend.Tock.Rounds.n) |> Snarky_backendless.Typ.transport ~there:to_data ~back:of_data @@ -1375,30 +1344,27 @@ module Step = struct include struct open Hlist.HlistId - let to_data { unfinalized_proofs; messages_for_next_step_proof } = + let _to_data { unfinalized_proofs; messages_for_next_step_proof } = [ Vector.map unfinalized_proofs ~f:Per_proof.In_circuit.to_data ; messages_for_next_step_proof ] - let of_data [ unfinalized_proofs; messages_for_next_step_proof ] = + let _of_data [ unfinalized_proofs; messages_for_next_step_proof ] = { unfinalized_proofs = Vector.map unfinalized_proofs ~f:Per_proof.In_circuit.of_data ; messages_for_next_step_proof } - end + end [@@warning "-45"] - let typ (type n f) + let[@warning "-60"] typ (type n f) ( (module Impl : Snarky_backendless.Snark_intf.Run with type field = f) - as impl ) zero ~assert_16_bits - (proofs_verified : - (Plonk_types.Opt.Flag.t Plonk_types.Features.t, n) Vector.t ) fq : + as impl ) ~assert_16_bits + (proofs_verified : (Opt.Flag.t Plonk_types.Features.t, n) Vector.t) fq : ( ((_, _) Vector.t, _) t , ((_, _) Vector.t, _) t , _ ) Snarky_backendless.Typ.t = - let per_proof feature_flags = - Per_proof.typ impl fq ~assert_16_bits ~zero - in + let per_proof _ = Per_proof.typ impl fq ~assert_16_bits in let unfinalized_proofs = Vector.typ' (Vector.map proofs_verified ~f:per_proof) in @@ -1424,7 +1390,7 @@ module Step = struct } [@@deriving sexp, compare, yojson] - let to_data + let[@warning "-45"] to_data { proof_state = { unfinalized_proofs; messages_for_next_step_proof } ; messages_for_next_wrap_proof } = @@ -1435,7 +1401,7 @@ module Step = struct ; messages_for_next_wrap_proof ] - let of_data + let[@warning "-45"] of_data Hlist.HlistId. [ unfinalized_proofs ; messages_for_next_step_proof diff --git a/src/lib/pickles/composition_types/composition_types.mli b/src/lib/pickles/composition_types/composition_types.mli index 6c333317005..d62962c7701 100644 --- a/src/lib/pickles/composition_types/composition_types.mli +++ b/src/lib/pickles/composition_types/composition_types.mli @@ -1,5 +1,5 @@ open Pickles_types -module Opt = Plonk_types.Opt +module Opt = Opt type ('a, 'b) opt := ('a, 'b) Opt.t @@ -80,28 +80,20 @@ module Wrap : sig -> f:('challenge -> 'challenge2) -> scalar:('scalar_challenge -> 'scalar_challenge2) -> ('challenge2, 'scalar_challenge2, 'bool) t - end - - module In_circuit : sig - module Lookup : sig - type 'scalar_challenge t = { joint_combiner : 'scalar_challenge } - [@@deriving sexp, compare, yojson, hlist, hash, equal, fields] - - val to_struct : 'a t -> ('a * unit) Hlist.HlistId.t - - val of_struct : ('a * unit) Hlist.HlistId.t -> 'a t - val typ : - ( 'a - , 'b - , 'f - , ( unit - , 'f ) - Snarky_backendless.Checked_runner.Simple.Types.Checked.t ) - Snarky_backendless.Types.Typ.t - -> ('a t, 'b t, 'f) Snarky_backendless.Typ.t + module In_circuit : sig + type ('challenge, 'scalar_challenge, 'bool) t = + { alpha : 'scalar_challenge + ; beta : 'challenge + ; gamma : 'challenge + ; zeta : 'scalar_challenge + ; joint_combiner : ('scalar_challenge, 'bool) Opt.t + ; feature_flags : 'bool Plonk_types.Features.t + } end + end + module In_circuit : sig (** All scalar values deferred by a verifier circuit. The value in [perm] is a scalar which will have been used to scale @@ -110,13 +102,13 @@ module Wrap : sig Then, we expose them so the next guy (who can do scalar arithmetic) can check that they were computed correctly from the - evaluations in the proof and the challenges. + evaluations in the proof and the challenges. *) type ( 'challenge , 'scalar_challenge , 'fp , 'fp_opt - , 'lookup_opt + , 'scalar_challenge_opt , 'bool ) t = { alpha : 'scalar_challenge @@ -128,24 +120,23 @@ module Wrap : sig ; perm : 'fp (** scalar used on one of the permutation polynomial commitments. *) ; feature_flags : 'bool Plonk_types.Features.t - ; lookup : 'lookup_opt + ; joint_combiner : 'scalar_challenge_opt } [@@deriving sexp, compare, yojson, hlist, hash, equal, fields] val map_challenges : - ('a, 'b, 'c, 'fp_opt, ('b Lookup.t, 'e) Opt.t, 'bool) t + ('a, 'b, 'c, 'fp_opt, ('b, 'e) Opt.t, 'bool) t -> f:('a -> 'f) -> scalar:('b -> 'g) - -> ('f, 'g, 'c, 'fp_opt, ('g Lookup.t, 'e) Opt.t, 'bool) t + -> ('f, 'g, 'c, 'fp_opt, ('g, 'e) Opt.t, 'bool) t val map_fields : - ('a, 'b, 'c, ('c, 'e) Opt.t, ('d Lookup.t, 'e) Opt.t, 'bool) t + ('a, 'b, 'c, ('c, 'e) Opt.t, ('d, 'e) Opt.t, 'bool) t -> f:('c -> 'f) - -> ('a, 'b, 'f, ('f, 'e) Opt.t, ('d Lookup.t, 'e) Opt.t, 'bool) t + -> ('a, 'b, 'f, ('f, 'e) Opt.t, ('d, 'e) Opt.t, 'bool) t val typ : 'f Spec.impl - -> dummy_scalar:'a -> dummy_scalar_challenge:'b Scalar_challenge.t -> challenge: ( 'c @@ -163,20 +154,20 @@ module Wrap : sig , bool , 'f ) Snarky_backendless.Typ.t - -> feature_flags:Plonk_types.Opt.Flag.t Plonk_types.Features.t + -> feature_flags:Opt.Flag.t Plonk_types.Features.Full.t -> ('fp, 'a, 'f) Snarky_backendless.Typ.t -> ( ( 'c , 'e Scalar_challenge.t , 'fp - , ('fp, 'boolean) Plonk_types.Opt.t - , ('e Scalar_challenge.t Lookup.t, 'boolean) Plonk_types.Opt.t + , ('fp, 'boolean) Opt.t + , ('e Scalar_challenge.t, 'boolean) Opt.t , 'boolean ) t , ( 'd , 'b Scalar_challenge.t , 'a , 'a option - , 'b Scalar_challenge.t Lookup.t option + , 'b Scalar_challenge.t option , bool ) t , 'f ) @@ -191,8 +182,7 @@ module Wrap : sig , 'lookup_opt , 'bool ) In_circuit.t - -> to_option: - ('lookup_opt -> 'scalar_challenge In_circuit.Lookup.t option) + -> to_option:('lookup_opt -> 'scalar_challenge option) -> ('challenge, 'scalar_challenge, 'bool) Minimal.t end @@ -263,19 +253,6 @@ module Wrap : sig } [@@deriving sexp, compare, yojson, hlist, hash, equal] - type ( 'plonk - , 'scalar_challenge - , 'fp - , 'bulletproof_challenges - , 'branch_data ) - w := - ( 'plonk - , 'scalar_challenge - , 'fp - , 'bulletproof_challenges - , 'branch_data ) - t - val map_challenges : ('a, 'b, 'fp, 'c, 'd) t -> f:'e @@ -361,7 +338,6 @@ module Wrap : sig val typ : (module Snarky_backendless.Snark_intf.Run with type field = 'f) - -> dummy_scalar:'a -> dummy_scalar_challenge:'b Scalar_challenge.t -> challenge: ( 'c @@ -372,7 +348,7 @@ module Wrap : sig Snarky_backendless.Checked_runner.Simple.Types.Checked.t ) snarky_typ -> scalar_challenge:('e, 'b, 'f) Snarky_backendless.Typ.t - -> feature_flags:Plonk_types.Opt.Flag.t Plonk_types.Features.t + -> feature_flags:Opt.Flag.t Plonk_types.Features.Full.t -> ('fp, 'a, 'f) Snarky_backendless.Typ.t -> ( 'g , 'h @@ -387,11 +363,11 @@ module Wrap : sig , ( 'fp , 'f Snarky_backendless.Cvar.t Snarky_backendless__Snark_intf.Boolean0.t ) - Plonk_types.Opt.t - , ( 'e Scalar_challenge.t Plonk.In_circuit.Lookup.t + Opt.t + , ( 'e Scalar_challenge.t , 'f Snarky_backendless.Cvar.t Snarky_backendless__Snark_intf.Boolean0.t ) - Plonk_types.Opt.t + Opt.t , 'f Snarky_backendless.Cvar.t Snarky_backendless__Snark_intf.Boolean0.t ) Plonk.In_circuit.t @@ -406,7 +382,7 @@ module Wrap : sig , 'b Scalar_challenge.t , 'a , 'a option - , 'b Scalar_challenge.t Plonk.In_circuit.Lookup.t option + , 'b Scalar_challenge.t option , bool ) Plonk.In_circuit.t , 'b Scalar_challenge.t @@ -422,7 +398,7 @@ module Wrap : sig val to_minimal : ('a, 'b, 'c, _, 'd, 'e, 'f, 'bool) In_circuit.t - -> to_option:('d -> 'b Plonk.In_circuit.Lookup.t option) + -> to_option:('d -> 'b option) -> ('a, 'b, 'c, 'bool, 'e, 'f) Minimal.t end @@ -613,7 +589,6 @@ module Wrap : sig val typ : (module Snarky_backendless.Snark_intf.Run with type field = 'f) - -> dummy_scalar:'a -> dummy_scalar_challenge:'b Scalar_challenge.t -> challenge: ( 'c @@ -624,7 +599,7 @@ module Wrap : sig Snarky_backendless.Checked_runner.Simple.Types.Checked.t ) snarky_typ -> scalar_challenge:('e, 'b, 'f) Snarky_backendless.Typ.t - -> feature_flags:Plonk_types.Opt.Flag.t Plonk_types.Features.t + -> feature_flags:Opt.Flag.t Plonk_types.Features.Full.t -> ('fp, 'a, 'f) Snarky_backendless.Typ.t -> ( 'g , 'h @@ -647,11 +622,8 @@ module Wrap : sig -> ( ( ( 'c , 'e Scalar_challenge.t , 'fp - , ('fp, 'boolean) Plonk_types.Opt.t - , ( 'e Scalar_challenge.t - Deferred_values.Plonk.In_circuit.Lookup.t - , 'boolean ) - Plonk_types.Opt.t + , ('fp, 'boolean) Opt.t + , ('e Scalar_challenge.t, 'boolean) Opt.t , ('f Snarky_backendless.Cvar.t Snarky_backendless__Snark_intf.Boolean0.t as @@ -670,8 +642,7 @@ module Wrap : sig , 'b Scalar_challenge.t , 'a , 'a option - , 'b Scalar_challenge.t Deferred_values.Plonk.In_circuit.Lookup.t - option + , 'b Scalar_challenge.t option , bool ) Deferred_values.Plonk.In_circuit.t , 'b Scalar_challenge.t @@ -689,7 +660,7 @@ module Wrap : sig val to_minimal : ('a, 'b, 'c, _, 'd, 'bool, 'e, 'f, 'g, 'h) In_circuit.t - -> to_option:('d -> 'b Deferred_values.Plonk.In_circuit.Lookup.t option) + -> to_option:('d -> 'b option) -> ('a, 'b, 'c, 'bool, 'e, 'f, 'g, 'h) Minimal.t end @@ -750,7 +721,7 @@ module Wrap : sig module Lookup_parameters : sig type ('chal, 'chal_var, 'fp, 'fp_var) t = { zero : ('chal, 'chal_var, 'fp, 'fp_var) Zero_values.t - ; use : Plonk_types.Opt.Flag.t + ; use : Opt.Flag.t } val opt_spec : @@ -950,6 +921,7 @@ module Wrap : sig type 'a vec8 := ('a * ('a * ('a * ('a * ('a * ('a * ('a * ('a * unit)))))))) Hlist.HlistId.t + [@@warning "-34"] type ('a, 'b, 'c, 'd, 'e, 'f, 'g, 'fp_opt, 'bool) flat_repr := ( ('a, Nat.N5.n) Vector.t @@ -969,7 +941,7 @@ module Wrap : sig , 'field1 Hlist0.Id.t , 'field2 Hlist0.Id.t ) Lookup_parameters.t - -> Plonk_types.Opt.Flag.t Plonk_types.Features.t + -> Opt.Flag.t Plonk_types.Features.t -> ( ( 'field1 , 'challenge1 , 'challenge1 Scalar_challenge.t @@ -1015,37 +987,20 @@ module Wrap : sig val to_data : ('a, 'b, 'c, 'fp_opt, 'd, 'bool, 'e, 'e, 'e, 'f, 'g) t -> option_map: - ( 'd - -> f: - ( 'h Proof_state.Deferred_values.Plonk.In_circuit.Lookup.t - -> ('h * unit) Hlist.HlistId.t ) - -> 'j Hlist0.Id.t ) - -> to_opt:('fp_opt -> 'fp_opt2) + ('d -> f:('h -> ('h * unit) Hlist.HlistId.t) -> 'j Hlist0.Id.t) -> ('c, 'a, 'b, 'e, 'f, 'g, 'j, 'fp_opt2, 'bool) flat_repr (** Construct a statement (as structured data) from the flat data-based representation. *) val of_data : ('a, 'b, 'c, 'd, 'e, 'f, 'g, 'fp option, 'bool) flat_repr - -> feature_flags: - Pickles_types.Plonk_types.Opt.Flag.t - Pickles_types.Plonk_types.Features.t -> option_map: - ( 'g Hlist0.Id.t - -> f: - ( ('h * unit) Hlist.HlistId.t - -> 'h Proof_state.Deferred_values.Plonk.In_circuit.Lookup.t - ) - -> 'j ) - -> of_opt:(('fp, 'bool) Pickles_types.Plonk_types.Opt.t -> 'fp_opt2) + ('g Hlist0.Id.t -> f:(('h * unit) Hlist.HlistId.t -> 'h) -> 'j) -> ('b, 'c, 'a, 'fp_opt2, 'j, 'bool, 'd, 'd, 'd, 'e, 'f) t end val to_minimal : ('a, 'b, 'c, _, 'd, 'bool, 'e, 'f, 'g, 'h, 'i) In_circuit.t - -> to_option: - ( 'd - -> 'b Proof_state.Deferred_values.Plonk.In_circuit.Lookup.t option - ) + -> to_option:('d -> 'b option) -> ('a, 'b, 'c, 'bool, 'e, 'f, 'g, 'h, 'i) Minimal.t end end @@ -1162,8 +1117,6 @@ module Step : sig val typ : 'f Spec.impl - -> dummy_scalar:'a - -> dummy_scalar_challenge:'b Scalar_challenge.t -> challenge: ( 'c , 'd @@ -1173,14 +1126,6 @@ module Step : sig Snarky_backendless.Checked_runner.Simple.Types.Checked.t ) Snarky_backendless.Types.Typ.t -> scalar_challenge:('e, 'b, 'f) Snarky_backendless.Typ.t - -> bool: - ( ('f Snarky_backendless.Cvar.t Snarky_backendless.Boolean.t - as - 'boolean ) - , bool - , 'f ) - Snarky_backendless.Typ.t - -> feature_flags:Plonk_types.Opt.Flag.t Plonk_types.Features.t -> ('fp, 'a, 'f) Snarky_backendless.Typ.t -> ( ('c, 'e Scalar_challenge.t, 'fp) t , ('d, 'b Scalar_challenge.t, 'a) t @@ -1315,10 +1260,6 @@ module Step : sig t_ [@@deriving sexp, compare, yojson] - type 'a vec8 := - ('a * ('a * ('a * ('a * ('a * ('a * ('a * ('a * unit)))))))) - Hlist.HlistId.t - type ( 'field , 'digest , 'challenge @@ -1358,7 +1299,7 @@ module Step : sig , 'field2 Snarky_backendless__Snark_intf.Boolean0.t , ( ('challenge2 Scalar_challenge.t * unit) Hlist.HlistId.t , 'field2 Snarky_backendless__Snark_intf.Boolean0.t ) - Plonk_types.Opt.t + Opt.t , 'field2 option , 'num_bulletproof_challenges ) flat_repr @@ -1403,12 +1344,6 @@ module Step : sig ) snarky_typ -> assert_16_bits:('a Snarky_backendless.Cvar.t -> unit) - -> zero: - ( Limb_vector.Challenge.Constant.t - , 'a Limb_vector.Challenge.t - , 'c Hlist0.Id.t - , 'b Hlist0.Id.t ) - Zero_values.t -> ( ( 'a Limb_vector.Challenge.t , 'a Limb_vector.Challenge.t Scalar_challenge.t , 'b @@ -1454,19 +1389,10 @@ module Step : sig , 'c ) Spec.T.t - type 'a vec8 := - ('a * ('a * ('a * ('a * ('a * ('a * ('a * ('a * ('a * unit))))))))) - Hlist.HlistId.t - val typ : 'f Spec.impl - -> ( Limb_vector.Challenge.Constant.t - , 'f Limb_vector.Challenge.t - , 'a Hlist0.Id.t - , 'b Hlist0.Id.t ) - Zero_values.t -> assert_16_bits:('f Snarky_backendless.Cvar.t -> unit) - -> (Plonk_types.Opt.Flag.t Plonk_types.Features.t, 'n) Vector.t + -> (Opt.Flag.t Plonk_types.Features.t, 'n) Vector.t -> ( 'b , 'a , 'f @@ -1519,10 +1445,6 @@ module Step : sig } [@@deriving sexp, compare, yojson] - type 'a vec8 := - ('a * ('a * ('a * ('a * ('a * ('a * ('a * ('a * unit)))))))) - Hlist.HlistId.t - val to_data : ( ( ( 'a , 'b diff --git a/src/lib/pickles/composition_types/digest.mli b/src/lib/pickles/composition_types/digest.mli index 8fc486747f0..9959851548e 100644 --- a/src/lib/pickles/composition_types/digest.mli +++ b/src/lib/pickles/composition_types/digest.mli @@ -1,7 +1,10 @@ +(** Vectors of 4 limbs of int64 *) + module Limbs = Pickles_types.Nat.N4 type nat4 := Limbs.n +(** Alias for fixed typed-size vector of size 4 *) type 'a v := ('a, nat4) Pickles_types.Vector.vec type vector := int64 v diff --git a/src/lib/pickles/composition_types/dune b/src/lib/pickles/composition_types/dune index a97beb0fe5e..0cc6eb6215f 100644 --- a/src/lib/pickles/composition_types/dune +++ b/src/lib/pickles/composition_types/dune @@ -1,26 +1,43 @@ (library (name composition_types) (public_name pickles.composition_types) - (flags -warn-error -27) + ; Deactivated warnings + ; 40: name-out-scope (activate later) + ; + ; 41: ambiguous name (too many of them for now, activate later) + ; + ; 42: disambiguated-name (rely on type disambiguation ,not too bad but closer + ; module openings may both solve the warning *and* help the reader) + ; + ; 44: open-shadow-identifier (operation overloading is common in the codebase) + ; + ; Warning 70 (no interface) is activated but not considered as an error since + ; we use the `_intf` module naming pattern (see + ; https://www.craigfe.io/posts/the-intf-trick for a rationale) + (flags + (:standard + -w +a-40..42-44 + -warn-error +a-70-27) + -open Core_kernel) (instrumentation (backend bisect_ppx)) (preprocess (pps ppx_version ppx_mina ppx_jane ppx_deriving.std ppx_deriving_yojson h_list.ppx )) (libraries ;; opam libraries - sexplib0 - bin_prot.shape - core_kernel - base.caml - ;; local libraries - mina_wire_types - kimchi_backend.pasta - kimchi_backend.pasta.basic - snarky.backendless - pickles_types - pickles.limb_vector - kimchi_backend - pickles_base - pickles.backend - kimchi_backend.common - ppx_version.runtime + sexplib0 + bin_prot.shape + core_kernel + base.caml + ;; local libraries + mina_wire_types + kimchi_backend.pasta + kimchi_backend.pasta.basic + snarky.backendless + pickles_types + pickles.limb_vector + kimchi_backend + pickles_base + pickles.backend + kimchi_backend.common + ppx_version.runtime + ) ) -) diff --git a/src/lib/pickles/composition_types/spec.ml b/src/lib/pickles/composition_types/spec.ml index 220fe679cba..536beb78052 100644 --- a/src/lib/pickles/composition_types/spec.ml +++ b/src/lib/pickles/composition_types/spec.ml @@ -58,15 +58,15 @@ module rec T : sig -> ('xs1 Hlist.HlistId.t, 'xs2 Hlist.HlistId.t, 'env) t | Opt : { inner : ('a1, 'a2, (< bool1 : bool ; bool2 : 'bool ; .. > as 'env)) t - ; flag : Plonk_types.Opt.Flag.t + ; flag : Opt.Flag.t ; dummy1 : 'a1 ; dummy2 : 'a2 ; bool : (module Bool_intf with type var = 'bool) } - -> ('a1 option, ('a2, 'bool) Plonk_types.Opt.t, 'env) t + -> ('a1 option, ('a2, 'bool) Opt.t, 'env) t | Opt_unflagged : { inner : ('a1, 'a2, (< bool1 : bool ; bool2 : 'bool ; .. > as 'env)) t - ; flag : Plonk_types.Opt.Flag.t + ; flag : Opt.Flag.t ; dummy1 : 'a1 ; dummy2 : 'a2 } @@ -120,13 +120,13 @@ let rec pack : Option.map t_constant_opt ~f:(fun t_const -> t_const.(i)) in pack ~zero ~one p spec t_constant_opt t ) - | Opt { inner; flag; dummy1; dummy2 } -> ( + | Opt { inner; dummy1; dummy2; flag = _; bool = _ } -> ( match t with - | None -> + | Nothing -> let t_constant_opt = Option.map t_constant_opt ~f:(fun _ -> dummy1) in Array.append [| zero |] (pack ~zero ~one p inner t_constant_opt dummy2) - | Some x -> + | Just x -> let t_constant_opt = Option.map ~f:(fun x -> Option.value_exn x) t_constant_opt in @@ -139,7 +139,7 @@ let rec pack : Array.append (p.pack Bool b_constant_opt b) (pack ~zero ~one p inner x_constant_opt x) ) - | Opt_unflagged { inner; flag; dummy1; dummy2 } -> ( + | Opt_unflagged { inner; dummy1; dummy2; flag = _ } -> ( match t with | None -> let t_constant_opt = Option.map t_constant_opt ~f:(fun _ -> dummy1) in @@ -165,7 +165,7 @@ let rec typ : -> (var, value, f) Snarky_backendless.Typ.t = let open Snarky_backendless.Typ in fun t spec -> - match spec with + match[@warning "-45"] spec with | B spec -> t.typ spec | Scalar chal -> @@ -191,11 +191,11 @@ let rec typ : let bool = typ t (B Bool) in let open B in (* Always use the same "maybe" layout which is a boolean and then the value *) - Plonk_types.Opt.constant_layout_typ bool flag ~dummy:dummy1 - ~dummy_var:dummy2 ~true_ ~false_ (typ t inner) + Opt.constant_layout_typ bool flag ~dummy:dummy1 ~dummy_var:dummy2 ~true_ + ~false_ (typ t inner) | Opt_unflagged { inner; flag; dummy1; dummy2 } -> ( match flag with - | Plonk_types.Opt.Flag.No -> + | Opt.Flag.No -> let open Snarky_backendless.Typ in unit () |> Snarky_backendless.Typ.transport @@ -203,8 +203,8 @@ let rec typ : ~back:(fun () -> None) |> Snarky_backendless.Typ.transport_var ~there:(function Some _ -> assert false | None -> ()) - ~back:(fun x -> None) - | Plonk_types.Opt.Flag.(Yes | Maybe) -> + ~back:(fun _ -> None) + | Opt.Flag.(Yes | Maybe) -> typ t inner |> Snarky_backendless.Typ.transport ~there:(function Some x -> x | None -> dummy1) @@ -226,10 +226,6 @@ let rec typ : |> transport ~there:(fun y -> assert_eq x y) ~back:(fun () -> x) |> transport_var ~there:(fun _ -> ()) ~back:(fun () -> constant_var) -type 'env exists = T : ('t1, 't2, 'env) T.t -> 'env exists - -type generic_spec = { spec : 'env. 'env exists } - module ETyp = struct type ('var, 'value, 'f) t = | T : @@ -248,7 +244,7 @@ let rec etyp : (f, env) etyp -> (value, var, env) T.t -> (var, value, f) ETyp.t = let open Snarky_backendless.Typ in fun e spec -> - match spec with + match[@warning "-45"] spec with | B spec -> e.etyp spec | Scalar chal -> @@ -282,24 +278,23 @@ let rec etyp : | Opt { inner; flag; dummy1; dummy2; bool = (module B) } -> let (T (bool, f_bool, f_bool')) = etyp e (B Bool) in let (T (a, f_a, f_a')) = etyp e inner in - let opt_map ~f1 ~f2 (x : _ Plonk_types.Opt.t) : _ Plonk_types.Opt.t = + let opt_map ~f1 ~f2 (x : _ Opt.t) : _ Opt.t = match x with - | None -> - None - | Some x -> - Some (f1 x) + | Nothing -> + Opt.nothing + | Just x -> + Opt.just (f1 x) | Maybe (b, x) -> Maybe (f2 b, f1 x) in let f = opt_map ~f1:f_a ~f2:f_bool in let f' = opt_map ~f1:f_a' ~f2:f_bool' in T - ( Plonk_types.Opt.constant_layout_typ ~dummy:dummy1 - ~dummy_var:(f_a' dummy2) ~true_:(f_bool' B.true_) - ~false_:(f_bool' B.false_) bool flag a + ( Opt.constant_layout_typ ~dummy:dummy1 ~dummy_var:(f_a' dummy2) + ~true_:(f_bool' B.true_) ~false_:(f_bool' B.false_) bool flag a , f , f' ) - | Opt_unflagged { inner; flag; dummy1; dummy2 } -> + | Opt_unflagged { inner; dummy1; dummy2; flag = _ } -> let (T (typ, f, f_inv)) = etyp e inner in let f x = Some (f x) in let f_inv = function None -> f_inv dummy2 | Some x -> f_inv x in @@ -309,7 +304,7 @@ let rec etyp : ~there:(Option.value ~default:dummy1) ~back:(fun x -> Some x) in T (typ, f, f_inv) - | Constant (x, assert_eq, spec) -> + | Constant (x, _assert_eq, spec) -> let (T (Typ typ, f, f')) = etyp e spec in let constant_var = let fields, aux = typ.value_to_fields x in diff --git a/src/lib/pickles/composition_types/spec.mli b/src/lib/pickles/composition_types/spec.mli index ccc2d086c72..e12511c5274 100644 --- a/src/lib/pickles/composition_types/spec.mli +++ b/src/lib/pickles/composition_types/spec.mli @@ -1,5 +1,6 @@ type 'f impl = (module Snarky_backendless.Snark_intf.Run with type field = 'f) +(** Basic types *) type (_, _, _) basic = | Unit : (unit, unit, < .. >) basic | Field @@ -37,6 +38,7 @@ module type Bool_intf = sig val false_ : var end +(** Compound types. These are built from Basic types described above *) module rec T : sig type (_, _, _) t = | B : ('a, 'b, 'env) basic -> ('a, 'b, 'env) t @@ -61,18 +63,18 @@ module rec T : sig t | Opt : { inner : ('a1, 'a2, 'env) t - ; flag : Pickles_types.Plonk_types.Opt.Flag.t + ; flag : Pickles_types.Opt.Flag.t ; dummy1 : 'a1 ; dummy2 : 'a2 ; bool : (module Bool_intf with type var = 'bool) } -> ( 'a1 option - , ('a2, 'bool) Pickles_types.Plonk_types.Opt.t + , ('a2, 'bool) Pickles_types.Opt.t , (< bool1 : bool ; bool2 : 'bool ; .. > as 'env) ) t | Opt_unflagged : { inner : ('a1, 'a2, (< bool1 : bool ; bool2 : 'bool ; .. > as 'env)) t - ; flag : Pickles_types.Plonk_types.Opt.Flag.t + ; flag : Pickles_types.Opt.Flag.t ; dummy1 : 'a1 ; dummy2 : 'a2 } diff --git a/src/lib/pickles/dirty.mli b/src/lib/pickles/dirty.mli index 444e9f79083..0dac886d7e4 100644 --- a/src/lib/pickles/dirty.mli +++ b/src/lib/pickles/dirty.mli @@ -1,4 +1,4 @@ -(* Origin of dirtiness *) +(** Origin of dirtiness, i.e. data that is not saved on disk yet *) type t = [ `Cache_hit | `Generated_something | `Locally_generated ] diff --git a/src/lib/pickles/dummy.ml b/src/lib/pickles/dummy.ml index 47cea717510..75102fa1e0c 100644 --- a/src/lib/pickles/dummy.ml +++ b/src/lib/pickles/dummy.ml @@ -4,25 +4,27 @@ open Backend open Composition_types open Common -let wrap_domains = Common.wrap_domains +let _wrap_domains = Common.wrap_domains let evals = - let open Plonk_types in - let e = - Evals.map (Evaluation_lengths.create ~of_int:Fn.id) ~f:(fun n -> - let a () = Array.create ~len:n (Ro.tock ()) in - (a (), a ()) ) - in - let ex = - { All_evals.With_public_input.evals = e - ; public_input = (Ro.tock (), Ro.tock ()) - } - in - { All_evals.ft_eval1 = Ro.tock (); evals = ex } + lazy + (let open Plonk_types in + let e = + Evals.map Evaluation_lengths.default ~f:(fun n -> + let a () = Array.create ~len:n (Ro.tock ()) in + (a (), a ()) ) + in + let ex = + { All_evals.With_public_input.evals = e + ; public_input = ([| Ro.tock () |], [| Ro.tock () |]) + } + in + { All_evals.ft_eval1 = Ro.tock (); evals = ex }) let evals_combined = - Plonk_types.All_evals.map evals ~f1:Fn.id - ~f2:(Array.reduce_exn ~f:Backend.Tock.Field.( + )) + lazy + (Plonk_types.All_evals.map (Lazy.force evals) ~f1:Fn.id + ~f2:(Array.reduce_exn ~f:Backend.Tock.Field.( + )) ) module Ipa = struct module Wrap = struct @@ -32,8 +34,9 @@ module Ipa = struct { Bulletproof_challenge.prechallenge } ) let challenges_computed = - Vector.map challenges ~f:(fun { prechallenge } : Tock.Field.t -> - Ipa.Wrap.compute_challenge prechallenge ) + lazy + (Vector.map challenges ~f:(fun { prechallenge } : Tock.Field.t -> + Ipa.Wrap.compute_challenge prechallenge ) ) let sg = lazy (time "dummy wrap sg" (fun () -> Ipa.Wrap.compute_sg challenges)) @@ -46,8 +49,9 @@ module Ipa = struct { Bulletproof_challenge.prechallenge } ) let challenges_computed = - Vector.map challenges ~f:(fun { prechallenge } : Tick.Field.t -> - Ipa.Step.compute_challenge prechallenge ) + lazy + (Vector.map challenges ~f:(fun { prechallenge } : Tick.Field.t -> + Ipa.Step.compute_challenge prechallenge ) ) let sg = lazy (time "dummy wrap sg" (fun () -> Ipa.Step.compute_sg challenges)) diff --git a/src/lib/pickles/dummy.mli b/src/lib/pickles/dummy.mli index 00a3adbfe49..080a8a85a3d 100644 --- a/src/lib/pickles/dummy.mli +++ b/src/lib/pickles/dummy.mli @@ -1,4 +1,6 @@ -(** [Ipa] *) +(** Dummy values used to pad lists inside proofs to a standard length, to ensure + heterogeneous proofs *) + module Ipa : sig module Wrap : sig val challenges : @@ -12,6 +14,7 @@ module Ipa : sig ( Backend.Tock.Field.t , Pickles_types.Nat.z Backend.Tock.Rounds.plus_n ) Pickles_types.Vector.t + lazy_t val sg : (Pasta_bindings.Fp.t * Pasta_bindings.Fp.t) lazy_t end @@ -28,6 +31,7 @@ module Ipa : sig ( Backend.Tick.Field.t , Pickles_types.Nat.z Backend.Tick.Rounds.plus_n ) Pickles_types.Vector.t + lazy_t val sg : (Pasta_bindings.Fq.t * Pasta_bindings.Fq.t) lazy_t end @@ -43,9 +47,11 @@ val evals : ( Backend.Tock.Field.t , Backend.Tock.Field.t array ) Pickles_types.Plonk_types.All_evals.t + lazy_t (** [evals_combined] is a constant *) val evals_combined : ( Backend.Tock.Field.t , Backend.Tock.Field.t ) Pickles_types.Plonk_types.All_evals.t + lazy_t diff --git a/src/lib/pickles/dune b/src/lib/pickles/dune index fb11dc3b4ed..a7ef1c949ce 100644 --- a/src/lib/pickles/dune +++ b/src/lib/pickles/dune @@ -3,9 +3,14 @@ (name pickles) (public_name pickles) (modules_without_implementation - full_signature - type) - (flags -warn-error +34+33+27) + full_signature + type + intf + pickles_intf) + (flags + (:standard -w +a-40..42-44 + -warn-error +a-27) + -open Core_kernel) (instrumentation (backend bisect_ppx)) (preprocess @@ -50,7 +55,6 @@ pickles.limb_vector pickles_base kimchi_backend - mina_version base58_check codable random_oracle_input diff --git a/src/lib/pickles/endo.mli b/src/lib/pickles/endo.mli index 6193df2b983..7d458308d93 100644 --- a/src/lib/pickles/endo.mli +++ b/src/lib/pickles/endo.mli @@ -1,4 +1,14 @@ -(* Endo coefficients *) +(** Curves used by the inductive proof system support optimisations for the + multi scalar multiplications using a {{ + https://link.springer.com/content/pdf/10.1007/3-540-44647-8_11.pdf } GLV + decomposition }. It is the case for instance for + Pallas and Vesta used by Halo2 and used by kimchi. + + This module provides a generic interface and abstract it with the Tick and + Tock curves used in Pickles. + For a more detailed description, the reader is invited to have a look at the + {{ https://eprint.iacr.org/2019/1021.pdf } Halo paper }. +*) (** [Step_inner_curve] contains the endo coefficients used by the step proof system *) module Step_inner_curve : sig diff --git a/src/lib/pickles/evaluation_lengths.ml b/src/lib/pickles/evaluation_lengths.ml index 55b9400d7bf..cd2b98ae0e5 100644 --- a/src/lib/pickles/evaluation_lengths.ml +++ b/src/lib/pickles/evaluation_lengths.ml @@ -1,7 +1,5 @@ -open Core_kernel - -let create ~of_int = - let one = of_int 1 in +let default = + let one = 1 in let open Pickles_types in let open Plonk_types in Evals. diff --git a/src/lib/pickles/evaluation_lengths.mli b/src/lib/pickles/evaluation_lengths.mli index 37b67ce650e..d1fd5ba38c3 100644 --- a/src/lib/pickles/evaluation_lengths.mli +++ b/src/lib/pickles/evaluation_lengths.mli @@ -1,5 +1,25 @@ -(* Evaluation lengths *) +(** This module provides functions to keep track of the evaluations of the + different polynomials involved in the polynomial IOP scheme. + Refer to the {{ https://eprint.iacr.org/2019/953.pdf } PlonK paper } for a + basic understanding of the different polynomials involved in a proof. +*) + +(** Versioned types for the evaluations. It is parametrized by the field the + evaluations happen in. + + For the different versions, refer to the module + {!module:Pickles_types.Plonk_types.Evals}. When a new custom gate is added, the + corresponding selectors must be added. + + As for the return type of {!Commitment_lengths.create}, an encoding of the + vector size is provided at the type level. + *) type 'a t := 'a Pickles_types.Plonk_types.Evals.t -val create : of_int:(int -> 'a) -> 'a t +(** [default] is a value of type [int t]. + + Its field values are set to [1] when they are non-optional, contains only + [1]-values when they are containers (like {!type:Pickles_types.Vector.t}) + and are set to {!Option.None} when optional. *) +val default : int t diff --git a/src/lib/pickles/fix_domains.ml b/src/lib/pickles/fix_domains.ml index 66cd9b420a1..32348ea46e9 100644 --- a/src/lib/pickles/fix_domains.ml +++ b/src/lib/pickles/fix_domains.ml @@ -3,18 +3,68 @@ open Import let zk_rows = 3 -let rough_domains : Domains.t = +let rough_domains = let d = Domain.Pow_2_roots_of_unity 20 in - { h = d } + { Domains.h = d } -let domains (type field) - (module Impl : Snarky_backendless.Snark_intf.Run with type field = field) +let domains (type field gates) ?feature_flags + (module Impl : Snarky_backendless.Snark_intf.Run + with type field = field + and type R1CS_constraint_system.t = ( field + , gates ) + Kimchi_backend_common + .Plonk_constraint_system + .t ) (Spec.ETyp.T (typ, conv, _conv_inv)) (Spec.ETyp.T (return_typ, _ret_conv, ret_conv_inv)) main = let main x () = ret_conv_inv (main (conv x)) in - let domains2 sys : Domains.t = + let domains2 sys = let open Domain in + (* Compute the domain required for the lookup tables *) + let lookup_table_length_log2 = + match feature_flags with + | None -> + 0 + | Some feature_flags -> + let { Pickles_types.Plonk_types.Features.range_check0 + ; range_check1 + ; foreign_field_add = + _ + (* Does not use lookup tables, therefore we do + not need in the computation *) + ; foreign_field_mul + ; xor + ; rot + ; lookup + ; runtime_tables + } = + feature_flags + in + let combined_lookup_table_length = + let range_check_table_used = + range_check0 || range_check1 || foreign_field_mul || rot + in + let xor_table_used = xor in + (if range_check_table_used then Int.pow 2 12 else 0) + + (if xor_table_used then Int.pow 2 8 else 0) + + ( if lookup then ( + Kimchi_backend_common.Plonk_constraint_system + .finalize_fixed_lookup_tables sys ; + Kimchi_backend_common.Plonk_constraint_system + .get_concatenated_fixed_lookup_table_size sys ) + else 0 ) + + + if runtime_tables then ( + Kimchi_backend_common.Plonk_constraint_system + .finalize_runtime_lookup_tables sys ; + Kimchi_backend_common.Plonk_constraint_system + .get_concatenated_runtime_lookup_table_size sys ) + else 0 + in + + Int.ceil_log2 (combined_lookup_table_length + zk_rows + 1) + in let public_input_size = Set_once.get_exn (Impl.R1CS_constraint_system.get_public_input_size sys) @@ -23,6 +73,8 @@ let domains (type field) let rows = zk_rows + public_input_size + Impl.R1CS_constraint_system.get_rows_len sys in - { h = Pow_2_roots_of_unity Int.(ceil_log2 rows) } + { Domains.h = + Pow_2_roots_of_unity Int.(max lookup_table_length_log2 (ceil_log2 rows)) + } in domains2 (Impl.constraint_system ~input_typ:typ ~return_typ main) diff --git a/src/lib/pickles/fix_domains.mli b/src/lib/pickles/fix_domains.mli index 5a4a91d5f59..0ec9373180e 100644 --- a/src/lib/pickles/fix_domains.mli +++ b/src/lib/pickles/fix_domains.mli @@ -1,5 +1,17 @@ +(** Determines the domain size used for 'wrap proofs'. This can be determined by + the fixpoint function provided by {!val:Wrap_domains.f_debug}, but for + efficiently this is disabled in production and uses the hard-coded results. +*) + val domains : - (module Snarky_backendless.Snark_intf.Run with type field = 'field) + ?feature_flags:bool Pickles_types.Plonk_types.Features.t + -> (module Snarky_backendless.Snark_intf.Run + with type field = 'field + and type R1CS_constraint_system.t = ( 'field + , 'gates ) + Kimchi_backend_common + .Plonk_constraint_system + .t ) -> ('a, 'b, 'field) Import.Spec.ETyp.t -> ('c, 'd, 'field) Import.Spec.ETyp.t -> ('a -> 'c) diff --git a/src/lib/pickles/full_signature.mli b/src/lib/pickles/full_signature.mli index 0bf3fce70a1..ae40da12b0a 100644 --- a/src/lib/pickles/full_signature.mli +++ b/src/lib/pickles/full_signature.mli @@ -1,3 +1,7 @@ +(** A type capturing the padding that's required to homogenize proofs consumed + by each of the inductive rules' ancestor proofs. +*) + type ('max_width, 'branches, 'maxes) t = { padded : ( (int, 'branches) Pickles_types.Vector.t diff --git a/src/lib/pickles/impls.ml b/src/lib/pickles/impls.ml index bc7fabb8c68..f928132ead8 100644 --- a/src/lib/pickles/impls.ml +++ b/src/lib/pickles/impls.ml @@ -49,7 +49,6 @@ module Step = struct module Other_field = struct (* Tick.Field.t = p < q = Tock.Field.t *) - let size_in_bits = Tock.Field.size_in_bits module Constant = Tock.Field @@ -57,37 +56,24 @@ module Step = struct Field.t * Boolean.var let forbidden_shifted_values = - let size_in_bits = Constant.size_in_bits in - let other_mod = Wrap_impl.Bigint.to_bignum_bigint Constant.size in - let values = forbidden_shifted_values ~size_in_bits ~modulus:other_mod in - let f x = - let open Option.Let_syntax in - let hi = test_bit x (Field.size_in_bits - 1) in - let lo = B.shift_right x 1 in - let%map lo = - let modulus = Impl.Field.size in - if B.compare modulus lo <= 0 then None - else Some Impl.Bigint.(to_field (of_bignum_bigint lo)) - in - (lo, hi) - in - values |> List.filter_map ~f - - let%test_unit "preserve circuit behavior for Step" = - let expected_list = - [ ("45560315531506369815346746415080538112", false) - ; ("45560315531506369815346746415080538113", false) - ; ( "14474011154664524427946373126085988481727088556502330059655218120611762012161" - , true ) - ; ( "14474011154664524427946373126085988481727088556502330059655218120611762012161" - , true ) - ] - in - let str_list = - List.map forbidden_shifted_values ~f:(fun (a, b) -> - (Tick.Field.to_string a, b) ) - in - assert ([%equal: (string * bool) list] str_list expected_list) + lazy + (let size_in_bits = Constant.size_in_bits in + let other_mod = Wrap_impl.Bigint.to_bignum_bigint Constant.size in + let values = + forbidden_shifted_values ~size_in_bits ~modulus:other_mod + in + let f x = + let open Option.Let_syntax in + let hi = test_bit x (Field.size_in_bits - 1) in + let lo = B.shift_right x 1 in + let%map lo = + let modulus = Impl.Field.size in + if B.compare modulus lo <= 0 then None + else Some Impl.Bigint.(to_field (of_bignum_bigint lo)) + in + (lo, hi) + in + values |> List.filter_map ~f ) let typ_unchecked : (t, Constant.t) Typ.t = Typ.transport @@ -112,20 +98,21 @@ module Step = struct in let (Typ typ_unchecked) = typ_unchecked in let%bind () = typ_unchecked.check t in - Checked.List.map forbidden_shifted_values ~f:(equal t) + Checked.List.map (Lazy.force forbidden_shifted_values) ~f:(equal t) >>= Boolean.any >>| Boolean.not >>= Boolean.Assert.is_true let typ : _ Snarky_backendless.Typ.t = let (Typ typ_unchecked) = typ_unchecked in Typ { typ_unchecked with check } - let to_bits (x, b) = Field.unpack x ~length:(Field.size_in_bits - 1) @ [ b ] + let _to_bits (x, b) = + Field.unpack x ~length:(Field.size_in_bits - 1) @ [ b ] end module Digest = Digest.Make (Impl) module Challenge = Challenge.Make (Impl) - let input ~proofs_verified ~wrap_rounds ~feature_flags = + let input ~proofs_verified ~wrap_rounds = let open Types.Step.Statement in let spec = spec proofs_verified wrap_rounds in let (T (typ, f, f_inv)) = @@ -171,26 +158,18 @@ module Wrap = struct type t = Field.t let forbidden_shifted_values = - let other_mod = Step.Impl.Bigint.to_bignum_bigint Constant.size in - let size_in_bits = Constant.size_in_bits in - let values = forbidden_shifted_values ~size_in_bits ~modulus:other_mod in - let f x = - let modulus = Impl.Field.size in - if B.compare modulus x <= 0 then None - else Some Impl.Bigint.(to_field (of_bignum_bigint x)) - in - values |> List.filter_map ~f - - let%test_unit "preserve circuit behavior for Wrap" = - let expected_list = - [ "91120631062839412180561524743370440705" - ; "91120631062839412180561524743370440706" - ] - in - let str_list = - List.map forbidden_shifted_values ~f:Wrap_field.to_string - in - assert ([%equal: string list] str_list expected_list) + lazy + (let other_mod = Step.Impl.Bigint.to_bignum_bigint Constant.size in + let size_in_bits = Constant.size_in_bits in + let values = + forbidden_shifted_values ~size_in_bits ~modulus:other_mod + in + let f x = + let modulus = Impl.Field.size in + if B.compare modulus x <= 0 then None + else Some Impl.Bigint.(to_field (of_bignum_bigint x)) + in + values |> List.filter_map ~f ) let typ_unchecked, check = (* Tick -> Tock *) @@ -204,7 +183,7 @@ module Wrap = struct let open Let_syntax in let equal x1 x2 = Field.Checked.equal x1 (Field.Var.constant x2) in let%bind () = t0.check t in - Checked.List.map forbidden_shifted_values ~f:(equal t) + Checked.List.map (Lazy.force forbidden_shifted_values) ~f:(equal t) >>= Boolean.any >>| Boolean.not >>= Boolean.Assert.is_true in (typ_unchecked, check) @@ -213,12 +192,15 @@ module Wrap = struct let (Typ typ_unchecked) = typ_unchecked in Typ { typ_unchecked with check } - let to_bits x = Field.unpack x ~length:Field.size_in_bits + let _to_bits x = Field.unpack x ~length:Field.size_in_bits end - let input () = + let input + ~feature_flags: + ({ Plonk_types.Features.Full.uses_lookups; _ } as feature_flags) () = + let feature_flags = Plonk_types.Features.of_full feature_flags in let lookup = - { Types.Wrap.Lookup_parameters.use = No + { Types.Wrap.Lookup_parameters.use = uses_lookups ; zero = { value = { challenge = Limb_vector.Challenge.Constant.zero @@ -245,24 +227,15 @@ module Wrap = struct Impl.run_checked (Other_field.check x) ; t ) , Fn.id ) ) - (* Wrap circuit: no features needed. *) - (In_circuit.spec (module Impl) lookup Plonk_types.Features.none) + (In_circuit.spec (module Impl) lookup feature_flags) in - let feature_flags = Plonk_types.Features.none in let typ = Typ.transport typ - ~there:(In_circuit.to_data ~option_map:Option.map ~to_opt:Fn.id) - ~back: - (In_circuit.of_data ~feature_flags ~option_map:Option.map - ~of_opt:Plonk_types.Opt.to_option ) + ~there:(In_circuit.to_data ~option_map:Option.map) + ~back:(In_circuit.of_data ~option_map:Option.map) in Spec.ETyp.T ( typ - , (fun x -> - In_circuit.of_data ~feature_flags ~option_map:Plonk_types.Opt.map - (f x) ~of_opt:Fn.id ) - , fun x -> - f_inv - (In_circuit.to_data ~option_map:Plonk_types.Opt.map x - ~to_opt:Plonk_types.Opt.to_option_unsafe ) ) + , (fun x -> In_circuit.of_data ~option_map:Opt.map (f x)) + , fun x -> f_inv (In_circuit.to_data ~option_map:Opt.map x) ) end diff --git a/src/lib/pickles/impls.mli b/src/lib/pickles/impls.mli index 24362a4607e..573ad8516b8 100644 --- a/src/lib/pickles/impls.mli +++ b/src/lib/pickles/impls.mli @@ -31,6 +31,8 @@ module Step : sig module Constant = Backend.Tock.Field + val forbidden_shifted_values : (Impl.field * bool) list lazy_t + val typ_unchecked : (t, Constant.t) Typ.t val typ : (t, Constant.t, Internal_Basic.field) Snarky_backendless.Typ.t @@ -39,7 +41,6 @@ module Step : sig val input : proofs_verified:'a Pickles_types.Nat.t -> wrap_rounds:'b Pickles_types.Nat.t - -> feature_flags:Plonk_types.Opt.Flag.t Plonk_types.Features.t -> ( ( ( ( Impl.Field.t , Impl.Field.t Composition_types.Scalar_challenge.t , Other_field.t Pickles_types.Shifted_value.Type2.t @@ -120,6 +121,8 @@ module Wrap : sig module Constant = Backend.Tick.Field + val forbidden_shifted_values : Impl.field list lazy_t + val typ_unchecked : (Impl.Field.t, Backend.Tick.Field.t) Impl.Typ.t val typ : @@ -130,21 +133,19 @@ module Wrap : sig end val input : - unit + feature_flags:Opt.Flag.t Plonk_types.Features.Full.t + -> unit -> ( ( Impl.Field.t , Impl.Field.t Composition_types.Scalar_challenge.t , Impl.Field.t Pickles_types.Shifted_value.Type1.t , ( Impl.Field.t Pickles_types.Shifted_value.Type1.t , Impl.field Snarky_backendless.Cvar.t Snarky_backendless.Snark_intf.Boolean0.t ) - Pickles_types.Plonk_types.Opt.t + Pickles_types.Opt.t , ( Impl.Field.t Composition_types.Scalar_challenge.t - Composition_types.Wrap.Proof_state.Deferred_values.Plonk.In_circuit - .Lookup - .t , Impl.field Snarky_backendless.Cvar.t Snarky_backendless.Snark_intf.Boolean0.t ) - Pickles_types.Plonk_types.Opt.t + Pickles_types.Opt.t , Impl.Boolean.var , Impl.field Snarky_backendless.Cvar.t , Impl.field Snarky_backendless.Cvar.t @@ -162,9 +163,6 @@ module Wrap : sig , Other_field.Constant.t Pickles_types.Shifted_value.Type1.t , Other_field.Constant.t Pickles_types.Shifted_value.Type1.t option , Limb_vector.Challenge.Constant.t Composition_types.Scalar_challenge.t - Composition_types.Wrap.Proof_state.Deferred_values.Plonk.In_circuit - .Lookup - .t option , bool , ( Limb_vector.Constant.Hex64.t diff --git a/src/lib/pickles/import.mli b/src/lib/pickles/import.mli index 67db7840b0c..8cc7a2ba8b0 100644 --- a/src/lib/pickles/import.mli +++ b/src/lib/pickles/import.mli @@ -1,4 +1,6 @@ -(* Definition of local namespace for imported module *) +(** Provides common aliases to provide a single entrypoint to open in Pickles + libraries +*) (** [B] is [Bignum.Bigint] *) module B = Bigint diff --git a/src/lib/pickles/inductive_rule.ml b/src/lib/pickles/inductive_rule.ml index 230a36619c7..b1ad99e9e30 100644 --- a/src/lib/pickles/inductive_rule.ml +++ b/src/lib/pickles/inductive_rule.ml @@ -1,4 +1,3 @@ -open Core_kernel open Pickles_types.Poly_types open Pickles_types.Hlist diff --git a/src/lib/pickles/intf.ml b/src/lib/pickles/intf.mli similarity index 66% rename from src/lib/pickles/intf.ml rename to src/lib/pickles/intf.mli index 670f8738977..2acb04df8fd 100644 --- a/src/lib/pickles/intf.ml +++ b/src/lib/pickles/intf.mli @@ -2,7 +2,7 @@ open Core_kernel open Pickles_types module Sponge_lib = Sponge -module Snarkable = struct +module Snarkable : sig module type S1 = sig type _ t @@ -122,7 +122,7 @@ module Snarkable = struct end end -module Evals = struct +module Evals : sig module type S = sig type n @@ -134,16 +134,18 @@ module Evals = struct end end -module Group (Impl : Snarky_backendless.Snark_intf.Run) = struct - open Impl - +(** Generic interface over a concrete implementation [Impl] of an elliptic + curve in Weierstrass form with [a] and [b]. In affine, the curve has the + equation form [y² = x³ + ax + b]. *) +module Group (Impl : Snarky_backendless.Snark_intf.Run) : sig module type S = sig type t + (** Parameters of the elliptic curve *) module Params : sig - val a : Field.Constant.t + val a : Impl.Field.Constant.t - val b : Field.Constant.t + val b : Impl.Field.Constant.t end module Constant : sig @@ -153,6 +155,7 @@ module Group (Impl : Snarky_backendless.Snark_intf.Run) = struct val negate : t -> t + (** The scalar field of the elliptic curve *) module Scalar : sig include Plonk_checks.Field_intf @@ -163,52 +166,73 @@ module Group (Impl : Snarky_backendless.Snark_intf.Run) = struct val scale : t -> Scalar.t -> t - val to_affine_exn : t -> field * field + (** [to_affine_exn p] returns the affine coordinates [(x, y)] of the point + [p] *) + val to_affine_exn : t -> Impl.field * Impl.field - val of_affine : field * field -> t + (** [of_affine (x, y)] builds a point on the curve + TODO: check it is on the curve? Check it is in the prime subgroup? + *) + val of_affine : Impl.field * Impl.field -> t end - val typ_unchecked : (t, Constant.t, field) Snarky_backendless.Typ.t + (** Represent a point, but not necessarily on the curve and in the prime + subgroup *) + val typ_unchecked : (t, Constant.t, Impl.field) Snarky_backendless.Typ.t - val typ : (t, Constant.t, field) Snarky_backendless.Typ.t + (** Represent a point on the curve and in the prime subgroup *) + val typ : (t, Constant.t, Impl.field) Snarky_backendless.Typ.t + (** Add two points on the curve. + TODO: is the addition complete? + *) val ( + ) : t -> t -> t + (** Double the point *) val double : t -> t - val scale : t -> Boolean.var list -> t + (** [scalar g xs] computes the scalar multiplication of [g] with [xs], where + the scalar [xs] is given as a list of bits *) + val scale : t -> Impl.Boolean.var list -> t - val if_ : Boolean.var -> then_:t -> else_:t -> t + val if_ : Impl.Boolean.var -> then_:t -> else_:t -> t + (** [negate x] computes the opposite of [x] *) val negate : t -> t - val to_field_elements : t -> Field.t list + (** Return the affine coordinates of the point [t] *) + val to_field_elements : t -> Impl.Field.t list + (** MSM with precomputed scaled values *) module Scaling_precomputation : sig + (** Precomputed table *) type t + (** [create p] builds a table of scaled values of [p] which can be used to + compute MSM *) val create : Constant.t -> t end val constant : Constant.t -> t + (** MSM using a precomputed table *) val multiscale_known : - (Boolean.var list * Scaling_precomputation.t) array -> t + (Impl.Boolean.var list * Scaling_precomputation.t) array -> t end end -module Sponge (Impl : Snarky_backendless.Snark_intf.Run) = struct - open Impl - +(** Hash functions that will be used for the Fiat Shamir transformation *) +module Sponge (Impl : Snarky_backendless.Snark_intf.Run) : sig module type S = Sponge.Intf.Sponge - with module Field := Field + with module Field := Impl.Field and module State := Sponge.State - and type input := Field.t - and type digest := Field.t - and type t = Field.t Sponge.t + and type input := Impl.Field.t + and type digest := Impl.Field.t + and type t = Impl.Field.t Sponge.t end +(** Basic interface representing inputs of a computation *) module type Inputs_base = sig module Impl : Snarky_backendless.Snark_intf.Run @@ -217,6 +241,7 @@ module type Inputs_base = sig include Group(Impl).S with type t = Field.t * Field.t + (** A generator on the curve and in the prime subgroup *) val one : t val if_ : Boolean.var -> then_:t -> else_:t -> t @@ -235,25 +260,38 @@ module type Inputs_base = sig val size : Import.B.t + (** The size in bits for the canonical representation of a field + element *) val size_in_bits : int + (** [to_bits x] returns the little endian representation of the canonical + representation of the field element [x] *) val to_bits : t -> bool list + (** [of_bits bs] builds an element of the field using the little endian + representation given by [bs] *) val of_bits : bool list -> t + (** [is_square y] returns [true] if there exists an element [x] in the same + field such that [x^2 = y], i.e [y] is a quadratic residue modulo [p] *) val is_square : t -> bool val print : t -> unit end module Generators : sig + (** Fixed generator of the group. It must be a point on the curve and in the + prime subgroup *) val h : Inner_curve.Constant.t Lazy.t end + (** Parameters for the sponge that will be used as a random oracle for the + Fiat Shamir transformation *) val sponge_params : Impl.Field.t Sponge_lib.Params.t end -module Wrap_main_inputs = struct +(** Interface for inputs for the outer computations *) +module Wrap_main_inputs : sig module type S = sig include Inputs_base @@ -267,7 +305,8 @@ module Wrap_main_inputs = struct end end -module Step_main_inputs = struct +(** Interface for inputs for the inner computations *) +module Step_main_inputs : sig module type S = sig include Inputs_base @@ -286,6 +325,7 @@ module Step_main_inputs = struct end end +(** Represent a statement to be proven *) module type Statement = sig type field diff --git a/src/lib/pickles/limb_vector/constant.ml b/src/lib/pickles/limb_vector/constant.ml index 9bda8a2bf05..02d35e7d470 100644 --- a/src/lib/pickles/limb_vector/constant.ml +++ b/src/lib/pickles/limb_vector/constant.ml @@ -41,10 +41,6 @@ module Hex64 = struct let hi, lo = String.(f (sub h ~pos:0 ~len:8), f (sub h ~pos:8 ~len:8)) in (hi lsl 32) lor lo - let%test_unit "int64 hex" = - Quickcheck.test (Int64.gen_incl zero max_value) ~f:(fun x -> - assert (equal x (of_hex (to_hex x))) ) - let sexp_of_t = Fn.compose String.sexp_of_t to_hex let t_of_sexp = Fn.compose of_hex String.t_of_sexp @@ -65,7 +61,7 @@ module Hex64 = struct end] end -module Make (N : Vector.Nat_intf) = struct +module Make (N : Pickles_types.Nat.Intf) = struct module A = Vector.With_length (N) let length = 64 * Nat.to_int N.n diff --git a/src/lib/pickles/limb_vector/constant.mli b/src/lib/pickles/limb_vector/constant.mli index f5e101d5036..bf77f0d078b 100644 --- a/src/lib/pickles/limb_vector/constant.mli +++ b/src/lib/pickles/limb_vector/constant.mli @@ -7,6 +7,22 @@ module Hex64 : sig type t = int64 + (** [to_hex t] converts [t] to its hex-string representation. + + This is a "pure" hexadecimal representation, i.e., it does NOT sport any + prefix like 'Ox' or '#x'. + *) + val to_hex : t -> string + + (** [of_hex s] converts a "pure" hexadecimal string representation into + {!type:t}. + + [s] should not contain any prefix information. + + @raise Invalid_argument if the string is not convertible + *) + val of_hex : string -> t + module Stable : sig module V1 : sig type nonrec t = t @@ -17,7 +33,7 @@ module Hex64 : sig end end -module Make (N : Pickles_types.Vector.Nat_intf) : sig +module Make (N : Pickles_types.Nat.Intf) : sig module A : module type of Pickles_types.Vector.With_length (N) val length : int diff --git a/src/lib/pickles/limb_vector/dune b/src/lib/pickles/limb_vector/dune index 3ec0cb8758c..03478d6fd82 100644 --- a/src/lib/pickles/limb_vector/dune +++ b/src/lib/pickles/limb_vector/dune @@ -1,14 +1,17 @@ (library (name limb_vector) (public_name pickles.limb_vector) - (flags -warn-error -27) + (flags + (:standard + -w +a-40..42-44 + -warn-error +a) + -open Core_kernel) (modules_without_implementation limb_vector) (instrumentation (backend bisect_ppx)) (preprocess (pps ppx_version ppx_mina ppx_jane ppx_deriving.std ppx_deriving_yojson )) (libraries ;; opam libraries bin_prot.shape - ppx_inline_test.config sexplib0 core_kernel base.caml diff --git a/src/lib/pickles/limb_vector/make.ml b/src/lib/pickles/limb_vector/make.ml index bb1564e35dd..78ed0d854ab 100644 --- a/src/lib/pickles/limb_vector/make.ml +++ b/src/lib/pickles/limb_vector/make.ml @@ -1,7 +1,7 @@ open Core_kernel open Pickles_types -module T (Impl : Snarky_backendless.Snark_intf.Run) (N : Vector.Nat_intf) = +module T (Impl : Snarky_backendless.Snark_intf.Run) (N : Pickles_types.Nat.Intf) = struct open Impl diff --git a/src/lib/pickles/limb_vector/make.mli b/src/lib/pickles/limb_vector/make.mli index c621dad8850..875b0a85452 100644 --- a/src/lib/pickles/limb_vector/make.mli +++ b/src/lib/pickles/limb_vector/make.mli @@ -1,8 +1,8 @@ -module T - (Impl : Snarky_backendless.Snark_intf.Run) - (N : Pickles_types.Vector.Nat_intf) : sig +(** Type representation of a vector of [N.t] limbs *) +module T (Impl : Snarky_backendless.Snark_intf.Run) (N : Pickles_types.Nat.Intf) : sig type t = Impl.Field.t + (** Returns the length of the vector as an integer *) val length : int module Constant : module type of Constant.Make (N) diff --git a/src/lib/pickles/limb_vector/test/dune b/src/lib/pickles/limb_vector/test/dune new file mode 100644 index 00000000000..a02db51996c --- /dev/null +++ b/src/lib/pickles/limb_vector/test/dune @@ -0,0 +1,11 @@ +(tests + (names test_constant) + (flags + (:standard -warn-error +a) + -open Core_kernel + -open Limb_vector + ) + (package pickles) + (preprocess (pps ppx_jane)) + (libraries alcotest core_kernel pickles pickles.limb_vector) + (action (run %{test}))) diff --git a/src/lib/pickles/limb_vector/test/test_constant.ml b/src/lib/pickles/limb_vector/test/test_constant.ml new file mode 100644 index 00000000000..84e07e2f56c --- /dev/null +++ b/src/lib/pickles/limb_vector/test/test_constant.ml @@ -0,0 +1,28 @@ +(* Testing + ------- + + Component: Pickles / Limb_vector + Subject: Test Constant (Hex64) + Invocation: dune exec src/lib/pickles/limb_vector/test/test_constant.exe +*) + +let test_hex_conversion () = + let open Constant.Hex64 in + Quickcheck.test (Int64.gen_incl zero max_value) ~f:(fun x -> + assert (equal x (of_hex (to_hex x))) ) + +let test_hex_failure () = + match Constant.Hex64.of_hex "ghi" with + | exception Invalid_argument _ -> + () + | _ -> + assert false + +let () = + let open Alcotest in + run "Limb_vector" + [ ( "Constant:Hex64" + , [ test_case "hex roundtrip" `Quick test_hex_conversion + ; test_case "hex conversion failure" `Quick test_hex_failure + ] ) + ] diff --git a/src/lib/pickles/make_sponge.mli b/src/lib/pickles/make_sponge.mli index 715ae0e5844..8d4b053317a 100644 --- a/src/lib/pickles/make_sponge.mli +++ b/src/lib/pickles/make_sponge.mli @@ -1,8 +1,22 @@ +(** This module provides interfaces and functors to instantiate a hash function + built from a permutation and the {{ + https://en.wikipedia.org/wiki/Sponge_function} sponge construction }. + + The interfaces have been created to be used with permutations consisting of + partial and full rounds, like {{ https://eprint.iacr.org/2019/458.pdf} + Poseidon }. +*) + module Rounds : sig + (** Number of full rounds for the permutation *) val rounds_full : int + (** If this is set to [true], rounds constants will be added before running a + round of the permutation. If set to [false], applies the standard + construction S-BOX -> MDS -> ARK. *) val initial_ark : bool + (** Number of partial rounds for the permutation *) val rounds_partial : int end @@ -16,8 +30,11 @@ module type S = sig type field := Field.t + (** [to_the_alpha x] returns [x^alpha] where [alpha] is a security parameter + of the permutation used for the S-BOX *) val to_the_alpha : field -> field + (** Exponent used in the S-BOX *) val alpha : int module Operations : Sponge.Intf.Operations with type Field.t = field @@ -39,23 +56,45 @@ module type S = sig module Field : sig type f := F.t + (** Parameters for the permutation *) type params := f Sponge.Params.t + (** Represents the state of the permutation. The capacity is always [1]. *) type state := f Sponge.State.t - type t = f Sponge.t (* TODO: Make this type abstract *) - + (** Represents the state of the sponge construction. It includes information like: + - the permutation parameters (linear layer, number of rounds, constants, etc) + - the sponge configuration + - an internal ID to differentiate each new instance. It is currently used only + for debugging purposes + *) + type t = f Sponge.t + (* TODO: Make this type abstract *) + + (** [create ?init params] creates a new sponge state and initializes the + permutation state with a fresh copy of [init]. If [init] is [None], the + initial permutation state is set to [F.zero]. *) val create : ?init:state -> params -> t + (** [make state params sponge_state] returns a new sponge state. The + permutation state is initialized to a fresh copy of [state]. [params] are + the parameters for the internal permutation. *) val make : state:state -> params:params -> sponge_state:Sponge.sponge_state -> t + (** [absorb state x] "absorbs" the field element [f] into the sponge state [state] + The sponge state [state] is modified *) val absorb : t -> f -> unit + (** [squeeze state] squeezes the sponge state [state]. + The sponge state [state] is modified *) val squeeze : t -> f + (** [copy state] returns a fresh copy of the sponge state [state] *) val copy : t -> t + (** [state sponge_state] returns a fresh copy of the permutation state + contained in the sponge state [sponge_state] *) val state : t -> state end @@ -75,6 +114,9 @@ module type S = sig val squeeze_field : t -> field end + (** [digest sponge_params elmts] is equivalent to absorbing one by one each + element of the array [elmts] followed by a call to squeeze with a sponge + construction instantiated with the parameters [sponge_params] *) val digest : field Sponge.Params.t -> field array @@ -88,15 +130,15 @@ module Make (Field : Kimchi_backend.Field.S) : module Test (Impl : Snarky_backendless.Snark_intf.Run) - (S_constant : Sponge.Intf.Sponge - with module Field := Impl.Field.Constant - and module State := Sponge.State - and type input := Impl.field - and type digest := Impl.field) - (S_checked : Sponge.Intf.Sponge - with module Field := Impl.Field - and module State := Sponge.State - and type input := Impl.Field.t - and type digest := Impl.Field.t) : sig + (_ : Sponge.Intf.Sponge + with module Field := Impl.Field.Constant + and module State := Sponge.State + and type input := Impl.field + and type digest := Impl.field) + (_ : Sponge.Intf.Sponge + with module Field := Impl.Field + and module State := Sponge.State + and type input := Impl.Field.t + and type digest := Impl.Field.t) : sig val test : Impl.Field.Constant.t Sponge.Params.t -> unit end diff --git a/src/lib/pickles/one_hot_vector/dune b/src/lib/pickles/one_hot_vector/dune index f2bc6d65b56..55bf72717f0 100644 --- a/src/lib/pickles/one_hot_vector/dune +++ b/src/lib/pickles/one_hot_vector/dune @@ -1,6 +1,11 @@ (library (name one_hot_vector) (public_name pickles.one_hot_vector) + (flags + (:standard + -w +a-40..42-44 + -warn-error +a) + -open Core_kernel) (instrumentation (backend bisect_ppx)) (preprocess (pps ppx_version ppx_jane)) (libraries diff --git a/src/lib/pickles/one_hot_vector/one_hot_vector.mli b/src/lib/pickles/one_hot_vector/one_hot_vector.mli index 7591d259399..8fd9f3ce3d4 100644 --- a/src/lib/pickles/one_hot_vector/one_hot_vector.mli +++ b/src/lib/pickles/one_hot_vector/one_hot_vector.mli @@ -1,26 +1,51 @@ +(** Represents a typed size one-hot vector, which is list of bits where exactly + one of the bit is one. As usual in Pickles, we encode the length at the type + level and a type parameter is used to represent the finite field the vector bits + are encoded in. Booleans are used to represent [0] and [1]. + + More information can be found on the {{ + https://en.wikipedia.org/wiki/One-hot } wikipedia article }. +*) + open Pickles_types module Constant : sig type t = int end +(** Represents a one-hot vector of length ['n]. The type parameter ['f] is used + to encod the field the vector lives in. For instance, if we want to + represent the one-hot vector [0; 0; 1; 0; 0] in the finite field [F13], we + would use the type [(F13.t, Nat.N5) t]. To activate the third bit, we would + use the function [of_index] provided below. +*) type ('f, 'n) t = private ('f Snarky_backendless.Cvar.t Snarky_backendless.Boolean.t, 'n) Vector.t +(** Concrete instance of a one-hot vector using an implementation Impl + TODO: why don't we merge this functor with {!Make}? *) module T (Impl : Snarky_backendless.Snark_intf.Run) : sig type nonrec 'n t = (Impl.field, 'n) t end +(** Concrete instance of a one-hot vector with some helpers *) module Make (Impl : Snarky_backendless.Snark_intf.Run) : sig - open Impl module Constant = Constant type 'n t = 'n T(Impl).t - val of_index : Field.t -> length:'n Nat.t -> 'n t + (** [of_index f_idx t_n] creates a one hot vector of size [t_n] which only the + index [f_idx] is set to [true]. + For instance, if we suppose [F13] is the finite field of order [13], the + one-hot vector [0; 0; 1; 0; 0] in [F13] can be created + using [of_index (F13.of_int 2) Nat.N5] *) + val of_index : Impl.Field.t -> length:'n Nat.t -> 'n t + (** [of_vector_unsafe v] creates a one-hot vector from a [n] long vector. + However, the function does not check if strictly one bit is set to [1]. + Use {!of_index} for the safe version *) val of_vector_unsafe : (Impl.Boolean.var, 'n) Vector.t -> 'n t - val typ : 'n Nat.t -> ('n t, Constant.t) Typ.t + val typ : 'n Nat.t -> ('n t, Constant.t) Impl.Typ.t end diff --git a/src/lib/pickles/opt_sponge.ml b/src/lib/pickles/opt_sponge.ml index cf1f93790f1..9a026b7eb12 100644 --- a/src/lib/pickles/opt_sponge.ml +++ b/src/lib/pickles/opt_sponge.ml @@ -21,7 +21,7 @@ type 'f sponge_state = type 'f t = { mutable state : 'f array ; params : 'f Sponge.Params.t - ; needs_final_permute_if_empty : bool + ; mutable needs_final_permute_if_empty : bool ; mutable sponge_state : 'f sponge_state } @@ -34,7 +34,7 @@ struct type nonrec t = Field.t t - let state { state; _ } = Array.copy state + let _state { state; _ } = Array.copy state let copy { state; params; sponge_state; needs_final_permute_if_empty } = { state = Array.copy state @@ -45,15 +45,15 @@ struct let initial_state = Array.init m ~f:(fun _ -> Field.zero) - let of_sponge { Sponge.state; params; sponge_state } = + let of_sponge { Sponge.state; params; sponge_state; id = _ } = match sponge_state with - | Squeezed n -> + | Sponge.Squeezed n -> { sponge_state = Squeezed n ; state = Array.copy state ; needs_final_permute_if_empty = true ; params } - | Absorbed n -> ( + | Sponge.Absorbed n -> ( let abs i = { sponge_state = Absorbing { next_index = i; xs = [] } ; state = Array.copy state @@ -104,102 +104,105 @@ struct assert_r1cs x (i_equals_j :> Field.t) Field.(a_j' - a.(j)) ; a.(j) <- a_j' ) + let cond_permute ~params ~permute state = + let permuted = P.block_cipher params (Array.copy state) in + for i = 0 to m - 1 do + state.(i) <- Field.if_ permute ~then_:permuted.(i) ~else_:state.(i) + done + + let consume_pairs ~params ~state ~pos:start_pos pairs = + Array.fold ~init:start_pos pairs ~f:(fun p ((b, x), (b', y)) -> + (* Semantically, we want to do this. + match b, b' with + | 1, 1 -> + if p = 0 + then state := perm {state with .0 += x, .1 += y } + else state := {perm {state with .1 += x} with .0 += y} + | 1, 0 -> + if p = 0 + then state := {state with .0 += x} + else state := perm {state with .1 += x} + | 0, 1 -> + if p = 0 + then state := {state with .0 += y } + else state := perm {state with .1 += y} + | 0, 0 -> + state + *) + let p' = Boolean.( lxor ) p b in + let pos_after = Boolean.( lxor ) p' b' in + let y = Field.(y * (b' :> t)) in + let add_in_y_after_perm = + (* post + add in + (1, 1, 1) + + do not add in + (1, 1, 0) + (0, 1, 0) + (0, 1, 1) + + (1, 0, 0) + (1, 0, 1) + (0, 0, 0) + (0, 0, 1) + *) + (* Only one case where we add in y after the permutation is applied *) + Boolean.all [ b; b'; p ] + in + let add_in_y_before_perm = Boolean.not add_in_y_after_perm in + add_in state p Field.(x * (b :> t)) ; + add_in state p' Field.(y * (add_in_y_before_perm :> t)) ; + let permute = + (* (b, b', p) + true: + (0, 1, 1) + (1, 0, 1) + (1, 1, 0) + (1, 1, 1) + + false: + (0, 0, 0) + (0, 0, 1) + (0, 1, 0) + (1, 0, 0) + *) + (* (b && b') || (p && (b || b')) *) + Boolean.(any [ all [ b; b' ]; all [ p; b ||| b' ] ]) + in + cond_permute ~params ~permute state ; + add_in state p' Field.(y * (add_in_y_after_perm :> t)) ; + pos_after ) + let consume ~needs_final_permute_if_empty ~params ~start_pos input state = assert (Array.length state = m) ; let n = Array.length input in - let pos = ref start_pos in - let cond_permute permute = - let permuted = P.block_cipher params (Array.copy state) in - for i = 0 to m - 1 do - state.(i) <- Field.if_ permute ~then_:permuted.(i) ~else_:state.(i) - done + let num_pairs = n / 2 in + let remaining = n - (2 * num_pairs) in + let pairs = + Array.init num_pairs ~f:(fun i -> (input.(2 * i), input.((2 * i) + 1))) in - let pairs = n / 2 in - let remaining = n - (2 * pairs) in - for i = 0 to pairs - 1 do - (* Semantically, we want to do this. - match b, b' with - | 1, 1 -> - if p = 0 - then state := perm {state with .0 += x, .1 += y } - else state := {perm {state with .1 += x} with .0 += y} - | 1, 0 -> - if p = 0 - then state := {state with .0 += x} - else state := perm {state with .1 += x} - | 0, 1 -> - if p = 0 - then state := {state with .0 += y } - else state := perm {state with .1 += y} - | 0, 0 -> - state - *) - let b, x = input.(2 * i) in - let b', y = input.((2 * i) + 1) in - let p = !pos in - let p' = Boolean.( lxor ) p b in - pos := Boolean.( lxor ) p' b' ; - let y = Field.(y * (b' :> t)) in - let add_in_y_after_perm = - (* post - add in - (1, 1, 1) - - do not add in - (1, 1, 0) - (0, 1, 0) - (0, 1, 1) - - (1, 0, 0) - (1, 0, 1) - (0, 0, 0) - (0, 0, 1) - *) - (* Only one case where we add in y after the permutation is applied *) - Boolean.all [ b; b'; p ] - in - let add_in_y_before_perm = Boolean.not add_in_y_after_perm in - add_in state p Field.(x * (b :> t)) ; - add_in state p' Field.(y * (add_in_y_before_perm :> t)) ; - let permute = - (* (b, b', p) - true: - (0, 1, 1) - (1, 0, 1) - (1, 1, 0) - (1, 1, 1) - - false: - (0, 0, 0) - (0, 0, 1) - (0, 1, 0) - (1, 0, 0) - *) - (* (b && b') || (p && (b || b')) *) - Boolean.(any [ all [ b; b' ]; all [ p; b ||| b' ] ]) - in - cond_permute permute ; - add_in state p' Field.(y * (add_in_y_after_perm :> t)) - done ; + let pos = consume_pairs ~params ~state ~pos:start_pos pairs in let empty_imput = Boolean.not (Boolean.Array.any (Array.map input ~f:fst)) in let should_permute = match remaining with | 0 -> - if needs_final_permute_if_empty then Boolean.(empty_imput ||| !pos) - else !pos + if needs_final_permute_if_empty then Boolean.(empty_imput ||| pos) + else pos | 1 -> let b, x = input.(n - 1) in - let p = !pos in - pos := Boolean.( lxor ) p b ; + let p = pos in + let pos_after = Boolean.( lxor ) p b in + ignore (pos_after : Boolean.var) ; add_in state p Field.(x * (b :> t)) ; if needs_final_permute_if_empty then Boolean.any [ p; b; empty_imput ] else Boolean.any [ p; b ] | _ -> assert false in - cond_permute should_permute + cond_permute ~params ~permute:should_permute state let absorb (t : t) x = match t.sponge_state with @@ -221,77 +224,65 @@ struct | Absorbing { next_index; xs } -> consume ~needs_final_permute_if_empty:t.needs_final_permute_if_empty ~start_pos:next_index ~params:t.params (Array.of_list_rev xs) t.state ; + t.needs_final_permute_if_empty <- true ; t.sponge_state <- Squeezed 1 ; t.state.(0) - let%test_module "opt_sponge" = - ( module struct - module S = Sponge.Make_sponge (P) - - let%test_unit "correctness" = - let params : _ Sponge.Params.t = - let a () = - Array.init 3 ~f:(fun _ -> Field.(constant (Constant.random ()))) - in - { mds = Array.init 3 ~f:(fun _ -> a ()) - ; round_constants = Array.init 40 ~f:(fun _ -> a ()) - } + let consume_all_pending (t : t) = + match t.sponge_state with + | Squeezed _ -> + failwith "Nothing pending" + | Absorbing { next_index; xs } -> + let input = Array.of_list_rev xs in + assert (Array.length t.state = m) ; + let n = Array.length input in + let num_pairs = n / 2 in + let remaining = n - (2 * num_pairs) in + let pairs = + Array.init num_pairs ~f:(fun i -> + (input.(2 * i), input.((2 * i) + 1)) ) + in + let pos = + consume_pairs ~params:t.params ~state:t.state ~pos:next_index pairs in - let gen = - let open Quickcheck.Generator.Let_syntax in - let%bind n = Quickcheck.Generator.small_positive_int - and n_pre = Quickcheck.Generator.small_positive_int in - let%map xs = List.gen_with_length n Field.Constant.gen - and bs = List.gen_with_length n Bool.quickcheck_generator - and pre = List.gen_with_length n_pre Field.Constant.gen in - (pre, List.zip_exn bs xs) + let pos_after = + if remaining = 1 then ( + let b, x = input.(n - 1) in + let p = pos in + let pos_after = Boolean.( lxor ) p b in + add_in t.state p Field.(x * (b :> t)) ; + pos_after ) + else pos in - Quickcheck.test gen ~trials:10 ~f:(fun (pre, ps) -> - let filtered = - List.filter_map ps ~f:(fun (b, x) -> if b then Some x else None) - in - let init () = - let pre = - exists - (Typ.list ~length:(List.length pre) Field.typ) - ~compute:(fun () -> pre) - in - let s = S.create params in - List.iter pre ~f:(S.absorb s) ; - s - in - let filtered_res = - let n = List.length filtered in - Impl.Internal_Basic.Test.checked_to_unchecked - (Typ.list ~length:n Field.typ) - Field.typ - (fun xs -> - make_checked (fun () -> - let s = init () in - List.iter xs ~f:(S.absorb s) ; - S.squeeze s ) ) - filtered - in - let opt_res = - let n = List.length ps in - Impl.Internal_Basic.Test.checked_to_unchecked - (Typ.list ~length:n (Typ.tuple2 Boolean.typ Field.typ)) - Field.typ - (fun xs -> - make_checked (fun () -> - let s = - if List.length pre = 0 then create params - else of_sponge (init ()) - in - List.iter xs ~f:(absorb s) ; - squeeze s ) ) - ps - in - if not (Field.Constant.equal filtered_res opt_res) then - failwithf - !"hash(%{sexp:Field.Constant.t list}) = %{sexp:Field.Constant.t}\n\ - hash(%{sexp:(bool * Field.Constant.t) list}) = \ - %{sexp:Field.Constant.t}" - filtered filtered_res ps opt_res () ) - end ) + (* TODO: We should propagate the emptiness state of the pairs, + otherwise this will break in some edge cases. + *) + t.sponge_state <- Absorbing { next_index = pos_after; xs = [] } + + let recombine ~original_sponge b (t : t) = + match[@warning "-4"] (original_sponge.sponge_state, t.sponge_state) with + | Squeezed orig_i, Squeezed curr_i -> + if orig_i <> curr_i then failwithf "Squeezed %i vs %i" orig_i curr_i () ; + Array.iteri original_sponge.state ~f:(fun i x -> + t.state.(i) <- Field.if_ b ~then_:t.state.(i) ~else_:x ) + | ( Absorbing { next_index = next_index_orig; xs = xs_orig } + , Absorbing { next_index = next_index_curr; xs = xs_curr } ) -> + (* TODO: Should test for full equality here, if we want to catch all + sponge misuses. + OTOH, if you're using this sponge then you'd better know what it's + doing.. + *) + if List.length xs_orig <> List.length xs_curr then + failwithf "Pending absorptions %i vs %i" (List.length xs_orig) + (List.length xs_curr) () ; + Array.iteri original_sponge.state ~f:(fun i x -> + t.state.(i) <- Field.if_ b ~then_:t.state.(i) ~else_:x ) ; + t.sponge_state <- + Absorbing + { next_index = + Boolean.if_ b ~then_:next_index_curr ~else_:next_index_orig + ; xs = xs_curr + } + | _, _ -> + failwith "Incompatible states" end diff --git a/src/lib/pickles/opt_sponge.mli b/src/lib/pickles/opt_sponge.mli index c7aeed6af15..f93844ee485 100644 --- a/src/lib/pickles/opt_sponge.mli +++ b/src/lib/pickles/opt_sponge.mli @@ -8,21 +8,57 @@ type 'f sponge_state = type 'f t = { mutable state : 'f array ; params : 'f Sponge.Params.t - ; needs_final_permute_if_empty : bool + ; mutable needs_final_permute_if_empty : bool ; mutable sponge_state : 'f sponge_state } module Make (Impl : Snarky_backendless.Snark_intf.Run) - (P : Sponge.Intf.Permutation with type Field.t = Impl.Field.t) : sig + (_ : Sponge.Intf.Permutation with type Field.t = Impl.Field.t) : sig type nonrec t = Impl.Field.t t val create : ?init:Impl.Field.t array -> Impl.Field.t Sponge.Params.t -> t + (** Create a new sponge with state copied from the given sponge. + In particular, this copies the underlying state array, so that any + mutations to the copy will not affect the original. + *) + val copy : t -> t + val of_sponge : Impl.Field.t Sponge.t -> t val absorb : t -> Impl.Field.t Snarky_backendless.Boolean.t * Impl.Field.t -> unit val squeeze : t -> Impl.Field.t + + (** Updates the sponge state by forcing absorption of all 'pending' field + elements passed to [absorb]. + This method runs logic equivalent to that in the [squeeze] method, but + without transitioning the state. + This method can be used with [copy] to create a fork of a sponge where + one of the branches calls the [absorb] method and the other does not. + *) + val consume_all_pending : t -> unit + + (** Recombines a forked copy of a sponge with the original. + When the boolean value is true, the sponge state will be preserved; + otherwise it will be overwritten by the state of the original sponge. + + When an optional [squeeze] has ocurred, both the original and forked + sponges have called [consume_all_pending] before the squeeze, and must + subsequently absorb the same value or values, to bring their internal + states back into alignment. 1 value is sufficient, but it is slightly + more efficient to absorb 2. + + This enables optional absorption for sponges. For example: +{[ + let original_sponge = Opt_sponge.copy sponge in + let squeezed = Opt_sponge.squeeze sponge in + Opt_sponge.absorb sponge x_opt ; + Opt_sponge.absorb original_sponge x_opt ; + Opt_sponge.recombine ~original_sponge b sponge +]} + *) + val recombine : original_sponge:t -> Impl.Boolean.var -> t -> unit end diff --git a/src/lib/pickles/per_proof_witness.ml b/src/lib/pickles/per_proof_witness.ml index 4a932943fd6..b69f7bcf590 100644 --- a/src/lib/pickles/per_proof_witness.ml +++ b/src/lib/pickles/per_proof_witness.ml @@ -62,11 +62,8 @@ type ('app_state, 'max_proofs_verified, 'num_branches) t = , Impl.Field.t Shifted_value.Type1.t , ( Impl.Field.t Pickles_types.Shifted_value.Type1.t , Impl.Boolean.var ) - Plonk_types.Opt.t - , ( scalar_challenge - Types.Wrap.Proof_state.Deferred_values.Plonk.In_circuit.Lookup.t - , Impl.Boolean.var ) - Plonk_types.Opt.t + Opt.t + , (scalar_challenge, Impl.Boolean.var) Opt.t , Impl.Boolean.var , unit , Digest.Make(Impl).t @@ -104,13 +101,11 @@ module No_app_state = struct end module Constant = struct - open Kimchi_backend - type challenge = Challenge.Constant.t type scalar_challenge = challenge Scalar_challenge.t - type ('statement, 'max_proofs_verified, _) t = + type ('statement, 'max_proofs_verified) t = { app_state : 'statement ; wrap_proof : Wrap_proof.Constant.t ; proof_state : @@ -118,9 +113,7 @@ module Constant = struct , scalar_challenge , Tick.Field.t Shifted_value.Type1.t , Tick.Field.t Shifted_value.Type1.t option - , scalar_challenge - Types.Wrap.Proof_state.Deferred_values.Plonk.In_circuit.Lookup.t - option + , scalar_challenge option , bool , unit , Digest.Constant.t @@ -137,15 +130,13 @@ module Constant = struct [@@deriving hlist] module No_app_state = struct - type nonrec (_, 'max_proofs_verified, 'num_branches) t = - (unit, 'max_proofs_verified, 'num_branches) t + type nonrec (_, 'max_proofs_verified, _) t = (unit, 'max_proofs_verified) t end end -let typ (type n avar aval m) ~feature_flags +let typ (type n avar aval) ~feature_flags ~num_chunks (statement : (avar, aval) Impls.Step.Typ.t) (max_proofs_verified : n Nat.t) - (branches : m Nat.t) : - ((avar, n, m) t, (aval, n, m) Constant.t) Impls.Step.Typ.t = + = let module Sc = Scalar_challenge in let open Impls.Step in let open Step_main_inputs in @@ -158,7 +149,6 @@ let typ (type n avar aval m) ~feature_flags ; Types.Wrap.Proof_state.In_circuit.typ (module Impl) ~challenge:Challenge.typ ~scalar_challenge:Challenge.typ ~feature_flags - ~dummy_scalar:(Shifted_value.Type1.Shifted_value Field.Constant.zero) ~dummy_scalar_challenge:(Sc.create Limb_vector.Challenge.Constant.zero) (Shifted_value.Type1.typ Field.typ) (Snarky_backendless.Typ.unit ()) @@ -166,7 +156,7 @@ let typ (type n avar aval m) ~feature_flags (Branch_data.typ (module Impl) ~assert_16_bits:(Step_verifier.assert_n_bits ~n:16) ) - ; Plonk_types.All_evals.typ + ; Plonk_types.All_evals.typ ~num_chunks (module Impl) (* Assume we have lookup iff we have runtime tables *) feature_flags diff --git a/src/lib/pickles/per_proof_witness.mli b/src/lib/pickles/per_proof_witness.mli index e4302655bdb..61ec4cdeb05 100644 --- a/src/lib/pickles/per_proof_witness.mli +++ b/src/lib/pickles/per_proof_witness.mli @@ -1,3 +1,5 @@ +(** The information required to recursively verify a Pickles proof. *) + open Pickles_types module Impl = Impls.Step @@ -26,12 +28,8 @@ type ('app_state, 'max_proofs_verified, 'num_branches) t = , Impl.Field.t Pickles_types.Shifted_value.Type1.t , ( Impl.Field.t Pickles_types.Shifted_value.Type1.t , Impl.Boolean.var ) - Pickles_types.Plonk_types.Opt.t - , ( scalar_challenge - Import.Types.Wrap.Proof_state.Deferred_values.Plonk.In_circuit.Lookup - .t - , Impl.Boolean.var ) - Pickles_types.Plonk_types.Opt.t + Pickles_types.Opt.t + , (scalar_challenge, Impl.Boolean.var) Pickles_types.Opt.t , Impl.Boolean.var , unit , Import.Digest.Make(Impl).t @@ -78,7 +76,7 @@ module Constant : sig type scalar_challenge = challenge Import.Scalar_challenge.t - type ('statement, 'max_proofs_verified, _) t = + type ('statement, 'max_proofs_verified) t = { app_state : 'statement ; wrap_proof : Wrap_proof.Constant.t ; proof_state : @@ -86,10 +84,7 @@ module Constant : sig , scalar_challenge , Backend.Tick.Field.t Pickles_types.Shifted_value.Type1.t , Backend.Tick.Field.t Pickles_types.Shifted_value.Type1.t option - , scalar_challenge - Import.Types.Wrap.Proof_state.Deferred_values.Plonk.In_circuit.Lookup - .t - option + , scalar_challenge option , bool , unit , Import.Digest.Constant.t @@ -112,14 +107,13 @@ module Constant : sig } module No_app_state : sig - type nonrec (_, 'max_proofs_verified, 'num_branches) t = - (unit, 'max_proofs_verified, 'num_branches) t + type nonrec (_, 'max_proofs_verified, _) t = (unit, 'max_proofs_verified) t end end val typ : - feature_flags:Plonk_types.Opt.Flag.t Plonk_types.Features.t + feature_flags:Opt.Flag.t Plonk_types.Features.Full.t + -> num_chunks:int -> ('avar, 'aval) Impl.Typ.t -> 'n Pickles_types.Nat.t - -> 'm Pickles_types.Nat.t - -> (('avar, 'n, 'm) t, ('aval, 'n, 'm) Constant.t) Impl.Typ.t + -> (('avar, 'n, _) t, ('aval, 'n) Constant.t) Impl.Typ.t diff --git a/src/lib/pickles/pickles.ml b/src/lib/pickles/pickles.ml index 70f99f21074..d353a8e6ea9 100644 --- a/src/lib/pickles/pickles.ml +++ b/src/lib/pickles/pickles.ml @@ -28,7 +28,6 @@ module Make_str (_ : Wire_types.Concrete) = struct open Async_kernel open Import open Pickles_types - open Poly_types open Hlist open Common open Backend @@ -45,8 +44,12 @@ module Make_str (_ : Wire_types.Concrete) = struct module Step_main_inputs = Step_main_inputs module Step_verifier = Step_verifier module Proof_cache = Proof_cache + module Cache = Cache + module Storables = Compile.Storables + module Ro = Ro - exception Return_digest = Compile.Return_digest + type chunking_data = Verify.Instance.chunking_data = + { num_chunks : int; domain_size : int; zk_rows : int } let verify_promise = Verify.verify @@ -77,6 +80,21 @@ module Make_str (_ : Wire_types.Concrete) = struct where f is a snarky function defined over an Impl with Field.t = Fp and each Ai is itself an inductive rule (possibly equal to A itself). + a1, ..., an can be seen as previous statements, i.e. + + prev_statement_1 ∈ A1, ..., prev_statement_n ∈ An + f [ prev_statement_1; ...; prev_statement_n ] new_statement = true + ------------------------------------------------------------------- + new_statement ∈ A + + In the case of a blockchain, the description of the sets A1, ..., An, A can + be blockchain state, and f would be a function updating the state: + + prev_blockchain_state ∈ A + update_blockchain_state [prev_blockchain_state] new_blockchain_state = true + --------------------------------------------------------------------------- + new_blockchain_state ∈ A + We pursue the "step" then "wrap" approach for proof composition. The main source of complexity is that we must "wrap" proofs whose verifiers are @@ -89,7 +107,7 @@ module Make_str (_ : Wire_types.Concrete) = struct Schematically, from the circuit point-of-view, we can say a proof is - a sequence of F_0 elements xs_0 - - a sequence of F_1 elelements xs_1 + - a sequence of F_1 elements xs_1 and a verifier is a pair of "snarky functions" - check_0 : F_0 list -> F_1 list -> unit which uses the Impl with Field.t = F_0 - check_1 : F_0 list -> F_1 list -> unit which uses the Impl with Field.t = F_1 @@ -210,9 +228,11 @@ module Make_str (_ : Wire_types.Concrete) = struct ~log_2_domain_size:(Lazy.force d.wrap_vk).domain.log_size_of_group in { wrap_vk = Some (Lazy.force d.wrap_vk) - ; wrap_index = Lazy.force d.wrap_key + ; wrap_index = + Plonk_verification_key_evals.map (Lazy.force d.wrap_key) + ~f:(fun x -> x.(0)) ; max_proofs_verified = - Pickles_base.Proofs_verified.of_nat + Pickles_base.Proofs_verified.of_nat_exn (Nat.Add.n d.max_proofs_verified) ; actual_wrap_domain_size } @@ -230,7 +250,10 @@ module Make_str (_ : Wire_types.Concrete) = struct { max_proofs_verified ; public_input = typ ; branches = Verification_key.Max_branches.n - ; feature_flags + ; feature_flags = + Plonk_types.(Features.to_full ~or_:Opt.Flag.( ||| ) feature_flags) + ; num_chunks = 1 + ; zk_rows = 3 } module Proof = struct @@ -274,7 +297,7 @@ module Make_str (_ : Wire_types.Concrete) = struct { constraints = 0 } } in - Verify.Instance.T (max_proofs_verified, m, vk, x, p) ) + Verify.Instance.T (max_proofs_verified, m, None, vk, x, p) ) |> Verify.verify_heterogenous ) let verify ~typ ts = verify_promise ~typ ts |> Promise.to_deferred @@ -289,25 +312,24 @@ module Make_str (_ : Wire_types.Concrete) = struct let compile_with_wrap_main_override_promise = Compile.compile_with_wrap_main_override_promise - let compile_promise ?self ?cache ?proof_cache ?disk_keys - ?return_early_digest_exception ?override_wrap_domain ~public_input - ~auxiliary_typ ~branches ~max_proofs_verified ~name ~constraint_constants - ~choices () = - compile_with_wrap_main_override_promise ?self ?cache ?proof_cache ?disk_keys - ?return_early_digest_exception ?override_wrap_domain ~public_input - ~auxiliary_typ ~branches ~max_proofs_verified ~name ~constraint_constants + let compile_promise ?self ?cache ?storables ?proof_cache ?disk_keys + ?override_wrap_domain ?num_chunks ~public_input ~auxiliary_typ ~branches + ~max_proofs_verified ~name ?constraint_constants ?commits ~choices () = + compile_with_wrap_main_override_promise ?self ?cache ?storables ?proof_cache + ?disk_keys ?override_wrap_domain ?num_chunks ~public_input ~auxiliary_typ + ~branches ~max_proofs_verified ~name ?constraint_constants ?commits ~choices () - let compile ?self ?cache ?proof_cache ?disk_keys ?override_wrap_domain - ~public_input ~auxiliary_typ ~branches ~max_proofs_verified ~name - ~constraint_constants ~choices () = + let compile ?self ?cache ?storables ?proof_cache ?disk_keys + ?override_wrap_domain ?num_chunks ~public_input ~auxiliary_typ ~branches + ~max_proofs_verified ~name ?constraint_constants ?commits ~choices () = let self, cache_handle, proof_module, provers = - compile_promise ?self ?cache ?proof_cache ?disk_keys ?override_wrap_domain - ~public_input ~auxiliary_typ ~branches ~max_proofs_verified ~name - ~constraint_constants ~choices () + compile_promise ?self ?cache ?storables ?proof_cache ?disk_keys + ?override_wrap_domain ?num_chunks ~public_input ~auxiliary_typ ~branches + ~max_proofs_verified ~name ?constraint_constants ?commits ~choices () in let rec adjust_provers : - type a1 a2 a3 a4 s1 s2_inner. + type a1 a2 a3 s1 s2_inner. (a1, a2, a3, s1, s2_inner Promise.t) H3_2.T(Prover).t -> (a1, a2, a3, s1, s2_inner Deferred.t) H3_2.T(Prover).t = function | [] -> @@ -330,60 +352,10 @@ module Make_str (_ : Wire_types.Concrete) = struct let () = Backtrace.elide := false - (* - let%test_unit "test deserialization and verification for side-loaded keys" = - Side_loaded.srs_precomputation () ; - let pi = - match - "((statement((proof_state((deferred_values((plonk((alpha((inner(528cdbf16708a53a 1f90a7eea2e06f6a))))(beta(617e5a7fd6be366a de197122a4415175))(gamma(3ca35d4452181c99 10f08540ba61b0ec))(zeta((inner(9b9cb3eb89f99860 fc3f2a556b3da3b8))))))(combined_inner_product(Shifted_value 0x0823E56793B5592614ADBA4D0E5FE7182C36059EFA67B62FF348C298025ED3B1))(b(Shifted_value 0x15E6E583088F3839A0A24BA096058D311F80E63DC7C5F96961EDF0E8431B3A89))(xi((inner(5c78b5106dc9196b dd923608f3a2d7c7))))(bulletproof_challenges(((prechallenge((inner(0277f6aad9d835a5 d7ce644af1e0a623)))))((prechallenge((inner(7155c8ca270890a9 88210ef505d7463a)))))((prechallenge((inner(66dd09cf8c767ca8 49a1f3f0d02c27d1)))))((prechallenge((inner(b3af5b7fff77d3dd 7e3d53b2d699d210)))))((prechallenge((inner(1a7034722f39c680 dac0b920710a3baf)))))((prechallenge((inner(31a391963a1eda22 1768f963fda30dbd)))))((prechallenge((inner(ca6977bc2cd08f02 8cca080a3eea91de)))))((prechallenge((inner(ca1c4459c6db90e0 5dc97444224692b8)))))((prechallenge((inner(5a8691fe938d7766 ffa7b76d50545300)))))((prechallenge((inner(e28a6bd7885e2dce cffc710fd023fcf2)))))((prechallenge((inner(67c9cacdbec10152 dbbbb17446514cdc)))))((prechallenge((inner(b962690d4c6147fe 047ed2b6432ae9a8)))))((prechallenge((inner(247a3c026fd42a1f 30ffd3eb2e92f6e0)))))((prechallenge((inner(fb040a5f7ae1682a 67e88c03b46429fb)))))((prechallenge((inner(da7aaeb99142d490 e6df1ef2a27bd5dd)))))((prechallenge((inner(c9590ba2d65e774c 6513be976dbbd014)))))))(branch_data((proofs_verified N0)(domain_log2"\n")))))(sponge_digest_before_evaluations(345bca89a18be6eb 32b32bea9853ce51 e4b78bd09bbb68a5 0c6791fb08e0ca57))(me_only((challenge_polynomial_commitment(0x0F69B65A5855F3A398D0DFD0C131B9612C9F2061D2F06E676F612C48D827E1E6 0x0CCAF3F03F9D2C3C3D44E09A12106911FA699DF934F706E62131BAD639F30155))(old_bulletproof_challenges((((prechallenge((inner(3382b3c9ace6bf6f 79974358f9761863)))))((prechallenge((inner(dd3a2b06e9888797 dd7ae6402944a1c7)))))((prechallenge((inner(c6e8e530f49c9fcb 07ddbb65cda09cdd)))))((prechallenge((inner(532c59a287691a13 a921bcb02a656f7b)))))((prechallenge((inner(e29c77b18f10078b f85c5f00df6b0cee)))))((prechallenge((inner(1dbda72d07b09c87 4d1b97e2e95f26a0)))))((prechallenge((inner(9c75747c56805f11 a1fe6369facef1e8)))))((prechallenge((inner(5c2b8adfdbe9604d 5a8c718cf210f79b)))))((prechallenge((inner(22c0b35c51e06b48 a6888b7340a96ded)))))((prechallenge((inner(9007d7b55e76646e c1c68b39db4e8e12)))))((prechallenge((inner(4445e35e373f2bc9 9d40c715fc8ccde5)))))((prechallenge((inner(429882844bbcaa4e 97a927d7d0afb7bc)))))((prechallenge((inner(99ca3d5bfffd6e77 efe66a55155c4294)))))((prechallenge((inner(4b7db27121979954 951fa2e06193c840)))))((prechallenge((inner(2cd1ccbeb20747b3 5bd1de3cf264021d))))))(((prechallenge((inner(3382b3c9ace6bf6f 79974358f9761863)))))((prechallenge((inner(dd3a2b06e9888797 dd7ae6402944a1c7)))))((prechallenge((inner(c6e8e530f49c9fcb 07ddbb65cda09cdd)))))((prechallenge((inner(532c59a287691a13 a921bcb02a656f7b)))))((prechallenge((inner(e29c77b18f10078b f85c5f00df6b0cee)))))((prechallenge((inner(1dbda72d07b09c87 4d1b97e2e95f26a0)))))((prechallenge((inner(9c75747c56805f11 a1fe6369facef1e8)))))((prechallenge((inner(5c2b8adfdbe9604d 5a8c718cf210f79b)))))((prechallenge((inner(22c0b35c51e06b48 a6888b7340a96ded)))))((prechallenge((inner(9007d7b55e76646e c1c68b39db4e8e12)))))((prechallenge((inner(4445e35e373f2bc9 9d40c715fc8ccde5)))))((prechallenge((inner(429882844bbcaa4e 97a927d7d0afb7bc)))))((prechallenge((inner(99ca3d5bfffd6e77 efe66a55155c4294)))))((prechallenge((inner(4b7db27121979954 951fa2e06193c840)))))((prechallenge((inner(2cd1ccbeb20747b3 5bd1de3cf264021d))))))))))))(pass_through((app_state())(challenge_polynomial_commitments())(old_bulletproof_challenges())))))(prev_evals((evals(((public_input 0x1D5050ABC193FD8288FE8B09DA9A2AA8C4A95E796C33DFB712E8CCAD7367266A)(evals((w((0x2C33C173BDE9340BE941ECD209AB6E99ECA8BDCA1CA8BDA8DAC3E40C3315F696)(0x02AE92968344D1F59603BA015C29D7801897F6B59E5ED43A3BEE316D6A876C3B)(0x3D4FDD240284F096B0D9CE405C2016E7CAE49931D0571F27DA7A3DDB202FC437)(0x1D8A9A17ABDFF5975832B2EA4AEB94BADE6346A554EB24A51B3ECFF560D33748)(0x36F806F043D8F33F7FD89730FB69E5DAF323E8637D23D95694665AB1B29AD194)(0x21CE677E9416783DBA7310E2813D200104A0328DCA5CF2C0E632BFD719951CBD)(0x0A3464D5ABBDF1C1FA6C3CF5C5328CBED7D1402AD4990AF2D07CF6958700E079)(0x3067923AF93C85BC677715F78EFEE2B765F4712D92A18DD691B2D617245D2837)(0x1D75E1CCE4165FD19BBF1D834F0366E3330A916F252801ACC2A9FCCFE19BB03F)(0x29793C3D311353444C4FCF2BF62299893F69FCEF0ADF7345310DB7E736C21758)(0x3F90E2448CAB2639788EEF0EDBD4F87463082EAE0C52F710E0A57B4238577C09)(0x3E19E9E45C6CFCF0F7036D3A598E26BC4C250AB45049E19A81EF3F9C68E7B09E)(0x31CF2FC45C59E45E5B16A9FA357972AEF1F744388C81C886B28BDBC55815CE45)(0x24B310A418CB5815513DCC5B4DBF4B2C64BD94A2D4764294EEBDF4C7DE1B1B08)(0x3E748B8BF7F3F63235B650DB873BD652BC58DB1C67C94AF03B218DB58CA0EA86)))(z(0x3FA6741D84E11434D7918A459AD1B698B8F3616E2A901B3B17E9E0BA8C229A95))(s((0x2160285A788031C45B0E041C0C3E1322E1330C718B709999E677E4C82C1A8DEC)(0x2CC1EE1154F527B33A011A5F816AFC3C219892D0CC3A25501A90183A2221B847)(0x299366D7BDB50CD278BDB43FFCC1AF66CFD6C821203F980AC22A9E0178612FCC)(0x04204759E7D8E84C1122BCF65008ABAC017DE7DAE44B7E479D30773966EB6B0A)(0x08D5ABDB38CEEA6D50FC378FCCEA65116C298EE0307D827FF67447502C5C5A23)(0x0AB1B1605C07FB055414308FD9D3872D1184AC436BF62BE06BF68A429EB806C8)))(generic_selector(0x2073E57ECA097CDB4349F56A96DD870EF4232F547624BFDD7AFFDF8407826000))(poseidon_selector(0x1412641F378B7B4AA2DF1B29573E3BBA2E092E74D48CC8CA03BFBD88755F5DD5)))))((public_input 0x0EFC0CC4E8604CB4B337B237BB46916110F5604041669E38EB1712C78A86539D)(evals((w((0x30C812455D840F09B1A10D73E607F1CD23F0977E205946DD7252149C3C8EB2EB)(0x0301082FC85A85A5C5E48483CB231F64BE4E42ADB7AB73B973034F92C2008243)(0x1AC263C293B548E7862234481865A6C4255180F33D5FCB1E30360DC5AA4A8164)(0x2679B03901AA2A286DF1E2A90BC72A3AF57C13DD65B9BB11104DB9189ADBB970)(0x39F0CFE5133D4CC3B598F1F6EA16004661DF7BA6D1316C38E124C65EF4F21C95)(0x16457DFD6BF323BE13162779EB0F48CAD3AD8D4970E9E6433F275B2226CF99D9)(0x2AF436FE0FAF0CB905DD8202DDC42C09D1565CE415FD44F33178D94B1BF76127)(0x26A914F7D55AC312918D41FDA516342E929034C06D1970794C1156FF8690B0E6)(0x0BDDDB276B9CDF4B2C9B4C6B43F2F302D46E2A0104724D7779B714CC1C13D10C)(0x057C045F4DA7202317E4A47952BEF19D109475749FC8BF0ED9244FD6BDB20CC3)(0x3AD9805BE86345B3FE98367D2ADAAAF6A3B2A511B7011D354CC074BB0F0B618C)(0x0864BB2DF60F29BEBC8D55DEC2B6F199DF53CB650BD797D8C81AA7D39F7A494C)(0x375F21536B66E816DCFCE829495A7B429CA1EB658123DE8858B765DB26D1DC68)(0x34D1B59A33369350867EE0E538C68D693E19BD5F8F05FBDE52828A6AE39666CA)(0x381AD285334A7884690F3AB8412291FCB0D3357169C0F176D2A6DB8D2B3FC02B)))(z(0x2FB41536E4655C119BE5F0DED90391A8171C191B3A9764F765FBB6EBF2AABAC9))(s((0x3F5522A1D8A0ABFA8876B4185E9CA1F885663F55579C39F7352F981CB304CCEF)(0x2E0700D6F8A02C04B1DFE630896B59615F21C4B3B541E26EE63DBCFDFE59D658)(0x10F7327C833EB35B4499AD4A1EF0BCB66861822381DEB0CC6799E7182892BD26)(0x29AB8F4C7E256D2D7703E368F9101BED0215E08CEC87AA5499CFA7D1E9E11657)(0x16523DDF438ACF2C072DC7F00C41F1E3A5214761C77D2533970A9382B5B48D30)(0x0D684A460B348082F5EFB03F7A635B5358E52235821D36251D67644CE694ABC4)))(generic_selector(0x2B204B8595299D22CC83DE6E2A78D4AF39AAE8527FB4B297A35051F376AE10C6))(poseidon_selector(0x370C7DAC58DB1DAB1147DAA8BBF7EE1F1E2C2EAB64BEED885CA14FC86D786459)))))))(ft_eval1 0x0459DE9EA74B8CB38B54454FA0F59D375307B1210F7403526153D5CC1288DE63)))(proof((messages((w_comm(((0x3E2CF8FDB7F25CC3D523E88735CC8B0068A436A107D926977B4408955AFB5A7D 0x32CEE955EC5BFCF269A050C50C9ED868664F26AEDB4FCC96A2EB22C4E9031ACC))((0x2029F54CE3FE1255005DC6E0D56F45ED46D9294A2021AD7C4D9ECB9A2FC35DDC 0x20098E9EB437414F861C8BB5FDF3111EB3C6707DC156FFEE3F3B712FB67F4A2E))((0x1110AE3F05A3DF2FE4149EB7125B7CF315D01D6BFBDC4E1EBEA05ADD633470FD 0x30BAEF091C165B8FCFAFAA96C0FB9EB59A6FD98176897423043623AFB8DCB084))((0x3395D2993CCBB9C0A22BE321D70F5F01F39B83D78D7D3684DE7EFEF71C9EED94 0x3A99A078DA706F3C43B6C081DE5A09A69D2D308BA50B961CAC6A664E3D4E8E3E))((0x258C56FA32B555BFC3288F6EEAA11344E4430C51F3ED6A59F35F749F9FAF084E 0x1D47AC341EF7AA76F15F0239A494A541E018C113ACD62E87FAA7764E23251944))((0x2C04311B81ED29240DE9DA622C894323236DD623846E83C08309D1C552B06503 0x2438036EE7EF2EAEB9216A843692A2FA45F8B595107D9EA6C05523C8274DCDFE))((0x19C1DE13982583A22FAD04553082499C88055C0D707C09DC77650EBC14718F6C 0x2611B1FC721B8B73B109886E5A2960ABBC5A471472F2DE27F0B70989B0E640BF))((0x136550315A4440E22DB32906E3C7C955B96C735E4058F1AFF8BDCF75BE2324C8 0x34AB87A590CB4B9674F28A75F6CF92757E84E1649F32CABCBE0B76AED1A60E8D))((0x2EE8D5BEA4D460321B9BD1B58BD5F9EF76DF3D0DEBB015190D317C61C73584AC 0x3D330403E54BD189C55448170D59D6F9D3EF48C8095281F45588B92B6107535F))((0x370E23375707B4E74486415A153CB1F0111C2B950C87178FA85891CCAB0D3D8A 0x0E75C598E63688217BEFBB5DCA020433CE158D4F8070C63982275F8261A3CE95))((0x2EFA160350CC4282EE06AF463EC8CA6980AF07983A42B6275E42FC4AA6E685C8 0x0EECA9EDB51265182CBEC10EF3B0AAF81EFB53E9B919940194C2726B9A785D1C))((0x27FE69FF4A716E2DF1389CFCD4C425B050C00931CDD123C0C5BEA7DFFDD3D603 0x122E05931206355AAB60DBAE077D490887DD1CAA599BAC05458BC3F41428CBB6))((0x3663E1C1C27C6F163AB552E83B21FDDC5EBAA3B735EFFFE38BAE99B01D71D037 0x2C46C91336CE381F3900BD2A80C2B36A6BC90C5D53A579E02240BBABB2018E60))((0x26667E23A0085FDDA970D4CDC78D6A4D9C9F003061F40F5AE8F81986C0D6D260 0x2B05A9F120DAAA355F54E8D0B96A78A674898FB1830A4EB71356137C8984BDA5))((0x105D2491EEAE03D1AA4AD8908412F3ED0B988A43C4F33C81581C3A60FEE9721F 0x2DBAAD56BFA1DCDDE5CFE40480C8E8E57E0093FEB153D9D4F983407B3EA91412))))(z_comm((0x029EE7F64D3FFF1F6920D6F009304C2C8F9ABF2B769ACD69F7F78201A09F10BB 0x301449483BF3A688552192934E10391D7BE97E54BEB26F7A3F3B1A2443CA07EC)))(t_comm((0x27ED056E28864693AB1653F62ADF5C6F47DCCD070EF16A2E911283224015921E 0x1077284DD15F99143EFACBA85D3DD63608F222CD6D7CF7A793DFC64390B7DBD8)(0x07A10F95A4F55597F66C3C92BBF9D69A23C6EE86CE2C864FC0A35FB199980B89 0x2BC564EC06B8B7052F469C3EC74ADD32C1C713EFA19F26102E7C73520F90ED2C)(0x3F30E96C3D5A23170F94895565422C6D54B8C8594D154CB495BD808941848C21 0x17F853D3C5869042C600C71720610A21DD057D689A34CF08E6A7054B1BDDD70C)(0x0C27FA8D2829BCBDD90E245677394DF7151F7C4E94D95832962D7187FEB33432 0x0442C73BC7C37791DA9CE0BE6332F69166EF6E6F651E23D859207B1FADF9E1A9)(0x039B920067F59B3458F8CFA660BC585B705826906B88893B88CADE19930604C4 0x33AAA622113A14BB1408538B38CA5157BCC835546BC081BA2D39E5A636F7854B)(0x0E76AEE47485073ADB66E8827B7F11C99B74F5D360AF12C326DEBFF457ABB298 0x15D7F59BD6BD0E49B36BAE1A8E17073FAD3442B8268D50D327E87CD4374C9E2E)(0x24B17C42758CD977DA31A5D619D0B0CC885A074F13DF1B0D9036A5BE962FAA66 0x33ABF75964D4318F21AA7F3C889EA88C495E1322B29C81646C90190626AF93A0)))))(openings((proof((lr(((0x018E82B85F43380E32CEDAD571886DCDB651FD16C54AFACC8A5F0FCA1A35D77A 0x07558C8DE9362826F52ED1FC9F3FAC3E60BE6BF9A693F1A960CB2F54BF9AD308)(0x2DD34ADF732340CE166A3989C2863E00AA20EE8DD3681A6FC47948DDC2291919 0x39EFB3592924CF49F45D5B471ACD66BD6A9D72C7F034EC75703740737E068FF9))((0x05DD7845B0D19212ACDF666DD90F309999BF28719B2A1F70B228AF5D3E59A633 0x207799AB420155C6FFECDB3538B0EF2259EEF776A33A781AC4F3EF6BCEE60700)(0x3AAFC4E24A25D2AFF714F0008F2465496C62EB6C1F7562E605C38EC59DBDBC67 0x378F5BACCE5C4BD6FEF8630F68C439F8FE986F218A562B1EC054E07FC5824B59))((0x38E608E6C866AD1C61BC6F250A0AD7761B71C6E5E0F7A065F01B7B2F4F485D18 0x2F1CFCEE96584F592CDE05B0B3F936A8D1FB603A2984EECB1DB042BA6D81A6D9)(0x07AD6181A8E32C3898B06BF092E28D1C8E928293125609033979AEDDB9116BCE 0x35287F7AA2300ECA1CC58AE8141AB97411E00F61C65F5B1A98A58EF5918C363B))((0x3461FACE1BEB85F605E72FAF9A3C804CC3BF82FC2094583528F0C7EBA74DFB48 0x2212015E8CA2965FE0E8A4A06838CEDDED1EA531A139F5CFD1588DB5736381C3)(0x0DE143977BA8B3FC93D25434EEDA4921E8BDE5AD59E1181E6B456B4309057F08 0x24B094D4AC456EC3F55D46830F4E82BF07312A1FAA97D9138BF41F16F7E23A9A))((0x21E5645330DC73F6F6819176F8E90A0827117664A93B4D96E19DE8B2819689F2 0x1AC631D608FDEB1EEFFB6C18A720E40CF1408B0BE266A62BE8B7D0B46DAF0FD3)(0x00D73BE9C319319E4C12A8F9610C476D16F0878F032DE6D6664E77DAAA446387 0x12814F8638826EA6099E0691770FFE50F817CFB3C45C1F065EB0F85D6EE7BA8B))((0x27D05D5CE92F8375D15C7E28A4F6A02E1C240BBA18978329DCA072436CDB3B7B 0x1C994843BE3797E9A6F2AC6FCCAB1C9B1745E819143F2918A383D3D336C5846C)(0x1D8ABC594EDE311A74A3CEE7DE36E4065851C0ED03A4148F1A13AF8A4E1CE8B2 0x2C3207B67EE005C7FC5B1C072E980ADF9695F015AE26BF16AE32E83C06FCC611))((0x135DC0F98465E36AEFC4AFAF082F4594434B4A4374309CBD334750983A7811A4 0x11057C0DF6BD2CC7A505A6B39969070656CB39E4EC479DCFE42E01E70BA39114)(0x1E254D9B7E6BEDFE142264E1B93B1CA92B943264E48C8E276AABBC063E79C02B 0x2A617229F4D194F3BE3D15D38B777EA4ABBA28F3641B269F7A251FBFC511B25A))((0x1E9E3FA46A50EC7A42F370E9A429C21984FCF730FAAC8913EC6E50B9DBA0390C 0x19A7CD7A84C3E998ABFCAB1D1AB8DF1E9F57D5878ECB12636A8C0D008E466934)(0x3F2C2B737CD73658ACE3CC9242DD9A52E39836B138BCDB71658B1052C7FE9C83 0x218E8EAB1F657EFEF1A281FE61A6B1CDD93033130FC664403EB1610AE20EFB3B))((0x063E8B50A90E7AFAA45B4AE2BB4F48537F14CFE82BEF31A110093999F0AB5333 0x10281C8C0E0174FA2121F435F35D9E8050637AA3F58E2A342DEB9C91798C47AC)(0x0D43AB0853C6C202A2CE3C39E9D1CDA61449A8A16A91012FFE58AFCBF675D3D6 0x3B5DADAAAE57CF6FB972C521FED1AC03B960851C0D44B6122EBB72A2258A4604))((0x18AE3885AC8AF0E6BD9C0E7785D83477ED6F5FE8A239AE2526141931D81EAB56 0x29FBB084D8FBE703D008E9CD70B702B3113B49F859C2A19B4406AD130D3731A2)(0x04AF99E720254B22E8DF368AE6FC273AC75A4639A6F3007369FD405532964CBE 0x124525E37EC615B1F57D54002836E35380548276C61D6B2539EA51C9015EED9C))((0x32A4ECA72864EEFFCF2D83B843B9BE4ADBCD45B972624811C894F916E4C81A30 0x3E6F57AB9CF53618664A7AD9862F65BF164EFFB42B7497B64A8844339318C365)(0x2F7EECC63F3EDF519A83E20D64E88213179264F93A2438A22A16335EB2853E6A 0x1D03C4087516EE01C13982505997CF5E13A8E4C228B4346DEFDCB1101E656494))((0x394C3F476F8DFAE68E5B4610E73239F7ACD8C5AE12E6F094B2D199D39308D87D 0x1A38D41C68C7BD3C6176D24F774641136D6C929811D86AE72E54598BB7DB27F4)(0x160CB44B2FAF93B0375D40E77D560091F066C8616B692FF842F90B6FEBC9BAB2 0x16C4E5ADA6534B5EA0406918AD2D64BC414EAFFBC723F27B359C524FF5FCE39C))((0x3FB19114E947FFDC540FB4284835CB74279DAB1CF3154F0874B0A0A5E63A3EEB 0x3D65D5B172CEF8D31F34A49AB0889F7A10A2238846B6B24569D68AA791F94CB6)(0x0F02699D800DB868A06E3EE4A0C158C90BC48A691E81744FFBCFDA32FF24DCF4 0x2714671243FD8237D339E0AC2C941EE9A642FDF6FCBBE031B42696FD69E831AB))((0x0521F6B05212DC975AF0007CD24D328B2ECED1C82791D2E606059B65BCBE554E 0x36BE6DAC4B773494121F7DD5F8507D36AE6ACC1DC99FA860DED1CA7AE8A3ED01)(0x38B51B590BF50CC6A24AB80474EB147A30C4AF3DD19A5654C1B105559BD14D4D 0x3E11DE8B1B4638FBD8C4D6836A747C0A81578A4D22B84AC58EC061FEB68B3177))((0x2D5328E0BA58995C7066774A463F8A902D7C2B97BD45C10B9D8B4D823DF106AC 0x26933A9C217727C9CDC4A4494D3E332B36BB997396FCA706099FFD3439BB4836)(0x0BB116BA807D12D4DF79557FFB7F60B48858601912530E3F49C890A34AED31CB 0x2462E0396ED302DD10A6EF43AE532333543F4A875599E83FBE4106644DDD3F8E))))(z_1 0x06A616C3A625F92ED65B5CA99D9A1DAAA476481B9C45E4553E7A8E436B13D570)(z_2 0x310AE40CBCE21FA0DC92D1DFE7DF49D939A579FF029F869118036BF8B370438C)(delta(0x366414F4FE9C3DDB27DA5A85452CEDBC65AFD104D1F5C241BE2E594F615ABBBC 0x0B4190D59EEA6EBF8B9316054439E92B5BFDC8CD9BB0C864783D5F1D785DF87E))(challenge_polynomial_commitment(0x1340C10B30AD07F4913C3CDD588C3E8A5A6E6DAC9947378FA97D11F52CCD4AE1 0x0B110AAD2D1957C9C6944439DED80C9CE9A0EAD35C96903AC1EADBC94AEB5D29))))(evals(((w((0x1BF1CE494D243FEF9253CB66CC3D6300A37ED4A230C154451779FA16F6AAEDD7)(0x2A9AB4178F95EAE6A3D608276A4BCD390A88DAF8C35196061ED79DADB747CA62)(0x2F272FD8DF352C035E81FC1A5C8664AABEF4F62962B7E3D03F6BF53C10C2B398)(0x0967B0F7F74E6558AB86D813EAB8490C43C569BAB9E72761C8D4086810A621B2)(0x3BE58E7E3C8DFFE8317E68E50729FFBD6E22E3FE43F3FD0C469F46768068550B)(0x2417CB5380DAD79780D62428CC09175FBE2DBC443E0767157589A7D581458D33)(0x206FA1779C5057CD0639666D2581A170B83CE654C654544C73F7DFD022FF1597)(0x3EC85737838ED8C4CB90D54523231C950FC641DAA8390AC6612995ADBBFC2947)(0x1A24C3397D2F39F1DFEECCCB66C78BE61279D5C22AD692C23DD5268133793F38)(0x1813C59133F4204F15554D891F94D802D26E2F18343D513E164706636CD7D6E4)(0x0534DF67954B7AAA90DBDFA81468B83F4182B927D5B418E531795998B9825BE3)(0x0F7FC2CEA19984972EE5732743ACDA4C6C406F03A852555019F213E43326B61A)(0x367ADA537033A054A65F0E145E6E79B56F054EEB8011F1EEE163E137D6366B89)(0x1B3232DFA316997F453D7A6F2005E6E096B54B3847F6FE8D581165887F85FD71)(0x0EDC1BCD8B78233F2C5E236D6D05265A586587AB0B1C0F5EE3A26E3EC45C8559)))(z(0x2D46727CABD1AD20E476E7ED8D664640D0565D3F01CBBF7C6258E2F436E0FB64))(s((0x16C1D17F88C267C43D4DFD19768583A2E9AB7AEC6975B09F139DF1AB5C41C815)(0x250EA67AD22E2661208B05E72B1054F60798FD58DDFE3333FAA9B5AB547C6745)(0x258A8C918280C265F825EB72C0B8C625665C2FAF60697D588EC6AACAC73D0B86)(0x072EFAAFC967EFE45BFF2EEC1A8CBF8A0B2CC1F44B25296DA33F73B3E48862D2)(0x3A23A8AA2A3D0DC85299DE4975C8485473C9C1D0D0D84A0BECFFD31351A6071D)(0x0DBC51C9DF923ACB44274742095761E599ED1D8F94EF8F414C151DCC5223A13F)))(generic_selector(0x1AB9C88B53C9CFD0A65823311711ABF1E13E5B352DC2D35C6D34A4508EF42C1D))(poseidon_selector(0x0D4DB96949873B90F365BCBC73B2A1AAE695533742F6472E050D024C47EF051F)))((w((0x044E2486D22B5737733C4933944865079C1D24CB1B62D5A5D99FB4A84D1A7806)(0x2B7D6F8FCA7A01770626488AD8540BDBAD1337C627CD8A9E63212A2A05831410)(0x2D92673EBC67FB88DC3053F021AA44F5ECC10FE56E9D8169EB28B63C86AE5766)(0x11BD17917D68A2E68F4E16998A89F15F53BCEE8524042E87316A917BE118B573)(0x1978EF73627746A050DFFFB981ACCAFDE1ED51690921994DBCEE69E44892C07A)(0x20B24CDDD02F9E3E3864B905A0E34C1910914A3990497208B44D9B7D2F9C04D8)(0x074347DE39DBB739163EC16F4AC610BAFE9328C7677A59ADB0E4949BEA72139F)(0x29F334283A097BEF545ED4BD25FE90538565AFB1ECCFBF12BB636F536950AAE5)(0x1D956F27A2C2B32F5108F9261BF08336CABF3F43A34D76549747C589AB268E26)(0x0F67F822B5005129FDDFA19806B63E2F9293651319E024F470A4E3C093C953FA)(0x07FE17373605026D0611EA8C56D5A5E012737A651B9DB4F2B6D3643E66AE8055)(0x050CA2177E768D190DB1B8EF36BFC9295796447C0F00F1C30D4EAD2C4CCF2576)(0x008B132B8DD971E8BD710E2176BA1A1486E982682603D7C99354FFDDD42ED0DF)(0x386E04A8455ACB87D0E73727740ECD7FD21607BBE70CE413AAA2ED5293FA203B)(0x29225BD92F00CC712E9F3FFCA7660592B80987BE8B35DDFF83194F0799DC3B44)))(z(0x2345A1A7FB004FF4B933E47E914BC762D3321AC74A1EB807F22F75F716A29745))(s((0x384F9DCC50FFCCCD17FE53094FDD6C6E3A189937EF2202055A9E84207D1F990F)(0x3E3C73F348C36B61D52D5DDFF36D766357B58A914875549471351BEAB35952CB)(0x193A462B9731E73C8622E658BAD0DB5A932213978DB3925DBB5ACF07F8AB2B4C)(0x2B6E71A35F8A6C161A22D6CA45CA5766378890C30EA61AF0A179CB6B5496E177)(0x03A7BF41CF46215871DC385F1C4AB03A8C3DD67EC3F789E425BAEC8ED2B4A65F)(0x23C3758C52FE243A5E63FD6AEC2218CC2A001A6F655F2E44F1A13E391FFA4BB8)))(generic_selector(0x2CC43F0A9D98CBE8E5B6FC354E9B090B910754181165DBE475E88A0A02F5A786))(poseidon_selector(0x22A81C50CBBE608CB6F8A807471424EB0A5167B392446F32E192E33EFDBFCE75)))))(ft_eval1 0x34AD5FA8AD38D9FB83534F851F0924BA3B9B43E1C45703F151A19BCCE71F4E7D))))))" - |> Side_loaded.Proof.of_base64 - with - | Error e -> - failwith e - | Ok pi -> - pi - in - let statement = - let transaction = - Backend.Tick.Field.t_of_sexp - (Atom - "0x2340A5795E22C7C923991D225400D0052B3A995C35BCCDC612E6205287419EC1" - ) - in - let at_account_update = - Backend.Tick.Field.t_of_sexp - (Atom - "0x2340A5795E22C7C923991D225400D0052B3A995C35BCCDC612E6205287419EC1" - ) - in - [| transaction; at_account_update |] - in - let vk = - Side_loaded.Verification_key.of_base58_check_exn - "VVA53aPgmCXemUiPjxo1dhgdNUSWbJarTh9Xhaki6b1AjVE31nk6wnSKcPa6JSJ8KDTDMryCozStCeisLTXLoYxBo3fjFhgPJn25EnuJMggPrVocSW3SfQBY7dgpPqQVccsqSPcFGJptarG6dRrLcx65M4SqudGDWbzpKd2oLyeTVifRTREq2BibC3rWMpUDuLwXEnp61FfFaktb4WKu3hfHyYBt5vL3Xndi9kynUWuhznijLG2yP7eX7o5M3nbjfkg7NdWaGReZH1yt4ewtrmHEMF5qTdK2UPgNzpScaK7ix8wZV5qECT483DsuY6Wpx3s2FfdmRDYwdr2YejhW4ZnJLNAxMgUkV3xkid5esqnk5TuQrdHMYvLZXju3RrZrvqhmbTFXpANKskZnuH1BUvkeoPvpQeYdoeYDJ6bgM6NFB3oWsPTU3vSMg3Wjsqx6Ekc8MuZHuaziGax9WNxbM3H6HscZFRs4npttEiwj1gSvZNaVc9FfRdCa3CMMWJNR1CkA1zKtCb8Sie1yiHc89hDA7K5mufV1yaX88xmAQrhZpTLCE8Ch62Zp3P1Vy6QVDACZCKSiz3bhikYEXFKZaJfRYVZVPeEBgjnUDrB4SD61KKnvWWESV8a3uGudeBLnJqoPJuBC8bZTUfskxqzkXmz2XTv4HMARJRTg21tFB8mZmLgVuaSWpc6inGxTZeWmE9ECSFzHuazEPNQ6yn1xo7G72ixrmLZrZqhbhPfnqSL5SWnmFWaWTihNNdHac8FDwb8JKvneC5yUur3WAZ8tTULiiNVvQhjhKVUrym2wTWFwhDAy6GqZcYeWRig9gpgdaxEuA7YnDc8XZZ5JS643PBfAWZZ3mZR4NxXPnVfn1xAUD2VFXmA8pzkqRwQ8DSpSPpKuwzwuJQUW6QSGtBheKFSxrXt6qekFX2azueedJZrhnwPW78dM7v3Qd2zTWo8iD2wfBB1Yot8BfUqAk7FYyi9hajKT1qZWQMg3kUVBywX93KBht2RFDJeVwiuE2hHaAzobxnnwsPJKPHaU8SM1EXQ4cFP2zJ2acPig52MNht3Z34fMeZ65bA3eEbcDbJw3pk2YS1pHtEr818b5TisPu6gshwkRGghbnTsQzHCjZVf61rpT4WphBsv6ob6foLwdc5ZSxq2BFzAWUv5j5nrtU9fqnQCx1DooZxAc8BnjxCXQ5TnE4Rpj82JwUR59QFNza2RwK2vZLvrNPt1LK5eCkZV8fBWuYD9J4AnxGA8icQbWBAfsSk9xXJBynEKymAsw6eTFPWCAMjQgJLhJP8MJR3NyNbqMfT1nR924EyZged7US9ogU8CLV5GcMBTSzAyCSFwFN8LGL1uT9sStzwQNbUvKvXYRwWNMYpb7Mxcjz1NjBaMbiWUryMcJc3D19yXt8VNt5g3L3Ty4GtL3WWV2aXRRXcuzYZai6wV8ESPGd3R6o4NJS5Ct5Z98fx25sNtswb77Q18pU379m4wsk8ck872oMZTPp9bDHTVpLoEBHd1gkC6j7pP8dx3cNTWc1NoewCGLi6zLDNfPZDrRXZESnaDRgVGEDinXS5SeAihMcQxvriHyskPW4SidcZsZtPvLnoQz7HQRpDnXfg4j6b8P5EX6sSJbkU9is3k6e8puQirFzLLgh2uC4oZH8EzLRZcGkonQPP5sLTmfwX4s5DJYdS4NLAVYSXndVZ4fazLfqPLukdWQkxZihUq4NtFkfzpNB8MPUBe6T72zhnvqVPegeEhgVvUokcn2DRJUc93DSYSGEJ3eZNFTruCgbM7xMXq83K6eraFRvxGqAgsQcTcQKwEfF9XvuppFDBbEHjdg84w1XiRkZ7xPKDdF6Hvi5G8V6rr6q1T7qypKiFqNrwM6frbJqgjedLpAY6RkPchip2WsZTpEX3EY1ryyGnJxZvb2fjCooQ9u1R6zNArVCV383KNJQZAaWFgzd58F7ZJ1fGU8zeFzDuhqSwqPyDE299sVYMSfbvp7xjWygxrbjApRE2FkjQtjuxaiXzsuemvrrSedVCGrktCHNqPKkJxbLcpz97rRBvwnKSd26x8LKHn2Zjzp2qeyxsY8HN7WVPATxPE4xXqi9dw41o8LBQ3GDGe1ASjphdp4bxj1guHhSZbMKTJDj7hJKyuvBMdG1YKQo3uv2qu5MiB3Afu5SZbZStNKBnxc2DRoDyF45yrQNeoBJogcSLAqWG624ZAdU4BWrqRJNjoAu6GxxE6E8TvFtvyDW1R9Nv7tXzmWE7RarrAL9YUD6uqe7gAanAv1cdAJRcPcdr2YvUL7zeB5d1daPfwJW4PYDvMwnnqDFSXgNqPreh8nFaiReDYjiHkwCojPcCgdcK5gJwpQTasjkWQBk2RmFQdfaLCpiPZGroZ6hTvRBHq2MwdUtkQHZjjCvY9fUtnniMVdUgkAZ9oLj8evpeoDEwyEHE1upmZZN84CMPP32NpHDtH3PwgGR3" - in - assert ( - Promise.block_on_async_exn (fun () -> - Side_loaded.verify_promise ~value_to_field_elements:Fn.id - ~return_typ:Impls.Step.Typ.unit - [ (vk, (statement, ()), pi) ] ) )*) - open Impls.Step let () = Snarky_backendless.Snark0.set_eval_constraints true - module Statement = struct - type t = Field.t - - let to_field_elements x = [| x |] - - module Constant = struct - type t = Field.Constant.t [@@deriving bin_io] - - let to_field_elements x = [| x |] - end - end - (* Currently, a circuit must have at least 1 of every type of constraint. *) let dummy_constraints () = Impl.( @@ -412,28 +384,13 @@ module Make_str (_ : Wire_types.Concrete) = struct : Field.t * Field.t )) module No_recursion = struct - module Statement = Statement - - let tag, _, p, Provers.[ step ] = + let[@warning "-45"] tag, _, p, Provers.[ step ] = Common.time "compile" (fun () -> compile_promise () ~public_input:(Input Field.typ) ~auxiliary_typ:Typ.unit ~branches:(module Nat.N1) ~max_proofs_verified:(module Nat.N0) ~name:"blockchain-snark" - ~constraint_constants: - (* Dummy values *) - { sub_windows_per_window = 0 - ; ledger_depth = 0 - ; work_delay = 0 - ; block_window_duration_ms = 0 - ; transaction_capacity = Log_2 0 - ; pending_coinbase_depth = 0 - ; coinbase_amount = Unsigned.UInt64.of_int 0 - ; supercharged_coinbase_factor = 0 - ; account_creation_fee = Unsigned.UInt64.of_int 0 - ; fork = None - } ~choices:(fun ~self:_ -> [ { identifier = "main" ; prevs = [] @@ -461,42 +418,17 @@ module Make_str (_ : Wire_types.Concrete) = struct Proof.verify_promise [ (Field.Constant.zero, b0) ] ) ) ; (Field.Constant.zero, b0) - let example_input, example_proof = example + let _example_input, _example_proof = example end module No_recursion_return = struct - module Statement = struct - type t = unit - - let to_field_elements () = [||] - - module Constant = struct - type t = unit [@@deriving bin_io] - - let to_field_elements () = [||] - end - end - - let tag, _, p, Provers.[ step ] = + let[@warning "-45"] tag, _, p, Provers.[ step ] = Common.time "compile" (fun () -> compile_promise () ~public_input:(Output Field.typ) ~auxiliary_typ:Typ.unit ~branches:(module Nat.N1) ~max_proofs_verified:(module Nat.N0) ~name:"blockchain-snark" - ~constraint_constants: - (* Dummy values *) - { sub_windows_per_window = 0 - ; ledger_depth = 0 - ; work_delay = 0 - ; block_window_duration_ms = 0 - ; transaction_capacity = Log_2 0 - ; pending_coinbase_depth = 0 - ; coinbase_amount = Unsigned.UInt64.of_int 0 - ; supercharged_coinbase_factor = 0 - ; account_creation_fee = Unsigned.UInt64.of_int 0 - ; fork = None - } ~choices:(fun ~self:_ -> [ { identifier = "main" ; prevs = [] @@ -524,12 +456,12 @@ module Make_str (_ : Wire_types.Concrete) = struct Proof.verify_promise [ (res, b0) ] ) ) ; (res, b0) - let example_input, example_proof = example + let _example_input, _example_proof = example end - module Simple_chain = struct - module Statement = Statement + [@@@warning "-60"] + module Simple_chain = struct type _ Snarky_backendless.Request.t += | Prev_input : Field.Constant.t Snarky_backendless.Request.t | Proof : (Nat.N1.n, Nat.N1.n) Proof.t Snarky_backendless.Request.t @@ -544,26 +476,13 @@ module Make_str (_ : Wire_types.Concrete) = struct | _ -> respond Unhandled - let tag, _, p, Provers.[ step ] = + let[@warning "-45"] _tag, _, p, Provers.[ step ] = Common.time "compile" (fun () -> compile_promise () ~public_input:(Input Field.typ) ~auxiliary_typ:Typ.unit ~branches:(module Nat.N1) ~max_proofs_verified:(module Nat.N1) ~name:"blockchain-snark" - ~constraint_constants: - (* Dummy values *) - { sub_windows_per_window = 0 - ; ledger_depth = 0 - ; work_delay = 0 - ; block_window_duration_ms = 0 - ; transaction_capacity = Log_2 0 - ; pending_coinbase_depth = 0 - ; coinbase_amount = Unsigned.UInt64.of_int 0 - ; supercharged_coinbase_factor = 0 - ; account_creation_fee = Unsigned.UInt64.of_int 0 - ; fork = None - } ~choices:(fun ~self -> [ { identifier = "main" ; prevs = [ self ] @@ -622,7 +541,7 @@ module Make_str (_ : Wire_types.Concrete) = struct Proof.verify_promise [ (Field.Constant.one, b1) ] ) ) ; (Field.Constant.one, b1) - let example_input, example_proof = example + let _example_input, _example_proof = example end module Tree_proof = struct @@ -651,7 +570,7 @@ module Make_str (_ : Wire_types.Concrete) = struct | _ -> respond Unhandled - let tag, _, p, Provers.[ step ] = + let[@warning "-45"] _tag, _, p, Provers.[ step ] = Common.time "compile" (fun () -> compile_promise () ~public_input:(Input Field.typ) ~override_wrap_domain:Pickles_base.Proofs_verified.N1 @@ -659,19 +578,6 @@ module Make_str (_ : Wire_types.Concrete) = struct ~branches:(module Nat.N1) ~max_proofs_verified:(module Nat.N2) ~name:"blockchain-snark" - ~constraint_constants: - (* Dummy values *) - { sub_windows_per_window = 0 - ; ledger_depth = 0 - ; work_delay = 0 - ; block_window_duration_ms = 0 - ; transaction_capacity = Log_2 0 - ; pending_coinbase_depth = 0 - ; coinbase_amount = Unsigned.UInt64.of_int 0 - ; supercharged_coinbase_factor = 0 - ; account_creation_fee = Unsigned.UInt64.of_int 0 - ; fork = None - } ~choices:(fun ~self -> [ { identifier = "main" ; feature_flags = Plonk_types.Features.none_bool @@ -744,9 +650,9 @@ module Make_str (_ : Wire_types.Concrete) = struct let examples = [ example1; example2 ] - let example1_input, example_proof = example1 + let _example1_input, _example_proof = example1 - let example2_input, example2_proof = example2 + let _example2_input, _example2_proof = example2 end let%test_unit "verify" = @@ -755,8 +661,6 @@ module Make_str (_ : Wire_types.Concrete) = struct Tree_proof.Proof.verify_promise Tree_proof.examples ) ) module Tree_proof_return = struct - module Statement = No_recursion_return.Statement - type _ Snarky_backendless.Request.t += | Is_base_case : bool Snarky_backendless.Request.t | No_recursion_input : Field.Constant.t Snarky_backendless.Request.t @@ -785,7 +689,7 @@ module Make_str (_ : Wire_types.Concrete) = struct | _ -> respond Unhandled - let tag, _, p, Provers.[ step ] = + let[@warning "-45"] _tag, _, p, Provers.[ step ] = Common.time "compile" (fun () -> compile_promise () ~public_input:(Output Field.typ) ~override_wrap_domain:Pickles_base.Proofs_verified.N1 @@ -793,19 +697,6 @@ module Make_str (_ : Wire_types.Concrete) = struct ~branches:(module Nat.N1) ~max_proofs_verified:(module Nat.N2) ~name:"blockchain-snark" - ~constraint_constants: - (* Dummy values *) - { sub_windows_per_window = 0 - ; ledger_depth = 0 - ; work_delay = 0 - ; block_window_duration_ms = 0 - ; transaction_capacity = Log_2 0 - ; pending_coinbase_depth = 0 - ; coinbase_amount = Unsigned.UInt64.of_int 0 - ; supercharged_coinbase_factor = 0 - ; account_creation_fee = Unsigned.UInt64.of_int 0 - ; fork = None - } ~choices:(fun ~self -> [ { identifier = "main" ; feature_flags = Plonk_types.Features.none_bool @@ -885,9 +776,9 @@ module Make_str (_ : Wire_types.Concrete) = struct let examples = [ example1; example2 ] - let example1_input, example1_proof = example1 + let _example1_input, _example1_proof = example1 - let example2_input, example2_proof = example2 + let _example2_input, _example2_proof = example2 end let%test_unit "verify" = @@ -897,19 +788,7 @@ module Make_str (_ : Wire_types.Concrete) = struct ) module Add_one_return = struct - module Statement = struct - type t = Field.t - - let to_field_elements x = [| x |] - - module Constant = struct - type t = Field.Constant.t [@@deriving bin_io] - - let to_field_elements x = [| x |] - end - end - - let tag, _, p, Provers.[ step ] = + let[@warning "-45"] _tag, _, p, Provers.[ step ] = Common.time "compile" (fun () -> compile_promise () ~public_input:(Input_and_output (Field.typ, Field.typ)) @@ -917,19 +796,6 @@ module Make_str (_ : Wire_types.Concrete) = struct ~branches:(module Nat.N1) ~max_proofs_verified:(module Nat.N0) ~name:"blockchain-snark" - ~constraint_constants: - (* Dummy values *) - { sub_windows_per_window = 0 - ; ledger_depth = 0 - ; work_delay = 0 - ; block_window_duration_ms = 0 - ; transaction_capacity = Log_2 0 - ; pending_coinbase_depth = 0 - ; coinbase_amount = Unsigned.UInt64.of_int 0 - ; supercharged_coinbase_factor = 0 - ; account_creation_fee = Unsigned.UInt64.of_int 0 - ; fork = None - } ~choices:(fun ~self:_ -> [ { identifier = "main" ; feature_flags = Plonk_types.Features.none_bool @@ -958,23 +824,11 @@ module Make_str (_ : Wire_types.Concrete) = struct Proof.verify_promise [ ((input, res), b0) ] ) ) ; ((input, res), b0) - let example_input, example_proof = example + let _example_input, _example_proof = example end module Auxiliary_return = struct - module Statement = struct - type t = Field.t - - let to_field_elements x = [| x |] - - module Constant = struct - type t = Field.Constant.t [@@deriving bin_io] - - let to_field_elements x = [| x |] - end - end - - let tag, _, p, Provers.[ step ] = + let[@warning "-45"] _tag, _, p, Provers.[ step ] = Common.time "compile" (fun () -> compile_promise () ~public_input:(Input_and_output (Field.typ, Field.typ)) @@ -982,19 +836,6 @@ module Make_str (_ : Wire_types.Concrete) = struct ~branches:(module Nat.N1) ~max_proofs_verified:(module Nat.N0) ~name:"blockchain-snark" - ~constraint_constants: - (* Dummy values *) - { sub_windows_per_window = 0 - ; ledger_depth = 0 - ; work_delay = 0 - ; block_window_duration_ms = 0 - ; transaction_capacity = Log_2 0 - ; pending_coinbase_depth = 0 - ; coinbase_amount = Unsigned.UInt64.of_int 0 - ; supercharged_coinbase_factor = 0 - ; account_creation_fee = Unsigned.UInt64.of_int 0 - ; fork = None - } ~choices:(fun ~self:_ -> [ { identifier = "main" ; feature_flags = Plonk_types.Features.none_bool @@ -1040,7 +881,7 @@ module Make_str (_ : Wire_types.Concrete) = struct Proof.verify_promise [ ((input, result), b0) ] ) ) ; ((input, result), b0) - let example_input, example_proof = example + let _example_input, _example_proof = example end end ) @@ -1109,7 +950,6 @@ module Make_str (_ : Wire_types.Concrete) = struct module M = struct module IR = Inductive_rule.T (A) (A_value) (A) (A_value) (A) (A_value) - module HIR = H4.T (IR) let max_local_max_proofs_verifieds ~self (type n) (module Max_proofs_verified : Nat.Intf with type n = n) branches @@ -1148,13 +988,9 @@ module Make_str (_ : Wire_types.Concrete) = struct let padded = V.f branches (M.f choices) |> Vector.transpose in (padded, Maxes.m padded) - module Lazy_ (A : T0) = struct - type t = A.t Lazy.t - end - module Lazy_keys = struct type t = - (Impls.Step.Keypair.t * Dirty.t) Lazy.t + (Impls.Step.Proving_key.t * Dirty.t) Lazy.t * (Kimchi_bindings.Protocol.VerifierIndex.Fp.t * Dirty.t) Lazy.t (* TODO Think this is right.. *) @@ -1172,12 +1008,9 @@ module Make_str (_ : Wire_types.Concrete) = struct Snark_keys_header.header_version ; kind ; constraint_constants - ; commits = - { mina = Mina_version.commit_id - ; marlin = Mina_version.marlin_commit_id - } + ; commits = { mina = "[NOT SPECIFIED]"; marlin = "[NOT SPECIFIED]" } ; length = (* This is a dummy, it gets filled in on read/write. *) 0 - ; commit_date = Mina_version.commit_date + ; commit_date = "UNKNOWN" ; constraint_system_hash ; identifying_hash = (* TODO: Proper identifying hash. *) @@ -1196,13 +1029,14 @@ module Make_str (_ : Wire_types.Concrete) = struct let full_signature = { Full_signature.padded; maxes = (module Maxes) } in - let feature_flags = Plonk_types.Features.none in + let feature_flags = Plonk_types.Features.Full.none in let actual_feature_flags = Plonk_types.Features.none_bool in let wrap_domains = let module M = Wrap_domains.Make (A) (A_value) (A) (A_value) (A) (A_value) in M.f full_signature prev_varss_n prev_varss_length ~feature_flags + ~num_chunks:1 ~max_proofs_verified:(module Max_proofs_verified) in let module Branch_data = struct @@ -1221,19 +1055,18 @@ module Make_str (_ : Wire_types.Concrete) = struct , 'm ) Step_branch_data.t end in - let proofs_verifieds = Vector.[ 2 ] in + let proofs_verifieds = Vector.singleton 2 in let (T inner_step_data as step_data) = - Step_branch_data.create ~index:0 ~feature_flags + Step_branch_data.create ~index:0 ~feature_flags ~num_chunks:1 ~actual_feature_flags ~max_proofs_verified:Max_proofs_verified.n ~branches:Branches.n ~self ~public_input:(Input typ) ~auxiliary_typ:typ A.to_field_elements A_value.to_field_elements rule ~wrap_domains ~proofs_verifieds in - let step_domains = Vector.[ inner_step_data.domains ] in + let step_domains = Vector.singleton inner_step_data.domains in let step_keypair = let etyp = - Impls.Step.input ~feature_flags - ~proofs_verified:Max_proofs_verified.n + Impls.Step.input ~proofs_verified:Max_proofs_verified.n ~wrap_rounds:Tock.Rounds.n in let (T (typ, _conv, conv_inv)) = etyp in @@ -1278,7 +1111,6 @@ module Make_str (_ : Wire_types.Concrete) = struct typ main in let step_vks = - let module V = H4.To_vector (Lazy_keys) in lazy (Vector.map [ step_keypair ] ~f:(fun (_, vk) -> Tick.Keypair.vk_commitments (fst (Lazy.force vk)) ) ) @@ -1287,7 +1119,6 @@ module Make_str (_ : Wire_types.Concrete) = struct let module SC' = SC in let open Impls.Wrap in let open Wrap_main_inputs in - let open Wrap_main in let x = exists Field.typ ~compute:(fun () -> Field.Constant.of_int 3) in @@ -1313,13 +1144,13 @@ module Make_str (_ : Wire_types.Concrete) = struct ( Wrap_verifier.Scalar_challenge.endo g ~num_bits:4 (Kimchi_backend_common.Scalar_challenge.create x) : Field.t * Field.t ) ; - for i = 0 to 64000 do + for _i = 0 to 64000 do assert_r1cs x y z done in let (wrap_pk, wrap_vk), disk_key = let open Impls.Wrap in - let (T (typ, conv, _conv_inv)) = input () in + let (T (typ, conv, _conv_inv)) = input ~feature_flags () in let main x () : unit = wrap_main (conv x) in let self_id = Type_equal.Id.uid self.id in let disk_key_prover = @@ -1354,7 +1185,6 @@ module Make_str (_ : Wire_types.Concrete) = struct let wrap_vk = Lazy.map wrap_vk ~f:fst in let module S = Step.Make (A) (A_value) (Max_proofs_verified) in let prover = - let module Z = H4.Zip (Branch_data) (E04 (Impls.Step.Keypair)) in let f : ( unit * (unit * unit) , unit * (unit * unit) @@ -1366,20 +1196,20 @@ module Make_str (_ : Wire_types.Concrete) = struct -> (Max_proofs_verified.n, Max_proofs_verified.n) Proof.t Promise.t = fun (T b as branch_data) (step_pk, step_vk) () -> - let (( module - Req ) - : (Max_proofs_verified.n, Maxes.ns) Requests.Wrap.t ) = + let (_ : (Max_proofs_verified.n, Maxes.ns) Requests.Wrap.t) = Requests.Wrap.create () in - let (module Requests) = b.requests in let _, prev_vars_length = b.proofs_verified in let step = let wrap_vk = Lazy.force wrap_vk in S.f branch_data () ~feature_flags ~prevs_length:prev_vars_length ~self ~public_input:(Input typ) ~auxiliary_typ:Impls.Step.Typ.unit ~step_domains - ~self_dlog_plonk_index:wrap_vk.commitments - (Impls.Step.Keypair.pk (fst (Lazy.force step_pk))) + ~self_dlog_plonk_index: + ((* TODO *) Plonk_verification_key_evals.map + ~f:(fun x -> [| x |]) + wrap_vk.commitments ) + (fst (Lazy.force step_pk)) wrap_vk.index in let pairing_vk = fst (Lazy.force step_vk) in @@ -1400,7 +1230,6 @@ module Make_str (_ : Wire_types.Concrete) = struct } in let%map.Promise proof = - let module Pairing_acc = Tock.Inner_curve.Affine in (* The prover for wrapping a proof *) let wrap (type actual_branching) ~(max_proofs_verified : Max_proofs_verified.n Nat.t) @@ -1408,11 +1237,12 @@ module Make_str (_ : Wire_types.Concrete) = struct with type ns = Maxes.ns and type length = Max_proofs_verified.n ) ~dlog_plonk_index wrap_main to_field_elements ~pairing_vk - ~step_domains ~wrap_domains ~pairing_plonk_indices pk + ~step_domains:_ ~wrap_domains:_ ~pairing_plonk_indices:_ + pk ({ statement = prev_statement - ; prev_evals + ; prev_evals = _ ; proof - ; index = which_index + ; index = _which_index } : ( _ , _ @@ -1491,7 +1321,6 @@ module Make_str (_ : Wire_types.Concrete) = struct let module O = Tick.Oracles in let public_input = tick_public_input_of_statement ~max_proofs_verified - ~feature_flags:Plonk_types.Features.none prev_statement_with_hashes in let prev_challenges = @@ -1530,7 +1359,7 @@ module Make_str (_ : Wire_types.Concrete) = struct V.f Max_local_max_proofs_verifieds.length (M.f prev_messages_for_next_wrap_proof) in - O.create pairing_vk + O.create_with_public_evals pairing_vk Vector.( map2 (Vector.trim_front sgs lte) prev_challenges ~f:(fun commitment cs -> @@ -1603,7 +1432,7 @@ module Make_str (_ : Wire_types.Concrete) = struct let tick_combined_evals = Plonk_checks.evals_of_split_evals (module Tick.Field) - proof.openings.evals + proof.proof.openings.evals ~rounds:(Nat.to_int Tick.Rounds.n) ~zeta:As_field.zeta ~zetaw in @@ -1644,6 +1473,7 @@ module Make_str (_ : Wire_types.Concrete) = struct ~endo:Endo.Step_inner_curve.base ~mds:Tick_field_sponge.params.mds ~srs_length_log2:Common.Max_degree.step_log2 + ~zk_rows:3 ~field_of_hex:(fun s -> Kimchi_pasta.Pasta.Bigint256.of_hex_string s |> Kimchi_pasta.Pasta.Fp.of_bigint ) @@ -1656,11 +1486,15 @@ module Make_str (_ : Wire_types.Concrete) = struct (* Note: We do not pad here. *) ~actual_proofs_verified: (Nat.Add.create actual_proofs_verified) - { evals = proof.openings.evals; public_input = x_hat } + { evals = proof.proof.openings.evals + ; public_input = + (let x1, x2 = x_hat in + ([| x1 |], [| x2 |]) ) + } ~r ~xi ~zeta ~zetaw ~old_bulletproof_challenges:prev_challenges ~env:tick_env ~domain:tick_domain - ~ft_eval1:proof.openings.ft_eval1 + ~ft_eval1:proof.proof.openings.ft_eval1 ~plonk:tick_plonk_minimal in let chal = Challenge.Constant.of_tick_field in @@ -1702,7 +1536,7 @@ module Make_str (_ : Wire_types.Concrete) = struct Kimchi_bindings.Protocol.SRS.Fp .batch_accumulator_generate urs 1 chals in - let sg_new = + let[@warning "-4"] sg_new = match sg_new with | [| Kimchi_types.Finite x |] -> x @@ -1719,8 +1553,6 @@ module Make_str (_ : Wire_types.Concrete) = struct let plonk = let module Field = struct include Tick.Field - - type nonrec bool = bool end in Wrap.Type1.derive_plonk (module Field) @@ -1741,7 +1573,7 @@ module Make_str (_ : Wire_types.Concrete) = struct N1 | S (S Z) -> N2 - | _ -> + | S _ -> assert false ) ; domain_log2 = Composition_types.Branch_data.Domain_log2.of_int_exn @@ -1776,7 +1608,7 @@ module Make_str (_ : Wire_types.Concrete) = struct ; alpha = plonk0.alpha ; beta = chal plonk0.beta ; gamma = chal plonk0.gamma - ; lookup = Plonk_types.Opt.None + ; joint_combiner = Opt.nothing } } ; sponge_digest_before_evaluations = @@ -1795,7 +1627,9 @@ module Make_str (_ : Wire_types.Concrete) = struct next_statement.proof_state.messages_for_next_wrap_proof in let%map.Promise next_proof = - let (T (input, conv, _conv_inv)) = Impls.Wrap.input () in + let (T (input, conv, _conv_inv)) = + Impls.Wrap.input ~feature_flags () + in Common.time "wrap proof" (fun () -> Impls.Wrap.generate_witness_conv ~f:(fun { Impls.Wrap.Proof_inputs.auxiliary_inputs @@ -1839,32 +1673,38 @@ module Make_str (_ : Wire_types.Concrete) = struct .deferred_values .plonk with - lookup = None + joint_combiner = None } } } } ) in - ( { proof = Wrap_wire_proof.of_kimchi_proof next_proof + ( { proof = Wrap_wire_proof.of_kimchi_proof next_proof.proof ; statement = Types.Wrap.Statement.to_minimal - ~to_option:Plonk_types.Opt.to_option next_statement + ~to_option:Opt.to_option next_statement ; prev_evals = { Plonk_types.All_evals.evals = - { public_input = x_hat - ; evals = proof.openings.evals + { public_input = + (let x1, x2 = x_hat in + ([| x1 |], [| x2 |]) ) + ; evals = proof.proof.openings.evals } - ; ft_eval1 = proof.openings.ft_eval1 + ; ft_eval1 = proof.proof.openings.ft_eval1 } } : _ P.Base.Wrap.t ) in wrap ~max_proofs_verified:Max_proofs_verified.n - full_signature.maxes ~dlog_plonk_index:wrap_vk.commitments + full_signature.maxes + ~dlog_plonk_index: + ((* TODO *) Plonk_verification_key_evals.map + ~f:(fun x -> [| x |]) + wrap_vk.commitments ) wrap_main A_value.to_field_elements ~pairing_vk ~step_domains:b.domains ~pairing_plonk_indices:(Lazy.force step_vks) ~wrap_domains - (Impls.Wrap.Keypair.pk (fst (Lazy.force wrap_pk))) + (fst (Lazy.force wrap_pk)) proof in Proof.T @@ -1888,10 +1728,17 @@ module Make_str (_ : Wire_types.Concrete) = struct ; proofs_verifieds ; max_proofs_verified = (module Max_proofs_verified) ; public_input = typ - ; wrap_key = Lazy.map wrap_vk ~f:Verification_key.commitments + ; wrap_key = + Lazy.map wrap_vk ~f:(fun x -> + (* TODO *) + Plonk_verification_key_evals.map + ~f:(fun x -> [| x |]) + (Verification_key.commitments x) ) ; wrap_vk = Lazy.map wrap_vk ~f:Verification_key.index ; wrap_domains ; step_domains + ; num_chunks = 1 + ; zk_rows = 3 } in Types_map.add_exn self data ; @@ -1901,13 +1748,10 @@ module Make_str (_ : Wire_types.Concrete) = struct let step, wrap_vk, wrap_disk_key = M.compile module Proof = struct - type statement = A_value.t - module Max_local_max_proofs_verified = Max_proofs_verified - module Max_proofs_verified_vec = Nvector (Max_proofs_verified) include Proof.Make (Max_proofs_verified) (Max_local_max_proofs_verified) - let id = wrap_disk_key + let _id = wrap_disk_key let verification_key = wrap_vk @@ -1918,7 +1762,7 @@ module Make_str (_ : Wire_types.Concrete) = struct (Lazy.force verification_key) ts - let statement (T p : t) = + let _statement (T p : t) = p.statement.messages_for_next_step_proof.app_state end @@ -1934,7 +1778,7 @@ module Make_str (_ : Wire_types.Concrete) = struct module Recurse_on_bad_proof = struct open Impls.Step - let dummy_proof = + let _dummy_proof = Proof0.dummy Nat.N2.n Nat.N2.n Nat.N2.n ~domain_log2:15 type _ Snarky_backendless.Request.t += @@ -1948,13 +1792,13 @@ module Make_str (_ : Wire_types.Concrete) = struct | _ -> respond Unhandled - let tag, _, p, Provers.[ step ] = + let[@warning "-45"] _tag, _, p, Provers.[ step ] = Common.time "compile" (fun () -> compile_promise () ~public_input:(Input Typ.unit) ~auxiliary_typ:Typ.unit ~branches:(module Nat.N1) ~max_proofs_verified:(module Nat.N2) - ~name:"recurse-on-bad" ~constraint_constants + ~name:"recurse-on-bad" ~choices:(fun ~self:_ -> [ { identifier = "main" ; feature_flags = Plonk_types.Features.none_bool @@ -2001,6 +1845,8 @@ module Make_str (_ : Wire_types.Concrete) = struct let%test_module "adversarial_tests" = ( module struct + [@@@warning "-60"] + let () = Backtrace.elide := false let () = Snarky_backendless.Snark0.set_eval_constraints true @@ -2045,18 +1891,6 @@ module Make_str (_ : Wire_types.Concrete) = struct ( module struct open Impls.Step - module Statement = struct - type t = Field.t - - let to_field_elements x = [| x |] - - module Constant = struct - type t = Field.Constant.t [@@deriving bin_io] - - let to_field_elements x = [| x |] - end - end - (* Currently, a circuit must have at least 1 of every type of constraint. *) let dummy_constraints () = Impl.( @@ -2085,28 +1919,13 @@ module Make_str (_ : Wire_types.Concrete) = struct : Field.t * Field.t )) module No_recursion = struct - module Statement = Statement - - let tag, _, p, Provers.[ step ] = + let[@warning "-45"] tag, _, p, Provers.[ step ] = Common.time "compile" (fun () -> compile_promise () ~public_input:(Input Field.typ) ~auxiliary_typ:Typ.unit ~branches:(module Nat.N1) ~max_proofs_verified:(module Nat.N0) ~name:"blockchain-snark" - ~constraint_constants: - (* Dummy values *) - { sub_windows_per_window = 0 - ; ledger_depth = 0 - ; work_delay = 0 - ; block_window_duration_ms = 0 - ; transaction_capacity = Log_2 0 - ; pending_coinbase_depth = 0 - ; coinbase_amount = Unsigned.UInt64.of_int 0 - ; supercharged_coinbase_factor = 0 - ; account_creation_fee = Unsigned.UInt64.of_int 0 - ; fork = None - } ~choices:(fun ~self:_ -> [ { identifier = "main" ; prevs = [] @@ -2138,28 +1957,13 @@ module Make_str (_ : Wire_types.Concrete) = struct end module Fake_1_recursion = struct - module Statement = Statement - - let tag, _, p, Provers.[ step ] = + let[@warning "-45"] tag, _, p, Provers.[ step ] = Common.time "compile" (fun () -> compile_promise () ~public_input:(Input Field.typ) ~auxiliary_typ:Typ.unit ~branches:(module Nat.N1) ~max_proofs_verified:(module Nat.N1) ~name:"blockchain-snark" - ~constraint_constants: - (* Dummy values *) - { sub_windows_per_window = 0 - ; ledger_depth = 0 - ; work_delay = 0 - ; block_window_duration_ms = 0 - ; transaction_capacity = Log_2 0 - ; pending_coinbase_depth = 0 - ; coinbase_amount = Unsigned.UInt64.of_int 0 - ; supercharged_coinbase_factor = 0 - ; account_creation_fee = Unsigned.UInt64.of_int 0 - ; fork = None - } ~choices:(fun ~self:_ -> [ { identifier = "main" ; prevs = [] @@ -2191,9 +1995,7 @@ module Make_str (_ : Wire_types.Concrete) = struct end module Fake_2_recursion = struct - module Statement = Statement - - let tag, _, p, Provers.[ step ] = + let[@warning "-45"] tag, _, p, Provers.[ step ] = Common.time "compile" (fun () -> compile_promise () ~public_input:(Input Field.typ) ~override_wrap_domain:Pickles_base.Proofs_verified.N1 @@ -2201,19 +2003,6 @@ module Make_str (_ : Wire_types.Concrete) = struct ~branches:(module Nat.N1) ~max_proofs_verified:(module Nat.N2) ~name:"blockchain-snark" - ~constraint_constants: - (* Dummy values *) - { sub_windows_per_window = 0 - ; ledger_depth = 0 - ; work_delay = 0 - ; block_window_duration_ms = 0 - ; transaction_capacity = Log_2 0 - ; pending_coinbase_depth = 0 - ; coinbase_amount = Unsigned.UInt64.of_int 0 - ; supercharged_coinbase_factor = 0 - ; account_creation_fee = Unsigned.UInt64.of_int 0 - ; fork = None - } ~choices:(fun ~self:_ -> [ { identifier = "main" ; prevs = [] @@ -2244,9 +2033,9 @@ module Make_str (_ : Wire_types.Concrete) = struct let example_input, example_proof = example end - module Simple_chain = struct - module Statement = Statement + [@@@warning "-60"] + module Simple_chain = struct type _ Snarky_backendless.Request.t += | Prev_input : Field.Constant.t Snarky_backendless.Request.t | Proof : Side_loaded.Proof.t Snarky_backendless.Request.t @@ -2271,26 +2060,13 @@ module Make_str (_ : Wire_types.Concrete) = struct ~max_proofs_verified:(Nat.Add.create Nat.N2.n) ~feature_flags:Plonk_types.Features.none ~typ:Field.typ - let _tag, _, p, Provers.[ step ] = + let[@warning "-45"] _tag, _, p, Provers.[ step ] = Common.time "compile" (fun () -> compile_promise () ~public_input:(Input Field.typ) ~auxiliary_typ:Typ.unit ~branches:(module Nat.N1) ~max_proofs_verified:(module Nat.N1) ~name:"blockchain-snark" - ~constraint_constants: - (* Dummy values *) - { sub_windows_per_window = 0 - ; ledger_depth = 0 - ; work_delay = 0 - ; block_window_duration_ms = 0 - ; transaction_capacity = Log_2 0 - ; pending_coinbase_depth = 0 - ; coinbase_amount = Unsigned.UInt64.of_int 0 - ; supercharged_coinbase_factor = 0 - ; account_creation_fee = Unsigned.UInt64.of_int 0 - ; fork = None - } ~choices:(fun ~self:_ -> [ { identifier = "main" ; prevs = [ side_loaded_tag ] @@ -2393,7 +2169,11 @@ module Make_str (_ : Wire_types.Concrete) = struct ( module struct open Impls.Step + [@@@warning "-60"] + module Statement = struct + [@@@warning "-32-34"] + type t = Field.t let to_field_elements x = [| x |] @@ -2401,6 +2181,8 @@ module Make_str (_ : Wire_types.Concrete) = struct module Constant = struct type t = Field.Constant.t [@@deriving bin_io] + [@@@warning "-32"] + let to_field_elements x = [| x |] end end @@ -2433,28 +2215,13 @@ module Make_str (_ : Wire_types.Concrete) = struct : Field.t * Field.t )) module No_recursion = struct - module Statement = Statement - - let tag, _, p, Provers.[ step ] = + let[@warning "-45"] tag, _, p, Provers.[ step ] = Common.time "compile" (fun () -> compile_promise () ~public_input:(Input Field.typ) ~auxiliary_typ:Typ.unit ~branches:(module Nat.N1) ~max_proofs_verified:(module Nat.N0) ~name:"blockchain-snark" - ~constraint_constants: - (* Dummy values *) - { sub_windows_per_window = 0 - ; ledger_depth = 0 - ; work_delay = 0 - ; block_window_duration_ms = 0 - ; transaction_capacity = Log_2 0 - ; pending_coinbase_depth = 0 - ; coinbase_amount = Unsigned.UInt64.of_int 0 - ; supercharged_coinbase_factor = 0 - ; account_creation_fee = Unsigned.UInt64.of_int 0 - ; fork = None - } ~choices:(fun ~self:_ -> [ { identifier = "main" ; prevs = [] @@ -2486,28 +2253,13 @@ module Make_str (_ : Wire_types.Concrete) = struct end module Fake_1_recursion = struct - module Statement = Statement - - let tag, _, p, Provers.[ step ] = + let[@warning "-45"] tag, _, p, Provers.[ step ] = Common.time "compile" (fun () -> compile_promise () ~public_input:(Input Field.typ) ~auxiliary_typ:Typ.unit ~branches:(module Nat.N1) ~max_proofs_verified:(module Nat.N1) ~name:"blockchain-snark" - ~constraint_constants: - (* Dummy values *) - { sub_windows_per_window = 0 - ; ledger_depth = 0 - ; work_delay = 0 - ; block_window_duration_ms = 0 - ; transaction_capacity = Log_2 0 - ; pending_coinbase_depth = 0 - ; coinbase_amount = Unsigned.UInt64.of_int 0 - ; supercharged_coinbase_factor = 0 - ; account_creation_fee = Unsigned.UInt64.of_int 0 - ; fork = None - } ~choices:(fun ~self:_ -> [ { identifier = "main" ; prevs = [] @@ -2539,9 +2291,7 @@ module Make_str (_ : Wire_types.Concrete) = struct end module Fake_2_recursion = struct - module Statement = Statement - - let tag, _, p, Provers.[ step ] = + let[@warning "-45"] tag, _, p, Provers.[ step ] = Common.time "compile" (fun () -> compile_promise () ~public_input:(Input Field.typ) ~override_wrap_domain:Pickles_base.Proofs_verified.N1 @@ -2549,19 +2299,6 @@ module Make_str (_ : Wire_types.Concrete) = struct ~branches:(module Nat.N1) ~max_proofs_verified:(module Nat.N2) ~name:"blockchain-snark" - ~constraint_constants: - (* Dummy values *) - { sub_windows_per_window = 0 - ; ledger_depth = 0 - ; work_delay = 0 - ; block_window_duration_ms = 0 - ; transaction_capacity = Log_2 0 - ; pending_coinbase_depth = 0 - ; coinbase_amount = Unsigned.UInt64.of_int 0 - ; supercharged_coinbase_factor = 0 - ; account_creation_fee = Unsigned.UInt64.of_int 0 - ; fork = None - } ~choices:(fun ~self:_ -> [ { identifier = "main" ; prevs = [] @@ -2592,9 +2329,9 @@ module Make_str (_ : Wire_types.Concrete) = struct let example_input, example_proof = example end - module Simple_chain = struct - module Statement = Statement + [@@@warning "-60"] + module Simple_chain = struct type _ Snarky_backendless.Request.t += | Prev_input : Field.Constant.t Snarky_backendless.Request.t | Proof : Side_loaded.Proof.t Snarky_backendless.Request.t @@ -2615,34 +2352,20 @@ module Make_str (_ : Wire_types.Concrete) = struct respond Unhandled let maybe_features = - Plonk_types.Features.( - map none ~f:(fun _ -> Plonk_types.Opt.Flag.Maybe)) + Plonk_types.Features.(map none ~f:(fun _ -> Opt.Flag.Maybe)) let side_loaded_tag = Side_loaded.create ~name:"foo" ~max_proofs_verified:(Nat.Add.create Nat.N2.n) ~feature_flags:maybe_features ~typ:Field.typ - let _tag, _, p, Provers.[ step ] = + let[@warning "-45"] _tag, _, p, Provers.[ step ] = Common.time "compile" (fun () -> compile_promise () ~public_input:(Input Field.typ) ~auxiliary_typ:Typ.unit ~branches:(module Nat.N1) ~max_proofs_verified:(module Nat.N1) ~name:"blockchain-snark" - ~constraint_constants: - (* Dummy values *) - { sub_windows_per_window = 0 - ; ledger_depth = 0 - ; work_delay = 0 - ; block_window_duration_ms = 0 - ; transaction_capacity = Log_2 0 - ; pending_coinbase_depth = 0 - ; coinbase_amount = Unsigned.UInt64.of_int 0 - ; supercharged_coinbase_factor = 0 - ; account_creation_fee = Unsigned.UInt64.of_int 0 - ; fork = None - } ~choices:(fun ~self:_ -> [ { identifier = "main" ; prevs = [ side_loaded_tag ] diff --git a/src/lib/pickles/pickles_intf.ml b/src/lib/pickles/pickles_intf.mli similarity index 93% rename from src/lib/pickles/pickles_intf.ml rename to src/lib/pickles/pickles_intf.mli index 012b0a49a95..a237aa36ec8 100644 --- a/src/lib/pickles/pickles_intf.ml +++ b/src/lib/pickles/pickles_intf.mli @@ -16,8 +16,8 @@ module type S = sig module Step_verifier = Step_verifier module Common = Common module Proof_cache = Proof_cache - - exception Return_digest of Md5.t + module Cache = Cache + module Ro = Ro module type Statement_intf = sig type field @@ -37,7 +37,7 @@ module type S = sig [%%versioned: module Stable : sig module V2 : sig - type t [@@deriving to_yojson] + type t [@@deriving to_yojson, of_yojson] end end] @@ -189,20 +189,20 @@ module type S = sig The types parameters are: - ['prev_vars] the tuple-list of public input circuit types to the previous proofs. - - For example, [Boolean.var * (Boolean.var * unit)] represents 2 previous + For example, [Boolean.var * (Boolean.var * unit)] represents 2 previous proofs whose public inputs are booleans - ['prev_values] the tuple-list of public input non-circuit types to the previous proofs. - - For example, [bool * (bool * unit)] represents 2 previous proofs whose + For example, [bool * (bool * unit)] represents 2 previous proofs whose public inputs are booleans. - ['widths] is a tuple list of the maximum number of previous proofs each previous proof itself had. - - For example, [Nat.z Nat.s * (Nat.z * unit)] represents 2 previous + For example, [Nat.z Nat.s * (Nat.z * unit)] represents 2 previous proofs where the first has at most 1 previous proof and the second had zero previous proofs. - ['heights] is a tuple list of the number of inductive rules in each of the previous proofs - - For example, [Nat.z Nat.s Nat.s * (Nat.z Nat.s * unit)] represents 2 + For example, [Nat.z Nat.s Nat.s * (Nat.z Nat.s * unit)] represents 2 previous proofs where the first had 2 inductive rules and the second had 1. - ['a_var] is the in-circuit type of the [main] function's public input. @@ -238,8 +238,12 @@ module type S = sig } end + type chunking_data = Verify.Instance.chunking_data = + { num_chunks : int; domain_size : int; zk_rows : int } + val verify_promise : - (module Nat.Intf with type n = 'n) + ?chunking_data:chunking_data + -> (module Nat.Intf with type n = 'n) -> (module Statement_value_intf with type t = 'a) -> Verification_key.t -> ('a * ('n, 'n) Proof.t) list @@ -275,6 +279,10 @@ module type S = sig val generate_or_load : t -> Dirty.t end + module Storables : sig + type t = Compile.Storables.t + end + module Side_loaded : sig module Verification_key : sig [%%versioned: @@ -334,7 +342,7 @@ module type S = sig val create : name:string -> max_proofs_verified:(module Nat.Add.Intf with type n = 'n1) - -> feature_flags:Plonk_types.Opt.Flag.t Plonk_types.Features.t + -> feature_flags:Opt.Flag.t Plonk_types.Features.t -> typ:('var, 'value) Impls.Step.Typ.t -> ('var, 'value, 'n1, Verification_key.Max_branches.n) Tag.t @@ -366,12 +374,13 @@ module type S = sig val compile_promise : ?self:('var, 'value, 'max_proofs_verified, 'branches) Tag.t -> ?cache:Key_cache.Spec.t list + -> ?storables:Storables.t -> ?proof_cache:Proof_cache.t -> ?disk_keys: (Cache.Step.Key.Verification.t, 'branches) Vector.t * Cache.Wrap.Key.Verification.t - -> ?return_early_digest_exception:bool -> ?override_wrap_domain:Pickles_base.Proofs_verified.t + -> ?num_chunks:int -> public_input: ( 'var , 'value @@ -385,7 +394,8 @@ module type S = sig -> max_proofs_verified: (module Nat.Add.Intf with type n = 'max_proofs_verified) -> name:string - -> constraint_constants:Snark_keys_header.Constraint_constants.t + -> ?constraint_constants:Snark_keys_header.Constraint_constants.t + -> ?commits:Snark_keys_header.Commits.With_date.t -> choices: ( self:('var, 'value, 'max_proofs_verified, 'branches) Tag.t -> ( 'prev_varss @@ -421,11 +431,13 @@ module type S = sig val compile : ?self:('var, 'value, 'max_proofs_verified, 'branches) Tag.t -> ?cache:Key_cache.Spec.t list + -> ?storables:Storables.t -> ?proof_cache:Proof_cache.t -> ?disk_keys: (Cache.Step.Key.Verification.t, 'branches) Vector.t * Cache.Wrap.Key.Verification.t -> ?override_wrap_domain:Pickles_base.Proofs_verified.t + -> ?num_chunks:int -> public_input: ( 'var , 'value @@ -439,7 +451,8 @@ module type S = sig -> max_proofs_verified: (module Nat.Add.Intf with type n = 'max_proofs_verified) -> name:string - -> constraint_constants:Snark_keys_header.Constraint_constants.t + -> ?constraint_constants:Snark_keys_header.Constraint_constants.t + -> ?commits:Snark_keys_header.Commits.With_date.t -> choices: ( self:('var, 'value, 'max_proofs_verified, 'branches) Tag.t -> ( 'prev_varss diff --git a/src/lib/pickles/plonk_checks/dune b/src/lib/pickles/plonk_checks/dune index 33743f8756e..a545289b3c1 100644 --- a/src/lib/pickles/plonk_checks/dune +++ b/src/lib/pickles/plonk_checks/dune @@ -2,6 +2,11 @@ (name plonk_checks) (public_name pickles.plonk_checks) (instrumentation (backend bisect_ppx)) + (flags + (:standard + -w +a-40..42-44 + -warn-error +a-4-70) + -open Core_kernel) (preprocess (pps ppx_mina ppx_version ppx_jane ppx_deriving.std ppx_deriving_yojson)) (libraries ;; opam libraries diff --git a/src/lib/pickles/plonk_checks/gen_scalars/dune b/src/lib/pickles/plonk_checks/gen_scalars/dune index ffa7a1ae6d0..2c0d1635198 100644 --- a/src/lib/pickles/plonk_checks/gen_scalars/dune +++ b/src/lib/pickles/plonk_checks/gen_scalars/dune @@ -1,6 +1,11 @@ (executable (name gen_scalars) (modes native) + (flags + (:standard + -w +a-40..42-44 + -warn-error +a) + -open Core_kernel) (link_flags (-linkall)) (libraries ;; opam libraries diff --git a/src/lib/pickles/plonk_checks/gen_scalars/gen_scalars.ml b/src/lib/pickles/plonk_checks/gen_scalars/gen_scalars.ml index 6575665580c..157ed7794c7 100644 --- a/src/lib/pickles/plonk_checks/gen_scalars/gen_scalars.ml +++ b/src/lib/pickles/plonk_checks/gen_scalars/gen_scalars.ml @@ -5,11 +5,17 @@ let output_file = Out_channel.create Sys.argv.(1) let output_string str = Out_channel.output_string output_file str let () = + (* We turn off warning 4 (fragile pattern-matching) globally for the generated + code *) output_string {ocaml| (* This file is generated by gen_scalars/gen_scalars.exe. *) -type curr_or_next = Curr | Next [@@deriving hash, eq, compare, sexp] +(* turn off fragile pattern-matching warning from sexp ppx *) +[@@@warning "-4"] + +type curr_or_next = Curr | Next +[@@deriving hash, eq, compare, sexp] module Gate_type = struct module T = struct @@ -85,8 +91,9 @@ module Env = struct ; pow : 'a * int -> 'a ; square : 'a -> 'a ; zk_polynomial : 'a - ; omega_to_minus_3 : 'a + ; omega_to_minus_zk_rows : 'a ; zeta_to_n_minus_1 : 'a + ; zeta_to_srs_length : 'a Lazy.t ; var : Column.t * curr_or_next -> 'a ; field : string -> 'a ; cell : 'a -> 'a @@ -95,11 +102,11 @@ module Env = struct ; endo_coefficient : 'a ; mds : int * int -> 'a ; srs_length_log2 : int - ; vanishes_on_last_4_rows : 'a + ; vanishes_on_zero_knowledge_and_previous_rows : 'a ; joint_combiner : 'a ; beta : 'a ; gamma : 'a - ; unnormalized_lagrange_basis : int -> 'a + ; unnormalized_lagrange_basis : bool * int -> 'a ; if_feature : Kimchi_types.feature_flag * (unit -> 'a) * (unit -> 'a) -> 'a } end @@ -126,10 +133,11 @@ module Tick : S = struct ; alpha_pow ; double ; zk_polynomial = _ - ; omega_to_minus_3 = _ + ; omega_to_minus_zk_rows = _ ; zeta_to_n_minus_1 = _ + ; zeta_to_srs_length = _ ; srs_length_log2 = _ - ; vanishes_on_last_4_rows + ; vanishes_on_zero_knowledge_and_previous_rows ; joint_combiner ; beta ; gamma @@ -189,10 +197,11 @@ module Tock : S = struct ; alpha_pow ; double ; zk_polynomial = _ - ; omega_to_minus_3 = _ + ; omega_to_minus_zk_rows = _ ; zeta_to_n_minus_1 = _ + ; zeta_to_srs_length = _ ; srs_length_log2 = _ - ; vanishes_on_last_4_rows = _ + ; vanishes_on_zero_knowledge_and_previous_rows = _ ; joint_combiner = _ ; beta = _ ; gamma = _ diff --git a/src/lib/pickles/plonk_checks/plonk_checks.ml b/src/lib/pickles/plonk_checks/plonk_checks.ml index dabc84edd9e..15062fe1cc2 100644 --- a/src/lib/pickles/plonk_checks/plonk_checks.ml +++ b/src/lib/pickles/plonk_checks/plonk_checks.ml @@ -3,10 +3,7 @@ open Pickles_types open Pickles_base module Scalars = Scalars module Domain = Domain -module Opt = Plonk_types.Opt - -type 'field vanishing_polynomial_domain = - < vanishing_polynomial : 'field -> 'field > +module Opt = Opt type 'field plonk_domain = < vanishing_polynomial : 'field -> 'field @@ -104,83 +101,20 @@ let evals_of_split_evals field ~zeta ~zetaw (es : _ Plonk_types.Evals.t) ~rounds open Composition_types.Wrap.Proof_state.Deferred_values.Plonk -type 'bool all_feature_flags = - { lookup_tables : 'bool Lazy.t - ; table_width_at_least_1 : 'bool Lazy.t - ; table_width_at_least_2 : 'bool Lazy.t - ; table_width_3 : 'bool Lazy.t - ; lookups_per_row_3 : 'bool Lazy.t - ; lookups_per_row_4 : 'bool Lazy.t - ; lookup_pattern_xor : 'bool Lazy.t - ; lookup_pattern_range_check : 'bool Lazy.t - ; features : 'bool Plonk_types.Features.t - } +type 'bool all_feature_flags = 'bool Lazy.t Plonk_types.Features.Full.t let expand_feature_flags (type boolean) (module B : Bool_intf with type t = boolean) - ({ range_check0 - ; range_check1 - ; foreign_field_add = _ - ; foreign_field_mul - ; xor - ; rot - ; lookup - ; runtime_tables = _ - } as features : - boolean Plonk_types.Features.t ) : boolean all_feature_flags = - let lookup_pattern_range_check = - (* RangeCheck, Rot gates use RangeCheck lookup pattern *) - lazy B.(range_check0 ||| range_check1 ||| rot) - in - let lookup_pattern_xor = - (* Xor lookup pattern *) - lazy xor - in - (* Make sure these stay up-to-date with the layouts!! *) - let table_width_3 = - (* Xor have max_joint_size = 3 *) - lookup_pattern_xor - in - let table_width_at_least_2 = - (* Lookup has max_joint_size = 2 *) - lazy (B.( ||| ) (Lazy.force table_width_3) lookup) - in - let table_width_at_least_1 = - (* RangeCheck, ForeignFieldMul have max_joint_size = 1 *) - lazy - (B.any - [ Lazy.force table_width_at_least_2 - ; Lazy.force lookup_pattern_range_check - ; foreign_field_mul - ] ) - in - let lookups_per_row_4 = - (* Xor, RangeCheckGate, ForeignFieldMul, have max_lookups_per_row = 4 *) - lazy - (B.any - [ Lazy.force lookup_pattern_xor - ; Lazy.force lookup_pattern_range_check - ; foreign_field_mul - ] ) - in - let lookups_per_row_3 = - (* Lookup has max_lookups_per_row = 3 *) - lazy (B.( ||| ) (Lazy.force lookups_per_row_4) lookup) - in - { lookup_tables = lookups_per_row_3 - ; table_width_at_least_1 - ; table_width_at_least_2 - ; table_width_3 - ; lookups_per_row_3 - ; lookups_per_row_4 - ; lookup_pattern_xor - ; lookup_pattern_range_check - ; features - } + (features : boolean Plonk_types.Features.t) : boolean all_feature_flags = + features + |> Plonk_types.Features.map ~f:(fun x -> lazy x) + |> Plonk_types.Features.to_full + ~or_:(fun x y -> lazy B.(Lazy.force x ||| Lazy.force y)) + ~any:(fun x -> lazy (B.any (List.map ~f:Lazy.force x))) let lookup_tables_used feature_flags = let module Bool = struct - type t = Plonk_types.Opt.Flag.t + type t = Opt.Flag.t let (true_ : t) = Yes @@ -207,53 +141,18 @@ let lookup_tables_used feature_flags = let any = List.fold_left ~f:( ||| ) ~init:false_ end in let all_feature_flags = expand_feature_flags (module Bool) feature_flags in - Lazy.force all_feature_flags.lookup_tables + Lazy.force all_feature_flags.uses_lookups let get_feature_flag (feature_flags : _ all_feature_flags) (feature : Kimchi_types.feature_flag) = - match feature with - | RangeCheck0 -> - Some feature_flags.features.range_check0 - | RangeCheck1 -> - Some feature_flags.features.range_check1 - | ForeignFieldAdd -> - Some feature_flags.features.foreign_field_add - | ForeignFieldMul -> - Some feature_flags.features.foreign_field_mul - | Xor -> - Some feature_flags.features.xor - | Rot -> - Some feature_flags.features.rot - | LookupTables -> - Some (Lazy.force feature_flags.lookup_tables) - | RuntimeLookupTables -> - Some feature_flags.features.runtime_tables - | TableWidth 3 -> - Some (Lazy.force feature_flags.table_width_3) - | TableWidth 2 -> - Some (Lazy.force feature_flags.table_width_at_least_2) - | TableWidth i when i <= 1 -> - Some (Lazy.force feature_flags.table_width_at_least_1) - | TableWidth _ -> - None - | LookupsPerRow 4 -> - Some (Lazy.force feature_flags.lookups_per_row_4) - | LookupsPerRow i when i <= 3 -> - Some (Lazy.force feature_flags.lookups_per_row_3) - | LookupsPerRow _ -> - None - | LookupPattern Lookup -> - Some feature_flags.features.lookup - | LookupPattern Xor -> - Some (Lazy.force feature_flags.lookup_pattern_xor) - | LookupPattern RangeCheck -> - Some (Lazy.force feature_flags.lookup_pattern_range_check) - | LookupPattern ForeignFieldMul -> - Some feature_flags.features.foreign_field_mul + let lazy_flag = + Plonk_types.Features.Full.get_feature_flag feature_flags feature + in + Option.map ~f:Lazy.force lazy_flag let scalars_env (type boolean t) (module B : Bool_intf with type t = boolean) (module F : Field_with_if_intf with type t = t and type bool = boolean) - ~endo ~mds ~field_of_hex ~domain ~srs_length_log2 + ~endo ~mds ~field_of_hex ~domain ~zk_rows ~srs_length_log2 ({ alpha; beta; gamma; zeta; joint_combiner; feature_flags } : (t, _, boolean) Minimal.t ) (e : (_ * _, _) Plonk_types.Evals.In_circuit.t) = @@ -264,7 +163,7 @@ let scalars_env (type boolean t) (module B : Bool_intf with type t = boolean) let get_eval = match (row : Scalars.curr_or_next) with Curr -> fst | Next -> snd in - match (col : Scalars.Column.t) with + match[@warning "-4"] (col : Scalars.Column.t) with | Witness i -> get_eval witness.(i) | Index Poseidon -> @@ -335,21 +234,44 @@ let scalars_env (type boolean t) (module B : Bool_intf with type t = boolean) done ; arr in - let w4, w3, w2, w1 = + let ( omega_to_zk_minus_1 + , omega_to_zk + , omega_to_intermediate_powers + , omega_to_zk_plus_1 + , omega_to_minus_1 ) = (* generator^{n - 3} *) let gen = domain#generator in (* gen_inv = gen^{n - 1} = gen^{-1} *) - let w1 = one / gen in - let w2 = square w1 in - let w3 = w2 * w1 in - let w4 = lazy (w3 * w1) in - (w4, w3, w2, w1) + let omega_to_minus_1 = one / gen in + let omega_to_minus_2 = square omega_to_minus_1 in + let omega_to_intermediate_powers, omega_to_zk_plus_1 = + let next_term = ref omega_to_minus_2 in + let omega_to_intermediate_powers = + Array.init + Stdlib.(zk_rows - 3) + ~f:(fun _ -> + let term = !next_term in + next_term := term * omega_to_minus_1 ; + term ) + in + (omega_to_intermediate_powers, !next_term) + in + let omega_to_zk = omega_to_zk_plus_1 * omega_to_minus_1 in + let omega_to_zk_minus_1 = lazy (omega_to_zk * omega_to_minus_1) in + ( omega_to_zk_minus_1 + , omega_to_zk + , omega_to_intermediate_powers + , omega_to_zk_plus_1 + , omega_to_minus_1 ) in let zk_polynomial = - (* Vanishing polynomial of [w1, w2, w3] - evaluated at x = zeta + (* Vanishing polynomial of + [omega_to_minus_1, omega_to_zk_plus_1, omega_to_zk] + evaluated at x = zeta *) - (zeta - w1) * (zeta - w2) * (zeta - w3) + (zeta - omega_to_minus_1) + * (zeta - omega_to_zk_plus_1) + * (zeta - omega_to_zk) in let zeta_to_n_minus_1 = lazy (domain#vanishing_polynomial zeta) in { Scalars.Env.add = ( + ) @@ -363,18 +285,21 @@ let scalars_env (type boolean t) (module B : Bool_intf with type t = boolean) ; cell = Fn.id ; double = (fun x -> of_int 2 * x) ; zk_polynomial - ; omega_to_minus_3 = w3 + ; omega_to_minus_zk_rows = omega_to_zk ; zeta_to_n_minus_1 = domain#vanishing_polynomial zeta + ; zeta_to_srs_length = lazy (pow2pow (module F) zeta srs_length_log2) ; endo_coefficient = endo ; mds = (fun (row, col) -> mds.(row).(col)) ; srs_length_log2 - ; vanishes_on_last_4_rows = + ; vanishes_on_zero_knowledge_and_previous_rows = ( match joint_combiner with | None -> (* No need to compute anything when not using lookups *) F.one | Some _ -> - zk_polynomial * (zeta - Lazy.force w4) ) + Array.fold omega_to_intermediate_powers + ~init:(zk_polynomial * (zeta - Lazy.force omega_to_zk_minus_1)) + ~f:(fun acc omega_pow -> acc * (zeta - omega_pow)) ) ; joint_combiner = Option.value joint_combiner ~default:F.one ; beta ; gamma @@ -382,20 +307,20 @@ let scalars_env (type boolean t) (module B : Bool_intf with type t = boolean) (fun i -> let w_to_i = match i with - | 0 -> + | false, 0 -> one - | 1 -> + | false, 1 -> domain#generator - | -1 -> - w1 - | -2 -> - w2 - | -3 -> - w3 - | -4 -> - Lazy.force w4 - | _ -> - failwith "TODO" + | false, -1 -> + omega_to_minus_1 + | false, -2 -> + omega_to_zk_plus_1 + | false, -3 | true, 0 -> + omega_to_zk + | true, -1 -> + Lazy.force omega_to_zk_minus_1 + | b, i -> + failwithf "TODO: unnormalized_lagrange_basis(%b, %i)" b i () in Lazy.force zeta_to_n_minus_1 / (zeta - w_to_i) ) ; if_feature = @@ -428,6 +353,16 @@ module Make (Shifted_value : Shifted_value.S) (Sc : Scalars.S) = struct let zkp = env.zk_polynomial in let alpha_pow = env.alpha_pow in let zeta1m1 = env.zeta_to_n_minus_1 in + let p_eval0 = + Option.value_exn + (Array.fold_right ~init:None p_eval0 ~f:(fun p_eval0 acc -> + match acc with + | None -> + Some p_eval0 + | Some acc -> + let zeta1 = Lazy.force env.zeta_to_srs_length in + Some F.(p_eval0 + (zeta1 * acc)) ) ) + in let open F in let w0 = Vector.to_array e.w |> Array.map ~f:fst in let ft_eval0 = @@ -450,11 +385,11 @@ module Make (Shifted_value : Shifted_value.S) (Sc : Scalars.S) = struct let nominator = ( zeta1m1 * alpha_pow Int.(perm_alpha0 + 1) - * (zeta - env.omega_to_minus_3) + * (zeta - env.omega_to_minus_zk_rows) + (zeta1m1 * alpha_pow Int.(perm_alpha0 + 2) * (zeta - one)) ) * (one - e0 z) in - let denominator = (zeta - env.omega_to_minus_3) * (zeta - one) in + let denominator = (zeta - env.omega_to_minus_zk_rows) * (zeta - one) in let ft_eval0 = ft_eval0 + (nominator / denominator) in let constant_term = Sc.constant_term env in ft_eval0 - constant_term @@ -494,14 +429,9 @@ module Make (Shifted_value : Shifted_value.S) (Sc : Scalars.S) = struct ; gamma ; zeta ; zeta_to_domain_size = env.zeta_to_n_minus_1 + F.one - ; zeta_to_srs_length = pow2pow (module F) zeta env.srs_length_log2 + ; zeta_to_srs_length = Lazy.force env.zeta_to_srs_length ; perm - ; lookup = - ( match joint_combiner with - | None -> - Plonk_types.Opt.None - | Some joint_combiner -> - Some { joint_combiner } ) + ; joint_combiner = Opt.of_option joint_combiner ; feature_flags = actual_feature_flags } @@ -524,12 +454,7 @@ module Make (Shifted_value : Shifted_value.S) (Sc : Scalars.S) = struct ; beta = plonk.beta ; gamma = plonk.gamma ; zeta = plonk.zeta - ; joint_combiner = - ( match plonk.lookup with - | Plonk_types.Opt.None -> - None - | Some l | Maybe (_, l) -> - Some l.In_circuit.Lookup.joint_combiner ) + ; joint_combiner = Opt.to_option_unsafe plonk.joint_combiner ; feature_flags = plonk.feature_flags } evals diff --git a/src/lib/pickles/plonk_checks/plonk_checks.mli b/src/lib/pickles/plonk_checks/plonk_checks.mli index 2d052252742..7479db4491a 100644 --- a/src/lib/pickles/plonk_checks/plonk_checks.mli +++ b/src/lib/pickles/plonk_checks/plonk_checks.mli @@ -1,8 +1,5 @@ open Pickles_types -type 'field vanishing_polynomial_domain = - < vanishing_polynomial : 'field -> 'field > - type 'field plonk_domain = < vanishing_polynomial : 'field -> 'field ; shifts : 'field Pickles_types.Plonk_types.Shifts.t @@ -58,8 +55,7 @@ end type 'f field = (module Field_intf with type t = 'f) -val lookup_tables_used : - Plonk_types.Opt.Flag.t Plonk_types.Features.t -> Plonk_types.Opt.Flag.t +val lookup_tables_used : Opt.Flag.t Plonk_types.Features.t -> Opt.Flag.t val domain : 't field @@ -83,6 +79,7 @@ val scalars_env : -> mds:'t array array -> field_of_hex:(string -> 't) -> domain:< generator : 't ; vanishing_polynomial : 't -> 't ; .. > + -> zk_rows:int -> srs_length_log2:int -> ( 't , 't @@ -91,7 +88,7 @@ val scalars_env : -> ('t * 't, 'a) Pickles_types.Plonk_types.Evals.In_circuit.t -> 't Scalars.Env.t -module Make (Shifted_value : Pickles_types.Shifted_value.S) (Sc : Scalars.S) : sig +module Make (Shifted_value : Pickles_types.Shifted_value.S) (_ : Scalars.S) : sig val ft_eval0 : 't field -> domain:< shifts : 't array ; .. > @@ -101,7 +98,7 @@ module Make (Shifted_value : Pickles_types.Shifted_value.S) (Sc : Scalars.S) : s , 'b ) Composition_types.Wrap.Proof_state.Deferred_values.Plonk.Minimal.t -> ('t * 't, 'a) Pickles_types.Plonk_types.Evals.In_circuit.t - -> 't + -> 't array -> 't val derive_plonk : @@ -117,13 +114,8 @@ module Make (Shifted_value : Pickles_types.Shifted_value.S) (Sc : Scalars.S) : s -> ( 't , 't , 't Shifted_value.t - , ('t Shifted_value.t, 'b) Pickles_types.Plonk_types.Opt.t - , ( 't - Composition_types.Wrap.Proof_state.Deferred_values.Plonk.In_circuit - .Lookup - .t - , 'b ) - Pickles_types.Plonk_types.Opt.t + , ('t Shifted_value.t, 'b) Pickles_types.Opt.t + , ('t, 'b) Pickles_types.Opt.t , 'b ) Composition_types.Wrap.Proof_state.Deferred_values.Plonk.In_circuit.t @@ -136,13 +128,10 @@ module Make (Shifted_value : Pickles_types.Shifted_value.S) (Sc : Scalars.S) : s , 't Snarky_backendless.Cvar.t Shifted_value.t , ( 't Snarky_backendless.Cvar.t Shifted_value.t , 't Snarky_backendless.Cvar.t Snarky_backendless.Boolean.t ) - Pickles_types.Plonk_types.Opt.t + Pickles_types.Opt.t , ( 't Snarky_backendless.Cvar.t - Composition_types.Wrap.Proof_state.Deferred_values.Plonk.In_circuit - .Lookup - .t , 't Snarky_backendless.Cvar.t Snarky_backendless.Boolean.t ) - Pickles_types.Plonk_types.Opt.t + Pickles_types.Opt.t , 't Snarky_backendless.Cvar.t Snarky_backendless.Boolean.t ) Composition_types.Wrap.Proof_state.Deferred_values.Plonk.In_circuit.t -> ( 't Snarky_backendless.Cvar.t * 't Snarky_backendless.Cvar.t diff --git a/src/lib/pickles/plonk_checks/scalars.ml b/src/lib/pickles/plonk_checks/scalars.ml index a994f983421..74cc2136a21 100644 --- a/src/lib/pickles/plonk_checks/scalars.ml +++ b/src/lib/pickles/plonk_checks/scalars.ml @@ -1,5 +1,8 @@ (* This file is generated by gen_scalars/gen_scalars.exe. *) +(* turn off fragile pattern-matching warning from sexp ppx *) +[@@@warning "-4"] + type curr_or_next = Curr | Next [@@deriving hash, eq, compare, sexp] module Gate_type = struct @@ -76,8 +79,9 @@ module Env = struct ; pow : 'a * int -> 'a ; square : 'a -> 'a ; zk_polynomial : 'a - ; omega_to_minus_3 : 'a + ; omega_to_minus_zk_rows : 'a ; zeta_to_n_minus_1 : 'a + ; zeta_to_srs_length : 'a Lazy.t ; var : Column.t * curr_or_next -> 'a ; field : string -> 'a ; cell : 'a -> 'a @@ -86,11 +90,11 @@ module Env = struct ; endo_coefficient : 'a ; mds : int * int -> 'a ; srs_length_log2 : int - ; vanishes_on_last_4_rows : 'a + ; vanishes_on_zero_knowledge_and_previous_rows : 'a ; joint_combiner : 'a ; beta : 'a ; gamma : 'a - ; unnormalized_lagrange_basis : int -> 'a + ; unnormalized_lagrange_basis : bool * int -> 'a ; if_feature : Kimchi_types.feature_flag * (unit -> 'a) * (unit -> 'a) -> 'a } end @@ -117,10 +121,11 @@ module Tick : S = struct ; alpha_pow ; double ; zk_polynomial = _ - ; omega_to_minus_3 = _ + ; omega_to_minus_zk_rows = _ ; zeta_to_n_minus_1 = _ + ; zeta_to_srs_length = _ ; srs_length_log2 = _ - ; vanishes_on_last_4_rows + ; vanishes_on_zero_knowledge_and_previous_rows ; joint_combiner ; beta ; gamma @@ -2758,7 +2763,7 @@ module Tick : S = struct ( LookupTables , (fun () -> alpha_pow 24 - * ( vanishes_on_last_4_rows + * ( vanishes_on_zero_knowledge_and_previous_rows * ( cell (var (LookupAggreg, Next)) * ( if_feature ( LookupsPerRow 0 @@ -3329,13 +3334,13 @@ module Tick : S = struct + cell (var (LookupTable, Curr)) + (beta * cell (var (LookupTable, Next))) ) ) ) ) + alpha_pow 25 - * ( unnormalized_lagrange_basis 0 + * ( unnormalized_lagrange_basis (false, 0) * ( cell (var (LookupAggreg, Curr)) - field "0x0000000000000000000000000000000000000000000000000000000000000001" ) ) + alpha_pow 26 - * ( unnormalized_lagrange_basis (-4) + * ( unnormalized_lagrange_basis (true, -1) * ( cell (var (LookupAggreg, Curr)) - field "0x0000000000000000000000000000000000000000000000000000000000000001" @@ -3344,7 +3349,7 @@ module Tick : S = struct * if_feature ( LookupsPerRow 1 , (fun () -> - unnormalized_lagrange_basis (-4) + unnormalized_lagrange_basis (true, -1) * ( cell (var (LookupSorted 0, Curr)) - cell (var (LookupSorted 1, Curr)) ) ) , fun () -> @@ -3355,7 +3360,7 @@ module Tick : S = struct * if_feature ( LookupsPerRow 2 , (fun () -> - unnormalized_lagrange_basis 0 + unnormalized_lagrange_basis (false, 0) * ( cell (var (LookupSorted 1, Curr)) - cell (var (LookupSorted 2, Curr)) ) ) , fun () -> @@ -3366,7 +3371,7 @@ module Tick : S = struct * if_feature ( LookupsPerRow 3 , (fun () -> - unnormalized_lagrange_basis (-4) + unnormalized_lagrange_basis (true, -1) * ( cell (var (LookupSorted 2, Curr)) - cell (var (LookupSorted 3, Curr)) ) ) , fun () -> @@ -3377,7 +3382,7 @@ module Tick : S = struct * if_feature ( LookupsPerRow 4 , (fun () -> - unnormalized_lagrange_basis 0 + unnormalized_lagrange_basis (false, 0) * ( cell (var (LookupSorted 3, Curr)) - cell (var (LookupSorted 4, Curr)) ) ) , fun () -> @@ -3417,10 +3422,11 @@ module Tock : S = struct ; alpha_pow ; double ; zk_polynomial = _ - ; omega_to_minus_3 = _ + ; omega_to_minus_zk_rows = _ ; zeta_to_n_minus_1 = _ + ; zeta_to_srs_length = _ ; srs_length_log2 = _ - ; vanishes_on_last_4_rows = _ + ; vanishes_on_zero_knowledge_and_previous_rows = _ ; joint_combiner = _ ; beta = _ ; gamma = _ diff --git a/src/lib/pickles/plonk_curve_ops.ml b/src/lib/pickles/plonk_curve_ops.ml index ea4b52a5a9e..65c154a2056 100644 --- a/src/lib/pickles/plonk_curve_ops.ml +++ b/src/lib/pickles/plonk_curve_ops.ml @@ -45,7 +45,7 @@ let add_fast (type f) let p3 = (x3, y3) in with_label "add_fast" (fun () -> assert_ - { annotation = Some __LOC__ + { Snarky_backendless.Constraint.annotation = Some __LOC__ ; basic = Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint.T (EC_add_complete @@ -125,7 +125,7 @@ struct :: !rounds_rev done ; assert_ - { annotation = Some __LOC__ + { Snarky_backendless.Constraint.annotation = Some __LOC__ ; basic = Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint.T (EC_scale { state = Array.of_list_rev !rounds_rev }) @@ -200,7 +200,7 @@ struct :: !rounds_rev done ; assert_ - { annotation = Some __LOC__ + { Snarky_backendless.Constraint.annotation = Some __LOC__ ; basic = Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint.T (EC_scale { state = Array.of_list_rev !rounds_rev }) @@ -289,82 +289,8 @@ struct in the other field. *) with_label __LOC__ (fun () -> Field.Assert.equal Field.((of_int 2 * s_div_2) + (s_odd :> Field.t)) s ) ; - scale_fast2 g (Shifted_value s_parts) ~num_bits + scale_fast2 g (Pickles_types.Shifted_value.Type2.Shifted_value s_parts) + ~num_bits let scale_fast a b = with_label __LOC__ (fun () -> scale_fast a b) - - let%test_module "curve_ops" = - ( module struct - module T = Internal_Basic - - let random_point = - let rec pt x = - let y2 = G.Params.(T.Field.(b + (x * (a + (x * x))))) in - if T.Field.is_square y2 then (x, T.Field.sqrt y2) - else pt T.Field.(x + one) - in - G.Constant.of_affine (pt (T.Field.of_int 0)) - - let n = Field.size_in_bits - - let%test_unit "scale fast 2" = - Quickcheck.test ~trials:5 Field.Constant.gen ~f:(fun s -> - let input = - let s_odd = T.Bigint.test_bit (T.Bigint.of_field s) 0 in - Field.Constant.((if s_odd then s - one else s) / of_int 2, s_odd) - in - T.Test.test_equal ~equal:G.Constant.equal - ~sexp_of_t:G.Constant.sexp_of_t - (Typ.tuple2 G.typ (Typ.tuple2 Field.typ Boolean.typ)) - G.typ - (fun (g, s) -> - make_checked (fun () -> - scale_fast2 ~num_bits:n g (Shifted_value s) ) ) - (fun (g, _) -> - let x = - let chunks_needed = chunks_needed ~num_bits:(n - 1) in - let actual_bits_used = chunks_needed * bits_per_chunk in - Pickles_types.Pcs_batch.pow ~one:G.Constant.Scalar.one - ~mul:G.Constant.Scalar.( * ) - G.Constant.Scalar.(of_int 2) - actual_bits_used - |> G.Constant.Scalar.( + ) - (G.Constant.Scalar.project (Field.Constant.unpack s)) - in - G.Constant.scale g x ) - (random_point, input) ) - - let%test_unit "scale fast" = - let open Pickles_types in - let shift = - Shifted_value.Type1.Shift.create (module G.Constant.Scalar) - in - Quickcheck.test ~trials:10 - Quickcheck.Generator.( - map (list_with_length n Bool.quickcheck_generator) ~f:(fun bs -> - Field.Constant.project bs |> Field.Constant.unpack )) - ~f:(fun xs -> - try - T.Test.test_equal ~equal:G.Constant.equal - ~sexp_of_t:G.Constant.sexp_of_t - (Typ.tuple2 G.typ (Typ.list ~length:n Boolean.typ)) - G.typ - (fun (g, s) -> - make_checked (fun () -> - scale_fast ~num_bits:n g (Shifted_value (Field.project s)) ) - ) - (fun (g, s) -> - let open G.Constant.Scalar in - let s = project s in - let x = - Shifted_value.Type1.to_field - (module G.Constant.Scalar) - ~shift (Shifted_value s) - in - G.Constant.scale g x ) - (random_point, xs) - with e -> - eprintf !"Input %{sexp: bool list}\n%!" xs ; - raise e ) - end ) end diff --git a/src/lib/pickles/plonk_curve_ops.mli b/src/lib/pickles/plonk_curve_ops.mli index 2dcebf8ed37..be69a9f2b4d 100644 --- a/src/lib/pickles/plonk_curve_ops.mli +++ b/src/lib/pickles/plonk_curve_ops.mli @@ -41,28 +41,51 @@ module Make -> num_bits:int -> G.t + (** Interface for the scalar field of the curve *) module type Scalar_field_intf = sig module Constant : sig + (** Represents an element of the field *) type t + (** The number of bits in the field's order, i.e. + [1 + log2(field_order)] *) val size_in_bits : int + (** The identity element for the addition *) val zero : t + (** The identity element for the multiplication *) val one : t + (** [of_int x] builds an element of type [t]. [x] is supposed to be the + canonical representation of the field element. + *) val of_int : int -> t + (** [a * b] returns the unique value [c] such that [a * b = c mod p] where + [p] is the order of the field *) val ( * ) : t -> t -> t + (** [a / b] returns the unique value [c] such that [a * c = b mod p] where + [p] is the order of the field + *) val ( / ) : t -> t -> t + (** [a + b] returns the unique value [c] such that [a + b = c mod p] where + [p] is the order of the field *) val ( + ) : t -> t -> t + (** [a - b] returns the unique value [c] such that [a + c = b mod p] where + [p] is the order of the field *) val ( - ) : t -> t -> t + (** [inv x] returns the unique value [y] such that [x * y = one mod p] + where [p] is the order of the field. + *) val inv : t -> t + (** [negate x] returns the unique value [y] such that [x + y = zero mod p] + where [p] is the order of the field *) val negate : t -> t val to_bigint : t -> Impl.Bigint.t diff --git a/src/lib/pickles/proof.ml b/src/lib/pickles/proof.ml index 118c5488d58..a1a905fb2d2 100644 --- a/src/lib/pickles/proof.ml +++ b/src/lib/pickles/proof.ml @@ -1,7 +1,6 @@ open Core_kernel open Pickles_types open Import -open Common open Backend let hash_fold_array = Pickles_types.Plonk_types.hash_fold_array @@ -25,19 +24,10 @@ module Base = struct Types.Step.Statement.t ; index : int ; prev_evals : 'prev_evals - ; proof : Tick.Proof.t + ; proof : Tick.Proof.with_public_evals } end - module Double = struct - [%%versioned - module Stable = struct - module V1 = struct - type 'a t = 'a * 'a [@@deriving compare, sexp, yojson, hash, equal] - end - end] - end - module Wrap = struct [%%versioned module Stable = struct @@ -45,9 +35,6 @@ module Base = struct module V2 = struct type ('messages_for_next_wrap_proof, 'messages_for_next_step_proof) t = - ( 'messages_for_next_wrap_proof - , 'messages_for_next_step_proof ) - Mina_wire_types.Pickles.Concrete_.Proof.Base.Wrap.V2.t = { statement : ( Limb_vector.Constant.Hex64.Stable.V1.t Vector.Vector_2.Stable.V1.t @@ -77,9 +64,13 @@ module Base = struct end] type ('messages_for_next_wrap_proof, 'messages_for_next_step_proof) t = - ( 'messages_for_next_wrap_proof - , 'messages_for_next_step_proof ) - Stable.Latest.t = + (* NB: This should be on the *serialized type*. However, the actual + serialized type [Repr.t] is hidden by this module, so this alias is + effectively junk anyway.. + *) + ( 'messages_for_next_wrap_proof + , 'messages_for_next_step_proof ) + Mina_wire_types.Pickles.Concrete_.Proof.Base.Wrap.V2.t = { statement : ( Challenge.Constant.t , Challenge.Constant.t Scalar_challenge.t @@ -126,7 +117,7 @@ let dummy (type w h r) (_w : w Nat.t) (h : h Nat.t) let g0 = Tock.Curve.(to_affine_exn one) in let g len = Array.create ~len g0 in let tick_arr len = Array.init len ~f:(fun _ -> tick ()) in - let lengths = Commitment_lengths.create ~of_int:Fn.id in + let lengths = Commitment_lengths.default ~num_chunks:1 (* TODO *) in T { statement = { proof_state = @@ -140,7 +131,7 @@ let dummy (type w h r) (_w : w Nat.t) (h : h Nat.t) N1 | S (S Z) -> N2 - | _ -> + | S _ -> assert false ) ; domain_log2 = Branch_data.Domain_log2.of_int_exn domain_log2 @@ -184,27 +175,28 @@ let dummy (type w h r) (_w : w Nat.t) (h : h Nat.t) ; lookup = None } ; openings = - { proof = - { lr = - Array.init (Nat.to_int Tock.Rounds.n) ~f:(fun _ -> - (g0, g0) ) - ; z_1 = Ro.tock () - ; z_2 = Ro.tock () - ; delta = g0 - ; challenge_polynomial_commitment = g0 - } - ; evals = Dummy.evals.evals.evals - ; ft_eval1 = Dummy.evals.ft_eval1 - } + (let evals = Lazy.force Dummy.evals in + { proof = + { lr = + Array.init (Nat.to_int Tock.Rounds.n) ~f:(fun _ -> + (g0, g0) ) + ; z_1 = Ro.tock () + ; z_2 = Ro.tock () + ; delta = g0 + ; challenge_polynomial_commitment = g0 + } + ; evals = evals.evals.evals + ; ft_eval1 = evals.ft_eval1 + } ) } ; prev_evals = (let e = - Plonk_types.Evals.map (Evaluation_lengths.create ~of_int:Fn.id) - ~f:(fun n -> (tick_arr n, tick_arr n)) + Plonk_types.Evals.map Evaluation_lengths.default ~f:(fun n -> + (tick_arr n, tick_arr n) ) in let ex = { Plonk_types.All_evals.With_public_input.public_input = - (tick (), tick ()) + ([| tick () |], [| tick () |]) ; evals = e } in @@ -365,8 +357,43 @@ module Proofs_verified_2 = struct include T.Repr - (* Force the typechecker to verify that these types are equal. *) - let (_ : (t, Stable.Latest.t) Type_equal.t) = Type_equal.T + let to_binable + ({ statement + ; prev_evals = { evals = { public_input; evals }; ft_eval1 } + ; proof + } : + t ) : Stable.Latest.t = + { statement + ; prev_evals = + { evals = + { public_input = + (let x1, x2 = public_input in + (x1.(0), x2.(0)) ) + ; evals + } + ; ft_eval1 + } + ; proof + } + + let of_binable + ({ statement + ; prev_evals = { evals = { public_input; evals }; ft_eval1 } + ; proof + } : + Stable.Latest.t ) : t = + { statement + ; prev_evals = + { evals = + { public_input = + (let x1, x2 = public_input in + ([| x1 |], [| x2 |]) ) + ; evals + } + ; ft_eval1 + } + ; proof + } end [%%versioned_binable @@ -386,9 +413,9 @@ module Proofs_verified_2 = struct (struct type nonrec t = t - let to_binable = to_repr + let to_binable x = Repr.to_binable (to_repr x) - let of_binable = of_repr + let of_binable x = of_repr (Repr.of_binable x) end) end end] @@ -435,8 +462,43 @@ module Proofs_verified_max = struct include T.Repr - (* Force the typechecker to verify that these types are equal. *) - let (_ : (t, Stable.Latest.t) Type_equal.t) = Type_equal.T + let to_binable + ({ statement + ; prev_evals = { evals = { public_input; evals }; ft_eval1 } + ; proof + } : + t ) : Stable.Latest.t = + { statement + ; prev_evals = + { evals = + { public_input = + (let x1, x2 = public_input in + (x1.(0), x2.(0)) ) + ; evals + } + ; ft_eval1 + } + ; proof + } + + let of_binable + ({ statement + ; prev_evals = { evals = { public_input; evals }; ft_eval1 } + ; proof + } : + Stable.Latest.t ) : t = + { statement + ; prev_evals = + { evals = + { public_input = + (let x1, x2 = public_input in + ([| x1 |], [| x2 |]) ) + ; evals + } + ; ft_eval1 + } + ; proof + } end [%%versioned_binable @@ -456,9 +518,9 @@ module Proofs_verified_max = struct (struct type nonrec t = t - let to_binable = to_repr + let to_binable x = Repr.to_binable (to_repr x) - let of_binable = of_repr + let of_binable x = of_repr (Repr.of_binable x) end) end end] diff --git a/src/lib/pickles/proof.mli b/src/lib/pickles/proof.mli index 00b0d1bb18d..7c78667abdd 100644 --- a/src/lib/pickles/proof.mli +++ b/src/lib/pickles/proof.mli @@ -1,3 +1,7 @@ +(** The type of intermediate (step) and emitted (wrap) proofs that pickles + generates +*) + module Base : sig module Messages_for_next_proof_over_same_field = Reduced_messages_for_next_proof_over_same_field @@ -17,18 +21,17 @@ module Base : sig Import.Types.Step.Statement.t ; index : int ; prev_evals : 'prev_evals - ; proof : Backend.Tick.Proof.t + ; proof : Backend.Tick.Proof.with_public_evals } end module Wrap : sig [%%versioned: module Stable : sig + [@@@no_toplevel_latest_type] + module V2 : sig type ('messages_for_next_wrap_proof, 'messages_for_next_step_proof) t = - ( 'messages_for_next_wrap_proof - , 'messages_for_next_step_proof ) - Mina_wire_types.Pickles.Concrete_.Proof.Base.Wrap.V2.t = { statement : ( Limb_vector.Constant.Hex64.Stable.V1.t Pickles_types.Vector.Vector_2.Stable.V1.t @@ -54,14 +57,18 @@ module Base : sig Pickles_types.Plonk_types.All_evals.Stable.V1.t ; proof : Wrap_wire_proof.Stable.V1.t } - [@@deriving compare, sexp, yojson, hash, equal] + [@@deriving compare, sexp, hash, equal] end end] type ('messages_for_next_wrap_proof, 'messages_for_next_step_proof) t = - ( 'messages_for_next_wrap_proof - , 'messages_for_next_step_proof ) - Stable.Latest.t = + (* NB: This should be on the *serialized type*. However, the actual + serialized type [Repr.t] is hidden by this module, so this alias is + effectively junk anyway.. + *) + ( 'messages_for_next_wrap_proof + , 'messages_for_next_step_proof ) + Mina_wire_types.Pickles.Concrete_.Proof.Base.Wrap.V2.t = { statement : ( Import.Challenge.Constant.t , Import.Challenge.Constant.t Import.Scalar_challenge.t diff --git a/src/lib/pickles/proof_cache.ml b/src/lib/pickles/proof_cache.ml index b1540bb2e48..56784778e0f 100644 --- a/src/lib/pickles/proof_cache.ml +++ b/src/lib/pickles/proof_cache.ml @@ -1,5 +1,7 @@ open Core_kernel +[@@@warning "-4-27"] + module Yojson_map = Map.Make (struct type t = [ `Null @@ -15,6 +17,8 @@ module Yojson_map = Map.Make (struct [@@deriving compare, sexp] end) +[@@@warning "+4+27"] + type t = Yojson.Safe.t Yojson_map.t Yojson_map.t ref (* We use a slightly more verbose format here, so that it's easy to debug. @@ -136,6 +140,12 @@ module Json = struct ; mul_comm : 'poly_comm ; emul_comm : 'poly_comm ; endomul_scalar_comm : 'poly_comm + ; xor_comm : 'poly_comm option [@default None] + ; range_check0_comm : 'poly_comm option [@default None] + ; range_check1_comm : 'poly_comm option [@default None] + ; foreign_field_add_comm : 'poly_comm option [@default None] + ; foreign_field_mul_comm : 'poly_comm option [@default None] + ; rot_comm : 'poly_comm option [@default None] } [@@deriving to_yojson] @@ -149,6 +159,7 @@ module Json = struct ; evals : 'poly_comm verification_evals ; shifts : 'fr array ; lookup_index : 'poly_comm lookup option + ; zk_rows : int [@default 3] } [@@deriving to_yojson] @@ -215,7 +226,6 @@ let set_proof t ~verification_key ~public_input proof = Map.set for_vk ~key:public_input ~data:proof ) let set_step_proof t ~keypair ~public_input proof = - let open Option.Let_syntax in let public_input = let len = Kimchi_bindings.FieldVectors.Fp.length public_input in Array.init len ~f:(fun i -> @@ -229,7 +239,6 @@ let set_step_proof t ~keypair ~public_input proof = set_proof t ~verification_key ~public_input proof_json let set_wrap_proof t ~keypair ~public_input proof = - let open Option.Let_syntax in let public_input = let len = Kimchi_bindings.FieldVectors.Fq.length public_input in Array.init len ~f:(fun i -> diff --git a/src/lib/pickles/pseudo/dune b/src/lib/pickles/pseudo/dune index 529d90fdc13..76032929257 100644 --- a/src/lib/pickles/pseudo/dune +++ b/src/lib/pickles/pseudo/dune @@ -1,7 +1,11 @@ (library (name pseudo) (public_name pickles.pseudo) - (flags -warn-error -27) + (flags + (:standard + -w +a-40..42-44 + -warn-error +a) + -open Core_kernel) (instrumentation (backend bisect_ppx)) (preprocess (pps ppx_version ppx_mina ppx_jane ppx_deriving.std ppx_deriving_yojson )) (libraries diff --git a/src/lib/pickles/pseudo/pseudo.ml b/src/lib/pickles/pseudo/pseudo.ml index 17199ecb9b6..8a8e4d712a9 100644 --- a/src/lib/pickles/pseudo/pseudo.ml +++ b/src/lib/pickles/pseudo/pseudo.ml @@ -88,7 +88,6 @@ module Make (Impl : Snarky_backendless.Snark_intf.Run) = struct else if disabled_not_the_same then failwith "Pseudo.Domain.shifts: found variable shifts" else - let open Pickles_types.Plonk_types.Shifts in let get_ith_shift i = mask which (Vector.map all_shifts ~f:(fun a -> Field.constant a.(i))) diff --git a/src/lib/pickles/pseudo/pseudo.mli b/src/lib/pickles/pseudo/pseudo.mli index a3f1b764dfd..4f4794d0959 100644 --- a/src/lib/pickles/pseudo/pseudo.mli +++ b/src/lib/pickles/pseudo/pseudo.mli @@ -1,6 +1,10 @@ -(* Pseudo *) +(** Encode a mask on a vector with a one-hot vector. Can be used to select at + compile time a bit of a vector. Users include the step/wrap circuits to select + the correct verification key. *) module Make (Impl : Snarky_backendless.Snark_intf.Run) : sig + (** The type parameter ['n] is the size of the vector, and ['a] is the domains + of the vector elements. *) type ('a, 'n) t = 'n One_hot_vector.T(Impl).t * ('a, 'n) Pickles_types.Vector.t diff --git a/src/lib/pickles/reduced_messages_for_next_proof_over_same_field.ml b/src/lib/pickles/reduced_messages_for_next_proof_over_same_field.ml index 7489d36ac1c..73f5be08ab0 100644 --- a/src/lib/pickles/reduced_messages_for_next_proof_over_same_field.ml +++ b/src/lib/pickles/reduced_messages_for_next_proof_over_same_field.ml @@ -1,7 +1,6 @@ open Core_kernel open Import open Pickles_types -open Types open Common open Backend diff --git a/src/lib/pickles/reduced_messages_for_next_proof_over_same_field.mli b/src/lib/pickles/reduced_messages_for_next_proof_over_same_field.mli index d539add44f7..fef302aa808 100644 --- a/src/lib/pickles/reduced_messages_for_next_proof_over_same_field.mli +++ b/src/lib/pickles/reduced_messages_for_next_proof_over_same_field.mli @@ -1,3 +1,7 @@ +(** Structures for message passing between proofs over the same field, passed + opaquely as a hash through the intermediate proof over the other field. +*) + module Step : sig module Stable : sig module V1 : sig diff --git a/src/lib/pickles/requests.ml b/src/lib/pickles/requests.ml index 94b086a8fed..352f4a22eaa 100644 --- a/src/lib/pickles/requests.ml +++ b/src/lib/pickles/requests.ml @@ -1,10 +1,8 @@ -open Core_kernel open Import open Types open Pickles_types open Hlist open Snarky_backendless.Request -open Common open Backend module Wrap = struct @@ -14,7 +12,6 @@ module Wrap = struct type max_local_max_proofs_verifieds open Impls.Wrap - open Wrap_main_inputs open Snarky_backendless.Request type _ t += @@ -132,7 +129,7 @@ module Step = struct , local_branches ) H3.T(Per_proof_witness.Constant.No_app_state).t t - | Wrap_index : Tock.Curve.Affine.t Plonk_verification_key_evals.t t + | Wrap_index : Tock.Curve.Affine.t array Plonk_verification_key_evals.t t | App_state : statement t | Return_value : return_value -> unit t | Auxiliary_value : auxiliary_value -> unit t @@ -145,7 +142,7 @@ module Step = struct end let create : - type proofs_verified local_signature local_branches statement return_value auxiliary_value prev_values prev_ret_values max_proofs_verified. + type proofs_verified local_signature local_branches statement return_value auxiliary_value prev_values max_proofs_verified. unit -> (module S with type local_signature = local_signature @@ -186,7 +183,8 @@ module Step = struct , local_branches ) H3.T(Per_proof_witness.Constant.No_app_state).t t - | Wrap_index : Tock.Curve.Affine.t Plonk_verification_key_evals.t t + | Wrap_index : + Tock.Curve.Affine.t array Plonk_verification_key_evals.t t | App_state : statement t | Return_value : return_value -> unit t | Auxiliary_value : auxiliary_value -> unit t diff --git a/src/lib/pickles/requests.mli b/src/lib/pickles/requests.mli index df2de0d1246..6f9439de8ff 100644 --- a/src/lib/pickles/requests.mli +++ b/src/lib/pickles/requests.mli @@ -1,3 +1,7 @@ +(** The snarky request types that the step and wrap circuits use to get + non-deterministic witness data from the prover. +*) + open Pickles_types module Step : sig @@ -31,7 +35,7 @@ module Step : sig Hlist.H3.T(Per_proof_witness.Constant.No_app_state).t Snarky_backendless.Request.t | Wrap_index : - Backend.Tock.Curve.Affine.t Plonk_verification_key_evals.t + Backend.Tock.Curve.Affine.t array Plonk_verification_key_evals.t Snarky_backendless.Request.t | App_state : statement Snarky_backendless.Request.t | Return_value : return_value -> unit Snarky_backendless.Request.t diff --git a/src/lib/pickles/ro.ml b/src/lib/pickles/ro.ml index 4770a966d63..bcc9f0f07aa 100644 --- a/src/lib/pickles/ro.ml +++ b/src/lib/pickles/ro.ml @@ -1,6 +1,5 @@ open Core_kernel open Backend -open Pickles_types open Import let bits_random_oracle = diff --git a/src/lib/pickles/ro.mli b/src/lib/pickles/ro.mli index c340b25e10e..ca3c9dfacc0 100644 --- a/src/lib/pickles/ro.mli +++ b/src/lib/pickles/ro.mli @@ -1,11 +1,21 @@ +(** Implements an interface simulating a {{ https://en.wikipedia.org/wiki/Random_oracle } random oracle }. *) + +(** Bits alias *) type bits := bool list +(** [bits_random_orable ~length seed] generates a list of [length] bits using + the seed [seed]. Blake2s is used *) val bits_random_oracle : length:int -> String.t -> bits +(** [ro seed length f] generates a sequence of [length] bits using a random + oracle seeded with [seed] and converts it into a value of type ['a] using the + function [f] *) val ro : string -> int -> (bits -> 'a) -> unit -> 'a +(** Random oracle generating elements in the field Tock *) val tock : unit -> Backend.Tock.Field.t +(** Random oracle generating elements in the field Tick *) val tick : unit -> Backend.Tick.Field.t val scalar_chal : diff --git a/src/lib/pickles/scalar_challenge.ml b/src/lib/pickles/scalar_challenge.ml index 7c1b67ceb82..8fffbce66f2 100644 --- a/src/lib/pickles/scalar_challenge.ml +++ b/src/lib/pickles/scalar_challenge.ml @@ -120,11 +120,12 @@ let to_field_checked' (type f) ?(num_bits = num_bits) done ; with_label __LOC__ (fun () -> assert_ - { annotation = Some __LOC__ - ; basic = - Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint.( - T (EC_endoscalar { state = Array.of_list_rev !state })) - } ) ; + Snarky_backendless.Constraint. + { annotation = Some __LOC__ + ; basic = + Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint.( + T (EC_endoscalar { state = Array.of_list_rev !state })) + } ) ; (!a, !b, !n) let to_field_checked (type f) ?num_bits @@ -151,43 +152,6 @@ let to_field_constant (type f) ~endo done ; F.((!a * endo) + !b) -let test (type f) - (module Impl : Snarky_backendless.Snark_intf.Run with type field = f) - ~(endo : f) = - let open Impl in - let module T = Internal_Basic in - let n = 128 in - let module Field_constant = struct - include Field.Constant - - type nonrec bool = bool - - let if_ b ~then_ ~else_ = if b then then_ () else else_ () - end in - Quickcheck.test ~trials:10 - (Quickcheck.Generator.list_with_length n Bool.quickcheck_generator) - ~f:(fun xs -> - try - T.Test.test_equal ~equal:Field.Constant.equal - ~sexp_of_t:Field.Constant.sexp_of_t - (Typ.list ~length:n Boolean.typ) - Field.typ - (fun s -> - make_checked (fun () -> - to_field_checked - (module Impl) - ~endo - (SC.create (Impl.Field.pack s)) ) ) - (fun s -> - to_field_constant - (module Field_constant) - ~endo - (SC.create (Challenge.Constant.of_bits s)) ) - xs - with e -> - eprintf !"Input %{sexp: bool list}\n%!" xs ; - raise e ) - module Make (Impl : Snarky_backendless.Snark_intf.Run) (G : Intf.Group(Impl).S with type t = Impl.Field.t * Impl.Field.t) @@ -309,37 +273,6 @@ struct let endo ?num_bits t s = with_label "endo" (fun () -> endo ?num_bits t s) - let%test_unit "endo" = - let module T = Internal_Basic in - let random_point = - let rec pt x = - let y2 = G.Params.(T.Field.(b + (x * (a + (x * x))))) in - if T.Field.is_square y2 then (x, T.Field.sqrt y2) - else pt T.Field.(x + one) - in - G.Constant.of_affine (pt (T.Field.random ())) - in - let n = 128 in - Quickcheck.test ~trials:10 - (Quickcheck.Generator.list_with_length n Bool.quickcheck_generator) - ~f:(fun xs -> - try - T.Test.test_equal ~equal:G.Constant.equal - ~sexp_of_t:G.Constant.sexp_of_t - (Typ.tuple2 G.typ (Typ.list ~length:n Boolean.typ)) - G.typ - (fun (g, s) -> - make_checked (fun () -> endo g (SC.create (Field.pack s))) ) - (fun (g, s) -> - let x = - Constant.to_field (SC.create (Challenge.Constant.of_bits s)) - in - G.Constant.scale g x ) - (random_point, xs) - with e -> - eprintf !"Input %{sexp: bool list}\n%!" xs ; - raise e ) - let endo_inv ((gx, gy) as g) chal = let res = exists G.typ diff --git a/src/lib/pickles/scalar_challenge.mli b/src/lib/pickles/scalar_challenge.mli index e3fe0a9743a..f5a8a74c896 100644 --- a/src/lib/pickles/scalar_challenge.mli +++ b/src/lib/pickles/scalar_challenge.mli @@ -23,13 +23,11 @@ val to_field_checked : -> 'f Snarky_backendless.Cvar.t Import.Scalar_challenge.t -> 'f Snarky_backendless.Cvar.t -val test : 'f Import.Spec.impl -> endo:'f -> unit - module Make : functor (Impl : Snarky_backendless.Snark_intf.Run) (G : Intf.Group(Impl).S with type t = Impl.Field.t * Impl.Field.t) (Challenge : Import.Challenge.S with module Impl := Impl) - (Endo : sig + (_ : sig val base : Impl.Field.Constant.t val scalar : G.Constant.Scalar.t diff --git a/src/lib/pickles/side_loaded_verification_key.ml b/src/lib/pickles/side_loaded_verification_key.ml index 693c1609754..df697d54471 100644 --- a/src/lib/pickles/side_loaded_verification_key.ml +++ b/src/lib/pickles/side_loaded_verification_key.ml @@ -25,7 +25,6 @@ open Core_kernel open Pickles_types -open Common open Import module V = Pickles_base.Side_loaded_verification_key @@ -37,20 +36,6 @@ include ( let bits = V.bits -let input_size ~of_int ~add ~mul w = - let open Composition_types in - (* This should be an affine function in [a]. *) - let size a = - let (T (Typ typ, _conv, _conv_inv)) = - Impls.Step.input ~proofs_verified:a ~wrap_rounds:Backend.Tock.Rounds.n - ~feature_flags:Plonk_types.Features.none - in - typ.size_in_field_elements - in - let f0 = size Nat.N0.n in - let slope = size Nat.N1.n - f0 in - add (of_int f0) (mul (of_int slope) w) - module Width : sig [%%versioned: module Stable : sig @@ -60,14 +45,6 @@ module Width : sig end end] - val of_int_exn : int -> t - - val to_int : t -> int - - val to_bits : t -> bool list - - val zero : t - open Impls.Step module Checked : sig @@ -125,11 +102,12 @@ module Domain = struct let log2_size (Pow_2_roots_of_unity x) = x end +[@@warning "-4"] module Domains = struct include V.Domains - let typ = + let _typ = let open Impls.Step in let dom = Typ.transport Typ.field @@ -150,17 +128,7 @@ let max_domains = module Vk = struct type t = (Impls.Wrap.Verification_key.t[@sexp.opaque]) [@@deriving sexp] - let to_yojson _ = `String "opaque" - - let of_yojson _ = Error "Vk: yojson not supported" - - let hash _ = Unit.hash () - let hash_fold_t s _ = Unit.hash_fold_t s () - - let equal _ _ = true - - let compare _ _ = 0 end module R = struct @@ -217,7 +185,9 @@ module Stable = struct in let log2_size = Import.Domain.log2_size d in let public = - let (T (input, conv, _conv_inv)) = Impls.Wrap.input () in + let (T (input, _conv, _conv_inv)) = + Impls.Wrap.input ~feature_flags:Plonk_types.Features.Full.maybe () + in let (Typ typ) = input in typ.size_in_field_elements in @@ -250,9 +220,16 @@ module Stable = struct ; emul_comm = g c.emul_comm ; complete_add_comm = g c.complete_add_comm ; endomul_scalar_comm = g c.endomul_scalar_comm + ; xor_comm = None + ; range_check0_comm = None + ; range_check1_comm = None + ; foreign_field_add_comm = None + ; foreign_field_mul_comm = None + ; rot_comm = None } ) ; shifts = Common.tock_shifts ~log2_size ; lookup_index = None + ; zk_rows = 3 } ) in { Poly.max_proofs_verified @@ -267,9 +244,9 @@ module Stable = struct let t_of_sexp sexp = of_repr (R.t_of_sexp sexp) - let to_yojson t = R.to_yojson (to_repr t) + let _to_yojson t = R.to_yojson (to_repr t) - let of_yojson json = Result.map ~f:of_repr (R.of_yojson json) + let _of_yojson json = Result.map ~f:of_repr (R.of_yojson json) let equal x y = R.equal (to_repr x) (to_repr y) @@ -343,7 +320,7 @@ module Checked = struct [@@deriving hlist, fields] (** [log_2] of the width. *) - let width_size = Nat.to_int Width.Length.n + let _width_size = Nat.to_int Width.Length.n let to_input = let open Random_oracle_input.Chunked in @@ -366,20 +343,6 @@ module Checked = struct ] end -let%test_unit "input_size" = - List.iter - (List.range 0 (Nat.to_int Width.Max.n) ~stop:`inclusive ~start:`inclusive) - ~f:(fun n -> - [%test_eq: int] - (input_size ~of_int:Fn.id ~add:( + ) ~mul:( * ) n) - (let (T a) = Nat.of_int n in - let (T (Typ typ, _conv, _conv_inv)) = - Impls.Step.input ~proofs_verified:a - ~wrap_rounds:Backend.Tock.Rounds.n - ~feature_flags:Plonk_types.Features.none - in - typ.size_in_field_elements ) ) - let typ : (Checked.t, t) Impls.Step.Typ.t = let open Step_main_inputs in let open Impl in diff --git a/src/lib/pickles/side_loaded_verification_key.mli b/src/lib/pickles/side_loaded_verification_key.mli index 8267890e287..4bd67729897 100644 --- a/src/lib/pickles/side_loaded_verification_key.mli +++ b/src/lib/pickles/side_loaded_verification_key.mli @@ -1,3 +1,7 @@ +(** A homogenized verification key type, used to 'side load' and verify any + pickles proof regardless of its original structure. +*) + module V = Pickles_base.Side_loaded_verification_key include @@ -66,6 +70,8 @@ module Vk : sig type t = (Impls.Wrap.Verification_key.t[@sexp.opaque]) [@@deriving sexp] end +[@@@warning "-32"] + [%%versioned: module Stable : sig module V2 : sig diff --git a/src/lib/pickles/sponge_inputs.ml b/src/lib/pickles/sponge_inputs.ml index c9ed46e1377..1050725221f 100644 --- a/src/lib/pickles/sponge_inputs.ml +++ b/src/lib/pickles/sponge_inputs.ml @@ -1,11 +1,3 @@ -open Core_kernel - -module type Field = sig - include Sponge.Intf.Field - - val square : t -> t -end - module Make (Impl : Snarky_backendless.Snark_intf.Run) (B : sig open Impl @@ -39,11 +31,10 @@ struct done ; res - open Impl - open Field - module Field = Field + module Field = Impl.Field - let block_cipher (params : _ Sponge.Params.t) init = + let block_cipher (_params : _ Sponge.Params.t) init = + let open Impl in Impl.with_label __LOC__ (fun () -> let t = exists @@ -63,7 +54,7 @@ struct t.(Int.(Array.length t - 1)) ) let add_assign ~state i x = - state.(i) <- Util.seal (module Impl) (state.(i) + x) + state.(i) <- Util.seal (module Impl) Field.(state.(i) + x) let copy = Array.copy end diff --git a/src/lib/pickles/sponge_inputs.mli b/src/lib/pickles/sponge_inputs.mli index 691a3052f60..a72f12efc01 100644 --- a/src/lib/pickles/sponge_inputs.mli +++ b/src/lib/pickles/sponge_inputs.mli @@ -1,10 +1,27 @@ +(** This implements a functor to instantiate a {{ + https://en.wikipedia.org/wiki/Substitution%E2%80%93permutation_network } SPN + } used in sponge construction. + + A Substitution-Permutation Network consists of applying consecutively a non-linear + operation, a linear operation and adding some constants on a state S. Hash + functions like the SHA family, Poseidon, Rescue and others are based on this + generic construction, and consists of applying N times the same permutation + on an initial state S. +*) + module Make - (Impl : Snarky_backendless.Snark_intf.Run) (B : sig + (Impl : Snarky_backendless.Snark_intf.Run) (_ : sig + (** The parameters of the permutation *) val params : Impl.field Sponge.Params.t + (** The exponent used in the SBOX *) val to_the_alpha : Impl.field -> Impl.field + (** Internal operations of the permutation *) module Operations : sig + (** [apply_affine_map (mds, rc) state] computes the linear layer of the + permutation using the matrix [mds] and the round constants [rc] with the + state [state] *) val apply_affine_map : Impl.field array array * Impl.field array -> Impl.field array diff --git a/src/lib/pickles/step.ml b/src/lib/pickles/step.ml index bb12f4fedcc..2f2bdab1ad4 100644 --- a/src/lib/pickles/step.ml +++ b/src/lib/pickles/step.ml @@ -1,7 +1,4 @@ module SC = Scalar_challenge -open Core_kernel -open Async_kernel -module P = Proof open Pickles_types open Poly_types open Hlist @@ -19,7 +16,7 @@ module Make end) (Max_proofs_verified : Nat.Add.Intf_transparent) = struct - let double_zip = Double.map2 ~f:Core_kernel.Tuple2.create + let _double_zip = Double.map2 ~f:Core_kernel.Tuple2.create module E = struct type t = Tock.Field.t array Double.t Plonk_types.Evals.t * Tock.Field.t @@ -79,13 +76,15 @@ struct , (_, prevs_length) Vector.t , _ , (_, Max_proofs_verified.n) Vector.t ) - P.Base.Step.t + Proof.Base.Step.t * ret_value * auxiliary_value * (int, prevs_length) Vector.t ) Promise.t = let logger = Internal_tracing_context_logger.get () in [%log internal] "Pickles_step_proof" ; + let _ = auxiliary_typ in + (* unused *) let _, prev_vars_length = branch_data.proofs_verified in let T = Length.contr prev_vars_length prevs_length in let (module Req) = branch_data.requests in @@ -106,9 +105,7 @@ struct , Challenge.Constant.t Scalar_challenge.t , Tick.Field.t Shifted_value.Type1.t , Tick.Field.t Shifted_value.Type1.t option - , Challenge.Constant.t Scalar_challenge.t - Types.Wrap.Proof_state.Deferred_values.Plonk.In_circuit.Lookup.t - option + , Challenge.Constant.t Scalar_challenge.t option , bool , Digest.Constant.t , Digest.Constant.t @@ -119,15 +116,14 @@ struct Wrap.Statement.In_circuit.t end in let challenge_polynomial = - let open Backend.Tock.Field in - Wrap_verifier.challenge_polynomial ~add ~mul ~one + Wrap_verifier.challenge_polynomial (module Backend.Tock.Field) in let expand_proof : type var value local_max_proofs_verified m. Impls.Wrap.Verification_key.t - -> 'a + -> _ array Plonk_verification_key_evals.t -> value - -> (local_max_proofs_verified, local_max_proofs_verified) P.t + -> (local_max_proofs_verified, local_max_proofs_verified) Proof.t -> (var, value, local_max_proofs_verified, m) Tag.t -> must_verify:bool -> [ `Sg of Tock.Curve.Affine.t ] @@ -208,7 +204,7 @@ struct Plonk_checks.scalars_env (module Env_bool) (module Env_field) - ~srs_length_log2:Common.Max_degree.step_log2 + ~srs_length_log2:Common.Max_degree.step_log2 ~zk_rows:data.zk_rows ~endo:Endo.Step_inner_curve.base ~mds:Tick_field_sponge.params.mds ~field_of_hex:(fun s -> Kimchi_pasta.Pasta.Bigint256.of_hex_string s @@ -223,8 +219,6 @@ struct time "plonk_checks" (fun () -> let module Field = struct include Tick.Field - - type nonrec bool = bool end in Plonk_checks.Type1.derive_plonk (module Field) @@ -243,7 +237,7 @@ struct Wrap_deferred_values.expand_deferred ~evals:t.prev_evals ~old_bulletproof_challenges: statement.messages_for_next_step_proof.old_bulletproof_challenges - ~proof_state:statement.proof_state + ~zk_rows:data.zk_rows ~proof_state:statement.proof_state in let prev_statement_with_hashes : ( _ @@ -278,17 +272,7 @@ struct ; alpha = plonk0.alpha ; beta = plonk0.beta ; gamma = plonk0.gamma - ; lookup = - Option.map (Opt.to_option_unsafe plonk.lookup) - ~f:(fun l -> - { Composition_types.Wrap.Proof_state - .Deferred_values - .Plonk - .In_circuit - .Lookup - .joint_combiner = - Option.value_exn plonk0.joint_combiner - } ) + ; joint_combiner = plonk0.joint_combiner } ; combined_inner_product = deferred_values.combined_inner_product @@ -315,7 +299,8 @@ struct let module O = Tock.Oracles in let o = let public_input = - tock_public_input_of_statement prev_statement_with_hashes + tock_public_input_of_statement ~feature_flags + prev_statement_with_hashes in O.create dlog_vk ( Vector.map2 @@ -332,7 +317,7 @@ struct |> Wrap_hack.pad_accumulator ) public_input proof in - let ((x_hat_1, x_hat_2) as x_hat) = O.(p_eval_1 o, p_eval_2 o) in + let ((x_hat_1, _x_hat_2) as x_hat) = O.(p_eval_1 o, p_eval_2 o) in let scalar_chal f = Scalar_challenge.map ~f:Challenge.Constant.of_tock_field (f o) in @@ -415,7 +400,8 @@ struct (Vector.map t.statement.messages_for_next_step_proof .old_bulletproof_challenges ~f:Ipa.Step.compute_challenges ) - Local_max_proofs_verified.n Dummy.Ipa.Step.challenges_computed + Local_max_proofs_verified.n + (Lazy.force Dummy.Ipa.Step.challenges_computed) ; wrap_proof = { opening = { proof.openings.proof with challenge_polynomial_commitment } @@ -469,6 +455,7 @@ struct (module Env_bool) (module Env_field) ~domain:tock_domain ~srs_length_log2:Common.Max_degree.wrap_log2 + ~zk_rows:3 ~field_of_hex:(fun s -> Kimchi_pasta.Pasta.Bigint256.of_hex_string s |> Kimchi_pasta.Pasta.Fq.of_bigint ) @@ -502,7 +489,7 @@ struct Plonk_checks.Type2.ft_eval0 (module Tock.Field) ~domain:tock_domain ~env:tock_env tock_plonk_minimal - tock_combined_evals x_hat_1 + tock_combined_evals [| x_hat_1 |] in let open Tock.Field in combine ~which_eval:`Fst ~ft_eval:ft_eval0 As_field.zeta @@ -512,8 +499,6 @@ struct let plonk = let module Field = struct include Tock.Field - - type nonrec bool = bool end in (* Wrap proof, no features *) Plonk_checks.Type2.derive_plonk @@ -568,7 +553,7 @@ struct , witnesses' , prev_proofs' , actual_wrap_domains' ) = - let rec go : + let[@warning "-4"] rec go : type vars values ns ms k. (vars, values, ns, ms) H4.T(Tag).t -> ( values @@ -640,14 +625,14 @@ struct module type S = sig type res - val f : _ P.t -> res + val f : _ Proof.t -> res end end in let extract_from_proofs (type res) (module Extract : Extract.S with type res = res) = let rec go : type vars values ns ms len. - (ns, ns) H2.T(P).t + (ns, ns) H2.T(Proof).t -> (values, vars, ns, ms) H4.T(Tag).t -> (vars, len) Length.t -> (res, len) Vector.t = @@ -672,7 +657,7 @@ struct Challenge.Constant.t Scalar_challenge.t Bulletproof_challenge.t Step_bp_vec.t - let f (T t : _ P.t) = + let f (T t : _ Proof.t) = t.statement.proof_state.deferred_values.bulletproof_challenges end ) in @@ -701,7 +686,7 @@ struct in let messages_for_next_wrap_proof_padded = let rec pad : - type n k maxes pvals lws lhs. + type n k maxes. (Digest.Constant.t, k) Vector.t -> maxes H1.T(Nat).t -> (maxes, n) Hlist.Length.t @@ -710,16 +695,16 @@ struct match (xs, maxes, l) with | [], [], Z -> [] - | x :: xs, [], Z -> + | _x :: _xs, [], Z -> assert false | x :: xs, _ :: ms, S n -> x :: pad xs ms n - | [], m :: ms, S n -> + | [], _m :: ms, S n -> let t : _ Types.Wrap.Proof_state.Messages_for_next_wrap_proof.t = { challenge_polynomial_commitment = Lazy.force Dummy.Ipa.Step.sg ; old_bulletproof_challenges = Vector.init Max_proofs_verified.n ~f:(fun _ -> - Dummy.Ipa.Wrap.challenges_computed ) + Lazy.force Dummy.Ipa.Wrap.challenges_computed ) } in Wrap_hack.hash_messages_for_next_wrap_proof Max_proofs_verified.n t @@ -767,7 +752,7 @@ struct ~f:(fun j acc (Pow_2_roots_of_unity domain) -> if Int.equal domain domain_size then j else acc ) in - Pickles_base.Proofs_verified.of_int domain_index ) + Pickles_base.Proofs_verified.of_int_exn domain_index ) in k wrap_domain_indices | _ -> ( @@ -784,7 +769,7 @@ struct ( module struct type res = Tick.Curve.Affine.t - let f (T t : _ P.t) = + let f (T t : _ Proof.t) = t.statement.proof_state.messages_for_next_wrap_proof .challenge_polynomial_commitment end ) @@ -799,14 +784,13 @@ struct } ) |> to_list) ) in - let%map.Promise (next_proof : Tick.Proof.t), next_statement_hashed = + let%map.Promise ( (next_proof : Tick.Proof.with_public_evals) + , _next_statement_hashed ) = let (T (input, _conv, conv_inv)) = Impls.Step.input ~proofs_verified:Max_proofs_verified.n - ~wrap_rounds:Tock.Rounds.n ~feature_flags - in - let { Domains.h } = - List.nth_exn (Vector.to_list step_domains) branch_data.index + ~wrap_rounds:Tock.Rounds.n in + let { Domains.h } = Vector.nth_exn step_domains branch_data.index in ksprintf Common.time "step-prover %d (%d)" branch_data.index (Domain.size h) (fun () -> @@ -848,10 +832,12 @@ struct then failwith "Regenerated proof" ; let%map.Promise proof = create_proof () in Proof_cache.set_step_proof proof_cache ~keypair:pk - ~public_input:public_inputs proof ; + ~public_input:public_inputs proof.proof ; proof | Some proof -> - Promise.return proof ) + Promise.return + ( { proof; public_evals = None } + : Tick.Proof.with_public_evals ) ) in [%log internal] "Backend_tick_proof_create_async_done" ; (proof, next_statement_hashed) ) @@ -870,16 +856,16 @@ struct ( module struct type res = E.t - let f (T t : _ P.t) = + let f (T t : _ Proof.t) = let proof = Wrap_wire_proof.to_kimchi_proof t.proof in (proof.openings.evals, proof.openings.ft_eval1) end ) in let messages_for_next_wrap_proof = let rec go : - type a a. - (a, a) H2.T(P).t - -> a H1.T(P.Base.Messages_for_next_proof_over_same_field.Wrap).t = + type a. + (a, a) H2.T(Proof).t + -> a H1.T(Proof.Base.Messages_for_next_proof_over_same_field.Wrap).t = function | [] -> [] @@ -898,7 +884,7 @@ struct } in [%log internal] "Pickles_step_proof_done" ; - ( { P.Base.Step.proof = next_proof + ( { Proof.Base.Step.proof = next_proof ; statement = next_statement ; index = branch_data.index ; prev_evals = @@ -908,9 +894,13 @@ struct Plonk_types.All_evals. { ft_eval1 ; evals = - { With_public_input.evals = es; public_input = x_hat } + { With_public_input.evals = es + ; public_input = + (let x1, x2 = x_hat in + ([| x1 |], [| x2 |]) ) + } } ) ) - lte Max_proofs_verified.n Dummy.evals + lte Max_proofs_verified.n (Lazy.force Dummy.evals) } , Option.value_exn !return_value , Option.value_exn !auxiliary_value diff --git a/src/lib/pickles/step.mli b/src/lib/pickles/step.mli index 508472f2fdc..54a2271ed40 100644 --- a/src/lib/pickles/step.mli +++ b/src/lib/pickles/step.mli @@ -31,9 +31,9 @@ module Make -> prevs_length:('prev_vars, 'prevs_length) Pickles_types.Hlist.Length.t -> self:('a, 'b, 'c, 'd) Tag.t -> step_domains:(Import.Domains.t, 'self_branches) Pickles_types.Vector.t - -> feature_flags:Plonk_types.Opt.Flag.t Plonk_types.Features.t + -> feature_flags:Opt.Flag.t Plonk_types.Features.Full.t -> self_dlog_plonk_index: - Backend.Tick.Inner_curve.Affine.t + Backend.Tick.Inner_curve.Affine.t array Pickles_types.Plonk_verification_key_evals.t -> public_input: ( 'var diff --git a/src/lib/pickles/step_branch_data.ml b/src/lib/pickles/step_branch_data.ml index fce63022c83..44d88f75546 100644 --- a/src/lib/pickles/step_branch_data.ml +++ b/src/lib/pickles/step_branch_data.ml @@ -1,7 +1,6 @@ open Core_kernel open Pickles_types open Hlist -open Common open Import (* The data obtained from "compiling" an inductive rule into a circuit. *) @@ -71,11 +70,10 @@ type ( 'a_var (* Compile an inductive rule. *) let create - (type branches max_proofs_verified local_signature local_branches var value - a_var a_value ret_var ret_value prev_vars prev_values ) ~index - ~(self : (var, value, max_proofs_verified, branches) Tag.t) ~wrap_domains - ~(feature_flags : Plonk_types.Opt.Flag.t Plonk_types.Features.t) - ~(actual_feature_flags : bool Plonk_types.Features.t) + (type branches max_proofs_verified var value a_var a_value ret_var ret_value) + ~index ~(self : (var, value, max_proofs_verified, branches) Tag.t) + ~wrap_domains ~(feature_flags : Opt.Flag.t Plonk_types.Features.Full.t) + ~num_chunks ~(actual_feature_flags : bool Plonk_types.Features.t) ~(max_proofs_verified : max_proofs_verified Nat.t) ~(proofs_verifieds : (int, branches) Vector.t) ~(branches : branches Nat.t) ~(public_input : @@ -85,8 +83,8 @@ let create , a_value , ret_var , ret_value ) - Inductive_rule.public_input ) ~auxiliary_typ var_to_field_elements - value_to_field_elements (rule : _ Inductive_rule.t) = + Inductive_rule.public_input ) ~auxiliary_typ _var_to_field_elements + _value_to_field_elements (rule : _ Inductive_rule.t) = Timer.clock __LOC__ ; let module HT = H4.T (Tag) in let (T (self_width, proofs_verified)) = HT.length rule.prevs in @@ -143,6 +141,14 @@ let create ; wrap_domains ; step_domains ; feature_flags + ; num_chunks + ; zk_rows = + ( match num_chunks with + | 1 -> + 3 + | num_chunks -> + let permuts = 7 in + ((2 * (permuts + 1) * num_chunks) - 1 + permuts) / permuts ) } ~public_input ~auxiliary_typ ~self_branches:branches ~proofs_verified ~local_signature:widths ~local_signature_length ~local_branches:heights @@ -158,10 +164,10 @@ let create in let etyp = Impls.Step.input ~proofs_verified:max_proofs_verified - ~wrap_rounds:Backend.Tock.Rounds.n ~feature_flags + ~wrap_rounds:Backend.Tock.Rounds.n (* TODO *) in - Fix_domains.domains + Fix_domains.domains ~feature_flags:actual_feature_flags (module Impls.Step) (T (Snarky_backendless.Typ.unit (), Fn.id, Fn.id)) etyp main diff --git a/src/lib/pickles/step_branch_data.mli b/src/lib/pickles/step_branch_data.mli index be2347631fd..3daf177a299 100644 --- a/src/lib/pickles/step_branch_data.mli +++ b/src/lib/pickles/step_branch_data.mli @@ -1,16 +1,20 @@ open Pickles_types +(** The data obtained from "compiling" an inductive rule into a circuit. *) type ( 'a_var , 'a_value , 'ret_var , 'ret_value , 'auxiliary_var , 'auxiliary_value + (* type level nat *) , 'max_proofs_verified , 'branches , 'prev_vars , 'prev_values + (* type level nat *) , 'local_widths + (* type level nat *) , 'local_heights ) t = | T : @@ -32,6 +36,7 @@ type ( 'a_var , 'auxiliary_var , 'auxiliary_value ) Inductive_rule.t + (* Main functions to compute *) ; main : step_domains:(Import.Domains.t, 'branches) Pickles_types.Vector.t -> unit @@ -58,19 +63,28 @@ type ( 'a_var , 'ret_value , 'auxiliary_var , 'auxiliary_value + (* type level nat *) , 'max_proofs_verified , 'branches , 'prev_vars , 'prev_values + (* type level nat *) , 'local_widths + (* type level nat *) , 'local_heights ) t +(** Compile one rule into a value of type [t] + [create idx self wrap_domains feature_flags actual_feature_flags + max_proofs_verified branches public_input aux_typ var_to_field_elem + val_to_field_elem rule] +*) val create : index:int -> self:('var, 'value, 'max_proofs_verified, 'branches) Tag.t -> wrap_domains:Import.Domains.t - -> feature_flags:Plonk_types.Opt.Flag.t Plonk_types.Features.t + -> feature_flags:Opt.Flag.t Plonk_types.Features.Full.t + -> num_chunks:int -> actual_feature_flags:bool Plonk_types.Features.t -> max_proofs_verified:'max_proofs_verified Pickles_types.Nat.t -> proofs_verifieds:(int, 'branches) Pickles_types.Vector.t diff --git a/src/lib/pickles/step_main.ml b/src/lib/pickles/step_main.ml index f5c610a160e..b871cf0d443 100644 --- a/src/lib/pickles/step_main.ml +++ b/src/lib/pickles/step_main.ml @@ -1,13 +1,7 @@ -module S = Sponge -open Core_kernel open Pickles_types -open Common -open Poly_types open Hlist open Import open Impls.Step -open Step_main_inputs -open Step_verifier module B = Inductive_rule.B (* Converts from the one hot vector representation of a number @@ -18,9 +12,10 @@ module B = Inductive_rule.B to the numeric representation i. *) -let one_hot_vector_to_num (type n) (v : n Per_proof_witness.One_hot_vector.t) : +let _one_hot_vector_to_num (type n) (v : n Per_proof_witness.One_hot_vector.t) : Field.t = let n = Vector.length (v :> (Boolean.var, n) Vector.t) in + let open Step_verifier in Pseudo.choose (v, Vector.init n ~f:Field.of_int) ~f:Fn.id let verify_one ~srs @@ -33,8 +28,8 @@ let verify_one ~srs } : _ Per_proof_witness.t ) (d : _ Types_map.For_step.t) (messages_for_next_wrap_proof : Digest.t) (unfinalized : Unfinalized.t) - (should_verify : B.t) : _ Vector.t * B.t = - Boolean.Assert.( = ) unfinalized.should_finalize should_verify ; + (must_verify : B.t) : _ Vector.t * B.t = + Boolean.Assert.( = ) unfinalized.should_finalize must_verify ; let deferred_values = proof_state.deferred_values in let finalized, chals = with_label __LOC__ (fun () -> @@ -46,9 +41,9 @@ let verify_one ~srs sponge in (* TODO: Refactor args into an "unfinalized proof" struct *) - finalize_other_proof d.max_proofs_verified ~step_domains:d.step_domains - ~feature_flags:d.feature_flags ~sponge ~prev_challenges - deferred_values prev_proof_evals ) + Step_verifier.finalize_other_proof d.max_proofs_verified + ~step_domains:d.step_domains ~zk_rows:d.zk_rows ~sponge + ~prev_challenges deferred_values prev_proof_evals ) in let branch_data = deferred_values.branch_data in let sponge_after_index, hash_messages_for_next_step_proof = @@ -58,10 +53,12 @@ let verify_one ~srs in let sponge_after_index, hash_messages_for_next_step_proof = (* TODO: Don't rehash when it's not necessary *) - hash_messages_for_next_step_proof_opt ~index:d.wrap_key to_field_elements + Step_verifier.hash_messages_for_next_step_proof_opt ~index:d.wrap_key + to_field_elements in (sponge_after_index, unstage hash_messages_for_next_step_proof) in + (* prepare the statement to be verified below *) let statement = let prev_messages_for_next_step_proof = with_label __LOC__ (fun () -> @@ -80,16 +77,21 @@ let verify_one ~srs ; old_bulletproof_challenges = prev_challenges } ) in + (* Returns messages for the next step proof and messages for the next + wrap proof *) { Types.Wrap.Statement.messages_for_next_step_proof = prev_messages_for_next_step_proof ; proof_state = { proof_state with messages_for_next_wrap_proof } } in + (* and when the statement is prepared, we call the step verifier with this + statement *) let verified = with_label __LOC__ (fun () -> - verify ~srs ~feature_flags:d.feature_flags + Step_verifier.verify ~srs + ~feature_flags:(Plonk_types.Features.of_full d.feature_flags) ~lookup_parameters: - { use = Plonk_checks.lookup_tables_used d.feature_flags + { use = d.feature_flags.uses_lookups ; zero = { var = { challenge = Field.zero @@ -103,10 +105,9 @@ let verify_one ~srs } } ~proofs_verified:d.max_proofs_verified ~wrap_domain:d.wrap_domain - ~is_base_case:(Boolean.not should_verify) - ~sponge_after_index ~sg_old:prev_challenge_polynomial_commitments - ~proof:wrap_proof ~wrap_verification_key:d.wrap_key statement - unfinalized ) + ~is_base_case:(Boolean.not must_verify) ~sponge_after_index + ~sg_old:prev_challenge_polynomial_commitments ~proof:wrap_proof + ~wrap_verification_key:d.wrap_key statement unfinalized ) in if debug then as_prover @@ -114,17 +115,15 @@ let verify_one ~srs fun () -> let finalized = read Boolean.typ finalized in let verified = read Boolean.typ verified in - let should_verify = read Boolean.typ should_verify in + let must_verify = read Boolean.typ must_verify in printf "finalized: %b\n%!" finalized ; printf "verified: %b\n%!" verified ; - printf "should_verify: %b\n\n%!" should_verify) ; - (chals, Boolean.(verified &&& finalized ||| not should_verify)) - -let finalize_previous_and_verify = () + printf "must_verify: %b\n\n%!" must_verify) ; + (chals, Boolean.(verified &&& finalized ||| not must_verify)) (* The SNARK function corresponding to the input inductive rule. *) let step_main : - type proofs_verified self_branches prev_vars prev_values prev_ret_vars var value a_var a_value ret_var ret_value auxiliary_var auxiliary_value max_proofs_verified local_branches local_signature. + type proofs_verified self_branches prev_vars prev_values var value a_var a_value ret_var ret_value auxiliary_var auxiliary_value max_proofs_verified local_branches local_signature. (module Requests.Step.S with type local_signature = local_signature and type local_branches = local_branches @@ -183,11 +182,6 @@ let step_main : fun (module Req) max_proofs_verified ~self_branches ~local_signature ~local_signature_length ~local_branches ~local_branches_length ~proofs_verified ~lte ~public_input ~auxiliary_typ ~basic ~self rule -> - let module T (F : T4) = struct - type ('a, 'b, 'n, 'm) t = - | Other of ('a, 'b, 'n, 'm) F.t - | Self : (a_var, a_value, max_proofs_verified, self_branches) t - end in let module Typ_with_max_proofs_verified = struct type ('var, 'value, 'local_max_proofs_verified, 'local_branches) t = ( ( 'var @@ -200,22 +194,23 @@ let step_main : Per_proof_witness.Constant.No_app_state.t ) Typ.t end in - let feature_flags (d : _ Tag.t) = - if Type_equal.Id.same self.id d.id then basic.feature_flags - else Types_map.feature_flags d + let feature_flags_and_num_chunks (d : _ Tag.t) = + if Type_equal.Id.same self.id d.id then + (basic.feature_flags, basic.num_chunks) + else (Types_map.feature_flags d, Types_map.num_chunks d) in - let feature_flags = + let feature_flags_and_num_chunks = let rec go : - type e pvars pvals ns1 ns2 br. + type pvars pvals ns1 ns2 br. (pvars, pvals, ns1, ns2) H4.T(Tag).t -> (pvars, br) Length.t - -> (Plonk_types.Opt.Flag.t Plonk_types.Features.t, br) Vector.t = + -> (Opt.Flag.t Plonk_types.Features.Full.t * int, br) Vector.t = fun ds ld -> - match (ds, ld) with + match[@warning "-4"] (ds, ld) with | [], Z -> [] | d :: ds, S ld -> - feature_flags d :: go ds ld + feature_flags_and_num_chunks d :: go ds ld | [], _ -> . | _ :: _, _ -> @@ -225,35 +220,39 @@ let step_main : in let prev_proof_typs = let rec join : - type e pvars pvals ns1 ns2 br. + type pvars pvals ns1 ns2 br. (pvars, pvals, ns1, ns2) H4.T(Tag).t -> ns1 H1.T(Nat).t -> ns2 H1.T(Nat).t -> (pvars, br) Length.t -> (ns1, br) Length.t -> (ns2, br) Length.t - -> (Plonk_types.Opt.Flag.t Plonk_types.Features.t, br) Vector.t + -> (Opt.Flag.t Plonk_types.Features.Full.t * int, br) Vector.t -> (pvars, pvals, ns1, ns2) H4.T(Typ_with_max_proofs_verified).t = - fun ds ns1 ns2 ld ln1 ln2 feature_flagss -> - match (ds, ns1, ns2, ld, ln1, ln2, feature_flagss) with + fun ds ns1 ns2 ld ln1 ln2 feature_flags_and_num_chunkss -> + match[@warning "-4"] + (ds, ns1, ns2, ld, ln1, ln2, feature_flags_and_num_chunkss) + with | [], [], [], Z, Z, Z, [] -> [] - | ( d :: ds + | ( _d :: ds , n1 :: ns1 - , n2 :: ns2 + , _n2 :: ns2 , S ld , S ln1 , S ln2 - , feature_flags :: feature_flagss ) -> - let t = Per_proof_witness.typ Typ.unit n1 n2 ~feature_flags in - t :: join ds ns1 ns2 ld ln1 ln2 feature_flagss + , (feature_flags, num_chunks) :: feature_flags_and_num_chunkss ) -> + let t = + Per_proof_witness.typ Typ.unit n1 ~feature_flags ~num_chunks + in + t :: join ds ns1 ns2 ld ln1 ln2 feature_flags_and_num_chunkss | [], _, _, _, _, _, _ -> . | _ :: _, _, _, _, _, _, _ -> . in join rule.prevs local_signature local_branches proofs_verified - local_signature_length local_branches_length feature_flags + local_signature_length local_branches_length feature_flags_and_num_chunks in let module Prev_typ = H4.Typ (Impls.Step) (Typ_with_max_proofs_verified) @@ -274,7 +273,6 @@ let step_main : (input_typ, output_typ) in let main () : _ Types.Step.Statement.t = - let open Requests.Step in let open Impls.Step in let logger = Internal_tracing_context_logger.get () in with_label "step_main" (fun () -> @@ -347,9 +345,11 @@ let step_main : in Req.Compute_prev_proof_parts previous_proof_statements ) ; let dlog_plonk_index = + let num_chunks = (* TODO *) 1 in exists ~request:(fun () -> Req.Wrap_index) - (Plonk_verification_key_evals.typ Inner_curve.typ) + (Plonk_verification_key_evals.typ + (Typ.array ~length:num_chunks Step_verifier.Inner_curve.typ) ) and prevs = exists (Prev_typ.f prev_proof_typs) ~request:(fun () -> Req.Proof_with_datas ) @@ -359,7 +359,7 @@ let step_main : (Vector.map ~f:(fun _feature_flags -> Unfinalized.typ ~wrap_rounds:Backend.Tock.Rounds.n ) - feature_flags ) ) + feature_flags_and_num_chunks ) ) ~request:(fun () -> Req.Unfinalized_proofs) and messages_for_next_wrap_proof = exists (Vector.typ Digest.typ Max_proofs_verified.n) @@ -369,7 +369,7 @@ let step_main : (Vector.typ (Typ.Internal.ref ()) (Length.to_nat proofs_verified)) ~request:(fun () -> Req.Wrap_domain_indices) in - let prevs = + let proof_witnesses = (* Inject the app-state values into the per-proof witnesses. *) let rec go : type vars ns1 ns2. @@ -390,7 +390,7 @@ let step_main : let bulletproof_challenges = with_label "prevs_verified" (fun () -> let rec go : - type vars vals prev_vals ns1 ns2 n. + type vars vals ns1 ns2 n. (vars, ns1, ns2) H3.T(Per_proof_witness).t -> (vars, vals, ns1, ns2) H4.T(Types_map.For_step).t -> vars H1.T(E01(Digest)).t @@ -402,10 +402,10 @@ let step_main : , n ) Vector.t -> (_, n) Vector.t * B.t list = - fun proofs datas messages_for_next_wrap_proofs unfinalizeds stmts - pi ~actual_wrap_domains -> + fun proof_witnesses datas messages_for_next_wrap_proofs + unfinalizeds stmts pi ~actual_wrap_domains -> match - ( proofs + ( proof_witnesses , datas , messages_for_next_wrap_proofs , unfinalizeds @@ -415,12 +415,12 @@ let step_main : with | [], [], [], [], [], Z, [] -> ([], []) - | ( p :: proofs + | ( pw :: proof_witnesses , d :: datas , messages_for_next_wrap_proof :: messages_for_next_wrap_proofs , unfinalized :: unfinalizeds - , { proof_must_verify = should_verify; _ } :: stmts + , { proof_must_verify = must_verify; _ } :: stmts , S pi , actual_wrap_domain :: actual_wrap_domains ) -> let () = @@ -456,12 +456,12 @@ let step_main : () in let chals, v = - verify_one ~srs p d messages_for_next_wrap_proof - unfinalized should_verify + verify_one ~srs pw d messages_for_next_wrap_proof + unfinalized must_verify in let chalss, vs = - go proofs datas messages_for_next_wrap_proofs unfinalizeds - stmts pi ~actual_wrap_domains + go proof_witnesses datas messages_for_next_wrap_proofs + unfinalizeds stmts pi ~actual_wrap_domains in (chals :: chalss, v :: vs) in @@ -491,6 +491,8 @@ let step_main : ; step_domains = `Known basic.step_domains ; wrap_key = dlog_plonk_index ; feature_flags = basic.feature_flags + ; num_chunks = basic.num_chunks + ; zk_rows = basic.zk_rows } in let module M = @@ -516,8 +518,9 @@ let step_main : in M.f rule.prevs in - go prevs datas messages_for_next_wrap_proofs unfinalized_proofs - previous_proof_statements proofs_verified ~actual_wrap_domains + go proof_witnesses datas messages_for_next_wrap_proofs + unfinalized_proofs previous_proof_statements proofs_verified + ~actual_wrap_domains in Boolean.Assert.all vs ; chalss ) in @@ -525,17 +528,18 @@ let step_main : let messages_for_next_step_proof = let challenge_polynomial_commitments = let module M = - H3.Map (Per_proof_witness) (E03 (Inner_curve)) + H3.Map (Per_proof_witness) (E03 (Step_verifier.Inner_curve)) (struct let f : - type a b c. (a, b, c) Per_proof_witness.t -> Inner_curve.t - = + type a b c. + (a, b, c) Per_proof_witness.t + -> Step_verifier.Inner_curve.t = fun acc -> acc.wrap_proof.opening.challenge_polynomial_commitment end) in - let module V = H3.To_vector (Inner_curve) in - V.f proofs_verified (M.f prevs) + let module V = H3.To_vector (Step_verifier.Inner_curve) in + V.f proofs_verified (M.f proof_witnesses) in with_label "hash_messages_for_next_step_proof" (fun () -> let hash_messages_for_next_step_proof = @@ -544,8 +548,8 @@ let step_main : fun x -> fst (typ.var_to_fields x) in unstage - (hash_messages_for_next_step_proof ~index:dlog_plonk_index - to_field_elements ) + (Step_verifier.hash_messages_for_next_step_proof + ~index:dlog_plonk_index to_field_elements ) in let (app_state : var) = match public_input with diff --git a/src/lib/pickles/step_main_inputs.ml b/src/lib/pickles/step_main_inputs.ml index 5ea29d5bf9e..2b6d012cdf3 100644 --- a/src/lib/pickles/step_main_inputs.ml +++ b/src/lib/pickles/step_main_inputs.ml @@ -1,19 +1,16 @@ open Core_kernel open Common open Backend -open Pickles_types module Impl = Impls.Step -open Import -let high_entropy_bits = 128 +let _high_entropy_bits = 128 -let sponge_params_constant = - Sponge.Params.(map pasta_p_kimchi ~f:Impl.Field.Constant.of_string) +let sponge_params_constant = Kimchi_pasta_basic.poseidon_params_fp let tick_field_random_oracle ?(length = Tick.Field.size_in_bits - 1) s = Tick.Field.of_bits (Ro.bits_random_oracle ~length s) -let unrelated_g = +let _unrelated_g = let group_map = unstage (group_map @@ -41,18 +38,18 @@ end let sponge_params = Sponge.Params.(map sponge_params_constant ~f:Impl.Field.constant) -module Unsafe = struct - let unpack_unboolean ?(length = Field.size_in_bits) x = - let res = - exists - (Typ.list Boolean.typ_unchecked ~length) - ~compute: - As_prover.( - fun () -> List.take (Field.Constant.unpack (read_var x)) length) - in - Field.Assert.equal x (Field.project res) ; - res -end +(* module Unsafe = struct + let _unpack_unboolean ?(length = Field.size_in_bits) x = + let res = + exists + (Typ.list Boolean.typ_unchecked ~length) + ~compute: + As_prover.( + fun () -> List.take (Field.Constant.unpack (read_var x)) length) + in + Field.Assert.equal x (Field.project res) ; + res + end *) module Sponge = struct module Permutation = @@ -89,27 +86,23 @@ module Sponge = struct absorb t (Field.pack bs) end -let%test_unit "sponge" = - let module T = Make_sponge.Test (Impl) (Tick_field_sponge.Field) (Sponge.S) in - T.test Tick_field_sponge.params - -module Input_domain = struct - let domain = Domain.Pow_2_roots_of_unity 6 - - let lagrange_commitments = - lazy - (let domain_size = Domain.size domain in - time "lagrange" (fun () -> - Array.init domain_size ~f:(fun i -> - let v = - (Kimchi_bindings.Protocol.SRS.Fq.lagrange_commitment - (Backend.Tock.Keypair.load_urs ()) - domain_size i ) - .unshifted - in - assert (Array.length v = 1) ; - v.(0) |> Common.finite_exn ) ) ) -end +(* module Input_domain = struct + let domain = Import.Domain.Pow_2_roots_of_unity 6 + + let _lagrange_commitments = + lazy + (let domain_size = Import.Domain.size domain in + Common.time "lagrange" (fun () -> + Array.init domain_size ~f:(fun i -> + let v = + (Kimchi_bindings.Protocol.SRS.Fq.lagrange_commitment + (Backend.Tock.Keypair.load_urs ()) + domain_size i ) + .unshifted + in + assert (Array.length v = 1) ; + v.(0) |> Common.finite_exn ) ) ) + end *) module Inner_curve = struct module C = Kimchi_pasta.Pasta.Pallas @@ -118,7 +111,6 @@ module Inner_curve = struct module Impl = Impl module Params = struct - open Impl.Field.Constant include C.Params let one = C.to_affine_exn C.one @@ -226,83 +218,6 @@ end module Ops = Plonk_curve_ops.Make (Impl) (Inner_curve) -let%test_unit "scale fast 2'" = - let open Impl in - let module T = Internal_Basic in - let module G = Inner_curve in - let n = Field.size_in_bits in - let module F = struct - type t = Field.t - - let typ = Field.typ - - module Constant = struct - include Field.Constant - - let to_bigint = Impl.Bigint.of_field - end - end in - Quickcheck.test ~trials:5 Field.Constant.gen ~f:(fun s -> - T.Test.test_equal ~equal:G.Constant.equal ~sexp_of_t:G.Constant.sexp_of_t - (Typ.tuple2 G.typ Field.typ) - G.typ - (fun (g, s) -> - make_checked (fun () -> Ops.scale_fast2' ~num_bits:n (module F) g s) - ) - (fun (g, _) -> - let x = - let chunks_needed = Ops.chunks_needed ~num_bits:(n - 1) in - let actual_bits_used = chunks_needed * Ops.bits_per_chunk in - Pickles_types.Pcs_batch.pow ~one:G.Constant.Scalar.one - ~mul:G.Constant.Scalar.( * ) - G.Constant.Scalar.(of_int 2) - actual_bits_used - |> G.Constant.Scalar.( + ) - (G.Constant.Scalar.project (Field.Constant.unpack s)) - in - G.Constant.scale g x ) - (G.Constant.random (), s) ) - -let%test_unit "scale fast 2 small" = - let open Impl in - let module T = Internal_Basic in - let module G = Inner_curve in - let n = 8 in - let module F = struct - type t = Field.t - - let typ = Field.typ - - module Constant = struct - include Field.Constant - - let to_bigint = Impl.Bigint.of_field - end - end in - Quickcheck.test ~trials:5 Field.Constant.gen ~f:(fun s -> - let s = - Field.Constant.unpack s |> Fn.flip List.take n |> Field.Constant.project - in - T.Test.test_equal ~equal:G.Constant.equal ~sexp_of_t:G.Constant.sexp_of_t - (Typ.tuple2 G.typ Field.typ) - G.typ - (fun (g, s) -> - make_checked (fun () -> Ops.scale_fast2' ~num_bits:n (module F) g s) - ) - (fun (g, _) -> - let x = - let chunks_needed = Ops.chunks_needed ~num_bits:(n - 1) in - let actual_bits_used = chunks_needed * Ops.bits_per_chunk in - Pickles_types.Pcs_batch.pow ~one:G.Constant.Scalar.one - ~mul:G.Constant.Scalar.( * ) - G.Constant.Scalar.(of_int 2) - actual_bits_used - |> G.Constant.Scalar.( + ) - (G.Constant.Scalar.project (Field.Constant.unpack s)) - in - G.Constant.scale g x ) - (G.Constant.random (), s) ) - module Generators = struct let h = lazy diff --git a/src/lib/pickles/step_main_inputs.mli b/src/lib/pickles/step_main_inputs.mli index 9c9343672bd..24d2a77eba7 100644 --- a/src/lib/pickles/step_main_inputs.mli +++ b/src/lib/pickles/step_main_inputs.mli @@ -109,7 +109,7 @@ module Inner_curve : sig type t = Inputs.F.t * Inputs.F.t - val double : t -> t + (* val double : t -> t *) val add' : div:(Inputs.F.t -> Inputs.F.t -> Inputs.F.t) @@ -123,9 +123,9 @@ module Inner_curve : sig val constant : Inputs.Constant.t -> t - val negate : t -> t + (* val negate : t -> t *) - val one : t + (* val one : t *) val assert_on_curve : t -> unit @@ -133,7 +133,7 @@ module Inner_curve : sig val typ : (t, Inputs.Constant.t) Inputs.Impl.Typ.t - val if_ : Inputs.Impl.Boolean.var -> then_:t -> else_:t -> t + (* val if_ : Inputs.Impl.Boolean.var -> then_:t -> else_:t -> t *) module Scalar : sig type t = Inputs.Impl.Boolean.var Bitstring_lib.Bitstring.Lsb_first.t @@ -158,7 +158,7 @@ module Inner_curve : sig end module Shifted : functor - (M : sig + (_ : sig val shift : t end) () @@ -166,7 +166,7 @@ module Inner_curve : sig val shifted : unit -> (module Shifted_intf) - val scale : ?init:t -> t -> Scalar.t -> t + (* val scale : ?init:t -> t -> Scalar.t -> t *) module Window_table : sig type t = Inputs.Constant.t Tuple_lib.Quadruple.t array diff --git a/src/lib/pickles/step_verifier.ml b/src/lib/pickles/step_verifier.ml index 0a109bac8d9..70011c5d428 100644 --- a/src/lib/pickles/step_verifier.ml +++ b/src/lib/pickles/step_verifier.ml @@ -2,14 +2,10 @@ open Core_kernel module SC = Scalar_challenge open Import +open Common open Util open Types.Step open Pickles_types -open Common -open Import -module S = Sponge - -let lookup_verification_enabled = false module Make (Inputs : Intf.Step_main_inputs.S @@ -19,41 +15,18 @@ module Make struct open Inputs open Impl - module PC = Inner_curve module Challenge = Challenge.Make (Impl) module Digest = Digest.Make (Impl) - module Number = Snarky_backendless.Number.Run.Make (Impl) (* Other_field.size > Field.size *) module Other_field = struct let size_in_bits = Field.size_in_bits - module Constant = Other_field - type t = Impls.Step.Other_field.t let typ = Impls.Step.Other_field.typ end - let print_g lab (x, y) = - if debug then - as_prover - As_prover.( - fun () -> - printf - !"%s: %{sexp:Backend.Tick.Field.t}, %{sexp:Backend.Tick.Field.t}\n\ - %!" - lab (read_var x) (read_var y)) - - let print_chal lab chal = - if debug then - as_prover - As_prover.( - fun () -> - printf - !"%s: %{sexp:Challenge.Constant.t}\n%!" - lab (read Challenge.typ chal)) - let print_fp lab x = if debug then as_prover @@ -83,7 +56,7 @@ struct Field.((b :> t) * x, (b :> t) * y) ) ty t - let scalar_to_field s = + let _scalar_to_field s = SC.to_field_checked (module Impl) s ~endo:Endo.Wrap_inner_curve.scalar let assert_n_bits ~n a = @@ -134,7 +107,7 @@ struct with_label __LOC__ (fun () -> let constant_part, non_constant_part = List.partition_map (Array.to_list ts) ~f:(fun (t, g) -> - match t with + match[@warning "-4"] t with | `Field (Constant c) | `Packed_bits (Constant c, _) -> First ( if Field.Constant.(equal zero) c then None @@ -158,7 +131,7 @@ struct |> List.fold ~init:None ~f:(fun acc x -> Some (add_opt acc x)) in let correction, acc = - List.mapi non_constant_part ~f:(fun i (s, x) -> + List.map non_constant_part ~f:(fun (s, x) -> let rr, n = match s with | `Packed_bits (s, n) -> @@ -195,7 +168,7 @@ struct with_label __LOC__ (fun () -> let absorb t = absorb sponge t in let prechallenges = - Array.mapi gammas ~f:(fun i gammas_i -> + Array.map gammas ~f:(fun gammas_i -> absorb (PC :: PC) gammas_i ; squeeze_scalar sponge ) in @@ -209,7 +182,7 @@ struct Array.map2_exn gammas prechallenges ~f:term_and_challenge |> Array.unzip in - (Array.reduce_exn terms ~f:Inner_curve.( + ), challenges) ) + (Array.reduce_exn terms ~f:(fun x y -> Inner_curve.(x + y)), challenges) ) let group_map = let f = @@ -236,7 +209,7 @@ struct in fun x -> Lazy.force f x - let scale_fast p s = + let _scale_fast p s = with_label __LOC__ (fun () -> Ops.scale_fast p s ~num_bits:Field.size_in_bits ) @@ -268,6 +241,13 @@ struct let combined_polynomial (* Corresponds to xi in figure 7 of WTS *) = with_label "combined_polynomial" (fun () -> Pcs_batch.combine_split_commitments pcs_batch + ~reduce_without_degree_bound:Array.to_list + ~reduce_with_degree_bound:(fun { Plonk_types.Poly_comm + .With_degree_bound + .unshifted + ; shifted + } -> + Array.to_list unshifted @ [ shifted ] ) ~scale_and_add:(fun ~(acc : [ `Maybe_finite of Boolean.var * Inner_curve.t @@ -298,8 +278,10 @@ struct ) ~xi ~init:(function - | `Finite x -> `Finite x | `Maybe_finite x -> `Maybe_finite x - ) + | `Finite x -> + Some (`Finite x) + | `Maybe_finite x -> + Some (`Maybe_finite x) ) (Vector.map without_degree_bound ~f:(Array.map ~f:(fun x -> `Finite x)) ) (Vector.map with_degree_bound @@ -364,7 +346,7 @@ struct let lagrange_commitment ~domain srs i = let d = Int.pow 2 (Domain.log2_size domain) in - match + match[@warning "-4"] (Kimchi_bindings.Protocol.SRS.Fq.lagrange_commitment srs d i).unshifted with | [| Finite g |] -> @@ -385,14 +367,7 @@ struct [ 0; 1 ; 2 ] in *) let lagrange_commitment (d : Domains.t) (i : int) : Inner_curve.Constant.t = - let d = Int.pow 2 (Domain.log2_size d.h) in - match - (Kimchi_bindings.Protocol.SRS.Fq.lagrange_commitment srs d i).unshifted - with - | [| Finite g |] -> - Inner_curve.Constant.of_affine g - | _ -> - assert false + lagrange_commitment ~domain:d.h srs i in let select_curve_points (type k) ~(points_for_domain : Domains.t -> (Inner_curve.Constant.t, k) Vector.t) @@ -421,7 +396,7 @@ struct [ lagrange_commitment d i ] ) |> Vector.unsingleton in - let lagrange_with_correction (type n) ~input_length i : + let lagrange_with_correction ~input_length i : (Inner_curve.t, Nat.N2.n) Vector.t = let actual_shift = (* TODO: num_bits should maybe be input_length - 1. *) @@ -440,7 +415,7 @@ struct List.partition_map (Array.to_list (Array.mapi ~f:(fun i t -> (i, t)) public_input)) ~f:(fun (i, t) -> - match t with + match[@warning "-4"] t with | `Field (Constant c) | `Packed_bits (Constant c, _) -> First ( if Field.Constant.(equal zero) c then None @@ -469,6 +444,7 @@ struct `Add_with_correction ((x, n), lagrange_with_correction ~input_length:n i) ) in + let f = Ops.add_fast ?check_finite:None in let correction = List.reduce_exn (List.filter_map terms ~f:(function @@ -476,12 +452,10 @@ struct None | `Add_with_correction (_, [ _; corr ]) -> Some corr ) ) - ~f:Ops.add_fast + ~f in let init = - List.fold - (List.filter_map constant_part ~f:Fn.id) - ~init:correction ~f:Ops.add_fast + List.fold (List.filter_map constant_part ~f:Fn.id) ~init:correction ~f in List.fold terms ~init ~f:(fun acc term -> match term with @@ -496,7 +470,7 @@ struct x_hat let incrementally_verify_proof (type b) - (module Proofs_verified : Nat.Add.Intf with type n = b) ~srs + (module Proofs_verified : Nat.Add.Intf with type n = b) ~srs:_ ~(domain : [ `Known of Domain.t | `Side_loaded of @@ -586,9 +560,10 @@ struct in let ft_comm = with_label __LOC__ (fun () -> - Common.ft_comm ~add:Ops.add_fast ~scale:scale_fast2 - ~negate:Inner_curve.negate ~endoscale:Scalar_challenge.endo - ~verification_key:m ~plonk ~alpha ~t_comm ) + Common.ft_comm + ~add:(Ops.add_fast ?check_finite:None) + ~scale:scale_fast2 ~negate:Inner_curve.negate + ~verification_key:m ~plonk ~t_comm ) in let bulletproof_challenges = (* This sponge needs to be initialized with (some derivative of) @@ -603,14 +578,11 @@ struct let without_degree_bound = Vector.append (Vector.map sg_old ~f:(fun g -> [| g |])) - ( [| x_hat |] :: [| ft_comm |] :: z_comm :: [| m.generic_comm |] - :: [| m.psm_comm |] :: [| m.complete_add_comm |] - :: [| m.mul_comm |] :: [| m.emul_comm |] - :: [| m.endomul_scalar_comm |] + ( [| x_hat |] :: [| ft_comm |] :: z_comm :: m.generic_comm + :: m.psm_comm :: m.complete_add_comm :: m.mul_comm :: m.emul_comm + :: m.endomul_scalar_comm :: Vector.append w_comm - (Vector.append - (Vector.map m.coefficients_comm ~f:(fun g -> [| g |])) - (Vector.map sigma_comm_init ~f:(fun g -> [| g |])) + (Vector.append m.coefficients_comm sigma_comm_init (snd Plonk_types.(Columns.add Permuts_minus_1.n)) ) (snd Plonk_types.( @@ -628,9 +600,7 @@ struct ~sponge:sponge_before_evaluations ~xi ~advice ~opening ~polynomials:(without_degree_bound, []) ) in - let joint_combiner = - if lookup_verification_enabled then failwith "TODO" else None - in + let joint_combiner = None in assert_eq_deferred_values { alpha = plonk.alpha ; beta = plonk.beta @@ -652,20 +622,18 @@ struct with_label "compute_challenges" (fun () -> Vector.map chals ~f:(fun b -> Bulletproof_challenge.pack b |> scalar) ) - let challenge_polynomial = - let open Field in - Wrap_verifier.challenge_polynomial ~add ~mul ~one + let challenge_polynomial = Wrap_verifier.challenge_polynomial (module Field) module Pseudo = Pseudo.Make (Impl) - module Bounded = struct - type t = { max : int; actual : Field.t } + (* module Bounded = struct + type t = { max : int; actual : Field.t } - let of_pseudo ((_, ns) as p : _ Pseudo.t) = - { max = Vector.reduce_exn ~f:Int.max ns - ; actual = Pseudo.choose p ~f:Field.of_int - } - end + let _of_pseudo ((_, ns) as p : _ Pseudo.t) = + { max = Vector.reduce_exn ~f:Int.max ns + ; actual = Pseudo.choose p ~f:Field.of_int + } + end *) let vanishing_polynomial mask = with_label "vanishing_polynomial" (fun () -> @@ -688,7 +656,7 @@ struct let domain_generator ~log2_size = Backend.Tick.Field.domain_generator ~log2_size |> Impl.Field.constant - let side_loaded_domain (type branches) = + let side_loaded_domain = let open Side_loaded_verification_key in fun ~(log2_size : Field.t) -> let domain ~max = @@ -713,89 +681,58 @@ struct in domain ~max:(Domain.log2_size max_domains.h) - let%test_module "side loaded domains" = - ( module struct - let run k = - let y = - run_and_check (fun () -> - let y = k () in - fun () -> As_prover.read_var y ) - |> Or_error.ok_exn - in - y - - let%test_unit "side loaded domains" = - let module O = One_hot_vector.Make (Impl) in - let open Side_loaded_verification_key in - let domains = [ { Domains.h = 10 }; { h = 15 } ] in - let pt = Field.Constant.random () in - List.iteri domains ~f:(fun i ds -> - let d_unchecked = - Plonk_checks.domain - (module Field.Constant) - (Pow_2_roots_of_unity ds.h) ~shifts:Common.tick_shifts - ~domain_generator:Backend.Tick.Field.domain_generator - in - let checked_domain () = - side_loaded_domain ~log2_size:(Field.of_int ds.h) - in - [%test_eq: Field.Constant.t] - (d_unchecked#vanishing_polynomial pt) - (run (fun () -> - (checked_domain ())#vanishing_polynomial (Field.constant pt) ) - ) ) - end ) - - module Split_evaluations = struct - open Plonk_types - - let mask' { Bounded.max; actual } : Boolean.var array = - let (T max) = Nat.of_int max in - Vector.to_array (ones_vector (module Impl) ~first_zero:actual max) - - let mask (type n) ~(lengths : (int, n) Vector.t) - (choice : n One_hot_vector.T(Impl).t) : Boolean.var array = - let max = - Option.value_exn - (List.max_elt ~compare:Int.compare (Vector.to_list lengths)) - in - let actual = Pseudo.choose (choice, lengths) ~f:Field.of_int in - mask' { max; actual } - - let last = - Array.reduce_exn ~f:(fun (b_acc, x_acc) (b, x) -> - (Boolean.(b_acc ||| b), Field.if_ b ~then_:x ~else_:x_acc) ) - - let rec pow x bits_lsb = - with_label "pow" (fun () -> - let rec go acc bs = - match bs with - | [] -> - acc - | b :: bs -> - let acc = Field.square acc in - let acc = Field.if_ b ~then_:Field.(x * acc) ~else_:acc in - go acc bs - in - go Field.one (List.rev bits_lsb) ) - - let mod_max_degree = - let k = Nat.to_int Backend.Tick.Rounds.n in - fun d -> - let d = - Number.of_bits - (Field.unpack ~length:Side_loaded_verification_key.max_log2_degree d) - in - Number.mod_pow_2 d (`Two_to_the k) + (* module Split_evaluations = struct + open Plonk_types - let mask_evals (type n) ~(lengths : (int, n) Vector.t Evals.t) - (choice : n One_hot_vector.T(Impl).t) (e : Field.t array Evals.t) : - (Boolean.var * Field.t) array Evals.t = - Evals.map2 lengths e ~f:(fun lengths e -> - Array.zip_exn (mask ~lengths choice) e ) - end + let mask' { Bounded.max; actual } : Boolean.var array = + let (T max) = Nat.of_int max in + Vector.to_array (ones_vector (module Impl) ~first_zero:actual max) - let absorb_field sponge x = Sponge.absorb sponge (`Field x) + let mask (type n) ~(lengths : (int, n) Vector.t) + (choice : n One_hot_vector.T(Impl).t) : Boolean.var array = + let max = + Option.value_exn + (List.max_elt ~compare:Int.compare (Vector.to_list lengths)) + in + let actual = Pseudo.choose (choice, lengths) ~f:Field.of_int in + mask' { max; actual } + + let _last = + Array.reduce_exn ~f:(fun (b_acc, x_acc) (b, x) -> + (Boolean.(b_acc ||| b), Field.if_ b ~then_:x ~else_:x_acc) ) + + let pow x bits_lsb = + with_label "pow" (fun () -> + let rec go acc bs = + match bs with + | [] -> + acc + | b :: bs -> + let acc = Field.square acc in + let acc = Field.if_ b ~then_:Field.(x * acc) ~else_:acc in + go acc bs + in + go Field.one (List.rev bits_lsb) ) + + let _mod_max_degree = + let k = Nat.to_int Backend.Tick.Rounds.n in + fun d -> + let d = + Number.of_bits + (Field.unpack + ~length:Pickles_base.Side_loaded_verification_key.max_log2_degree + d ) + in + Number.mod_pow_2 d (`Two_to_the k) + + let _mask_evals (type n) ~(lengths : (int, n) Vector.t Evals.t) + (choice : n One_hot_vector.T(Impl).t) (e : Field.t array Evals.t) : + (Boolean.var * Field.t) array Evals.t = + Evals.map2 lengths e ~f:(fun lengths e -> + Array.zip_exn (mask ~lengths choice) e ) + end *) + + let _absorb_field sponge x = Sponge.absorb sponge (`Field x) (* pt^{2^n} *) let pow2_pow (pt : Field.t) (n : int) : Field.t = @@ -818,7 +755,7 @@ struct module Opt_sponge = struct include Opt_sponge.Make (Impl) (Step_main_inputs.Sponge.Permutation) - let squeeze_challenge sponge : Field.t = + let _squeeze_challenge sponge : Field.t = lowest_128_bits (squeeze sponge) ~constrain_low_bits:true end @@ -826,13 +763,10 @@ struct Shifted_value.Type1.Shift.( map ~f:Field.constant (create (module Field.Constant))) - let shift2 = + let _shift2 = Shifted_value.Type2.Shift.( map ~f:Field.constant (create (module Field.Constant))) - let%test_unit "endo scalar" = - SC.test (module Impl) ~endo:Endo.Wrap_inner_curve.scalar - module Plonk = Types.Wrap.Proof_state.Deferred_values.Plonk module Plonk_checks = struct @@ -870,9 +804,6 @@ struct (which_log2, unique_domains) ~shifts ~domain_generator - let field_array_if b ~then_ ~else_ = - Array.map2_exn then_ else_ ~f:(fun x1 x2 -> Field.if_ b ~then_:x1 ~else_:x2) - (* This finalizes the "deferred values" coming from a previous proof over the same field. It 1. Checks that [xi] and [r] where sampled correctly. I.e., by absorbing all the @@ -886,9 +817,8 @@ struct Meaning it needs opt sponge. *) let finalize_other_proof (type b branches) (module Proofs_verified : Nat.Add.Intf with type n = b) - ~(feature_flags : Plonk_types.Opt.Flag.t Plonk_types.Features.t) ~(step_domains : - [ `Known of (Domains.t, branches) Vector.t | `Side_loaded ] ) + [ `Known of (Domains.t, branches) Vector.t | `Side_loaded ] ) ~zk_rows ~(* TODO: Add "actual proofs verified" so that proofs don't carry around dummy "old bulletproof challenges" *) sponge ~(prev_challenges : (_, b) Vector.t) @@ -961,8 +891,12 @@ struct in Sponge.absorb sponge (`Field challenge_digest) ; Sponge.absorb sponge (`Field ft_eval1) ; - Sponge.absorb sponge (`Field (fst evals.public_input)) ; - Sponge.absorb sponge (`Field (snd evals.public_input)) ; + Array.iter + ~f:(fun x -> Sponge.absorb sponge (`Field x)) + (fst evals.public_input) ; + Array.iter + ~f:(fun x -> Sponge.absorb sponge (`Field x)) + (snd evals.public_input) ; let xs = Evals.In_circuit.to_absorption_sequence evals.evals in (* This is a hacky, but much more efficient, version of the opt sponge. This uses the assumption that the sponge 'absorption state' will align @@ -975,9 +909,9 @@ struct Array.iter ~f:(fun x -> Sponge.absorb sponge (`Field x)) in match opt with - | None -> + | Nothing -> () - | Some (x1, x2) -> + | Just (x1, x2) -> absorb x1 ; absorb x2 | Maybe (b, (x1, x2)) -> (* Cache the sponge state before *) @@ -1050,7 +984,7 @@ struct Plonk_checks.scalars_env (module Env_bool) (module Env_field) - ~srs_length_log2:Common.Max_degree.step_log2 + ~srs_length_log2:Common.Max_degree.step_log2 ~zk_rows ~endo:(Impl.Field.constant Endo.Step_inner_curve.base) ~mds:sponge_params.mds ~field_of_hex:(fun s -> @@ -1077,21 +1011,21 @@ struct (e : (Field.t array, _) Evals.In_circuit.t) = let sg_evals = sg_evals |> Vector.to_list - |> List.map ~f:(fun (keep, eval) -> - [| Plonk_types.Opt.Maybe (keep, eval) |] ) + |> List.map ~f:(fun (keep, eval) -> [| Opt.Maybe (keep, eval) |]) in let a = Evals.In_circuit.to_list e |> List.map ~f:(function - | None -> + | Nothing -> [||] - | Some a -> - Array.map a ~f:(fun x -> Plonk_types.Opt.Some x) + | Just a -> + Array.map a ~f:Opt.just | Maybe (b, a) -> - Array.map a ~f:(fun x -> Plonk_types.Opt.Maybe (b, x)) ) + Array.map a ~f:(Opt.maybe b) ) in let v = - List.append sg_evals ([| Some x_hat |] :: [| Some ft |] :: a) + List.append sg_evals + (Array.map ~f:Opt.just x_hat :: [| Opt.just ft |] :: a) in Common.combined_evaluation (module Impl) ~xi v in @@ -1150,8 +1084,9 @@ struct let sponge = Sponge.create sponge_params in Array.iter (Types.index_to_field_elements - ~g:(fun (z : Inputs.Inner_curve.t) -> - List.to_array (Inner_curve.to_field_elements z) ) + ~g: + (Array.concat_map ~f:(fun (z : Inputs.Inner_curve.t) -> + List.to_array (Inner_curve.to_field_elements z) ) ) index ) ~f:(fun x -> Sponge.absorb sponge (`Field x)) ; sponge @@ -1173,8 +1108,8 @@ struct let open Types.Step.Proof_state.Messages_for_next_step_proof in let after_index = sponge_after_index index in ( after_index - , stage (fun t ~widths ~max_width ~proofs_verified_mask -> - (* TODO: Just get rid of the proofs verified mask and always absorb in full *) + , (* TODO: Just get rid of the proofs verified mask and always absorb in full *) + stage (fun t ~widths:_ ~max_width:_ ~proofs_verified_mask -> let sponge = Sponge.copy after_index in let t = { t with @@ -1216,9 +1151,9 @@ struct | `Opt sponge -> Opt_sponge.squeeze sponge ) ) - let accumulation_verifier - (accumulator_verification_key : _ Types_map.For_step.t) prev_accumulators - proof new_accumulator : Boolean.var = + let _accumulation_verifier + (_accumulator_verification_key : _ Types_map.For_step.t) + _prev_accumulators _proof _new_accumulator : Boolean.var = Boolean.false_ let verify ~proofs_verified ~is_base_case ~sg_old ~sponge_after_index @@ -1240,9 +1175,8 @@ struct (Types.Wrap.Statement.In_circuit.spec (module Impl) lookup_parameters feature_flags ) - (Types.Wrap.Statement.In_circuit.to_data - ~option_map:Plonk_types.Opt.map statement - ~to_opt:Plonk_types.Opt.to_option_unsafe ) ) + (Types.Wrap.Statement.In_circuit.to_data ~option_map:Opt.map + statement ) ) |> Array.map ~f:(function | `Field (Shifted_value.Type1.Shifted_value x) -> `Field x @@ -1250,8 +1184,11 @@ struct `Packed_bits (x, n) ) in let sponge = Sponge.create sponge_params in - let { Types.Step.Proof_state.Deferred_values.xi; combined_inner_product; b } - = + let { Types.Step.Proof_state.Deferred_values.xi + ; combined_inner_product + ; b + ; _ + } = unfinalized.deferred_values in let ( sponge_digest_before_evaluations_actual @@ -1263,7 +1200,7 @@ struct ~proof ~plonk: (Composition_types.Step.Proof_state.Deferred_values.Plonk.In_circuit - .to_wrap ~opt_none:Opt.None ~false_:Boolean.false_ + .to_wrap ~opt_none:Opt.nothing ~false_:Boolean.false_ unfinalized.deferred_values.plonk ) in with_label __LOC__ (fun () -> @@ -1287,3 +1224,7 @@ struct end include Make (Step_main_inputs) + +module For_tests_only = struct + let side_loaded_domain = side_loaded_domain +end diff --git a/src/lib/pickles/step_verifier.mli b/src/lib/pickles/step_verifier.mli index fca3e24c38a..3c67030594e 100644 --- a/src/lib/pickles/step_verifier.mli +++ b/src/lib/pickles/step_verifier.mli @@ -1,5 +1,3 @@ -open Pickles_types - module Challenge : module type of Import.Challenge.Make (Step_main_inputs.Impl) module Digest : module type of Import.Digest.Make (Step_main_inputs.Impl) @@ -38,18 +36,12 @@ end val assert_n_bits : n:int -> Pasta_bindings.Fp.t Snarky_backendless.Cvar.t -> unit -type field := Step_main_inputs.Impl.Field.t - -type snark_field := field Snarky_backendless.Cvar.t - -type ('a, 'b) vector := ('a, 'b) Pickles_types.Vector.t - val finalize_other_proof : (module Pickles_types.Nat.Add.Intf with type n = 'b) - -> feature_flags:Plonk_types.Opt.Flag.t Plonk_types.Features.t -> step_domains: [ `Known of (Import.Domains.t, 'branches) Pickles_types.Vector.t | `Side_loaded ] + -> zk_rows:int -> sponge:Step_main_inputs.Sponge.t -> prev_challenges: ( (Step_main_inputs.Impl.Field.t, 'a) Pickles_types.Vector.t @@ -64,7 +56,6 @@ val finalize_other_proof : Composition_types.Opt.t , ( Step_main_inputs.Impl.field Snarky_backendless.Cvar.t Import.Scalar_challenge.t - Import.Types.Wrap.Proof_state.Deferred_values.Plonk.In_circuit.Lookup.t , Step_main_inputs.Impl.Boolean.var ) Composition_types.Opt.t , ( Step_main_inputs.Impl.field Snarky_backendless.Cvar.t @@ -86,7 +77,7 @@ val finalize_other_proof : val hash_messages_for_next_step_proof : index: - Step_main_inputs.Inner_curve.t + Step_main_inputs.Inner_curve.t array Pickles_types.Plonk_verification_key_evals.t -> ('s -> Step_main_inputs.Impl.Field.t array) -> ( ( 'a @@ -101,7 +92,7 @@ val hash_messages_for_next_step_proof : val hash_messages_for_next_step_proof_opt : index: - Step_main_inputs.Inner_curve.t + Step_main_inputs.Inner_curve.t array Pickles_types.Plonk_verification_key_evals.t -> ('s -> Step_main_inputs.Impl.Field.t array) -> Step_main_inputs.Sponge.t @@ -121,6 +112,8 @@ val hash_messages_for_next_step_proof_opt : -> Step_main_inputs.Impl.Field.t ) Core_kernel.Staged.t +(** Actual verification using cryptographic tools. Returns [true] (encoded as a + in-circuit Boolean variable) if the verification is successful *) val verify : proofs_verified:(module Pickles_types.Nat.Add.Intf with type n = 'a) -> is_base_case:Step_main_inputs.Impl.Boolean.var @@ -133,7 +126,8 @@ val verify : , Step_main_inputs.Impl.Field.t Pickles_types.Shifted_value.Type1.t Pickles_types.Hlist0.Id.t ) Composition_types.Wrap.Lookup_parameters.t - -> feature_flags:Plonk_types.Opt.Flag.t Plonk_types.Features.t + (* lookup arguments parameters *) + -> feature_flags:Pickles_types.Opt.Flag.t Pickles_types.Plonk_types.Features.t -> proof:Wrap_proof.Checked.t -> srs:Kimchi_bindings.Protocol.SRS.Fq.t -> wrap_domain: @@ -142,7 +136,7 @@ val verify : Step_main_inputs.Impl.field Composition_types.Branch_data.Proofs_verified.One_hot.Checked.t ] -> wrap_verification_key: - Step_main_inputs.Inner_curve.t + Step_main_inputs.Inner_curve.t array Pickles_types.Plonk_verification_key_evals.t -> ( Step_main_inputs.Impl.field Limb_vector.Challenge.t , Step_main_inputs.Impl.field Limb_vector.Challenge.t @@ -150,15 +144,12 @@ val verify : , Step_main_inputs.Impl.Field.t Pickles_types.Shifted_value.Type1.t , ( Step_main_inputs.Impl.Field.t Pickles_types.Shifted_value.Type1.t , Step_main_inputs.Impl.Boolean.var ) - Pickles_types.Plonk_types.Opt.t + Pickles_types.Opt.t , ( Step_main_inputs.Impl.field Limb_vector.Challenge.t Composition_types.Scalar_challenge.t - Composition_types.Wrap.Proof_state.Deferred_values.Plonk.In_circuit - .Lookup - .t , Step_main_inputs.Impl.field Snarky_backendless.Cvar.t Snarky_backendless.Snark_intf.Boolean0.t ) - Pickles_types.Plonk_types.Opt.t + Pickles_types.Opt.t , Step_main_inputs.Impl.Boolean.var , Step_main_inputs.Impl.field Snarky_backendless.Cvar.t , Step_main_inputs.Impl.field Snarky_backendless.Cvar.t @@ -171,6 +162,7 @@ val verify : Pickles_types.Hlist0.Id.t , Step_main_inputs.Impl.field Composition_types.Branch_data.Checked.t ) Import.Types.Wrap.Statement.In_circuit.t + (* statement *) -> ( Step_main_inputs.Impl.Field.t , Step_main_inputs.Impl.Field.t Import.Scalar_challenge.t , Other_field.t Pickles_types.Shifted_value.Type2.t @@ -181,4 +173,16 @@ val verify : , Step_main_inputs.Impl.Field.t , Step_main_inputs.Impl.Boolean.var ) Import.Types.Step.Proof_state.Per_proof.In_circuit.t + (* unfinalized *) -> Step_main_inputs.Impl.Boolean.var + +module For_tests_only : sig + type field := Step_main_inputs.Impl.Field.t + + val side_loaded_domain : + log2_size:field + -> < generator : field + ; log2_size : field + ; shifts : field Pickles_types.Plonk_types.Shifts.t + ; vanishing_polynomial : field -> field > +end diff --git a/src/lib/pickles/tag.mli b/src/lib/pickles/tag.mli index 7c85e667747..c11bf218722 100644 --- a/src/lib/pickles/tag.mli +++ b/src/lib/pickles/tag.mli @@ -1,4 +1,6 @@ -(* Tags *) +(** A globally-unique identifier tag to look up the data for a collection of + inductive rules. Used to declare dependencies between families of rules. +*) open Core_kernel diff --git a/src/lib/pickles/test/chunked_circuits/dune b/src/lib/pickles/test/chunked_circuits/dune new file mode 100644 index 00000000000..41049782a9a --- /dev/null +++ b/src/lib/pickles/test/chunked_circuits/dune @@ -0,0 +1,64 @@ +(tests + (names test_chunked_circuits) + (libraries + ;; opam libraries + alcotest + stdio + integers + result + base.caml + bignum.bigint + core_kernel + base64 + digestif + ppx_inline_test.config + sexplib0 + base + async_kernel + bin_prot.shape + async + async_unix + ;; local libraries + mina_wire_types + kimchi_bindings + kimchi_types + pasta_bindings + kimchi_backend.pasta + kimchi_backend.pasta.basic + kimchi_backend.pasta.constraint_system + bitstring_lib + snarky.intf + pickles.backend + pickles_types + snarky.backendless + snarky_group_map + sponge + pickles + pickles.pseudo + composition_types + pickles.limb_vector + pickles_base + kimchi_backend + mina_version + base58_check + codable + random_oracle_input + pickles.composition_types + pickles.plonk_checks + pickles.one_hot_vector + snarky_log + group_map + snarky_curve + key_cache + snark_keys_header + tuple_lib + promise + kimchi_backend.common + logger + internal_tracing.context_logger + ppx_version.runtime + error_json) + (instrumentation + (backend bisect_ppx)) + (preprocess + (pps ppx_version))) diff --git a/src/lib/pickles/test/chunked_circuits/test_chunked_circuits.ml b/src/lib/pickles/test/chunked_circuits/test_chunked_circuits.ml new file mode 100644 index 00000000000..ed266173f17 --- /dev/null +++ b/src/lib/pickles/test/chunked_circuits/test_chunked_circuits.ml @@ -0,0 +1,145 @@ +open Core_kernel +open Pickles_types +open Pickles.Impls.Step + +let () = Pickles.Backend.Tick.Keypair.set_urs_info [] + +let () = Pickles.Backend.Tock.Keypair.set_urs_info [] + +let constraint_constants = + { Snark_keys_header.Constraint_constants.sub_windows_per_window = 0 + ; ledger_depth = 0 + ; work_delay = 0 + ; block_window_duration_ms = 0 + ; transaction_capacity = Log_2 0 + ; pending_coinbase_depth = 0 + ; coinbase_amount = Unsigned.UInt64.of_int 0 + ; supercharged_coinbase_factor = 0 + ; account_creation_fee = Unsigned.UInt64.of_int 0 + ; fork = None + } + +let test () = + let tag, _cache_handle, proof, Pickles.Provers.[ prove ] = + Pickles.compile ~public_input:(Pickles.Inductive_rule.Input Typ.unit) + ~auxiliary_typ:Typ.unit + ~branches:(module Nat.N1) + ~max_proofs_verified:(module Nat.N0) + ~num_chunks:2 ~override_wrap_domain:N1 ~name:"chunked_circuits" + ~constraint_constants (* TODO(mrmr1993): This was misguided.. Delete. *) + ~choices:(fun ~self:_ -> + [ { identifier = "2^17" + ; prevs = [] + ; main = + (fun _ -> + let fresh_zero () = + exists Field.typ ~compute:(fun _ -> Field.Constant.zero) + in + (* Remember that each of these counts for *half* a row, so we + need 2^17 of them to fill 2^16 columns. + *) + for _ = 0 to 1 lsl 17 do + ignore (Field.mul (fresh_zero ()) (fresh_zero ()) : Field.t) + done ; + (* We must now appease the permutation argument gods, to ensure + that the 7th permuted column has polynomial degree larger + than 2^16, and thus that its high chunks are non-zero. + Suckiness of linearization strikes again! + *) + let fresh_zero = fresh_zero () in + Impl.assert_ + { basic = + Kimchi_backend_common.Plonk_constraint_system + .Plonk_constraint + .T + (Raw + { kind = Generic + ; values = + [| fresh_zero + ; fresh_zero + ; fresh_zero + ; fresh_zero + ; fresh_zero + ; fresh_zero + ; fresh_zero + |] + ; coeffs = [||] + } ) + ; annotation = Some __LOC__ + } ; + { previous_proof_statements = [] + ; public_output = () + ; auxiliary_output = () + } ) + ; feature_flags = Pickles_types.Plonk_types.Features.none_bool + } + ] ) + () + in + let module Requests = struct + type _ Snarky_backendless.Request.t += + | Proof : + (Nat.N0.n, Nat.N0.n) Pickles.Proof.t Snarky_backendless.Request.t + + let handler (proof : _ Pickles.Proof.t) + (Snarky_backendless.Request.With { request; respond }) = + match request with + | Proof -> + respond (Provide proof) + | _ -> + respond Unhandled + end in + let _tag, _cache_handle, recursive_proof, Pickles.Provers.[ recursive_prove ] + = + Pickles.compile ~public_input:(Pickles.Inductive_rule.Input Typ.unit) + ~auxiliary_typ:Typ.unit + ~branches:(module Nat.N1) + ~max_proofs_verified:(module Nat.N1) + ~name:"recursion over chunks" + ~constraint_constants (* TODO(mrmr1993): This was misguided.. Delete. *) + ~choices:(fun ~self:_ -> + [ { identifier = "recurse over 2^17" + ; prevs = [ tag ] + ; main = + (fun _ -> + let proof = + exists (Typ.Internal.ref ()) ~request:(fun () -> + Requests.Proof ) + in + { previous_proof_statements = + [ { public_input = () + ; proof + ; proof_must_verify = Boolean.true_ + } + ] + ; public_output = () + ; auxiliary_output = () + } ) + ; feature_flags = Pickles_types.Plonk_types.Features.none_bool + } + ] ) + () + in + let module Proof = (val proof) in + let module Recursive_proof = (val recursive_proof) in + let test_prove () = + let public_input, (), proof = + Async.Thread_safe.block_on_async_exn (fun () -> prove ()) + in + Or_error.ok_exn + (Async.Thread_safe.block_on_async_exn (fun () -> + Proof.verify [ (public_input, proof) ] ) ) ; + let public_input, (), proof = + Async.Thread_safe.block_on_async_exn (fun () -> + recursive_prove ~handler:(Requests.handler proof) () ) + in + Or_error.ok_exn + (Async.Thread_safe.block_on_async_exn (fun () -> + Recursive_proof.verify [ (public_input, proof) ] ) ) + in + test_prove () + +let () = + test () ; + Alcotest.run "Chunked circuit" + [ ("2^16", [ ("prove and verify", `Quick, test) ]) ] diff --git a/src/lib/pickles/test/dune b/src/lib/pickles/test/dune new file mode 100644 index 00000000000..9e10567a82b --- /dev/null +++ b/src/lib/pickles/test/dune @@ -0,0 +1,38 @@ +(tests + (names main) + (flags + (:standard -warn-error +a) + -open Core_kernel + -open Pickles) + (preprocess (pps ppx_jane)) + (package pickles) + (libraries + ; Opam libraries + alcotest + core_kernel + fmt + integers + sexplib0 + ; Mina libraries + kimchi_backend + kimchi_backend.common + kimchi_backend.pasta + kimchi_backend.pasta.basic + kimchi_bindings + kimchi_types + pasta_bindings + pickles + pickles.backend + pickles.composition_types + pickles.limb_vector + pickles.one_hot_vector + pickles.plonk_checks + pickles_base + pickles_types + promise + snark_keys_header + snarky.backendless + sponge + tuple_lib + ) + (action (run %{test}))) diff --git a/src/lib/pickles/test/main.ml b/src/lib/pickles/test/main.ml new file mode 100644 index 00000000000..01ba6a50571 --- /dev/null +++ b/src/lib/pickles/test/main.ml @@ -0,0 +1,8 @@ +let () = + let tests = + Test_impls.tests @ Test_opt_sponge.tests @ Test_plonk_curve_ops.tests + @ Test_sponge.tests @ Test_step.tests @ Test_scalar_challenge.tests + @ Test_side_loaded_verification_key.tests @ Test_step_verifier.tests + @ Test_wrap.tests @ Test_wrap_hack.tests + in + Alcotest.run "Pickles" tests diff --git a/src/lib/pickles/test/optional_custom_gates/dune b/src/lib/pickles/test/optional_custom_gates/dune new file mode 100644 index 00000000000..98a8af4bc2f --- /dev/null +++ b/src/lib/pickles/test/optional_custom_gates/dune @@ -0,0 +1,65 @@ +(tests + (names test_fix_domains pickles_test_optional_custom_gates) + (libraries + ;; opam libraries + alcotest + stdio + integers + result + base.caml + bignum.bigint + core_kernel + base64 + digestif + ppx_inline_test.config + sexplib0 + base + async_kernel + bin_prot.shape + async + async_unix + ;; local libraries + mina_wire_types + kimchi_bindings + kimchi_types + pasta_bindings + kimchi_backend.pasta + kimchi_backend.pasta.basic + kimchi_backend.pasta.constraint_system + bitstring_lib + snarky.intf + pickles.backend + pickles_types + snarky.backendless + snarky_group_map + sponge + pickles + pickles.pseudo + composition_types + pickles.limb_vector + pickles_base + kimchi_backend + mina_version + base58_check + codable + random_oracle_input + pickles.composition_types + pickles.plonk_checks + pickles.one_hot_vector + snarky_log + group_map + snarky_curve + key_cache + snark_keys_header + tuple_lib + promise + kimchi_backend.common + logger + internal_tracing.context_logger + ppx_version.runtime + error_json + pickles_optional_custom_gates_circuits) + (instrumentation + (backend bisect_ppx)) + (preprocess + (pps ppx_version))) diff --git a/src/lib/pickles/test/optional_custom_gates/pickles_test_optional_custom_gates.ml b/src/lib/pickles/test/optional_custom_gates/pickles_test_optional_custom_gates.ml new file mode 100644 index 00000000000..0f121a56c14 --- /dev/null +++ b/src/lib/pickles/test/optional_custom_gates/pickles_test_optional_custom_gates.ml @@ -0,0 +1,322 @@ +open Core_kernel +open Pickles_types +open Pickles.Impls.Step +open Pickles_optional_custom_gates_circuits + +(** Testing + ------- + + Component: Pickles + Subject: Testing the integration of custom gates + Invocation: dune exec \ + src/lib/pickles/test/optional_custom_gates/pickles_test_optional_custom_gates.exe +*) + +(* Set this value for reproducibility *) +let seed = [| Random.int 1_000_000 |] + +let state = Random.State.make seed + +let () = Pickles.Backend.Tick.Keypair.set_urs_info [] + +let () = Pickles.Backend.Tock.Keypair.set_urs_info [] + +(* Parameters *) +let random_table_id = 1 + Random.State.int state 1_000 + +let size = 1 + Random.State.int state 1_000 + +let values = + Array.init size ~f:(fun _ -> + let x = Random.State.int state 1_000_000 in + Field.Constant.of_int x ) + +let idx1 = Random.State.int state size + +let idx2 = Random.State.int state size + +let idx3 = Random.State.int state size + +let main_fixed_lookup_tables () = + let table_id = random_table_id in + let indexes = Array.init size ~f:Field.Constant.of_int in + add_plonk_constraint + (AddFixedLookupTable + { id = Int32.of_int_exn table_id; data = [| indexes; values |] } ) ; + let v1 = values.(idx1) in + let v2 = values.(idx2) in + let v3 = values.(idx3) in + add_plonk_constraint + (Lookup + { (* table id *) + w0 = fresh_int table_id + ; (* idx1 *) w1 = fresh_int idx1 + ; (* v1 *) w2 = exists Field.typ ~compute:(fun () -> v1) + ; (* idx2 *) w3 = fresh_int idx2 + ; (* v2 *) w4 = exists Field.typ ~compute:(fun () -> v2) + ; (* idx3 *) w5 = fresh_int idx3 + ; (* v3 *) w6 = exists Field.typ ~compute:(fun () -> v3) + } ) + +(* Parameters *) +(* nb of fixed lookup tables *) +let max_fixed_lt_n = 1 + Random.State.int state 10 + +(* number of fixed lookups *) +let fixed_lt_queries_n = 1 + Random.State.int state 100 + +(* fixed lookup tables data *) +let fixed_lt_data = + (* generate some random unique table ids *) + let fixed_table_ids = + Int.Set.to_array + (Int.Set.of_list + (List.init max_fixed_lt_n ~f:(fun _ -> + 1 + Random.State.int state (max_fixed_lt_n * 4) ) ) ) + in + Array.map fixed_table_ids ~f:(fun table_id -> + let max_table_size = 1 + Random.State.int state 100 in + let indexes = + Int.Set.to_array + (Int.Set.of_list + (List.init max_table_size ~f:(fun _ -> + 1 + Random.State.int state (max_table_size * 4) ) ) ) + in + let table_size = Array.length indexes in + let values = + Array.init table_size ~f:(fun _ -> Random.State.int state 100_000_000) + in + (table_id, indexes, values) ) + +(* lookup queries; selected random rows from f_lt_data *) +let lookups = + Array.init fixed_lt_queries_n ~f:(fun _ -> + let table_id, indexes, values = + fixed_lt_data.(Random.State.int state (Array.length fixed_lt_data)) + in + let table_size = Array.length indexes in + let idx1 = Random.State.int state table_size in + let idx2 = Random.State.int state table_size in + let idx3 = Random.State.int state table_size in + ( table_id + , (indexes.(idx1), values.(idx1)) + , (indexes.(idx2), values.(idx2)) + , (indexes.(idx3), values.(idx3)) ) ) + +let main_fixed_lookup_tables_multiple_tables_multiple_lookups () = + Array.iter fixed_lt_data ~f:(fun (table_id, indexes, values) -> + add_plonk_constraint + (AddFixedLookupTable + { id = Int32.of_int_exn table_id + ; data = + [| Array.map ~f:Field.Constant.of_int indexes + ; Array.map ~f:Field.Constant.of_int values + |] + } ) ) ; + Array.iter lookups ~f:(fun (table_id, (idx1, v1), (idx2, v2), (idx3, v3)) -> + add_plonk_constraint + (Lookup + { w0 = fresh_int table_id + ; w1 = fresh_int idx1 + ; w2 = fresh_int v1 + ; w3 = fresh_int idx2 + ; w4 = fresh_int v2 + ; w5 = fresh_int idx3 + ; w6 = fresh_int v3 + } ) ) + +(* maximum number of runtime lookup tables *) +let max_runtime_lt_n = 1 + Random.State.int state 10 + +(* number of runtime lookups *) +let runtime_lt_queries_n = 1 + Random.State.int state 100 + +(* runtime lookup tables data *) +let runtime_lt_data = + let runtime_table_ids = + (* have at least one collision between runtime and fixed table ids *) + let random_fixed_table_id = + let table_id, _, _ = + fixed_lt_data.(Random.State.int state (Array.length fixed_lt_data)) + in + table_id + in + (* and generate some random table ids *) + let other_ids = + List.init (max_runtime_lt_n - 1) ~f:(fun _ -> + 1 + Random.State.int state 100 ) + in + (* making sure they're all unique *) + Int.Set.to_array + (Int.Set.of_list (List.cons random_fixed_table_id other_ids)) + in + + Array.map runtime_table_ids ~f:(fun table_id -> + let max_table_size = 1 + Random.State.int state 100 in + let first_column = + Int.Set.to_array + (Int.Set.of_list + (List.init max_table_size ~f:(fun _ -> + 1 + Random.State.int state (max_table_size * 4) ) ) ) + in + let table_size = Array.length first_column in + (* We must make sure that if runtime table_id collides with some + fixed table_id created earlier then elements in first_column + and (fixed) indexes are either disjoint (k1 != k2), or they + map to the same value (v1 = v2). In other words, in the case + that this fixed table already contains (k,v) with k = + first_column[i], we have to set second_column[i] to v. *) + let second_column = + match + Array.find + ~f:(fun (fixed_table_id, _, _) -> fixed_table_id = table_id) + fixed_lt_data + with + | Some (_, indexes, values) -> + (* This is O(n^2), can be O(nlogn) *) + Array.map first_column ~f:(fun k -> + match Array.findi ~f:(fun _ k2 -> k2 = k) indexes with + | Some (ix, _) -> + values.(ix) + | None -> + Random.State.int state 1_000_000 ) + | None -> + Array.init table_size ~f:(fun _ -> Random.State.int state 1_000_000) + in + (table_id, first_column, second_column) ) + +(* runtime lookup queries *) +let runtime_lookups = + Array.init runtime_lt_queries_n ~f:(fun _ -> + let table_id, first_column, second_column = + runtime_lt_data.(Random.State.int state (Array.length runtime_lt_data)) + in + let table_size = Array.length first_column in + let idx1 = Random.State.int state table_size in + let idx2 = Random.State.int state table_size in + let idx3 = Random.State.int state table_size in + ( table_id + , (first_column.(idx1), second_column.(idx1)) + , (first_column.(idx2), second_column.(idx2)) + , (first_column.(idx3), second_column.(idx3)) ) ) + +let main_runtime_table_cfg () = + Array.iter runtime_lt_data ~f:(fun (table_id, first_column, _) -> + add_plonk_constraint + (AddRuntimeTableCfg + { id = Int32.of_int_exn table_id + ; first_column = Array.map ~f:Field.Constant.of_int first_column + } ) ) ; + Array.iter runtime_lookups ~f:(fun (table_id, (k1, v1), (k2, v2), (k3, v3)) -> + add_plonk_constraint + (Lookup + { w0 = fresh_int table_id + ; w1 = fresh_int k1 + ; w2 = fresh_int v1 + ; w3 = fresh_int k2 + ; w4 = fresh_int v2 + ; w5 = fresh_int k3 + ; w6 = fresh_int v3 + } ) ) + +let add_tests, get_tests = + let tests = ref [] in + ( (fun name testcases -> tests := (name, testcases) :: !tests) + , fun () -> List.rev !tests ) + +let main_body ~(feature_flags : _ Plonk_types.Features.t) () = + Pickles_optional_custom_gates_circuits.main_body ~feature_flags () ; + if feature_flags.runtime_tables then main_runtime_table_cfg () ; + if feature_flags.lookup then ( + main_fixed_lookup_tables () ; + main_fixed_lookup_tables_multiple_tables_multiple_lookups () ) + +let register_test name feature_flags1 feature_flags2 = + let _tag, _cache_handle, proof, Pickles.Provers.[ prove1; prove2 ] = + Pickles.compile ~public_input:(Pickles.Inductive_rule.Input Typ.unit) + ~auxiliary_typ:Typ.unit + ~branches:(module Nat.N2) + ~max_proofs_verified:(module Nat.N0) + ~name:"optional_custom_gates" + ~choices:(fun ~self:_ -> + [ { identifier = "main1" + ; prevs = [] + ; main = + (fun _ -> + main_body ~feature_flags:feature_flags1 () ; + { previous_proof_statements = [] + ; public_output = () + ; auxiliary_output = () + } ) + ; feature_flags = feature_flags1 + } + ; { identifier = "main2" + ; prevs = [] + ; main = + (fun _ -> + main_body ~feature_flags:feature_flags2 () ; + { previous_proof_statements = [] + ; public_output = () + ; auxiliary_output = () + } ) + ; feature_flags = feature_flags2 + } + ] ) + () + in + let module Proof = (val proof) in + let test_prove1 () = + let public_input1, (), proof1 = + Async.Thread_safe.block_on_async_exn (fun () -> prove1 ()) + in + Or_error.ok_exn + (Async.Thread_safe.block_on_async_exn (fun () -> + Proof.verify [ (public_input1, proof1) ] ) ) + in + let test_prove2 () = + let public_input2, (), proof2 = + Async.Thread_safe.block_on_async_exn (fun () -> prove2 ()) + in + Or_error.ok_exn + (Async.Thread_safe.block_on_async_exn (fun () -> + Proof.verify [ (public_input2, proof2) ] ) ) + in + + let open Alcotest in + add_tests name + [ test_case "prove 1" `Quick test_prove1 + ; test_case "prove 2" `Quick test_prove2 + ] + +let register_feature_test (name, specific_feature_flags) = + (* Tests activating "on" logic*) + register_test name specific_feature_flags specific_feature_flags ; + (* Tests activating "maybe on" logic *) + register_test + (Printf.sprintf "%s (maybe)" name) + specific_feature_flags Plonk_types.Features.none_bool + +let () = + let configurations = + [ ("xor", Plonk_types.Features.{ none_bool with xor = true }) + ; ( "range check 0" + , Plonk_types.Features.{ none_bool with range_check0 = true } ) + ; ( "range check 1" + , Plonk_types.Features.{ none_bool with range_check1 = true } ) + ; ("rot", Plonk_types.Features.{ none_bool with rot = true }) + ; ( "foreign field addition" + , Plonk_types.Features.{ none_bool with foreign_field_add = true } ) + ; ( "foreign field multiplication" + , Plonk_types.Features.{ none_bool with foreign_field_mul = true } ) + ; ( "fixed lookup tables" + , Plonk_types.Features.{ none_bool with lookup = true } ) + ; ( "runtime+fixed lookup tables" + , Plonk_types.Features. + { none_bool with lookup = true; runtime_tables = true } ) + ] + in + List.iter ~f:register_feature_test configurations ; + register_test "different sizes of lookup" + Plonk_types.Features.{ none_bool with foreign_field_mul = true } + Plonk_types.Features.{ none_bool with xor = true } ; + Alcotest.run "Custom gates" (get_tests ()) diff --git a/src/lib/pickles/test/optional_custom_gates/test_fix_domains.ml b/src/lib/pickles/test/optional_custom_gates/test_fix_domains.ml new file mode 100644 index 00000000000..c367270db45 --- /dev/null +++ b/src/lib/pickles/test/optional_custom_gates/test_fix_domains.ml @@ -0,0 +1,320 @@ +(** Testing + ------- + Component: Pickles + Subject: Testing computation of the domains when fixed and runtime tables are present + Invocation: dune exec \ + src/lib/pickles/test/optional_custom_gates/test_fix_domains.exe +*) + +open Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint + +open Pickles.Impls.Step + +let add_constraint c = assert_ { basic = T c; annotation = None } + +let add_plonk_constraint c = assert_ { basic = T c; annotation = None } + +let etyp_unit = + Composition_types.Spec.ETyp.T + (Snarky_backendless.Typ.unit (), Core_kernel.Fn.id, Core_kernel.Fn.id) + +let test_fix_domains_with_runtime_table_cfgs () = + let table_sizes = [ [ 1 ]; [ 1; 1 ]; [ 1; 10; 42; 36 ] ] in + (* Log2 value *) + let exp_output = [ 3; 3; 7 ] in + let feature_flags = + Pickles_types.Plonk_types.Features.{ none_bool with runtime_tables = true } + in + assert ( + List.for_all2 + (fun table_sizes exp_output -> + let main () = + List.iteri + (fun i table_size -> + let first_column = + Array.init table_size (fun _ -> + Pickles.Impls.Step.Field.Constant.random () ) + in + add_constraint + (AddRuntimeTableCfg { id = Int32.of_int i; first_column }) ) + table_sizes + in + let domains = + Pickles__Fix_domains.domains ~feature_flags + (module Pickles.Impls.Step) + etyp_unit etyp_unit main + in + let log2_size = Pickles_base.Domain.log2_size domains.h in + log2_size = exp_output ) + table_sizes exp_output ) + +let test_fix_domains_with_runtime_table_cfgs_and_fixed_lookup_tables () = + (* Tables do not share the same ID *) + let fixed_table_sizes = [ [ 1 ]; [ 1; 1 ]; [ 1; 10; 42; 36 ]; []; [ 1 ] ] in + let rt_cfgs_table_sizes = [ [ 1 ]; [ 1; 1 ]; [ 1; 10; 42; 36 ]; [ 1 ]; [] ] in + let exp_outputs = [ 3; 3; 8; 3; 3 ] in + let feature_flags = + Pickles_types.Plonk_types.Features. + { none_bool with lookup = true; runtime_tables = true } + in + assert ( + List.for_all2 + (fun (fixed_table_sizes, rt_cfgs_table_sizes) exp_output -> + let n_fixed_table_sizes = List.length fixed_table_sizes in + let main () = + List.iteri + (fun i table_size -> + let indexes = + Array.init table_size Pickles.Impls.Step.Field.Constant.of_int + in + let values = + Array.init table_size (fun _ -> + Pickles.Impls.Step.Field.Constant.random () ) + in + add_constraint + (AddFixedLookupTable + { id = Int32.of_int i; data = [| indexes; values |] } ) ) + fixed_table_sizes ; + List.iteri + (fun i table_size -> + let first_column = + Array.init table_size Pickles.Impls.Step.Field.Constant.of_int + in + add_constraint + (AddRuntimeTableCfg + { id = Int32.of_int (n_fixed_table_sizes + i); first_column } + ) ) + rt_cfgs_table_sizes + in + let domains = + Pickles__Fix_domains.domains ~feature_flags + (module Pickles.Impls.Step) + etyp_unit etyp_unit main + in + let log2_size = Pickles_base.Domain.log2_size domains.h in + log2_size = exp_output ) + (List.combine fixed_table_sizes rt_cfgs_table_sizes) + exp_outputs ) + +let test_fix_domains_with_runtime_table_cfgs_and_fixed_lookup_tables_sharing_id + () = + let id = 0l in + let fixed_lt_sizes = [ 3; 1; 7 ] in + let rt_cfg_sizes = [ 3; 7; 8 ] in + (* log2 value *) + let exp_outputs = [ 4; 4; 5 ] in + let feature_flags = + Pickles_types.Plonk_types.Features. + { none_bool with lookup = true; runtime_tables = true } + in + assert ( + List.for_all2 + (fun (fixed_table_size, rt_cfg_table_size) exp_output -> + let main () = + let indexes = + Array.init fixed_table_size Pickles.Impls.Step.Field.Constant.of_int + in + let values = + Array.init fixed_table_size (fun _ -> + Pickles.Impls.Step.Field.Constant.random () ) + in + add_constraint + (AddFixedLookupTable { id; data = [| indexes; values |] }) ; + let first_column = + Array.init rt_cfg_table_size + Pickles.Impls.Step.Field.Constant.of_int + in + add_constraint (AddRuntimeTableCfg { id; first_column }) + in + let domains = + Pickles__Fix_domains.domains ~feature_flags + (module Pickles.Impls.Step) + etyp_unit etyp_unit main + in + let log2_size = Pickles_base.Domain.log2_size domains.h in + log2_size = exp_output ) + (List.combine fixed_lt_sizes rt_cfg_sizes) + exp_outputs ) + +let test_fix_domains_with_fixed_lookup_tables () = + let table_sizes = [ [ 1 ]; [ 1; 1 ]; [ 1; 10; 42; 36 ] ] in + (* Log2 value *) + let exp_output = [ 3; 3; 7 ] in + let feature_flags = + Pickles_types.Plonk_types.Features.{ none_bool with lookup = true } + in + assert ( + List.for_all2 + (fun table_sizes exp_output -> + let main () = + List.iteri + (fun i table_size -> + let indexes = + Array.init table_size Pickles.Impls.Step.Field.Constant.of_int + in + let values = + Array.init table_size (fun _ -> + Pickles.Impls.Step.Field.Constant.random () ) + in + add_constraint + (AddFixedLookupTable + { id = Int32.of_int i; data = [| indexes; values |] } ) ) + table_sizes + in + let domains = + Pickles__Fix_domains.domains ~feature_flags + (module Pickles.Impls.Step) + etyp_unit etyp_unit main + in + let log2_size = Pickles_base.Domain.log2_size domains.h in + log2_size = exp_output ) + table_sizes exp_output ) + +let test_fix_domains_with_xor_table () = + (* The domain size does not depend on the circuit, only on the feature flags *) + let main () = + add_plonk_constraint (Raw { kind = Zero; values = [||]; coeffs = [||] }) + in + (* Log2 value. XOR table size is 256 *) + let exp_output = 9 in + let feature_flags_s = + [ Pickles_types.Plonk_types.Features.{ none_bool with xor = true } + ; Pickles_types.Plonk_types.Features. + { none_bool with foreign_field_add = true; xor = true } + ] + in + assert ( + List.for_all + (fun feature_flags -> + let domains = + Pickles__Fix_domains.domains ~feature_flags + (module Pickles.Impls.Step) + etyp_unit etyp_unit main + in + let log2_size = Pickles_base.Domain.log2_size domains.h in + log2_size = exp_output ) + feature_flags_s ) + +let test_fix_domains_with_range_check_table () = + (* The domain size does not depend on the circuit, only on the feature flags *) + let main () = + add_plonk_constraint (Raw { kind = Zero; values = [||]; coeffs = [||] }) + in + (* Log2 value. Range check table size is 2^12 *) + let exp_output = 13 in + (* TODO: use QCheck to generate the different options *) + let feature_flags_s = + [ Pickles_types.Plonk_types.Features.{ none_bool with range_check0 = true } + ; Pickles_types.Plonk_types.Features.{ none_bool with range_check1 = true } + ; Pickles_types.Plonk_types.Features. + { none_bool with range_check0 = true; foreign_field_add = true } + ; Pickles_types.Plonk_types.Features. + { none_bool with range_check0 = true; range_check1 = true } + ; Pickles_types.Plonk_types.Features. + { none_bool with + range_check0 = true + ; range_check1 = true + ; foreign_field_add = true + } + ; Pickles_types.Plonk_types.Features. + { none_bool with foreign_field_mul = true } + ; Pickles_types.Plonk_types.Features. + { none_bool with foreign_field_mul = true; foreign_field_add = true } + ; Pickles_types.Plonk_types.Features. + { none_bool with + foreign_field_mul = true + ; range_check0 = true + ; range_check1 = true + } + ; Pickles_types.Plonk_types.Features. + { none_bool with + foreign_field_mul = true + ; foreign_field_add = true + ; range_check0 = true + ; range_check1 = true + } + ] + in + assert ( + List.for_all + (fun feature_flags -> + let domains = + Pickles__Fix_domains.domains ~feature_flags + (module Pickles.Impls.Step) + etyp_unit etyp_unit main + in + let log2_size = Pickles_base.Domain.log2_size domains.h in + log2_size = exp_output ) + feature_flags_s ) + +let test_fix_domains_with_range_check_and_xor_table () = + (* The domain size does not depend on the circuit, only on the feature flags *) + let main () = + add_plonk_constraint (Raw { kind = Zero; values = [||]; coeffs = [||] }) + in + let exp_output = 13 in + let feature_flags_s = + [ Pickles_types.Plonk_types.Features. + { none_bool with range_check0 = true; xor = true } + ; Pickles_types.Plonk_types.Features. + { none_bool with range_check1 = true; xor = true } + ; Pickles_types.Plonk_types.Features. + { none_bool with range_check0 = true; range_check1 = true; xor = true } + ; Pickles_types.Plonk_types.Features. + { none_bool with foreign_field_mul = true; xor = true } + ; Pickles_types.Plonk_types.Features. + { none_bool with + foreign_field_mul = true + ; range_check0 = true + ; range_check1 = true + ; xor = true + } + ; Pickles_types.Plonk_types.Features. + { none_bool with + foreign_field_mul = true + ; range_check0 = true + ; range_check1 = true + ; rot = true + } + ; Pickles_types.Plonk_types.Features. + { none_bool with + foreign_field_mul = true + ; range_check0 = true + ; range_check1 = true + ; rot = true + ; xor = true + } + ] + in + assert ( + List.for_all + (fun feature_flags -> + let domains = + Pickles__Fix_domains.domains ~feature_flags + (module Pickles.Impls.Step) + etyp_unit etyp_unit main + in + let log2_size = Pickles_base.Domain.log2_size domains.h in + log2_size = exp_output ) + feature_flags_s ) + +let () = + let open Alcotest in + run "Test Pickles.Fix_domains with custom gates" + [ ( "domains" + , [ test_case "With only fixed lookup tables" `Quick + test_fix_domains_with_fixed_lookup_tables + ; test_case "With only runtime table cfgs" `Quick + test_fix_domains_with_runtime_table_cfgs + ; test_case "With runtime table cfgs and fixed lookup tables" `Quick + test_fix_domains_with_runtime_table_cfgs_and_fixed_lookup_tables + ; test_case "With runtime table cfgs and fixed lookup tables sharing ID" + `Quick + test_fix_domains_with_runtime_table_cfgs_and_fixed_lookup_tables_sharing_id + ; test_case "With XOR table" `Quick test_fix_domains_with_xor_table + ; test_case "With range check table" `Quick + test_fix_domains_with_range_check_table + ; test_case "With range check and XOR tables" `Quick + test_fix_domains_with_range_check_and_xor_table + ] ) + ] diff --git a/src/lib/pickles/test/optional_custom_gates/test_gadgets/dune b/src/lib/pickles/test/optional_custom_gates/test_gadgets/dune new file mode 100644 index 00000000000..34e0dbde4a7 --- /dev/null +++ b/src/lib/pickles/test/optional_custom_gates/test_gadgets/dune @@ -0,0 +1,64 @@ +(library + (name pickles_optional_custom_gates_circuits) + (libraries + ;; opam libraries + alcotest + stdio + integers + result + base.caml + bignum.bigint + core_kernel + base64 + digestif + ppx_inline_test.config + sexplib0 + base + async_kernel + bin_prot.shape + async + async_unix + ;; local libraries + mina_wire_types + kimchi_bindings + kimchi_types + pasta_bindings + kimchi_backend.pasta + kimchi_backend.pasta.basic + kimchi_backend.pasta.constraint_system + bitstring_lib + snarky.intf + pickles.backend + pickles_types + snarky.backendless + snarky_group_map + sponge + pickles + pickles.pseudo + composition_types + pickles.limb_vector + pickles_base + kimchi_backend + mina_version + base58_check + codable + random_oracle_input + pickles.composition_types + pickles.plonk_checks + pickles.one_hot_vector + snarky_log + group_map + snarky_curve + key_cache + snark_keys_header + tuple_lib + promise + kimchi_backend.common + logger + internal_tracing.context_logger + ppx_version.runtime + error_json) + (instrumentation + (backend bisect_ppx)) + (preprocess + (pps ppx_version))) diff --git a/src/lib/pickles/test/optional_custom_gates/test_gadgets/pickles_optional_custom_gates_circuits.ml b/src/lib/pickles/test/optional_custom_gates/test_gadgets/pickles_optional_custom_gates_circuits.ml new file mode 100644 index 00000000000..365b7e029b4 --- /dev/null +++ b/src/lib/pickles/test/optional_custom_gates/test_gadgets/pickles_optional_custom_gates_circuits.ml @@ -0,0 +1,188 @@ +open Core_kernel +open Pickles_types +open Pickles.Impls.Step + +let add_constraint c = assert_ { basic = c; annotation = None } + +let add_plonk_constraint c = + add_constraint + (Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint.T c) + +let fresh_int i = exists Field.typ ~compute:(fun () -> Field.Constant.of_int i) + +let main_xor () = + add_plonk_constraint + (Xor + { in1 = fresh_int 0 + ; in2 = fresh_int 0 + ; out = fresh_int 0 + ; in1_0 = fresh_int 0 + ; in1_1 = fresh_int 0 + ; in1_2 = fresh_int 0 + ; in1_3 = fresh_int 0 + ; in2_0 = fresh_int 0 + ; in2_1 = fresh_int 0 + ; in2_2 = fresh_int 0 + ; in2_3 = fresh_int 0 + ; out_0 = fresh_int 0 + ; out_1 = fresh_int 0 + ; out_2 = fresh_int 0 + ; out_3 = fresh_int 0 + } ) ; + add_plonk_constraint (Raw { kind = Zero; values = [||]; coeffs = [||] }) + +let main_range_check0 () = + add_plonk_constraint + (RangeCheck0 + { v0 = fresh_int 0 + ; v0p0 = fresh_int 0 + ; v0p1 = fresh_int 0 + ; v0p2 = fresh_int 0 + ; v0p3 = fresh_int 0 + ; v0p4 = fresh_int 0 + ; v0p5 = fresh_int 0 + ; v0c0 = fresh_int 0 + ; v0c1 = fresh_int 0 + ; v0c2 = fresh_int 0 + ; v0c3 = fresh_int 0 + ; v0c4 = fresh_int 0 + ; v0c5 = fresh_int 0 + ; v0c6 = fresh_int 0 + ; v0c7 = fresh_int 0 + ; (* Coefficients *) + compact = Field.Constant.zero + } ) + +let main_range_check1 () = + add_plonk_constraint + (RangeCheck1 + { v2 = fresh_int 0 + ; v12 = fresh_int 0 + ; v2c0 = fresh_int 0 + ; v2p0 = fresh_int 0 + ; v2p1 = fresh_int 0 + ; v2p2 = fresh_int 0 + ; v2p3 = fresh_int 0 + ; v2c1 = fresh_int 0 + ; v2c2 = fresh_int 0 + ; v2c3 = fresh_int 0 + ; v2c4 = fresh_int 0 + ; v2c5 = fresh_int 0 + ; v2c6 = fresh_int 0 + ; v2c7 = fresh_int 0 + ; v2c8 = fresh_int 0 + ; v2c9 = fresh_int 0 + ; v2c10 = fresh_int 0 + ; v2c11 = fresh_int 0 + ; v0p0 = fresh_int 0 + ; v0p1 = fresh_int 0 + ; v1p0 = fresh_int 0 + ; v1p1 = fresh_int 0 + ; v2c12 = fresh_int 0 + ; v2c13 = fresh_int 0 + ; v2c14 = fresh_int 0 + ; v2c15 = fresh_int 0 + ; v2c16 = fresh_int 0 + ; v2c17 = fresh_int 0 + ; v2c18 = fresh_int 0 + ; v2c19 = fresh_int 0 + } ) + +let main_rot () = + add_plonk_constraint + (Rot64 + { word = fresh_int 0 + ; rotated = fresh_int 0 + ; excess = fresh_int 0 + ; bound_limb0 = fresh_int 0xFFF + ; bound_limb1 = fresh_int 0xFFF + ; bound_limb2 = fresh_int 0xFFF + ; bound_limb3 = fresh_int 0xFFF + ; bound_crumb0 = fresh_int 3 + ; bound_crumb1 = fresh_int 3 + ; bound_crumb2 = fresh_int 3 + ; bound_crumb3 = fresh_int 3 + ; bound_crumb4 = fresh_int 3 + ; bound_crumb5 = fresh_int 3 + ; bound_crumb6 = fresh_int 3 + ; bound_crumb7 = fresh_int 3 + ; two_to_rot = Field.Constant.one + } ) ; + add_plonk_constraint + (Raw { kind = Zero; values = [| fresh_int 0 |]; coeffs = [||] }) + +let main_foreign_field_add () = + add_plonk_constraint + (ForeignFieldAdd + { left_input_lo = fresh_int 0 + ; left_input_mi = fresh_int 0 + ; left_input_hi = fresh_int 0 + ; right_input_lo = fresh_int 0 + ; right_input_mi = fresh_int 0 + ; right_input_hi = fresh_int 0 + ; field_overflow = fresh_int 0 + ; carry = fresh_int 0 + ; foreign_field_modulus0 = Field.Constant.of_int 1 + ; foreign_field_modulus1 = Field.Constant.of_int 0 + ; foreign_field_modulus2 = Field.Constant.of_int 0 + ; sign = Field.Constant.of_int 0 + } ) ; + add_plonk_constraint + (Raw { kind = Zero; values = [| fresh_int 0 |]; coeffs = [||] }) + +let main_foreign_field_mul () = + add_plonk_constraint + (ForeignFieldMul + { left_input0 = fresh_int 0 + ; left_input1 = fresh_int 0 + ; left_input2 = fresh_int 0 + ; right_input0 = fresh_int 0 + ; right_input1 = fresh_int 0 + ; right_input2 = fresh_int 0 + ; remainder01 = fresh_int 0 + ; remainder2 = fresh_int 0 + ; quotient0 = fresh_int 0 + ; quotient1 = fresh_int 0 + ; quotient2 = fresh_int 0 + ; quotient_hi_bound = + exists Field.typ ~compute:(fun () -> + let open Field.Constant in + let two_to_21 = of_int Int.(2 lsl 21) in + let two_to_42 = two_to_21 * two_to_21 in + let two_to_84 = two_to_42 * two_to_42 in + two_to_84 - one - one ) + ; product1_lo = fresh_int 0 + ; product1_hi_0 = fresh_int 0 + ; product1_hi_1 = fresh_int 0 + ; carry0 = fresh_int 0 + ; carry1_0 = fresh_int 0 + ; carry1_12 = fresh_int 0 + ; carry1_24 = fresh_int 0 + ; carry1_36 = fresh_int 0 + ; carry1_48 = fresh_int 0 + ; carry1_60 = fresh_int 0 + ; carry1_72 = fresh_int 0 + ; carry1_84 = fresh_int 0 + ; carry1_86 = fresh_int 0 + ; carry1_88 = fresh_int 0 + ; carry1_90 = fresh_int 0 + ; foreign_field_modulus2 = Field.Constant.one + ; neg_foreign_field_modulus0 = Field.Constant.zero + ; neg_foreign_field_modulus1 = Field.Constant.zero + ; neg_foreign_field_modulus2 = + Field.Constant.( + let two_to_22 = of_int Int.(2 lsl 22) in + let two_to_44 = two_to_22 * two_to_22 in + let two_to_88 = two_to_44 * two_to_44 in + two_to_88 - one) + } ) ; + add_plonk_constraint + (Raw { kind = Zero; values = [| fresh_int 0 |]; coeffs = [||] }) + +let main_body ~(feature_flags : _ Plonk_types.Features.t) () = + if feature_flags.rot then main_rot () ; + if feature_flags.xor then main_xor () ; + if feature_flags.range_check0 then main_range_check0 () ; + if feature_flags.range_check1 then main_range_check1 () ; + if feature_flags.foreign_field_add then main_foreign_field_add () ; + if feature_flags.foreign_field_mul then main_foreign_field_mul () diff --git a/src/lib/pickles/test/test_impls.ml b/src/lib/pickles/test/test_impls.ml new file mode 100644 index 00000000000..108639c4f8a --- /dev/null +++ b/src/lib/pickles/test/test_impls.ml @@ -0,0 +1,46 @@ +(* Testing + ------- + + Component: Pickles + Subject: Test step and wrap circuits + Invocation: dune exec src/lib/pickles/test/main.exe -- test "Impls:" +*) + +let test_step_circuit_behavior () = + let expected_list = + [ ("45560315531506369815346746415080538112", false) + ; ("45560315531506369815346746415080538113", false) + ; ( "14474011154664524427946373126085988481727088556502330059655218120611762012161" + , true ) + ; ( "14474011154664524427946373126085988481727088556502330059655218120611762012161" + , true ) + ] + in + let str_list = + List.map (Lazy.force Impls.Step.Other_field.forbidden_shifted_values) + ~f:(fun (a, b) -> (Backend.Tick.Field.to_string a, b)) + in + assert ([%equal: (string * bool) list] str_list expected_list) + +let test_wrap_circuit_behavior () = + let expected_list = + [ "91120631062839412180561524743370440705" + ; "91120631062839412180561524743370440706" + ] + in + let str_list = + List.map + (Lazy.force Impls.Wrap.Other_field.forbidden_shifted_values) + ~f:Backend.Tock.Field.to_string + in + assert ([%equal: string list] str_list expected_list) + +let tests = + let open Alcotest in + [ ( "Impls:Step" + , [ test_case "preserve circuit behavior" `Quick test_step_circuit_behavior + ] ) + ; ( "Impls:Wrap" + , [ test_case "preserve circuit behavior" `Quick test_wrap_circuit_behavior + ] ) + ] diff --git a/src/lib/pickles/test/test_opt_sponge.ml b/src/lib/pickles/test/test_opt_sponge.ml new file mode 100644 index 00000000000..d94d1ee5635 --- /dev/null +++ b/src/lib/pickles/test/test_opt_sponge.ml @@ -0,0 +1,100 @@ +(* Testing + ------- + + Component: Pickles + Subject: Test opt_sponge + Invocation: \ + dune exec src/lib/pickles/test/main.exe -- test "Opt_sponge" +*) + +module Wrap_main_inputs = Pickles__Wrap_main_inputs +module Opt_sponge = Pickles__Opt_sponge + +module Test_make + (Impl : Snarky_backendless.Snark_intf.Run) + (P : Sponge.Intf.Permutation with type Field.t = Impl.Field.t) = +struct + module O = Opt_sponge.Make (Impl) (P) + module S = Sponge.Make_sponge (P) + open Impl + + let test_correctness () = + let params : _ Sponge.Params.t = + let a () = + Array.init 3 ~f:(fun _ -> Field.(constant (Constant.random ()))) + in + { mds = Array.init 3 ~f:(fun _ -> a ()) + ; round_constants = Array.init 40 ~f:(fun _ -> a ()) + } + in + let gen = + let open Quickcheck.Generator.Let_syntax in + let%bind n = Quickcheck.Generator.small_positive_int + and n_pre = Quickcheck.Generator.small_positive_int in + let%map xs = List.gen_with_length n Field.Constant.gen + and bs = List.gen_with_length n Bool.quickcheck_generator + and pre = List.gen_with_length n_pre Field.Constant.gen in + (pre, List.zip_exn bs xs) + in + Quickcheck.test gen ~trials:10 ~f:(fun (pre, ps) -> + let filtered = + List.filter_map ps ~f:(fun (b, x) -> if b then Some x else None) + in + let init () = + let pre = + exists + (Typ.list ~length:(List.length pre) Field.typ) + ~compute:(fun () -> pre) + in + let s = S.create params in + List.iter pre ~f:(S.absorb s) ; + s + in + let filtered_res = + let length = List.length filtered in + Impl.Internal_Basic.Test.checked_to_unchecked + (Typ.list ~length Field.typ) + Field.typ + (fun xs -> + make_checked (fun () -> + let s = init () in + List.iter xs ~f:(S.absorb s) ; + S.squeeze s ) ) + filtered + in + let opt_res = + let length = List.length ps in + Impl.Internal_Basic.Test.checked_to_unchecked + (Typ.list ~length (Typ.tuple2 Boolean.typ Field.typ)) + Field.typ + (fun xs -> + make_checked (fun () -> + let s = + match pre with + | [] -> + O.create params + | _ :: _ -> + O.of_sponge (init ()) + in + List.iter xs ~f:(O.absorb s) ; + O.squeeze s ) ) + ps + in + if not (Field.Constant.equal filtered_res opt_res) then + failwithf + !"hash(%{sexp:Field.Constant.t list}) = %{sexp:Field.Constant.t}\n\ + hash(%{sexp:(bool * Field.Constant.t) list}) = \ + %{sexp:Field.Constant.t}" + filtered filtered_res ps opt_res () ) +end + +module Wrap = Test_make (Impls.Wrap) (Wrap_main_inputs.Sponge.Permutation) +module Step = Test_make (Impls.Step) (Step_main_inputs.Sponge.Permutation) + +let tests = + let open Alcotest in + [ ( "Opt_sponge" + , [ test_case "wrap correct" `Quick Wrap.test_correctness + ; test_case "step correct" `Quick Step.test_correctness + ] ) + ] diff --git a/src/lib/pickles/test/test_plonk_curve_ops.ml b/src/lib/pickles/test/test_plonk_curve_ops.ml new file mode 100644 index 00000000000..3c05b4bb190 --- /dev/null +++ b/src/lib/pickles/test/test_plonk_curve_ops.ml @@ -0,0 +1,101 @@ +(* Testing + ------- + + Component: Pickles + Subject: Test Plonk curve operations + Invocation: \ + dune exec src/lib/pickles/test/main.exe -- test "Plonk curve operations" +*) + +module Test_make + (Impl : Snarky_backendless.Snark_intf.Run) + (G : Pickles__Intf.Group(Impl).S with type t = Impl.Field.t * Impl.Field.t) = +struct + open Impl + module T = Internal_Basic + include Pickles__Plonk_curve_ops.Make (Impl) (G) + + let random_point = + let rec pt x = + let y2 = G.Params.(T.Field.(b + (x * (a + (x * x))))) in + if T.Field.is_square y2 then (x, T.Field.sqrt y2) + else pt T.Field.(x + one) + in + G.Constant.of_affine (pt (T.Field.of_int 0)) + + let n = Field.size_in_bits + + let test_scale_fast_2 () = + Quickcheck.test ~trials:5 Field.Constant.gen ~f:(fun s -> + let input = + let s_odd = T.Bigint.test_bit (T.Bigint.of_field s) 0 in + Field.Constant.((if s_odd then s - one else s) / of_int 2, s_odd) + in + T.Test.test_equal ~equal:G.Constant.equal + ~sexp_of_t:G.Constant.sexp_of_t + (Typ.tuple2 G.typ (Typ.tuple2 Field.typ Boolean.typ)) + G.typ + (fun (g, s) -> + make_checked (fun () -> + scale_fast2 ~num_bits:n g (Shifted_value s) ) ) + (fun (g, _) -> + let x = + let chunks_needed = chunks_needed ~num_bits:(n - 1) in + let actual_bits_used = chunks_needed * bits_per_chunk in + Pickles_types.Pcs_batch.pow ~one:G.Constant.Scalar.one + ~mul:G.Constant.Scalar.( * ) + G.Constant.Scalar.(of_int 2) + actual_bits_used + |> G.Constant.Scalar.( + ) + (G.Constant.Scalar.project (Field.Constant.unpack s)) + in + G.Constant.scale g x ) + (random_point, input) ) + + let test_scale_fast () = + let open Pickles_types in + let shift = Shifted_value.Type1.Shift.create (module G.Constant.Scalar) in + Quickcheck.test ~trials:10 + Quickcheck.Generator.( + map (list_with_length n Bool.quickcheck_generator) ~f:(fun bs -> + Field.Constant.project bs |> Field.Constant.unpack )) + ~f:(fun xs -> + try + T.Test.test_equal ~equal:G.Constant.equal + ~sexp_of_t:G.Constant.sexp_of_t + (Typ.tuple2 G.typ (Typ.list ~length:n Boolean.typ)) + G.typ + (fun (g, s) -> + make_checked (fun () -> + scale_fast ~num_bits:n g (Shifted_value (Field.project s)) ) + ) + (fun (g, s) -> + let open G.Constant.Scalar in + let s = project s in + let x = + Shifted_value.Type1.to_field + (module G.Constant.Scalar) + ~shift (Shifted_value s) + in + G.Constant.scale g x ) + (random_point, xs) + with e -> + eprintf !"Input %{sexp: bool list}\n%!" xs ; + raise e ) + + let tests = + let open Alcotest in + [ test_case "scale fast" `Quick test_scale_fast + ; test_case "scale fast 2" `Quick test_scale_fast_2 + ] +end + +module Wrap = + Test_make (Pickles__Impls.Wrap) (Pickles__Wrap_main_inputs.Inner_curve) +module Step = + Test_make (Pickles__Impls.Step) (Pickles__Step_main_inputs.Inner_curve) + +let tests = + [ ("Plonk curve operations:Wrap", Wrap.tests) + ; ("Plonk curve operations:Step", Step.tests) + ] diff --git a/src/lib/pickles/test/test_scalar_challenge.ml b/src/lib/pickles/test/test_scalar_challenge.ml new file mode 100644 index 00000000000..a8064f70bc1 --- /dev/null +++ b/src/lib/pickles/test/test_scalar_challenge.ml @@ -0,0 +1,105 @@ +(* Testing + ------- + + Component: Pickles + Subject: Test step and wrap scalar challenges + Invocation: dune exec src/lib/pickles/test/main.exe -- test "scalar challenge" +*) + +module SC = Pickles__Import.Scalar_challenge +module Scalar_challenge = Pickles__Scalar_challenge + +module Test_make + (Impl : Snarky_backendless.Snark_intf.Run) + (G : Pickles__Intf.Group(Impl).S with type t = Impl.Field.t * Impl.Field.t) + (Challenge : Pickles__Import.Challenge.S with module Impl := Impl) + (Endo : sig + val base : Impl.Field.Constant.t + + val scalar : G.Constant.Scalar.t + end) = +struct + open Impl + include Pickles__Scalar_challenge.Make (Impl) (G) (Challenge) (Endo) + module T = Internal_Basic + + let test_endo () = + let random_point = + let rec pt x = + let y2 = G.Params.(T.Field.(b + (x * (a + (x * x))))) in + if T.Field.is_square y2 then (x, T.Field.sqrt y2) + else pt T.Field.(x + one) + in + G.Constant.of_affine (pt (T.Field.random ())) + in + let n = 128 in + Quickcheck.test ~trials:10 + (Quickcheck.Generator.list_with_length n Bool.quickcheck_generator) + ~f:(fun xs -> + try + T.Test.test_equal ~equal:G.Constant.equal + ~sexp_of_t:G.Constant.sexp_of_t + (Typ.tuple2 G.typ (Typ.list ~length:n Boolean.typ)) + G.typ + (fun (g, s) -> + make_checked (fun () -> endo g (SC.create (Field.pack s))) ) + (fun (g, s) -> + let x = + Constant.to_field (SC.create (Challenge.Constant.of_bits s)) + in + G.Constant.scale g x ) + (random_point, xs) + with e -> + eprintf !"Input %{sexp: bool list}\n%!" xs ; + raise e ) + + let test_scalar ~endo () = + let n = 128 in + Quickcheck.test ~trials:10 + (Quickcheck.Generator.list_with_length n Bool.quickcheck_generator) + ~f:(fun xs -> + try + T.Test.test_equal ~equal:Field.Constant.equal + ~sexp_of_t:Field.Constant.sexp_of_t + (Typ.list ~length:n Boolean.typ) + Field.typ + (fun s -> + make_checked (fun () -> + Scalar_challenge.to_field_checked + (module Impl) + ~endo + (SC.create (Impl.Field.pack s)) ) ) + (fun s -> + Scalar_challenge.to_field_constant + (module Field.Constant) + ~endo + (SC.create (Challenge.Constant.of_bits s)) ) + xs + with e -> + eprintf !"Input %{sexp: bool list}\n%!" xs ; + raise e ) +end + +module Endo = Pickles__Endo +module Wrap = + Test_make (Impls.Wrap) (Pickles__Wrap_main_inputs.Inner_curve) + (Impls.Wrap.Challenge) + (Endo.Wrap_inner_curve) +module Step = + Test_make (Impls.Step) (Pickles__Step_main_inputs.Inner_curve) + (Impls.Step.Challenge) + (Endo.Step_inner_curve) + +let tests = + let open Alcotest in + [ ( "Wrap scalar challenge " + , [ test_case "test endo" `Quick Wrap.test_endo + ; test_case "test scalar" `Quick + (Wrap.test_scalar ~endo:Endo.Step_inner_curve.scalar) + ] ) + ; ( "Step scalar challenge" + , [ test_case "test endo" `Quick Step.test_endo + ; test_case "test scalar" `Quick + (Step.test_scalar ~endo:Endo.Wrap_inner_curve.scalar) + ] ) + ] diff --git a/src/lib/pickles/test/test_side_loaded_verification_key.ml b/src/lib/pickles/test/test_side_loaded_verification_key.ml new file mode 100644 index 00000000000..37440f3f40f --- /dev/null +++ b/src/lib/pickles/test/test_side_loaded_verification_key.ml @@ -0,0 +1,44 @@ +(* Testing + ------- + + Component: Pickles + Subject: Test side-loaded verification key + Invocation: \ + dune exec src/lib/pickles/test/main.exe -- test "Side-loaded verification key" +*) + +module SLV_key = Pickles__Side_loaded_verification_key +open Pickles_types + +let input_size w = + (* This should be an affine function in [a]. *) + let size proofs_verified = + let (T (Typ typ, _conv, _conv_inv)) = + Impls.Step.input ~proofs_verified ~wrap_rounds:Backend.Tock.Rounds.n + in + typ.size_in_field_elements + in + let f0 = size Nat.N0.n in + let slope = size Nat.N1.n - f0 in + f0 + (slope * w) + +let test_input_size () = + List.iter + (List.range 0 + (Nat.to_int SLV_key.Width.Max.n) + ~stop:`inclusive ~start:`inclusive ) + ~f:(fun n -> + Alcotest.(check int) + "input size" (input_size n) + (let (T a) = Pickles_types.Nat.of_int n in + let (T (Typ typ, _conv, _conv_inv)) = + Impls.Step.input ~proofs_verified:a + ~wrap_rounds:Backend.Tock.Rounds.n + in + typ.size_in_field_elements ) ) + +let tests = + let open Alcotest in + [ ( "Side-loaded verification key" + , [ test_case "test_input_size" `Quick test_input_size ] ) + ] diff --git a/src/lib/pickles/test/test_sponge.ml b/src/lib/pickles/test/test_sponge.ml new file mode 100644 index 00000000000..e10b6d0250d --- /dev/null +++ b/src/lib/pickles/test/test_sponge.ml @@ -0,0 +1,63 @@ +(* Testing + ------- + + Component: Pickles + Subject: Test sponge + Invocation: \ + dune exec src/lib/pickles/test/main.exe -- test "Sponge" +*) + +module Test + (Impl : Snarky_backendless.Snark_intf.Run) + (S_constant : Sponge.Intf.Sponge + with module Field := Impl.Field.Constant + and module State := Sponge.State + and type input := Impl.field + and type digest := Impl.field) + (S_checked : Sponge.Intf.Sponge + with module Field := Impl.Field + and module State := Sponge.State + and type input := Impl.Field.t + and type digest := Impl.Field.t) = +struct + open Impl + + let test params : unit = + let n = 10 in + let a = Array.init n ~f:(fun _ -> Field.Constant.random ()) in + Impl.Internal_Basic.Test.test_equal ~sexp_of_t:Field.Constant.sexp_of_t + ~equal:Field.Constant.equal + (Typ.array ~length:n Field.typ) + Field.typ + (fun a -> + make_checked (fun () -> + let s = + S_checked.create (Sponge.Params.map ~f:Field.constant params) + in + Array.iter a ~f:(S_checked.absorb s) ; + S_checked.squeeze s ) ) + (fun a -> + let s = S_constant.create params in + Array.iter a ~f:(S_constant.absorb s) ; + S_constant.squeeze s ) + a +end + +module Step = + Test (Impls.Step) (Pickles__Tick_field_sponge.Field) + (Pickles__Step_main_inputs.Sponge.S) +module Wrap = + Test (Impls.Wrap) (Pickles__Tock_field_sponge.Field) + (Pickles__Wrap_main_inputs.Sponge.S) + +let tests = + let open Alcotest in + [ ( "Sponge:Step" + , [ test_case "sponge" `Quick (fun () -> + Step.test Pickles__Tick_field_sponge.params ) + ] ) + ; ( "Sponge:Wrap" + , [ test_case "sponge" `Quick (fun () -> + Wrap.test Pickles__Tock_field_sponge.params ) + ] ) + ] diff --git a/src/lib/pickles/test/test_step.ml b/src/lib/pickles/test/test_step.ml new file mode 100644 index 00000000000..37353cc71ed --- /dev/null +++ b/src/lib/pickles/test/test_step.ml @@ -0,0 +1,97 @@ +(* Testing + ------- + + Component: Pickles + Subject: Test scaling functions + Invocation: \ + dune exec src/lib/pickles/test/main.exe -- test "Step curve operations" +*) + +module Impl = Pickles__Impls.Step +module Inner_curve = Pickles__Step_main_inputs.Inner_curve +module Ops = Pickles__Step_main_inputs.Ops + +let test_scale_fast_2 () = + let open Impl in + let module T = Internal_Basic in + let module G = Inner_curve in + let n = Field.size_in_bits in + let module F = struct + type t = Field.t + + let typ = Field.typ + + module Constant = struct + include Field.Constant + + let to_bigint = Impl.Bigint.of_field + end + end in + Quickcheck.test ~trials:5 Field.Constant.gen ~f:(fun s -> + T.Test.test_equal ~equal:G.Constant.equal ~sexp_of_t:G.Constant.sexp_of_t + (Typ.tuple2 G.typ Field.typ) + G.typ + (fun (g, s) -> + make_checked (fun () -> Ops.scale_fast2' ~num_bits:n (module F) g s) + ) + (fun (g, _) -> + let x = + let chunks_needed = Ops.chunks_needed ~num_bits:(n - 1) in + let actual_bits_used = chunks_needed * Ops.bits_per_chunk in + Pickles_types.Pcs_batch.pow ~one:G.Constant.Scalar.one + ~mul:G.Constant.Scalar.( * ) + G.Constant.Scalar.(of_int 2) + actual_bits_used + |> G.Constant.Scalar.( + ) + (G.Constant.Scalar.project (Field.Constant.unpack s)) + in + G.Constant.scale g x ) + (G.Constant.random (), s) ) + +let test_scale_fast_2_small () = + let open Impl in + let module T = Internal_Basic in + let module G = Inner_curve in + let n = 8 in + let module F = struct + type t = Field.t + + let typ = Field.typ + + module Constant = struct + include Field.Constant + + let to_bigint = Impl.Bigint.of_field + end + end in + Quickcheck.test ~trials:5 Field.Constant.gen ~f:(fun s -> + let s = + Field.Constant.unpack s |> Fn.flip List.take n |> Field.Constant.project + in + T.Test.test_equal ~equal:G.Constant.equal ~sexp_of_t:G.Constant.sexp_of_t + (Typ.tuple2 G.typ Field.typ) + G.typ + (fun (g, s) -> + make_checked (fun () -> Ops.scale_fast2' ~num_bits:n (module F) g s) + ) + (fun (g, _) -> + let x = + let chunks_needed = Ops.chunks_needed ~num_bits:(n - 1) in + let actual_bits_used = chunks_needed * Ops.bits_per_chunk in + Pickles_types.Pcs_batch.pow ~one:G.Constant.Scalar.one + ~mul:G.Constant.Scalar.( * ) + G.Constant.Scalar.(of_int 2) + actual_bits_used + |> G.Constant.Scalar.( + ) + (G.Constant.Scalar.project (Field.Constant.unpack s)) + in + G.Constant.scale g x ) + (G.Constant.random (), s) ) + +let tests = + let open Alcotest in + [ ( "Step curve operations" + , [ test_case "scale fast prime" `Quick test_scale_fast_2 + ; test_case "scale fast small" `Quick test_scale_fast_2_small + ] ) + ] diff --git a/src/lib/pickles/test/test_step_verifier.ml b/src/lib/pickles/test/test_step_verifier.ml new file mode 100644 index 00000000000..9661d48ffcc --- /dev/null +++ b/src/lib/pickles/test/test_step_verifier.ml @@ -0,0 +1,51 @@ +(* Testing + ------- + + Component: Pickles + Subject: Test Step_verifier + Invocation: \ + dune exec src/lib/pickles/test/main.exe -- test "Step verifier" +*) + +module Step_main_inputs = Pickles__Step_main_inputs +open Step_main_inputs.Impl + +let run k = + let y = + run_and_check (fun () -> + let y = k () in + fun () -> As_prover.read_var y ) + |> Or_error.ok_exn + in + y + +let test_side_loaded_domains () = + let module O = One_hot_vector.Make (Impl) in + let open Pickles__Side_loaded_verification_key in + let domains = [ { Domains.h = 10 }; { h = 15 } ] in + let pt = Field.Constant.random () in + List.iter domains ~f:(fun ds -> + let d_unchecked = + Plonk_checks.domain + (module Field.Constant) + (Pow_2_roots_of_unity ds.h) ~shifts:Common.tick_shifts + ~domain_generator:Backend.Tick.Field.domain_generator + in + let checked_domain () = + Pickles__Step_verifier.For_tests_only.side_loaded_domain + ~log2_size:(Field.of_int ds.h) + in + let pp ppf cst = + Format.pp_print_string ppf (Field.Constant.to_string cst) + in + (Alcotest.check (Alcotest.testable pp Field.Constant.equal)) + "side loaded domains" + (d_unchecked#vanishing_polynomial pt) + (run (fun () -> + (checked_domain ())#vanishing_polynomial (Field.constant pt) ) ) ) + +let tests = + let open Alcotest in + [ ( "Step verifier" + , [ test_case "side loaded domains" `Quick test_side_loaded_domains ] ) + ] diff --git a/src/lib/pickles/test/test_wrap.ml b/src/lib/pickles/test/test_wrap.ml new file mode 100644 index 00000000000..2521b558a83 --- /dev/null +++ b/src/lib/pickles/test/test_wrap.ml @@ -0,0 +1,338 @@ +(* Testing + ------- + + Component: Pickles + Subject: Test Wrap + Invocation: \ + dune exec src/lib/pickles/test/main.exe -- test "Gate:" +*) +open Pickles_types +module Wrap = Pickles__Wrap +module Import = Pickles__Import + +(* Type to list test configurations, that is, how to interpret feature flags in + the tests. *) +type test_options = + { true_is_yes : Plonk_types.Features.options + ; true_is_maybe : Plonk_types.Features.options + ; all_maybes : Plonk_types.Features.options + } + +(* Helper function to convert actual feature flags into 3 test configurations of feature flags + @param actual_feature_flags The actual feature flags in terms of true/false + + @return Corresponding feature flags configs composed of Yes/No/Maybe options + - one where true is mapped to Yes and false is mapped to No + - one where true is mapped to Maybe and false is mapped to No + - one where true and false are both mapped to Maybe *) +let generate_test_feature_flag_configs + (actual_feature_flags : Plonk_types.Features.flags) : test_options = + (* Set up a helper to convert actual feature flags composed of booleans into + feature flags composed of Yes/No/Maybe options. + @param actual_feature_flags The actual feature flags in terms of true/false + @param true_opt Opt type to use for true/enabled features + @param false_opt Opt type to use for false/disabled features + @return Corresponding feature flags composed of Yes/No/Maybe values *) + let compute_feature_flags (actual_feature_flags : Plonk_types.Features.flags) + (true_opt : Opt.Flag.t) (false_opt : Opt.Flag.t) : + Plonk_types.Features.options = + Plonk_types.Features.map actual_feature_flags ~f:(function + | true -> + true_opt + | false -> + false_opt ) + in + + (* Generate the 3 configurations of the actual feature flags using + helper *) + let open Opt.Flag in + { true_is_yes = compute_feature_flags actual_feature_flags Yes No + ; true_is_maybe = compute_feature_flags actual_feature_flags Maybe No + ; all_maybes = compute_feature_flags actual_feature_flags Maybe Maybe + } + +(* Run the recursive proof tests on the supplied inputs. + + @param actual_feature_flags User-specified feature flags, matching those + required by the backend circuit + @param public_input list of public inputs (can be empty) + @param vk Verifier index for backend circuit + @param proof Backend proof + + @return true or throws and exception +*) +let run_recursive_proof_test (actual_feature_flags : Plonk_types.Features.flags) + (feature_flags : Plonk_types.Features.options) + (public_input : Pasta_bindings.Fp.t list) + (vk : Kimchi_bindings.Protocol.VerifierIndex.Fp.t) + (proof : Backend.Tick.Proof.with_public_evals) : Impls.Step.Boolean.value = + (* Constants helper - takes an OCaml value and converts it to a snarky value, where + all values here are constant literals. N.b. this should be + encapsulated as Snarky internals, but it never got merged. *) + let constant (Typ typ : _ Snarky_backendless.Typ.t) x = + let xs, aux = typ.value_to_fields x in + typ.var_of_fields (Array.map xs ~f:Impls.Step.Field.constant, aux) + in + + (* Compute deferred values - in the Pickles recursive proof system, deferred values + are values from 2 proofs earlier in the recursion hierarchy. Every recursion + goes through a two-phase process of step and wrap, like so + + step <- wrap <- step <- ... <- wrap <- step, + `<-----------' + deferred + + where there may be multiple children at each level (but let's ignore that!). + Deferred values are values (part of the public input) that must be passed between + the two phases in order to be verified correctly-- it works like this. + + - The wrap proof is passed the deferred values for its step proof as part of its public input. + - The wrap proof starts verifying the step proof. As part of this verification it must + perform all of the group element checks (since it's over the Vesta base field); however, + at this stage it just assumes that the deferred values of its public input are correct + (i.e. it defers checking them). + - The next step proof verifies the wrap proof with a similar process, but using the other + curve (e.g. Pallas). There are two important things to note: + - Since it is using the other curve, it can compute the commitments to the public inputs + of the previous wrap circuit that were passed into it. In other words, the next step + proof receives data from the previous wrap proof about the previous step proof. Yeah, + from two proofs back! (e.g. the deferred values) + - The next step proof also computes the deferred values inside the circuit and verifies + that they match those used by the previous wrap proof. + + The code below generates the deferred values so that we can verifiy that we can actually + compute those values correctly inside the circuit. Special thanks to Matthew Ryan for + explaining this in detail. *) + let { Wrap.For_tests_only.deferred_values + ; x_hat_evals + ; sponge_digest_before_evaluations + } = + Wrap.For_tests_only.deferred_values ~actual_feature_flags ~sgs:[] + ~prev_challenges:[] ~step_vk:vk ~public_input ~proof + ~actual_proofs_verified:Nat.N0.n + in + + let full_features = + Plonk_types.Features.to_full ~or_:Opt.Flag.( ||| ) feature_flags + in + + (* Define Typ.t for Deferred_values.t -- A Type.t defines how to convert a value of some type + in OCaml into a var in circuit/Snarky. + + This complex function is called with two sets of inputs: once for the step circuit and + once for the wrap circuit. It was decided not to use a functor for this. *) + let deferred_values_typ = + let open Impls.Step in + let open Step_main_inputs in + let open Step_verifier in + Import.Types.Wrap.Proof_state.Deferred_values.In_circuit.typ + (module Impls.Step) + ~feature_flags:full_features ~challenge:Challenge.typ + ~scalar_challenge:Challenge.typ + ~dummy_scalar_challenge: + (Kimchi_backend_common.Scalar_challenge.create + Limb_vector.Challenge.Constant.zero ) + (Shifted_value.Type1.typ Field.typ) + (Import.Branch_data.typ + (module Impl) + ~assert_16_bits:(Step_verifier.assert_n_bits ~n:16) ) + in + + (* Use deferred_values_typ and the constant helper to prepare deferred_values + for use in the circuit. We change some [Opt.t] to [Option.t] because that is + what Type.t is configured to accept. *) + let deferred_values = + constant deferred_values_typ + { deferred_values with + plonk = + { deferred_values.plonk with + joint_combiner = + Opt.to_option_unsafe deferred_values.plonk.joint_combiner + } + } + (* Prepare all of the evaluations (i.e. all of the columns in the proof that we open) + for use in the circuit *) + and evals = + constant + (Plonk_types.All_evals.typ ~num_chunks:1 + (module Impls.Step) + full_features ) + { evals = + { public_input = x_hat_evals; evals = proof.proof.openings.evals } + ; ft_eval1 = proof.proof.openings.ft_eval1 + } + in + + (* Run the circuit without generating a proof using run_and_check *) + Impls.Step.run_and_check (fun () -> + (* Set up the step sponge from the wrap sponge -- we cannot use the same poseidon + sponge in both step and wrap because they have different fields. + + In order to continue the Fiat-Shamir heuristic across field boundaries we use + the wrap sponge for everything in the wrap proof, squeeze it one final time and + expose the squoze value in the public input to the step proof, which absorbs + said squoze value into the step sponge. :-) This means the step sponge has absorbed + everything from the proof so far by proxy and that is also over the native field! *) + let res, _chals = + let sponge = + let open Step_main_inputs in + let sponge = Sponge.create sponge_params in + Sponge.absorb sponge + (`Field (Impl.Field.constant sponge_digest_before_evaluations)) ; + sponge + in + + (* Call finalisation with all of the required details *) + Step_verifier.finalize_other_proof + (module Nat.N0) + ~step_domains: + (`Known [ { h = Pow_2_roots_of_unity vk.domain.log_size_of_group } ]) + ~zk_rows:3 ~sponge ~prev_challenges:[] deferred_values evals + in + + (* Read the boolean result from the circuit and make it available + to the OCaml world. *) + Impls.Step.(As_prover.(fun () -> read Boolean.typ res)) ) + |> Or_error.ok_exn + +(* Common srs value for all tests *) +let srs = + Kimchi_bindings.Protocol.SRS.Fp.create (1 lsl Common.Max_degree.step_log2) + +type example = + Kimchi_bindings.Protocol.SRS.Fp.t + -> Kimchi_bindings.Protocol.Index.Fp.t + * Pasta_bindings.Fp.t list + * ( Pasta_bindings.Fq.t Kimchi_types.or_infinity + , Pasta_bindings.Fp.t ) + Kimchi_types.proof_with_public + +module type SETUP = sig + val example : example + + (* Feature flags tused for backend proof *) + val actual_feature_flags : bool Plonk_types.Features.t +end + +(* [Make] is the test functor. + + Given a test setup, compute different test configurations and define 3 + test for said configurations. *) +module Make (S : SETUP) = struct + (* Generate foreign field multiplication test backend proof using Kimchi, + obtaining the proof and corresponding prover index. + + Note: we only want to pay the cost of generating this proof once and + then reuse it many times for the different recursive proof tests. *) + let index, public_input, proof = S.example srs + + (* Obtain verifier key from prover index and convert backend proof to + snarky proof *) + let vk = Kimchi_bindings.Protocol.VerifierIndex.Fp.create index + + let proof = Backend.Tick.Proof.of_backend_with_public_evals proof + + let test_feature_flags_configs = + generate_test_feature_flag_configs S.actual_feature_flags + + let runtest feature_flags = + run_recursive_proof_test S.actual_feature_flags feature_flags public_input + vk proof + + let test_true_is_yes () = + assert (runtest test_feature_flags_configs.true_is_yes) + + let test_true_is_maybe () = + assert (runtest test_feature_flags_configs.true_is_maybe) + + let test_all_maybes () = assert (runtest test_feature_flags_configs.all_maybes) + + let tests = + let open Alcotest in + [ test_case "true -> yes" `Quick test_true_is_yes + ; test_case "true -> maybe" `Quick test_true_is_maybe + ; test_case "all maybes" `Quick test_all_maybes + ] +end + +(* Small combinators to lift gate example signatures to the expected + signatures for the tests. This amounts to generating the list of public + inputs from either no public inputs, a single one or a pair of inputs + returned by the gate example. *) + +let without_public_input gate_example srs = + let index, proof = gate_example srs in + (index, [], proof) + +let with_one_public_input gate_example srs = + let index, public_input, proof = gate_example srs in + (index, [ public_input ], proof) + +let with_two_public_inputs gate_example srs = + let index, (public_input1, public_input2), proof = gate_example srs in + (index, [ public_input1; public_input2 ], proof) + +module Lookup = Make (struct + let example = + with_one_public_input Kimchi_bindings.Protocol.Proof.Fp.example_with_lookup + + let actual_feature_flags = + { Plonk_types.Features.none_bool with lookup = true; runtime_tables = true } +end) + +module Range_check = Make (struct + let example = + without_public_input + Kimchi_bindings.Protocol.Proof.Fp.example_with_range_check + + let actual_feature_flags = + { Plonk_types.Features.none_bool with + range_check0 = true + ; range_check1 = true + } +end) + +module Range_check_64 = Make (struct + let example = + without_public_input + Kimchi_bindings.Protocol.Proof.Fp.example_with_range_check0 + + let actual_feature_flags = + { Plonk_types.Features.none_bool with range_check0 = true } +end) + +module Xor = Make (struct + let example = + with_two_public_inputs Kimchi_bindings.Protocol.Proof.Fp.example_with_xor + + let actual_feature_flags = { Plonk_types.Features.none_bool with xor = true } +end) + +module Rot = Make (struct + let example = + with_two_public_inputs Kimchi_bindings.Protocol.Proof.Fp.example_with_rot + + let actual_feature_flags = + { Plonk_types.Features.none_bool with range_check0 = true; rot = true } +end) + +module FFAdd = Make (struct + let example = + with_one_public_input Kimchi_bindings.Protocol.Proof.Fp.example_with_ffadd + + let actual_feature_flags = + { Plonk_types.Features.none_bool with + range_check0 = true + ; range_check1 = true + ; foreign_field_add = true + } +end) + +let tests = + [ ("Gate:Lookup", Lookup.tests) + ; ("Gate:Foreign field addition", FFAdd.tests) + ; ("Gate:Rot", Rot.tests) + ; ("Gate:Xor", Xor.tests) + ; ("Gate:Range check", Range_check.tests) + ; ("Gate:Range_check 64 bits", Range_check_64.tests) + ] diff --git a/src/lib/pickles/test/test_wrap_hack.ml b/src/lib/pickles/test/test_wrap_hack.ml new file mode 100644 index 00000000000..023f951c373 --- /dev/null +++ b/src/lib/pickles/test/test_wrap_hack.ml @@ -0,0 +1,55 @@ +(* Testing + ------- + + Component: Pickles + Subject: Test module Wrap_hack + Invocation: \ + dune exec src/lib/pickles/test/main.exe -- test "Wrap hack" +*) + +open Pickles_types +open Backend +module Wrap_main_inputs = Pickles__Wrap_main_inputs +module Wrap_hack = Pickles__Wrap_hack + +(* Check that the pre-absorbing technique works. I.e., that it's consistent with + the actual definition of hash_messages_for_next_wrap_proof. *) +let test_hash_messages_for_next_wrap_proof (type n) (n : n Nat.t) () = + let open Pickles.Impls.Wrap in + let messages_for_next_wrap_proof : + _ Composition_types.Wrap.Proof_state.Messages_for_next_wrap_proof.t = + let g = Wrap_main_inputs.Inner_curve.Constant.random () in + { Composition_types.Wrap.Proof_state.Messages_for_next_wrap_proof + .challenge_polynomial_commitment = g + ; old_bulletproof_challenges = + Vector.init n ~f:(fun _ -> + Vector.init Backend.Tock.Rounds.n ~f:(fun _ -> + Tock.Field.random () ) ) + } + in + Internal_Basic.Test.test_equal ~sexp_of_t:Field.Constant.sexp_of_t + ~equal:Field.Constant.equal + (Composition_types.Wrap.Proof_state.Messages_for_next_wrap_proof.typ + Wrap_main_inputs.Inner_curve.typ + (Vector.typ Field.typ Backend.Tock.Rounds.n) + ~length:n ) + Field.typ + (fun t -> + make_checked (fun () -> + Wrap_hack.Checked.hash_messages_for_next_wrap_proof n t ) ) + (fun t -> + Wrap_hack.Checked.hash_constant_messages_for_next_wrap_proof n t + |> Digest.Constant.to_bits |> Impls.Wrap.Field.Constant.project ) + messages_for_next_wrap_proof + +let tests = + let open Alcotest in + [ ( "Wrap hack" + , [ test_case "hash_messages_for_next_wrap_proof correct 0" `Quick + (test_hash_messages_for_next_wrap_proof Nat.N0.n) + ; test_case "hash_messages_for_next_wrap_proof correct 1" `Quick + (test_hash_messages_for_next_wrap_proof Nat.N1.n) + ; test_case "hash_messages_for_next_wrap_proof correct 2" `Quick + (test_hash_messages_for_next_wrap_proof Nat.N2.n) + ] ) + ] diff --git a/src/lib/pickles/tests/dune b/src/lib/pickles/tests/dune new file mode 100644 index 00000000000..e74c6b1de9e --- /dev/null +++ b/src/lib/pickles/tests/dune @@ -0,0 +1,3 @@ +(tests + (names test_ro) + (libraries alcotest core_kernel pickles)) diff --git a/src/lib/pickles/tests/test_ro.ml b/src/lib/pickles/tests/test_ro.ml new file mode 100644 index 00000000000..1510ec8dafd --- /dev/null +++ b/src/lib/pickles/tests/test_ro.ml @@ -0,0 +1,20 @@ +(* initially written to see the consistency between updates of digestif *) +let test_bits_random_oracle_consistency_check () = + let s = "BitsRandomOracle" in + let exp_output_str = + "01000001101110001111111100011000011000100010100011010010110110010011101101101000001110001110100101010100001000001110101110111010" + in + let exp_output = + List.map (fun i -> i = '1') (List.of_seq (String.to_seq exp_output_str)) + in + let output = Pickles.Ro.bits_random_oracle ~length:128 s in + assert (List.for_all2 Bool.equal exp_output output) + +let () = + let open Alcotest in + run "Pickles Random Oracle" + [ ( "bits_random_oracle" + , [ test_case "consistency_check" `Quick + test_bits_random_oracle_consistency_check + ] ) + ] diff --git a/src/lib/pickles/tick_field_sponge.ml b/src/lib/pickles/tick_field_sponge.ml index a0a5216196f..752b71d1418 100644 --- a/src/lib/pickles/tick_field_sponge.ml +++ b/src/lib/pickles/tick_field_sponge.ml @@ -1,10 +1,3 @@ include Make_sponge.Make (Backend.Tick.Field) -let params = - (* HACK *) - Sponge.Params.( - let testbit n i = Bigint.(equal (shift_right n i land one) one) in - map pasta_p_kimchi ~f:(fun s -> - Backend.Tick.Field.of_bits - (List.init Backend.Tick.Field.size_in_bits - (testbit (Bigint.of_string s)) ) )) +let params = Kimchi_pasta_basic.poseidon_params_fp diff --git a/src/lib/pickles/tick_field_sponge.mli b/src/lib/pickles/tick_field_sponge.mli index 6bdb81932e9..c36bcae635a 100644 --- a/src/lib/pickles/tick_field_sponge.mli +++ b/src/lib/pickles/tick_field_sponge.mli @@ -1,3 +1,8 @@ +(** Implement a Sponge for the field Tick *) + include module type of Make_sponge.Make (Backend.Tick.Field) +(** Parameters for the permutation. It can be generated using the {{ + https://github.com/o1-labs/proof-systems/tree/master/poseidon } SAGE + script} *) val params : Backend.Tick.Field.t Sponge.Params.t diff --git a/src/lib/pickles/timer.mli b/src/lib/pickles/timer.mli index f9afefac5f8..e37dd47c150 100644 --- a/src/lib/pickles/timer.mli +++ b/src/lib/pickles/timer.mli @@ -1,3 +1,6 @@ +(** This module provides functions for time profiling. It must only be used for + debugging. *) + val start : string -> unit val clock : string -> unit diff --git a/src/lib/pickles/tock_field_sponge.ml b/src/lib/pickles/tock_field_sponge.ml index 35d8a3444e8..22b02f96d06 100644 --- a/src/lib/pickles/tock_field_sponge.ml +++ b/src/lib/pickles/tock_field_sponge.ml @@ -1,10 +1,3 @@ include Make_sponge.Make (Backend.Tock.Field) -let params = - (* HACK *) - Sponge.Params.( - let testbit n i = Bigint.(equal (shift_right n i land one) one) in - map pasta_q_kimchi ~f:(fun s -> - Backend.Tock.Field.of_bits - (List.init Backend.Tock.Field.size_in_bits - (testbit (Bigint.of_string s)) ) )) +let params = Kimchi_pasta_basic.poseidon_params_fq diff --git a/src/lib/pickles/tock_field_sponge.mli b/src/lib/pickles/tock_field_sponge.mli index 599e5b8697c..b2847f8e9c3 100644 --- a/src/lib/pickles/tock_field_sponge.mli +++ b/src/lib/pickles/tock_field_sponge.mli @@ -1,3 +1,8 @@ +(** Implement a Sponge for the field Tock *) + include module type of Make_sponge.Make (Backend.Tock.Field) +(** Parameters for the permutation. It can be generated using the {{ + https://github.com/o1-labs/proof-systems/tree/master/poseidon } SAGE + script}. *) val params : Backend.Tock.Field.t Sponge.Params.t diff --git a/src/lib/pickles/type.mli b/src/lib/pickles/type.mli index 39695ea3145..fa97dc8fcdc 100644 --- a/src/lib/pickles/type.mli +++ b/src/lib/pickles/type.mli @@ -1,13 +1,15 @@ type (_, _) t = - | PC : ('g1, < g1 : 'g1 ; .. >) t + | PC : ('g1, < g1 : 'g1 ; .. >) t (** Polynomial commitment *) | Scalar : ('s, < scalar : 's ; .. >) t + (** Scalar, used for challenges for instance *) | Without_degree_bound : ( 'g1 Pickles_types.Plonk_types.Poly_comm.Without_degree_bound.t , < g1 : 'g1 ; .. > ) - t + t (** Polynomials *) | With_degree_bound : ( 'g1_opt Pickles_types.Plonk_types.Poly_comm.With_degree_bound.t , < g1_opt : 'g1_opt ; .. > ) - t - | Field : ('field, < base_field : 'field ; .. >) t + t (** Polynomial *) + | Field : ('field, < base_field : 'field ; .. >) t (** Field element *) | ( :: ) : ('a, 'e) t * ('b, 'e) t -> ('a * 'b, 'e) t + (** Concatenate two elements of this type as a list *) diff --git a/src/lib/pickles/types_map.ml b/src/lib/pickles/types_map.ml index 68ea2e129b1..0edc83927ab 100644 --- a/src/lib/pickles/types_map.ml +++ b/src/lib/pickles/types_map.ml @@ -16,9 +16,11 @@ module Basic = struct ; public_input : ('var, 'value) Impls.Step.Typ.t ; branches : 'n2 Nat.t ; wrap_domains : Domains.t - ; wrap_key : Tick.Inner_curve.Affine.t Plonk_verification_key_evals.t + ; wrap_key : Tick.Inner_curve.Affine.t array Plonk_verification_key_evals.t ; wrap_vk : Impls.Wrap.Verification_key.t - ; feature_flags : Plonk_types.Opt.Flag.t Plonk_types.Features.t + ; feature_flags : Opt.Flag.t Plonk_types.Features.Full.t + ; num_chunks : int + ; zk_rows : int } end @@ -38,8 +40,10 @@ module Side_loaded = struct type ('var, 'value, 'n1, 'n2) t = { max_proofs_verified : (module Nat.Add.Intf with type n = 'n1) ; public_input : ('var, 'value) Impls.Step.Typ.t - ; feature_flags : Plonk_types.Opt.Flag.t Plonk_types.Features.t + ; feature_flags : Opt.Flag.t Plonk_types.Features.Full.t ; branches : 'n2 Nat.t + ; num_chunks : int + ; zk_rows : int } end @@ -53,13 +57,22 @@ module Side_loaded = struct let to_basic { permanent = - { max_proofs_verified; public_input; branches; feature_flags } + { max_proofs_verified + ; public_input + ; branches + ; feature_flags + ; num_chunks + ; zk_rows + } ; ephemeral } = let wrap_key, wrap_vk = match ephemeral with | Some { index = `In_prover i | `In_both (i, _) } -> - (i.wrap_index, i.wrap_vk) + let wrap_index = + Plonk_verification_key_evals.map i.wrap_index ~f:(fun x -> [| x |]) + in + (wrap_index, i.wrap_vk) | _ -> failwithf "Side_loaded.to_basic: Expected `In_prover (%s)" __LOC__ () in @@ -72,19 +85,21 @@ module Side_loaded = struct ; wrap_domains = Common.wrap_domains ~proofs_verified ; wrap_key ; feature_flags + ; num_chunks + ; zk_rows } end module Compiled = struct - type f = Impls.Wrap.field - type ('a_var, 'a_value, 'max_proofs_verified, 'branches) basic = { public_input : ('a_var, 'a_value) Impls.Step.Typ.t ; proofs_verifieds : (int, 'branches) Vector.t (* For each branch in this rule, how many predecessor proofs does it have? *) ; wrap_domains : Domains.t ; step_domains : (Domains.t, 'branches) Vector.t - ; feature_flags : Plonk_types.Opt.Flag.t Plonk_types.Features.t + ; feature_flags : Opt.Flag.t Plonk_types.Features.Full.t + ; num_chunks : int + ; zk_rows : int } (* This is the data associated to an inductive proof system with statement type @@ -97,26 +112,31 @@ module Compiled = struct ; proofs_verifieds : (int, 'branches) Vector.t (* For each branch in this rule, how many predecessor proofs does it have? *) ; public_input : ('a_var, 'a_value) Impls.Step.Typ.t - ; wrap_key : Tick.Inner_curve.Affine.t Plonk_verification_key_evals.t Lazy.t + ; wrap_key : + Tick.Inner_curve.Affine.t array Plonk_verification_key_evals.t Lazy.t ; wrap_vk : Impls.Wrap.Verification_key.t Lazy.t ; wrap_domains : Domains.t ; step_domains : (Domains.t, 'branches) Vector.t - ; feature_flags : Plonk_types.Opt.Flag.t Plonk_types.Features.t + ; feature_flags : Opt.Flag.t Plonk_types.Features.Full.t + ; num_chunks : int + ; zk_rows : int } type packed = | T : ('var, 'value, 'n1, 'n2) Tag.id * ('var, 'value, 'n1, 'n2) t -> packed let to_basic - { branches + { branches = _ ; max_proofs_verified - ; proofs_verifieds + ; proofs_verifieds = _ ; public_input ; wrap_vk ; wrap_domains ; step_domains ; wrap_key ; feature_flags + ; num_chunks + ; zk_rows } = { Basic.max_proofs_verified ; wrap_domains @@ -125,6 +145,8 @@ module Compiled = struct ; wrap_key = Lazy.force wrap_key ; wrap_vk = Lazy.force wrap_vk ; feature_flags + ; num_chunks + ; zk_rows } end @@ -136,19 +158,27 @@ module For_step = struct ; proofs_verifieds : [ `Known of (Impls.Step.Field.t, 'branches) Vector.t | `Side_loaded ] ; public_input : ('a_var, 'a_value) Impls.Step.Typ.t - ; wrap_key : inner_curve_var Plonk_verification_key_evals.t + ; wrap_key : inner_curve_var array Plonk_verification_key_evals.t ; wrap_domain : [ `Known of Domain.t | `Side_loaded of Impls.Step.field Pickles_base.Proofs_verified.One_hot.Checked.t ] ; step_domains : [ `Known of (Domains.t, 'branches) Vector.t | `Side_loaded ] - ; feature_flags : Plonk_types.Opt.Flag.t Plonk_types.Features.t + ; feature_flags : Opt.Flag.t Plonk_types.Features.Full.t + ; num_chunks : int + ; zk_rows : int } - let of_side_loaded (type a b c d e f) + let of_side_loaded (type a b c d) ({ ephemeral ; permanent = - { branches; max_proofs_verified; public_input; feature_flags } + { branches + ; max_proofs_verified + ; public_input + ; feature_flags + ; num_chunks + ; zk_rows + } } : (a, b, c, d) Side_loaded.t ) : (a, b, c, d) t = let index = @@ -159,14 +189,19 @@ module For_step = struct failwithf "For_step.side_loaded: Expected `In_circuit (%s)" __LOC__ () in let T = Nat.eq_exn branches Side_loaded_verification_key.Max_branches.n in + let wrap_key = + Plonk_verification_key_evals.map index.wrap_index ~f:(fun x -> [| x |]) + in { branches ; max_proofs_verified ; public_input ; proofs_verifieds = `Side_loaded - ; wrap_key = index.wrap_index + ; wrap_key ; wrap_domain = `Side_loaded index.actual_wrap_domain_size ; step_domains = `Side_loaded ; feature_flags + ; num_chunks + ; zk_rows } let of_compiled @@ -178,6 +213,9 @@ module For_step = struct ; wrap_domains ; step_domains ; feature_flags + ; wrap_vk = _ + ; num_chunks + ; zk_rows } : _ Compiled.t ) = { branches @@ -187,10 +225,12 @@ module For_step = struct ; public_input ; wrap_key = Plonk_verification_key_evals.map (Lazy.force wrap_key) - ~f:Step_main_inputs.Inner_curve.constant + ~f:(Array.map ~f:Step_main_inputs.Inner_curve.constant) ; wrap_domain = `Known wrap_domains.h ; step_domains = `Known step_domains ; feature_flags + ; num_chunks + ; zk_rows } end @@ -252,7 +292,7 @@ let public_input : let feature_flags : type var value. - (var, value, _, _) Tag.t -> Plonk_types.Opt.Flag.t Plonk_types.Features.t = + (var, value, _, _) Tag.t -> Opt.Flag.t Plonk_types.Features.Full.t = fun tag -> match tag.kind with | Compiled -> @@ -260,13 +300,21 @@ let feature_flags : | Side_loaded -> (lookup_side_loaded tag.id).permanent.feature_flags -let value_to_field_elements : - type a. (_, a, _, _) Tag.t -> a -> Tick.Field.t array = +let num_chunks : type var value. (var, value, _, _) Tag.t -> int = + fun tag -> + match tag.kind with + | Compiled -> + (lookup_compiled tag.id).num_chunks + | Side_loaded -> + (lookup_side_loaded tag.id).permanent.num_chunks + +let _value_to_field_elements : + type a. (_, a, _, _) Tag.t -> a -> Backend.Tick.Field.t array = fun t -> let (Typ typ) = public_input t in fun x -> fst (typ.value_to_fields x) -let lookup_map (type var value c d) (t : (var, value, c, d) Tag.t) ~self +let _lookup_map (type var value c d) (t : (var, value, c, d) Tag.t) ~self ~default ~(f : [ `Compiled of (var, value, c, d) Compiled.t @@ -295,7 +343,7 @@ let add_side_loaded ~name permanent = tag let set_ephemeral { Tag.kind; id } (eph : Side_loaded.Ephemeral.t) = - (match kind with Side_loaded -> () | _ -> failwith "Expected Side_loaded") ; + assert (match kind with Side_loaded -> true | Compiled -> false) ; Hashtbl.update univ.side_loaded (Type_equal.Id.uid id) ~f:(function | None -> assert false diff --git a/src/lib/pickles/types_map.mli b/src/lib/pickles/types_map.mli index 9e28b9df48b..cd862007eb6 100644 --- a/src/lib/pickles/types_map.mli +++ b/src/lib/pickles/types_map.mli @@ -11,10 +11,12 @@ module Basic : sig ; branches : 'n2 Pickles_types.Nat.t ; wrap_domains : Import.Domains.t ; wrap_key : - Backend.Tick.Inner_curve.Affine.t + Backend.Tick.Inner_curve.Affine.t array Pickles_types.Plonk_verification_key_evals.t ; wrap_vk : Impls.Wrap.Verification_key.t - ; feature_flags : Plonk_types.Opt.Flag.t Plonk_types.Features.t + ; feature_flags : Opt.Flag.t Plonk_types.Features.Full.t + ; num_chunks : int + ; zk_rows : int } end @@ -35,8 +37,10 @@ module Side_loaded : sig { max_proofs_verified : (module Pickles_types.Nat.Add.Intf with type n = 'n1) ; public_input : ('var, 'value) Impls.Step.Typ.t - ; feature_flags : Plonk_types.Opt.Flag.t Plonk_types.Features.t + ; feature_flags : Opt.Flag.t Plonk_types.Features.Full.t ; branches : 'n2 Pickles_types.Nat.t + ; num_chunks : int + ; zk_rows : int } end @@ -58,7 +62,9 @@ module Compiled : sig (* For each branch in this rule, how many predecessor proofs does it have? *) ; wrap_domains : Import.Domains.t ; step_domains : (Import.Domains.t, 'branches) Pickles_types.Vector.t - ; feature_flags : Plonk_types.Opt.Flag.t Plonk_types.Features.t + ; feature_flags : Opt.Flag.t Plonk_types.Features.Full.t + ; num_chunks : int + ; zk_rows : int } type ('a_var, 'a_value, 'max_proofs_verified, 'branches) t = @@ -69,13 +75,15 @@ module Compiled : sig (* For each branch in this rule, how many predecessor proofs does it have? *) ; public_input : ('a_var, 'a_value) Impls.Step.Typ.t ; wrap_key : - Backend.Tick.Inner_curve.Affine.t + Backend.Tick.Inner_curve.Affine.t array Pickles_types.Plonk_verification_key_evals.t Lazy.t ; wrap_vk : Impls.Wrap.Verification_key.t Lazy.t ; wrap_domains : Import.Domains.t ; step_domains : (Import.Domains.t, 'branches) Pickles_types.Vector.t - ; feature_flags : Plonk_types.Opt.Flag.t Plonk_types.Features.t + ; feature_flags : Opt.Flag.t Plonk_types.Features.Full.t + ; num_chunks : int + ; zk_rows : int } end @@ -88,7 +96,8 @@ module For_step : sig [ `Known of (Impls.Step.Field.t, 'branches) Pickles_types.Vector.t | `Side_loaded ] ; public_input : ('a_var, 'a_value) Impls.Step.Typ.t - ; wrap_key : inner_curve_var Pickles_types.Plonk_verification_key_evals.t + ; wrap_key : + inner_curve_var array Pickles_types.Plonk_verification_key_evals.t ; wrap_domain : [ `Known of Import.Domain.t | `Side_loaded of @@ -96,7 +105,9 @@ module For_step : sig ; step_domains : [ `Known of (Import.Domains.t, 'branches) Pickles_types.Vector.t | `Side_loaded ] - ; feature_flags : Plonk_types.Opt.Flag.t Plonk_types.Features.t + ; feature_flags : Opt.Flag.t Plonk_types.Features.Full.t + ; num_chunks : int + ; zk_rows : int } val of_side_loaded : ('a, 'b, 'c, 'd) Side_loaded.t -> ('a, 'b, 'c, 'd) t @@ -126,7 +137,9 @@ val max_proofs_verified : ('a, 'b, 'n1, 'c) Tag.t -> (module Pickles_types.Nat.Add.Intf with type n = 'n1) -val feature_flags : _ Tag.t -> Plonk_types.Opt.Flag.t Plonk_types.Features.t +val feature_flags : _ Tag.t -> Opt.Flag.t Plonk_types.Features.Full.t + +val num_chunks : _ Tag.t -> int val add_exn : ('var, 'value, 'c, 'd) Tag.t -> ('var, 'value, 'c, 'd) Compiled.t -> unit diff --git a/src/lib/pickles/unfinalized.ml b/src/lib/pickles/unfinalized.ml index 88640bf362c..83ce3e6ec87 100644 --- a/src/lib/pickles/unfinalized.ml +++ b/src/lib/pickles/unfinalized.ml @@ -60,7 +60,8 @@ module Constant = struct } in let evals = - Plonk_types.Evals.to_in_circuit Dummy.evals_combined.evals.evals + Plonk_types.Evals.to_in_circuit + (Lazy.force Dummy.evals_combined).evals.evals in let env = let module Env_bool = struct @@ -86,11 +87,11 @@ module Constant = struct Plonk_checks.scalars_env (module Env_bool) (module Env_field) - ~srs_length_log2:Common.Max_degree.wrap_log2 + ~srs_length_log2:Common.Max_degree.wrap_log2 ~zk_rows:3 ~endo:Endo.Wrap_inner_curve.base ~mds:Tock_field_sponge.params.mds ~field_of_hex: - (Core_kernel.Fn.compose Tock.Field.of_bigint - Kimchi_pasta.Pasta.Bigint256.of_hex_string ) + (Core_kernel.Fn.compose Tock.Field.of_bigint (fun x -> + Kimchi_pasta.Pasta.Bigint256.of_hex_string x ) ) ~domain: (Plonk_checks.domain (module Tock.Field) @@ -101,14 +102,11 @@ module Constant = struct let plonk = let module Field = struct include Tock.Field - - type nonrec bool = bool end in Plonk_checks.derive_plonk (module Field) ~env ~shift chals evals |> Composition_types.Step.Proof_state.Deferred_values.Plonk.In_circuit .of_wrap - ~assert_none:(fun x -> - assert (Option.is_none (Plonk_types.Opt.to_option x)) ) + ~assert_none:(fun x -> assert (Option.is_none (Opt.to_option x))) ~assert_false:(fun x -> assert (not x)) in { deferred_values = @@ -123,12 +121,11 @@ module Constant = struct } ) end -let typ ~wrap_rounds : (t, Constant.t) Typ.t = +let typ ~wrap_rounds:_ : (t, Constant.t) Typ.t = Types.Step.Proof_state.Per_proof.typ (module Impl) (Shifted_value.typ Other_field.typ) ~assert_16_bits:(Step_verifier.assert_n_bits ~n:16) - ~zero:Common.Lookup_parameters.tick_zero let dummy : unit -> t = Memo.unit (fun () -> diff --git a/src/lib/pickles/unix/dune b/src/lib/pickles/unix/dune index b71d708d3df..5cd1e090252 100644 --- a/src/lib/pickles/unix/dune +++ b/src/lib/pickles/unix/dune @@ -1,25 +1,28 @@ (library (name pickles_unix) (public_name pickles.unix) + (flags + (:standard + -w +a-40..42-44 + -warn-error +a) + -open Core_kernel) (inline_tests (flags -verbose -show-counts)) (libraries ;; opam libraries - graphql-async + base + core_kernel graphql - yojson - ppx_inline_test.config sexplib0 - ; result - core_kernel - base + yojson + graphql-async ;; local libraries - pasta_bindings graphql_basic_scalars + kimchi_backend.pasta + kimchi_backend.pasta.basic mina_base pickles pickles.backend - kimchi_backend.pasta - kimchi_backend.pasta.basic + pasta_bindings ) (preprocess (pps ppx_jane ppx_version)) (instrumentation (backend bisect_ppx)) diff --git a/src/lib/pickles/unix/graphql_scalars.ml b/src/lib/pickles/unix/graphql_scalars.ml index 7358a9e4856..7a6a203537d 100644 --- a/src/lib/pickles/unix/graphql_scalars.ml +++ b/src/lib/pickles/unix/graphql_scalars.ml @@ -1,5 +1,4 @@ open Graphql_basic_scalars.Utils -open Graphql_basic_scalars.Testing module Make (Schema : Schema) = struct module type Json_intf = @@ -27,32 +26,4 @@ module Make (Schema : Schema) = struct end include Make (Schema) - -let%test_module "Roundtrip tests" = - ( module struct - include Make (Test_schema) - - let%test_module "VerificationKey" = - ( module struct - module VerificationKey_gen = struct - include Pickles.Side_loaded.Verification_key - - let gen = Core_kernel.Quickcheck.Generator.return dummy - end - - include Make_test (VerificationKey) (VerificationKey_gen) - end ) - - let%test_module "VerificationKeyHash" = - ( module struct - module VerificationKeyHash_gen = struct - include Pickles.Backend.Tick.Field - - let gen = - Core_kernel.Int.quickcheck_generator - |> Core_kernel.Quickcheck.Generator.map ~f:Pasta_bindings.Fp.of_int - end - - include Make_test (VerificationKeyHash) (VerificationKeyHash_gen) - end ) - end ) +module For_tests_only = Make (Test_schema) diff --git a/src/lib/pickles/unix/graphql_scalars.mli b/src/lib/pickles/unix/graphql_scalars.mli index 64b20f7b195..1a4bae7f7d8 100644 --- a/src/lib/pickles/unix/graphql_scalars.mli +++ b/src/lib/pickles/unix/graphql_scalars.mli @@ -4,3 +4,25 @@ module VerificationKey : module VerificationKeyHash : Graphql_basic_scalars.Json_intf with type t = Pickles.Backend.Tick.Field.t + +module For_tests_only : sig + module VerificationKey : sig + type t = VerificationKey.t + + val parse : Yojson.Basic.t -> t + + val serialize : t -> Yojson.Basic.t + + val typ : unit -> ('a, t option) Graphql.Schema.typ + end + + module VerificationKeyHash : sig + type t = VerificationKeyHash.t + + val parse : Yojson.Basic.t -> t + + val serialize : t -> Yojson.Basic.t + + val typ : unit -> ('a, t option) Graphql.Schema.typ + end +end diff --git a/src/lib/pickles/unix/test/dune b/src/lib/pickles/unix/test/dune new file mode 100644 index 00000000000..e67e5c654b9 --- /dev/null +++ b/src/lib/pickles/unix/test/dune @@ -0,0 +1,23 @@ +(tests + (names test_graphql_scalars) + (flags + (:standard -warn-error +a) + -open Core_kernel + -open Pickles_unix + ) + (package pickles) + (preprocess (pps ppx_jane)) + (libraries + ; opam libraries + alcotest + core_kernel + sexplib0 + ; local libraries + kimchi_backend.pasta + kimchi_backend.pasta.basic + graphql_basic_scalars + pasta_bindings + pickles + pickles.backend + pickles.unix) + (action (run %{test}))) diff --git a/src/lib/pickles/unix/test/test_graphql_scalars.ml b/src/lib/pickles/unix/test/test_graphql_scalars.ml new file mode 100644 index 00000000000..ebc525aae58 --- /dev/null +++ b/src/lib/pickles/unix/test/test_graphql_scalars.ml @@ -0,0 +1,41 @@ +(* Testing + ------- + + Component: Pickles / Unix + Subject: Test Graphql scalars + Invocation: dune exec src/lib/pickles/unix/test/test_graphql_scalars.exe +*) + +include Graphql_scalars.For_tests_only +module Make_test = Graphql_basic_scalars.Testing.Produce_test + +module VerificationKey_gen = struct + include Pickles.Side_loaded.Verification_key + + let gen = Core_kernel.Quickcheck.Generator.return dummy +end + +module Vk = Make_test (VerificationKey) (VerificationKey_gen) + +let test_vk () = Vk.test_query () + +module VerificationKeyHash_gen = struct + include Pickles.Backend.Tick.Field + + let gen = + Core_kernel.Int.quickcheck_generator + |> Core_kernel.Quickcheck.Generator.map ~f:Pasta_bindings.Fp.of_int +end + +module Vk_hash = Make_test (VerificationKeyHash) (VerificationKeyHash_gen) + +let test_vk_hash () = Vk_hash.test_query () + +let () = + let open Alcotest in + run "Pickles unix" + [ ( "Graphql scalars" + , [ test_case "verification key query" `Quick test_vk + ; test_case "verification key hash query" `Quick test_vk_hash + ] ) + ] diff --git a/src/lib/pickles/util.ml b/src/lib/pickles/util.ml index 5c4c28fa3a1..40b8022919b 100644 --- a/src/lib/pickles/util.ml +++ b/src/lib/pickles/util.ml @@ -14,22 +14,27 @@ let rec absorb : -> unit = fun ~absorb_field ~absorb_scalar ~g1_to_field_elements ~mask_g1_opt ty t -> match ty with - | PC -> + | Type.PC -> List.iter ~f:absorb_field (g1_to_field_elements t) - | Field -> + | Type.Field -> absorb_field t - | Scalar -> + | Type.Scalar -> absorb_scalar t - | Without_degree_bound -> + | Type.Without_degree_bound -> Array.iter ~f:(Fn.compose (List.iter ~f:absorb_field) g1_to_field_elements) t - | With_degree_bound -> - Array.iter t.unshifted ~f:(fun t -> - absorb ~absorb_field ~absorb_scalar ~g1_to_field_elements ~mask_g1_opt - PC (mask_g1_opt t) ) ; - absorb ~absorb_field ~absorb_scalar ~g1_to_field_elements ~mask_g1_opt PC - (mask_g1_opt t.shifted) + | Type.With_degree_bound -> + let Pickles_types.Plonk_types.Poly_comm.With_degree_bound. + { unshifted; shifted } = + t + in + let absorb x = + absorb ~absorb_field ~absorb_scalar ~g1_to_field_elements ~mask_g1_opt + Type.PC (mask_g1_opt x) + in + Array.iter unshifted ~f:absorb ; + absorb shifted | ty1 :: ty2 -> let absorb t = absorb t ~absorb_field ~absorb_scalar ~g1_to_field_elements ~mask_g1_opt @@ -51,14 +56,14 @@ let ones_vector : let rec go : type m. Boolean.var -> int -> m Nat.t -> (Boolean.var, m) Vector.t = fun value i m -> - match m with - | Z -> - [] - | S m -> + match[@warning "-45"] m with + | Pickles_types.Nat.Z -> + Pickles_types.Vector.[] + | Pickles_types.Nat.S m -> let value = Boolean.(value && not (Field.equal first_zero (Field.of_int i))) in - value :: go value (i + 1) m + Pickles_types.Vector.(value :: go value (i + 1) m) in go Boolean.true_ 0 n diff --git a/src/lib/pickles/verification_key.ml b/src/lib/pickles/verification_key.ml index b4dda6423fb..af1cab84c57 100644 --- a/src/lib/pickles/verification_key.ml +++ b/src/lib/pickles/verification_key.ml @@ -65,6 +65,12 @@ module Verifier_index_json = struct ; mul_comm : 'polyComm ; emul_comm : 'polyComm ; endomul_scalar_comm : 'polyComm + ; xor_comm : 'polyComm option + ; range_check0_comm : 'polyComm option + ; range_check1_comm : 'polyComm option + ; foreign_field_add_comm : 'polyComm option + ; foreign_field_mul_comm : 'polyComm option + ; rot_comm : 'polyComm option } [@@deriving yojson] @@ -78,6 +84,7 @@ module Verifier_index_json = struct ; evals : 'polyComm verification_evals ; shifts : 'fr array ; lookup_index : 'polyComm Lookup.t option + ; zk_rows : int } [@@deriving yojson] @@ -94,6 +101,11 @@ module Verifier_index_json = struct verifier_index_to_yojson fp (fun _ -> `Null) (polycomm_to_yojson (or_infinity_to_yojson fq)) + + let of_yojson fp fq = + verifier_index_of_yojson fp + (fun _ -> Ok (Backend.Tock.Keypair.load_urs ())) + (polycomm_of_yojson (or_infinity_of_yojson fq)) end module Data = struct @@ -133,10 +145,13 @@ module Stable = struct (Impls.Wrap.Verification_key.t [@to_yojson Verifier_index_json.to_yojson Backend.Tock.Field.to_yojson - Backend.Tick.Field.to_yojson] ) + Backend.Tick.Field.to_yojson] + [@of_yojson + Verifier_index_json.of_yojson Backend.Tock.Field.of_yojson + Backend.Tick.Field.of_yojson] ) ; data : Data.t } - [@@deriving fields, to_yojson] + [@@deriving fields, to_yojson, of_yojson] let to_latest = Fn.id @@ -144,7 +159,9 @@ module Stable = struct let t : Impls.Wrap.Verification_key.t = let log2_size = Int.ceil_log2 d.constraints in let public = - let (T (input, conv, _conv_inv)) = Impls.Wrap.input () in + let (T (input, _conv, _conv_inv)) = + Impls.Wrap.input ~feature_flags:Plonk_types.Features.Full.maybe () + in let (Typ typ) = input in typ.size_in_field_elements in @@ -171,9 +188,16 @@ module Stable = struct ; emul_comm = g c.emul_comm ; complete_add_comm = g c.complete_add_comm ; endomul_scalar_comm = g c.endomul_scalar_comm + ; xor_comm = None + ; range_check0_comm = None + ; range_check1_comm = None + ; foreign_field_add_comm = None + ; foreign_field_mul_comm = None + ; rot_comm = None } ) ; shifts = Common.tock_shifts ~log2_size ; lookup_index = None + ; zk_rows = 3 } in { commitments = c; data = d; index = t } @@ -194,6 +218,8 @@ end] let to_yojson = Stable.Latest.to_yojson +let of_yojson = Stable.Latest.of_yojson + let dummy_commitments g = let open Plonk_types in { Plonk_verification_key_evals.sigma_comm = @@ -207,6 +233,32 @@ let dummy_commitments g = ; endomul_scalar_comm = g } +let dummy_step_commitments g = + let open Plonk_types in + { Plonk_verification_key_evals.Step.sigma_comm = + Vector.init Permuts.n ~f:(fun _ -> g) + ; coefficients_comm = Vector.init Columns.n ~f:(fun _ -> g) + ; generic_comm = g + ; psm_comm = g + ; complete_add_comm = g + ; mul_comm = g + ; emul_comm = g + ; endomul_scalar_comm = g + ; xor_comm = None + ; range_check0_comm = None + ; range_check1_comm = None + ; foreign_field_add_comm = None + ; foreign_field_mul_comm = None + ; rot_comm = None + ; lookup_table_comm = Vector.init Lookup_sorted_minus_1.n ~f:(fun _ -> None) + ; lookup_table_ids = None + ; runtime_tables_selector = None + ; lookup_selector_lookup = None + ; lookup_selector_xor = None + ; lookup_selector_range_check = None + ; lookup_selector_ffmul = None + } + let dummy = lazy (let rows = Domain.size (Common.wrap_domains ~proofs_verified:2).h in diff --git a/src/lib/pickles/verification_key.mli b/src/lib/pickles/verification_key.mli index 13ac615fe33..a5d8d6844f8 100644 --- a/src/lib/pickles/verification_key.mli +++ b/src/lib/pickles/verification_key.mli @@ -19,7 +19,7 @@ module Stable : sig ; index : Impls.Wrap.Verification_key.t ; data : Data.t } - [@@deriving fields, to_yojson, bin_shape, bin_io] + [@@deriving fields, to_yojson, of_yojson, bin_shape, bin_io] include Pickles_types.Sigs.VERSIONED end @@ -33,8 +33,11 @@ type t = Stable.Latest.t = ; index : Impls.Wrap.Verification_key.t ; data : Data.t } -[@@deriving fields, to_yojson] +[@@deriving fields, to_yojson, of_yojson] val dummy_commitments : 'a -> 'a Pickles_types.Plonk_verification_key_evals.t +val dummy_step_commitments : + 'a -> ('a, 'a option) Pickles_types.Plonk_verification_key_evals.Step.t + val dummy : Stable.Latest.t lazy_t diff --git a/src/lib/pickles/verify.ml b/src/lib/pickles/verify.ml index 678915e3882..7905dbeae4d 100644 --- a/src/lib/pickles/verify.ml +++ b/src/lib/pickles/verify.ml @@ -1,17 +1,16 @@ module SC = Scalar_challenge -open Core_kernel -open Async_kernel open Pickles_types open Common open Import -open Backend -open Tuple_lib module Instance = struct + type chunking_data = { num_chunks : int; domain_size : int; zk_rows : int } + type t = | T : (module Nat.Intf with type n = 'n) * (module Intf.Statement_value with type t = 'a) + * chunking_data option * Verification_key.t * 'a * ('n, 'n) Proof.t @@ -19,7 +18,6 @@ module Instance = struct end let verify_heterogenous (ts : Instance.t list) = - let module Plonk = Types.Wrap.Proof_state.Deferred_values.Plonk in let module Tick_field = Backend.Tick.Field in let logger = Internal_tracing_context_logger.get () in [%log internal] "Verify_heterogenous" @@ -40,12 +38,13 @@ let verify_heterogenous (ts : Instance.t list) = ((fun (lab, b) -> if not b then r := lab :: !r), result) in [%log internal] "Compute_plonks_and_chals" ; - let computed_bp_chals, deferred_values = + let _computed_bp_chals, deferred_values = List.map ts ~f:(fun (T ( _max_proofs_verified , _statement + , chunking_data , key , _app_state , T @@ -55,25 +54,34 @@ let verify_heterogenous (ts : Instance.t list) = { old_bulletproof_challenges; _ } } ; prev_evals = evals + ; proof = _ } ) ) -> Timer.start __LOC__ ; - let non_chunking = + let non_chunking, expected_num_chunks = + let expected_num_chunks = + Option.value_map ~default:1 chunking_data ~f:(fun x -> + x.Instance.num_chunks ) + in let exception Is_chunked in match Pickles_types.Plonk_types.Evals.map evals.evals.evals ~f:(fun (x, y) -> - if Array.length x > 1 || Array.length y > 1 then - raise Is_chunked ) + if + Array.length x > expected_num_chunks + || Array.length y > expected_num_chunks + then raise Is_chunked ) with | exception Is_chunked -> - false + (false, expected_num_chunks) | _unit_evals -> (* we do not care about _unit_evals, if we reached this point, we know all evals have length 1 for they cannot have length 0 *) - true + (true, expected_num_chunks) in - check (lazy "only uses single chunks", non_chunking) ; + check + ( lazy (sprintf "only uses %i chunks" expected_num_chunks) + , non_chunking ) ; check ( lazy "feature flags are consistent with evaluations" , Pickles_types.Plonk_types.Evals.validate_feature_flags @@ -84,15 +92,22 @@ let verify_heterogenous (ts : Instance.t list) = let step_domain = Branch_data.domain proof_state.deferred_values.branch_data in + let expected_domain_size = + match chunking_data with + | None -> + Nat.to_int Backend.Tick.Rounds.n + | Some { domain_size; _ } -> + domain_size + in check ( lazy "domain size is small enough" - , Domain.log2_size step_domain <= Nat.to_int Backend.Tick.Rounds.n ) ; + , Domain.log2_size step_domain <= expected_domain_size ) ; let sc = SC.to_field_constant tick_field ~endo:Endo.Wrap_inner_curve.scalar in Timer.clock __LOC__ ; let { Deferred_values.Minimal.plonk = _ - ; branch_data + ; branch_data = _ ; bulletproof_challenges } = Deferred_values.Minimal.map_challenges @@ -101,7 +116,11 @@ let verify_heterogenous (ts : Instance.t list) = in Timer.clock __LOC__ ; let deferred_values = - Wrap_deferred_values.expand_deferred ~evals + let zk_rows = + Option.value_map ~default:3 chunking_data ~f:(fun x -> + x.Instance.zk_rows ) + in + Wrap_deferred_values.expand_deferred ~evals ~zk_rows ~old_bulletproof_challenges ~proof_state in Timer.clock __LOC__ ; @@ -134,7 +153,7 @@ let verify_heterogenous (ts : Instance.t list) = [%log internal] "Accumulator_check" ; let%bind accumulator_check = Ipa.Step.accumulator_check - (List.map ts ~f:(fun (T (_, _, _, _, T t)) -> + (List.map ts ~f:(fun (T (_, _, _, _, _, T t)) -> ( t.statement.proof_state.messages_for_next_wrap_proof .challenge_polynomial_commitment , Ipa.Step.compute_challenges @@ -151,6 +170,7 @@ let verify_heterogenous (ts : Instance.t list) = (T ( (module Max_proofs_verified) , (module A_value) + , _chunking_data , key , app_state , T t ) ) @@ -161,7 +181,10 @@ let verify_heterogenous (ts : Instance.t list) = Common.hash_messages_for_next_step_proof ~app_state:A_value.to_field_elements (Reduced_messages_for_next_proof_over_same_field.Step.prepare - ~dlog_plonk_index:key.commitments + ~dlog_plonk_index: + (Plonk_verification_key_evals.map + ~f:(fun x -> [| x |]) + key.commitments ) { t.statement.messages_for_next_step_proof with app_state } ) ; proof_state = { deferred_values = @@ -186,7 +209,8 @@ let verify_heterogenous (ts : Instance.t list) = } in let input = - tock_unpadded_public_input_of_statement prepared_statement + tock_unpadded_public_input_of_statement + ~feature_flags:Plonk_types.Features.Full.maybe prepared_statement in let message = Wrap_hack.pad_accumulator @@ -203,7 +227,12 @@ let verify_heterogenous (ts : Instance.t list) = t.statement.proof_state.messages_for_next_wrap_proof .old_bulletproof_challenges ) in - (key.index, Wrap_wire_proof.to_kimchi_proof t.proof, input, Some message) ) + ( key.index + , { proof = Wrap_wire_proof.to_kimchi_proof t.proof + ; public_evals = None + } + , input + , Some message ) ) in [%log internal] "Compute_batch_verify_inputs_done" ; [%log internal] "Dlog_check_batch_verify" ; @@ -212,10 +241,11 @@ let verify_heterogenous (ts : Instance.t list) = Common.time "dlog_check" (fun () -> check (lazy "dlog_check", dlog_check)) ; result () -let verify (type a return_typ n) +let verify (type a n) ?chunking_data (max_proofs_verified : (module Nat.Intf with type n = n)) (a_value : (module Intf.Statement_value with type t = a)) (key : Verification_key.t) (ts : (a * (n, n) Proof.t) list) = verify_heterogenous (List.map ts ~f:(fun (x, p) -> - Instance.T (max_proofs_verified, a_value, key, x, p) ) ) + Instance.T (max_proofs_verified, a_value, chunking_data, key, x, p) ) + ) diff --git a/src/lib/pickles/verify.mli b/src/lib/pickles/verify.mli index bc90518b955..bf9fd17a973 100644 --- a/src/lib/pickles/verify.mli +++ b/src/lib/pickles/verify.mli @@ -1,10 +1,13 @@ open Core_kernel module Instance : sig + type chunking_data = { num_chunks : int; domain_size : int; zk_rows : int } + type t = | T : (module Pickles_types.Nat.Intf with type n = 'n) * (module Intf.Statement_value with type t = 'a) + * chunking_data option * Verification_key.t * 'a * ('n, 'n) Proof.t @@ -12,7 +15,8 @@ module Instance : sig end val verify : - (module Pickles_types.Nat.Intf with type n = 'n) + ?chunking_data:Instance.chunking_data + -> (module Pickles_types.Nat.Intf with type n = 'n) -> (module Intf.Statement_value with type t = 'a) -> Verification_key.t -> ('a * ('n, 'n) Proof.t) list diff --git a/src/lib/pickles/wrap.ml b/src/lib/pickles/wrap.ml index e763e610d2a..75750c17c8e 100644 --- a/src/lib/pickles/wrap.ml +++ b/src/lib/pickles/wrap.ml @@ -2,10 +2,7 @@ module SC = Scalar_challenge module P = Proof open Pickles_types open Hlist -open Tuple_lib open Common -open Core_kernel -open Async_kernel open Import open Types open Backend @@ -13,8 +10,7 @@ open Backend (* This contains the "wrap" prover *) let challenge_polynomial = - let open Backend.Tick.Field in - Wrap_verifier.challenge_polynomial ~add ~mul ~one + Wrap_verifier.challenge_polynomial (module Backend.Tick.Field) module Type1 = Plonk_checks.Make @@ -25,7 +21,7 @@ module Type1 = let index_terms = Plonk_checks.Scalars.Tick.index_terms end) -let vector_of_list (type a t) +let _vector_of_list (type a t) (module V : Snarky_intf.Vector.S with type elt = a and type t = t) (xs : a list) : t = let r = V.create () in @@ -37,7 +33,7 @@ let tick_rounds = Nat.to_int Tick.Rounds.n let combined_inner_product (type actual_proofs_verified) ~env ~domain ~ft_eval1 ~actual_proofs_verified: (module AB : Nat.Add.Intf with type n = actual_proofs_verified) - (e : _ Plonk_types.All_evals.With_public_input.t) + (e : (_ array * _ array, _) Plonk_types.All_evals.With_public_input.t) ~(old_bulletproof_challenges : (_, actual_proofs_verified) Vector.t) ~r ~plonk ~xi ~zeta ~zetaw = let combined_evals = @@ -65,7 +61,7 @@ let combined_inner_product (type actual_proofs_verified) ~env ~domain ~ft_eval1 let v : Tick.Field.t array list = List.append (List.map (Vector.to_list challenge_polys) ~f:(fun f -> [| f pt |])) - ([| f e.public_input |] :: [| ft |] :: a) + (f e.public_input :: [| ft |] :: a) in let open Tick.Field in Pcs_batch.combine_split_evaluations ~xi ~init:Fn.id @@ -76,553 +72,218 @@ let combined_inner_product (type actual_proofs_verified) ~env ~domain ~ft_eval1 combine ~which_eval:`Fst ~ft:ft_eval0 zeta + (r * combine ~which_eval:`Snd ~ft:ft_eval1 zetaw) -module Deferred_values = Types.Wrap.Proof_state.Deferred_values - -type scalar_challenge_constant = Challenge.Constant.t Scalar_challenge.t - -type deferred_values_and_hints = - { x_hat_evals : Backend.Tick.Field.t * Backend.Tick.Field.t - ; sponge_digest_before_evaluations : Tick.Field.t - ; deferred_values : - ( ( Challenge.Constant.t - , scalar_challenge_constant - , Backend.Tick.Field.t Pickles_types.Shifted_value.Type1.t - , (Tick.Field.t Shifted_value.Type1.t, bool) Opt.t - , ( scalar_challenge_constant Deferred_values.Plonk.In_circuit.Lookup.t +module For_tests_only = struct + type shifted_tick_field = + Backend.Tick.Field.t Pickles_types.Shifted_value.Type1.t + + type scalar_challenge_constant = + Import.Challenge.Constant.t Import.Scalar_challenge.t + + type deferred_values_and_hints = + { x_hat_evals : Backend.Tick.Field.t array * Backend.Tick.Field.t array + ; sponge_digest_before_evaluations : Backend.Tick.Field.t + ; deferred_values : + ( ( Import.Challenge.Constant.t + , scalar_challenge_constant + , shifted_tick_field + , (shifted_tick_field, bool) Opt.t + , (scalar_challenge_constant, bool) Opt.t , bool ) - Opt.t - , bool ) - Deferred_values.Plonk.In_circuit.t - , scalar_challenge_constant - , Tick.Field.t Shifted_value.Type1.t - , Challenge.Constant.t Scalar_challenge.t Bulletproof_challenge.t - Step_bp_vec.t - , Branch_data.t ) - Deferred_values.t - } - -let deferred_values (type n) ~(sgs : (Backend.Tick.Curve.Affine.t, n) Vector.t) - ~actual_feature_flags - ~(prev_challenges : ((Backend.Tick.Field.t, _) Vector.t, n) Vector.t) - ~(step_vk : Kimchi_bindings.Protocol.VerifierIndex.Fp.t) - ~(public_input : Backend.Tick.Field.t list) ~(proof : Backend.Tick.Proof.t) - ~(actual_proofs_verified : n Nat.t) : deferred_values_and_hints = - let module O = Tick.Oracles in - let o = - O.create step_vk - Vector.( - map2 sgs prev_challenges ~f:(fun commitment cs -> - { Tick.Proof.Challenge_polynomial.commitment - ; challenges = Vector.to_array cs - } ) - |> to_list) - public_input proof - in - let x_hat = O.(p_eval_1 o, p_eval_2 o) in - let scalar_chal f = - Scalar_challenge.map ~f:Challenge.Constant.of_tick_field (f o) - in - let plonk0 = - { Types.Wrap.Proof_state.Deferred_values.Plonk.Minimal.alpha = - scalar_chal O.alpha - ; beta = O.beta o - ; gamma = O.gamma o - ; zeta = scalar_chal O.zeta - ; joint_combiner = - (* TODO: Needs to be changed when lookups are fully implemented *) - Option.map (O.joint_combiner_chal o) - ~f:(Scalar_challenge.map ~f:Challenge.Constant.of_tick_field) - ; feature_flags = actual_feature_flags - } - in - let r = scalar_chal O.u in - let xi = scalar_chal O.v in - let module As_field = struct - let to_field = - SC.to_field_constant - (module Tick.Field) - ~endo:Endo.Wrap_inner_curve.scalar - - let r = to_field r - - let xi = to_field xi - - let zeta = to_field plonk0.zeta - - let alpha = to_field plonk0.alpha - - let joint_combiner = Option.map ~f:to_field plonk0.joint_combiner - end in - let domain = Domain.Pow_2_roots_of_unity step_vk.domain.log_size_of_group in - let zetaw = Tick.Field.mul As_field.zeta step_vk.domain.group_gen in - let tick_plonk_minimal = - { plonk0 with - zeta = As_field.zeta - ; alpha = As_field.alpha - ; joint_combiner = As_field.joint_combiner + Import.Types.Wrap.Proof_state.Deferred_values.Plonk.In_circuit.t + , scalar_challenge_constant + , shifted_tick_field + , scalar_challenge_constant Import.Bulletproof_challenge.t + Import.Types.Step_bp_vec.t + , Import.Branch_data.t ) + Import.Types.Wrap.Proof_state.Deferred_values.t } - in - let tick_combined_evals = - Plonk_checks.evals_of_split_evals - (module Tick.Field) - proof.openings.evals ~rounds:(Nat.to_int Tick.Rounds.n) - ~zeta:As_field.zeta ~zetaw - |> Plonk_types.Evals.to_in_circuit - in - let tick_domain = - Plonk_checks.domain - (module Tick.Field) - domain ~shifts:Common.tick_shifts - ~domain_generator:Backend.Tick.Field.domain_generator - in - let tick_env = - let module Env_bool = struct - type t = bool - - let true_ = true - - let false_ = false - - let ( &&& ) = ( && ) - let ( ||| ) = ( || ) - - let any = List.exists ~f:Fn.id - end in - let module Env_field = struct - include Tick.Field - - type bool = Env_bool.t - - let if_ (b : bool) ~then_ ~else_ = if b then then_ () else else_ () - end in - Plonk_checks.scalars_env - (module Env_bool) - (module Env_field) - ~endo:Endo.Step_inner_curve.base ~mds:Tick_field_sponge.params.mds - ~srs_length_log2:Common.Max_degree.step_log2 - ~field_of_hex:(fun s -> - Kimchi_pasta.Pasta.Bigint256.of_hex_string s - |> Kimchi_pasta.Pasta.Fp.of_bigint ) - ~domain:tick_domain tick_plonk_minimal tick_combined_evals - in - let plonk = - let module Field = struct - include Tick.Field - - type nonrec bool = bool - end in - Type1.derive_plonk - (module Field) - ~shift:Shifts.tick1 ~env:tick_env tick_plonk_minimal tick_combined_evals - and new_bulletproof_challenges, b = - let prechals = - Array.map (O.opening_prechallenges o) ~f:(fun x -> - Scalar_challenge.map ~f:Challenge.Constant.of_tick_field x ) + let deferred_values (type n) + ~(sgs : (Backend.Tick.Curve.Affine.t, n) Vector.t) ~actual_feature_flags + ~(prev_challenges : ((Backend.Tick.Field.t, _) Vector.t, n) Vector.t) + ~(step_vk : Kimchi_bindings.Protocol.VerifierIndex.Fp.t) + ~(public_input : Backend.Tick.Field.t list) + ~(proof : Backend.Tick.Proof.with_public_evals) + ~(actual_proofs_verified : n Nat.t) : deferred_values_and_hints = + let module O = Tick.Oracles in + let o = + O.create_with_public_evals step_vk + Vector.( + map2 sgs prev_challenges ~f:(fun commitment cs -> + { Tick.Proof.Challenge_polynomial.commitment + ; challenges = Vector.to_array cs + } ) + |> to_list) + public_input proof in - let chals = Array.map prechals ~f:(fun x -> Ipa.Step.compute_challenge x) in - let challenge_poly = unstage (challenge_polynomial chals) in - let open As_field in - let b = - let open Tick.Field in - challenge_poly zeta + (r * challenge_poly zetaw) + let x_hat = + match proof.public_evals with + | Some x -> + x + | None -> + O.([| p_eval_1 o |], [| p_eval_2 o |]) in - let prechals = Array.map prechals ~f:Bulletproof_challenge.unpack in - (prechals, b) - in - let shift_value = - Shifted_value.Type1.of_field (module Tick.Field) ~shift:Shifts.tick1 - and chal = Challenge.Constant.of_tick_field in - { deferred_values = - { Types.Wrap.Proof_state.Deferred_values.xi - ; b = shift_value b - ; bulletproof_challenges = - Vector.of_array_and_length_exn new_bulletproof_challenges - Tick.Rounds.n - ; combined_inner_product = - shift_value - As_field.( - combined_inner_product (* Note: We do not pad here. *) - ~actual_proofs_verified:(Nat.Add.create actual_proofs_verified) - { evals = proof.openings.evals; public_input = x_hat } - ~r ~xi ~zeta ~zetaw ~old_bulletproof_challenges:prev_challenges - ~env:tick_env ~domain:tick_domain - ~ft_eval1:proof.openings.ft_eval1 ~plonk:tick_plonk_minimal) - ; branch_data = - { proofs_verified = - ( match actual_proofs_verified with - | Z -> - Branch_data.Proofs_verified.N0 - | S Z -> - N1 - | S (S Z) -> - N2 - | _ -> - assert false ) - ; domain_log2 = - Branch_data.Domain_log2.of_int_exn - step_vk.domain.log_size_of_group - } - ; plonk = - { plonk with - zeta = plonk0.zeta - ; alpha = plonk0.alpha - ; beta = chal plonk0.beta - ; gamma = chal plonk0.gamma - ; lookup = - Opt.map plonk.lookup ~f:(fun l -> - { Composition_types.Wrap.Proof_state.Deferred_values.Plonk - .In_circuit - .Lookup - .joint_combiner = Option.value_exn plonk0.joint_combiner - } ) - } + let scalar_chal f = + Scalar_challenge.map ~f:Challenge.Constant.of_tick_field (f o) + in + let plonk0 = + { Types.Wrap.Proof_state.Deferred_values.Plonk.Minimal.alpha = + scalar_chal O.alpha + ; beta = O.beta o + ; gamma = O.gamma o + ; zeta = scalar_chal O.zeta + ; joint_combiner = + (* TODO: Needs to be changed when lookups are fully implemented *) + Option.map (O.joint_combiner_chal o) + ~f:(Scalar_challenge.map ~f:Challenge.Constant.of_tick_field) + ; feature_flags = actual_feature_flags } - ; x_hat_evals = x_hat - ; sponge_digest_before_evaluations = O.digest_before_evaluations o - } + in + let r = scalar_chal O.u in + let xi = scalar_chal O.v in + let module As_field = struct + let to_field = + SC.to_field_constant + (module Tick.Field) + ~endo:Endo.Wrap_inner_curve.scalar -(* Testing - ------- + let r = to_field r - Invocation: - dune exec src/lib/pickles/.pickles.inline-tests/inline_test_runner_pickles.exe \ - --profile=dev --display short -- inline-test-runner pickles -only-test wrap.ml -*) -let%test_module "gate finalization" = - ( module struct - type test_options = - { true_is_yes : Plonk_types.Features.options - ; true_is_maybe : Plonk_types.Features.options - ; all_maybes : Plonk_types.Features.options - } + let xi = to_field xi - (* Helper function to convert actual feature flags into 3 test configurations of feature flags - @param actual_feature_flags The actual feature flags in terms of true/false + let zeta = to_field plonk0.zeta - @return Corresponding feature flags configs composed of Yes/No/Maybe options - - one where true is mapped to Yes and false is mapped to No - - one where true is mapped to Maybe and false is mapped to No - - one where true and false are both mapped to Maybe *) - let generate_test_feature_flag_configs - (actual_feature_flags : Plonk_types.Features.flags) : test_options = - (* Set up a helper to convert actual feature flags composed of booleans into - feature flags composed of Yes/No/Maybe options. - @param actual_feature_flags The actual feature flags in terms of true/false - @param true_opt Plonk_types.Opt type to use for true/enabled features - @param false_opt Plonk_types.Opt type to use for false/disabled features - @return Corresponding feature flags composed of Yes/No/Maybe values *) - let compute_feature_flags - (actual_feature_flags : Plonk_types.Features.flags) - (true_opt : Plonk_types.Opt.Flag.t) - (false_opt : Plonk_types.Opt.Flag.t) : Plonk_types.Features.options = - Plonk_types.Features.map actual_feature_flags ~f:(function - | true -> - true_opt - | false -> - false_opt ) - in + let alpha = to_field plonk0.alpha - (* Generate the 3 configurations of the actual feature flags using - helper *) - let open Plonk_types.Opt.Flag in - { true_is_yes = compute_feature_flags actual_feature_flags Yes No - ; true_is_maybe = compute_feature_flags actual_feature_flags Maybe No - ; all_maybes = compute_feature_flags actual_feature_flags Maybe Maybe + let joint_combiner = Option.map ~f:to_field plonk0.joint_combiner + end in + let domain = Domain.Pow_2_roots_of_unity step_vk.domain.log_size_of_group in + let zetaw = Tick.Field.mul As_field.zeta step_vk.domain.group_gen in + let tick_plonk_minimal = + { plonk0 with + zeta = As_field.zeta + ; alpha = As_field.alpha + ; joint_combiner = As_field.joint_combiner } + in + let tick_combined_evals = + Plonk_checks.evals_of_split_evals + (module Tick.Field) + proof.proof.openings.evals ~rounds:(Nat.to_int Tick.Rounds.n) + ~zeta:As_field.zeta ~zetaw + |> Plonk_types.Evals.to_in_circuit + in + let tick_domain = + Plonk_checks.domain + (module Tick.Field) + domain ~shifts:Common.tick_shifts + ~domain_generator:Backend.Tick.Field.domain_generator + in + let tick_env = + let module Env_bool = struct + type t = bool - (* Run the recursive proof tests on the supplied inputs. - - @param actual_feature_flags User-specified feature flags, matching those - required by the backend circuit - @param public_input list of public inputs (can be empty) - @param vk Verifier index for backend circuit - @param proof Backend proof + let true_ = true - @return true or throws and exception - *) - let run_recursive_proof_test - (actual_feature_flags : Plonk_types.Features.flags) - (feature_flags : Plonk_types.Features.options) - (public_input : Pasta_bindings.Fp.t list) - (vk : Kimchi_bindings.Protocol.VerifierIndex.Fp.t) - (proof : Backend.Tick.Proof.t) : Impls.Step.Boolean.value = - (* Constants helper - takes an OCaml value and converts it to a snarky value, where - all values here are constant literals. N.b. this should be - encapsulated as Snarky internals, but it never got merged. *) - let constant (Typ typ : _ Snarky_backendless.Typ.t) x = - let xs, aux = typ.value_to_fields x in - typ.var_of_fields (Array.map xs ~f:Impls.Step.Field.constant, aux) - in + let false_ = false - (* Compute deferred values - in the Pickles recursive proof system, deferred values - are values from 2 proofs earlier in the recursion hierarchy. Every recursion - goes through a two-phase process of step and wrap, like so + let ( &&& ) = ( && ) - step <- wrap <- step <- ... <- wrap <- step, - `<-----------' - deferred + let ( ||| ) = ( || ) - where there may be multiple children at each level (but let's ignore that!). - Deferred values are values (part of the public input) that must be passed between - the two phases in order to be verified correctly-- it works like this. + let any = List.exists ~f:Fn.id + end in + let module Env_field = struct + include Tick.Field - - The wrap proof is passed the deferred values for its step proof as part of its public input. - - The wrap proof starts verifying the step proof. As part of this verification it must - perform all of the group element checks (since it's over the Vesta base field); however, - at this stage it just assumes that the deferred values of its public input are correct - (i.e. it defers checking them). - - The next step proof verifies the wrap proof with a similar process, but using the other - curve (e.g. Pallas). There are two important things to note: - - Since it is using the other curve, it can compute the commitments to the public inputs - of the previous wrap circuit that were passed into it. In other words, the next step - proof receives data from the previous wrap proof about the previous step proof. Yeah, - from two proofs back! (e.g. the deferred values) - - The next step proof also computes the deferred values inside the circuit and verifies - that they match those used by the previous wrap proof. + type bool = Env_bool.t - The code below generates the deferred values so that we can verifiy that we can actually - compute those values correctly inside the circuit. Special thanks to Matthew Ryan for - explaining this in detail. *) - let { deferred_values; x_hat_evals; sponge_digest_before_evaluations } = - deferred_values ~actual_feature_flags ~sgs:[] ~prev_challenges:[] - ~step_vk:vk ~public_input ~proof ~actual_proofs_verified:Nat.N0.n + let if_ (b : bool) ~then_ ~else_ = if b then then_ () else else_ () + end in + Plonk_checks.scalars_env + (module Env_bool) + (module Env_field) + ~endo:Endo.Step_inner_curve.base ~mds:Tick_field_sponge.params.mds + ~zk_rows:step_vk.zk_rows ~srs_length_log2:Common.Max_degree.step_log2 + ~field_of_hex:(fun s -> + Kimchi_pasta.Pasta.Bigint256.of_hex_string s + |> Kimchi_pasta.Pasta.Fp.of_bigint ) + ~domain:tick_domain tick_plonk_minimal tick_combined_evals + in + let plonk = + let module Field = struct + include Tick.Field + end in + Type1.derive_plonk + (module Field) + ~shift:Shifts.tick1 ~env:tick_env tick_plonk_minimal tick_combined_evals + and new_bulletproof_challenges, b = + let prechals = + Array.map (O.opening_prechallenges o) ~f:(fun x -> + Scalar_challenge.map ~f:Challenge.Constant.of_tick_field x ) in - - (* Define Typ.t for Deferred_values.t -- A Type.t defines how to convert a value of some type - in OCaml into a var in circuit/Snarky. - - This complex function is called with two sets of inputs: once for the step circuit and - once for the wrap circuit. It was decided not to use a functor for this. *) - let deferred_values_typ = - let open Impls.Step in - let open Step_main_inputs in - let open Step_verifier in - Wrap.Proof_state.Deferred_values.In_circuit.typ - (module Impls.Step) - ~feature_flags ~challenge:Challenge.typ - ~scalar_challenge:Challenge.typ - ~dummy_scalar:(Shifted_value.Type1.Shifted_value Field.Constant.zero) - ~dummy_scalar_challenge: - (Kimchi_backend_common.Scalar_challenge.create - Limb_vector.Challenge.Constant.zero ) - (Shifted_value.Type1.typ Field.typ) - (Branch_data.typ - (module Impl) - ~assert_16_bits:(Step_verifier.assert_n_bits ~n:16) ) + let chals = + Array.map prechals ~f:(fun x -> Ipa.Step.compute_challenge x) in - - (* Use deferred_values_typ and the constant helper to prepare deferred_values - for use in the circuit. We change some [Opt.t] to [Option.t] because that is - what Type.t is configured to accept. *) - let deferred_values = - constant deferred_values_typ - { deferred_values with - plonk = - { deferred_values.plonk with - lookup = Opt.to_option_unsafe deferred_values.plonk.lookup - } - } - (* Prepare all of the evaluations (i.e. all of the columns in the proof that we open) - for use in the circuit *) - and evals = - constant - (Plonk_types.All_evals.typ (module Impls.Step) feature_flags) - { evals = { public_input = x_hat_evals; evals = proof.openings.evals } - ; ft_eval1 = proof.openings.ft_eval1 - } + let challenge_poly = unstage (challenge_polynomial chals) in + let open As_field in + let b = + let open Tick.Field in + challenge_poly zeta + (r * challenge_poly zetaw) in + let prechals = Array.map prechals ~f:Bulletproof_challenge.unpack in + (prechals, b) + in + let shift_value = + Shifted_value.Type1.of_field (module Tick.Field) ~shift:Shifts.tick1 + and chal = Challenge.Constant.of_tick_field in + { deferred_values = + { Types.Wrap.Proof_state.Deferred_values.xi + ; b = shift_value b + ; bulletproof_challenges = + Vector.of_array_and_length_exn new_bulletproof_challenges + Tick.Rounds.n + ; combined_inner_product = + shift_value + As_field.( + combined_inner_product (* Note: We do not pad here. *) + ~actual_proofs_verified: + (Nat.Add.create actual_proofs_verified) + { evals = proof.proof.openings.evals; public_input = x_hat } + ~r ~xi ~zeta ~zetaw + ~old_bulletproof_challenges:prev_challenges ~env:tick_env + ~domain:tick_domain ~ft_eval1:proof.proof.openings.ft_eval1 + ~plonk:tick_plonk_minimal) + ; branch_data = + { proofs_verified = + ( match actual_proofs_verified with + | Z -> + Branch_data.Proofs_verified.N0 + | S Z -> + N1 + | S (S Z) -> + N2 + | S _ -> + assert false ) + ; domain_log2 = + Branch_data.Domain_log2.of_int_exn + step_vk.domain.log_size_of_group + } + ; plonk = + { plonk with + zeta = plonk0.zeta + ; alpha = plonk0.alpha + ; beta = chal plonk0.beta + ; gamma = chal plonk0.gamma + ; joint_combiner = Opt.of_option plonk0.joint_combiner + } + } + ; x_hat_evals = x_hat + ; sponge_digest_before_evaluations = O.digest_before_evaluations o + } +end - (* Run the circuit without generating a proof using run_and_check *) - Impls.Step.run_and_check (fun () -> - (* Set up the step sponge from the wrap sponge -- we cannot use the same poseidon - sponge in both step and wrap because they have different fields. - - In order to continue the Fiat-Shamir heuristic across field boundaries we use - the wrap sponge for everything in the wrap proof, squeeze it one final time and - expose the squoze value in the public input to the step proof, which absorbs - said squoze value into the step sponge. :-) This means the step sponge has absorbed - everything from the proof so far by proxy and that is also over the native field! *) - let res, _chals = - let sponge = - let open Step_main_inputs in - let sponge = Sponge.create sponge_params in - Sponge.absorb sponge - (`Field (Impl.Field.constant sponge_digest_before_evaluations)) ; - sponge - in - - (* Call finalisation with all of the required details *) - Step_verifier.finalize_other_proof - (module Nat.N0) - ~feature_flags - ~step_domains: - (`Known - [ { h = Pow_2_roots_of_unity vk.domain.log_size_of_group } ] - ) - ~sponge ~prev_challenges:[] deferred_values evals - in - - (* Read the boolean result from the circuit and make it available - to the OCaml world. *) - Impls.Step.(As_prover.(fun () -> read Boolean.typ res)) ) - |> Or_error.ok_exn - - (* Common srs value for all tests *) - let srs = - Kimchi_bindings.Protocol.SRS.Fp.create (1 lsl Common.Max_degree.step_log2) - - type example = - Kimchi_bindings.Protocol.SRS.Fp.t - -> Kimchi_bindings.Protocol.Index.Fp.t - * Pasta_bindings.Fp.t list - * ( Pasta_bindings.Fq.t Kimchi_types.or_infinity - , Pasta_bindings.Fp.t ) - Kimchi_types.prover_proof - - module type SETUP = sig - val example : example - - (* Feature flags tused for backend proof *) - val actual_feature_flags : bool Plonk_types.Features.t - end - - (* [Make] is the test functor. - - Given a test setup, compute different test configurations and define 3 - test for said configurations. *) - module Make (S : SETUP) = struct - (* Generate foreign field multiplication test backend proof using Kimchi, - obtaining the proof and corresponding prover index. - - Note: we only want to pay the cost of generating this proof once and - then reuse it many times for the different recursive proof tests. *) - let index, public_input, proof = S.example srs - - (* Obtain verifier key from prover index and convert backend proof to - snarky proof *) - let vk = Kimchi_bindings.Protocol.VerifierIndex.Fp.create index - - let proof = Backend.Tick.Proof.of_backend proof - - let test_feature_flags_configs = - generate_test_feature_flag_configs S.actual_feature_flags - - let runtest feature_flags = - run_recursive_proof_test S.actual_feature_flags feature_flags - public_input vk proof - - let%test "true -> yes" = runtest test_feature_flags_configs.true_is_yes - - let%test "true -> maybe" = - runtest test_feature_flags_configs.true_is_maybe - - let%test "all maybes" = runtest test_feature_flags_configs.all_maybes - end - - (* Small combinators to lift gate example signatures to the expected - signatures for the tests. This amounts to generating the list of public - inputs from either no public inputs, a single one or a pair of inputs - returned by the gate example. *) - - let no_public_input gate_example srs = - let index, proof = gate_example srs in - (index, [], proof) - - let public_input_1 gate_example srs = - let index, public_input, proof = gate_example srs in - (index, [ public_input ], proof) - - let public_input_2 gate_example srs = - let index, (public_input1, public_input2), proof = gate_example srs in - (index, [ public_input1; public_input2 ], proof) - - let%test_module "lookup" = - ( module Make (struct - let example = - public_input_1 (fun srs -> - Kimchi_bindings.Protocol.Proof.Fp.example_with_lookup srs true ) - - let actual_feature_flags = - { Plonk_types.Features.none_bool with - lookup = true - ; runtime_tables = true - } - end) ) - - (*let%test_module "foreign field multiplication" = - ( module Make (struct - let example = - no_public_input - Kimchi_bindings.Protocol.Proof.Fp.example_with_foreign_field_mul - - let actual_feature_flags = - { Plonk_types.Features.none_bool with - range_check0 = true - ; range_check1 = true - ; foreign_field_add = true - ; foreign_field_mul = true - } - end) )*) - - let%test_module "range check" = - ( module Make (struct - let example = - no_public_input - Kimchi_bindings.Protocol.Proof.Fp.example_with_range_check - - let actual_feature_flags = - { Plonk_types.Features.none_bool with - range_check0 = true - ; range_check1 = true - } - end) ) - - let%test_module "range check 64 bits" = - ( module Make (struct - let example = - no_public_input - Kimchi_bindings.Protocol.Proof.Fp.example_with_range_check0 - - let actual_feature_flags = - { Plonk_types.Features.none_bool with range_check0 = true } - end) ) - - let%test_module "xor" = - ( module Make (struct - let example = - public_input_2 Kimchi_bindings.Protocol.Proof.Fp.example_with_xor - - let actual_feature_flags = - { Plonk_types.Features.none_bool with xor = true } - end) ) - - let%test_module "rot" = - ( module Make (struct - let example = - public_input_2 Kimchi_bindings.Protocol.Proof.Fp.example_with_rot - - let actual_feature_flags = - { Plonk_types.Features.none_bool with - range_check0 = true - ; rot = true - } - end) ) - - let%test_module "foreign field addition" = - ( module Make (struct - let example = - public_input_1 Kimchi_bindings.Protocol.Proof.Fp.example_with_ffadd - - let actual_feature_flags = - { Plonk_types.Features.none_bool with - range_check0 = true - ; range_check1 = true - ; foreign_field_add = true - } - end) ) - end ) - +include For_tests_only module Step_acc = Tock.Inner_curve.Affine (* The prover for wrapping a proof *) @@ -637,7 +298,7 @@ let wrap Req ) : (max_proofs_verified, max_local_max_proofs_verifieds) Requests.Wrap.t ) ~dlog_plonk_index wrap_main ~(typ : _ Impls.Step.Typ.t) ~step_vk - ~actual_wrap_domains ~step_plonk_indices ~feature_flags + ~actual_wrap_domains ~step_plonk_indices:_ ~feature_flags ~actual_feature_flags ?tweak_statement pk ({ statement = prev_statement; prev_evals; proof; index = which_index } : ( _ @@ -743,9 +404,9 @@ let wrap in k (M.f messages_for_next_wrap_proof) | Messages -> - k proof.messages + k proof.proof.messages | Openings_proof -> - k proof.openings.proof + k proof.proof.openings.proof | Proof_state -> k prev_statement_with_hashes.proof_state | Which_branch -> @@ -767,10 +428,10 @@ let wrap | _ -> Snarky_backendless.Request.unhandled in - let module O = Tick.Oracles in + let public_input = tick_public_input_of_statement ~max_proofs_verified - prev_statement_with_hashes ~feature_flags + prev_statement_with_hashes in let prev_challenges = Vector.map ~f:Ipa.Step.compute_challenges @@ -811,7 +472,7 @@ let wrap let messages_for_next_wrap_proof : _ P.Base.Messages_for_next_proof_over_same_field.Wrap.t = { challenge_polynomial_commitment = - proof.openings.proof.challenge_polynomial_commitment + proof.proof.openings.proof.challenge_polynomial_commitment ; old_bulletproof_challenges = Vector.map prev_statement.proof_state.unfinalized_proofs ~f:(fun t -> t.deferred_values.bulletproof_challenges ) @@ -859,7 +520,7 @@ let wrap |> Wrap_hack.pad_accumulator in let%map.Promise next_proof = - let (T (input, conv, _conv_inv)) = Impls.Wrap.input () in + let (T (input, conv, _conv_inv)) = Impls.Wrap.input ~feature_flags () in Common.time "wrap proof" (fun () -> [%log internal] "Wrap_generate_witness_conv" ; Impls.Wrap.generate_witness_conv @@ -885,10 +546,12 @@ let wrap then failwith "Regenerated proof" ; let%map.Promise proof = create_proof () in Proof_cache.set_wrap_proof proof_cache ~keypair:pk - ~public_input:public_inputs proof ; + ~public_input:public_inputs proof.proof ; proof | Some proof -> - Promise.return proof ) + Promise.return + ( { proof; public_evals = None } + : Tock.Proof.with_public_evals ) ) in [%log internal] "Backend_tock_proof_create_async_done" ; proof ) @@ -908,23 +571,24 @@ let wrap { next_statement.proof_state.deferred_values with plonk = { next_statement.proof_state.deferred_values.plonk with - lookup = - (* TODO: This assumes wrap circuits do not use lookup *) - None + joint_combiner = + Opt.to_option + next_statement.proof_state.deferred_values.plonk + .joint_combiner } } } } ) in [%log internal] "Pickles_wrap_proof_done" ; - ( { proof = Wrap_wire_proof.of_kimchi_proof next_proof + ( { proof = Wrap_wire_proof.of_kimchi_proof next_proof.proof ; statement = Types.Wrap.Statement.to_minimal next_statement ~to_option:Opt.to_option_unsafe ; prev_evals = { Plonk_types.All_evals.evals = - { public_input = x_hat_evals; evals = proof.openings.evals } - ; ft_eval1 = proof.openings.ft_eval1 + { public_input = x_hat_evals; evals = proof.proof.openings.evals } + ; ft_eval1 = proof.proof.openings.ft_eval1 } } : _ P.Base.Wrap.t ) diff --git a/src/lib/pickles/wrap.mli b/src/lib/pickles/wrap.mli index 4b03afe62ed..5f47babaa4a 100644 --- a/src/lib/pickles/wrap.mli +++ b/src/lib/pickles/wrap.mli @@ -8,21 +8,19 @@ val wrap : and type ns = 'max_local_max_proofs_verifieds ) -> ('max_proofs_verified, 'max_local_max_proofs_verifieds) Requests.Wrap.t -> dlog_plonk_index: - Backend.Tock.Curve.Affine.t Pickles_types.Plonk_verification_key_evals.t + Backend.Tock.Curve.Affine.t array + Pickles_types.Plonk_verification_key_evals.t -> ( ( Impls.Wrap.Impl.Field.t , Impls.Wrap.Impl.Field.t Composition_types.Scalar_challenge.t , Impls.Wrap.Impl.Field.t Pickles_types.Shifted_value.Type1.t , ( Impls.Wrap.Impl.Field.t Pickles_types.Shifted_value.Type1.t , Impls.Wrap.Impl.field Snarky_backendless.Cvar.t Snarky_backendless.Snark_intf.Boolean0.t ) - Pickles_types.Plonk_types.Opt.t + Pickles_types.Opt.t , ( Impls.Wrap.Impl.Field.t Composition_types.Scalar_challenge.t - Composition_types.Wrap.Proof_state.Deferred_values.Plonk.In_circuit - .Lookup - .t , Impls.Wrap.Impl.field Snarky_backendless.Cvar.t Snarky_backendless.Snark_intf.Boolean0.t ) - Pickles_types.Plonk_types.Opt.t + Pickles_types.Opt.t , Impls.Wrap.Impl.Boolean.var , Impls.Wrap.Impl.field Snarky_backendless.Cvar.t , Impls.Wrap.Impl.field Snarky_backendless.Cvar.t @@ -40,7 +38,7 @@ val wrap : -> step_vk:Kimchi_bindings.Protocol.VerifierIndex.Fp.t -> actual_wrap_domains:(Core_kernel.Int.t, 'c) Pickles_types.Vector.t -> step_plonk_indices:'d - -> feature_flags:Plonk_types.Opt.Flag.t Plonk_types.Features.t + -> feature_flags:Opt.Flag.t Plonk_types.Features.Full.t -> actual_feature_flags:bool Plonk_types.Features.t -> ?tweak_statement: ( ( Import.Challenge.Constant.t @@ -50,10 +48,6 @@ val wrap : , bool ) Import.Types.Opt.t , ( Import.Challenge.Constant.t Import.Types.Scalar_challenge.t - Composition_types.Wrap.Proof_state.Deferred_values.Plonk - .In_circuit - .Lookup - .t , bool ) Import.Types.Opt.t , bool @@ -83,10 +77,6 @@ val wrap : , bool ) Import.Types.Opt.t , ( Import.Challenge.Constant.t Import.Types.Scalar_challenge.t - Composition_types.Wrap.Proof_state.Deferred_values.Plonk - .In_circuit - .Lookup - .t , bool ) Import.Types.Opt.t , bool @@ -168,7 +158,7 @@ val combined_inner_product : -> ft_eval1:Backend.Tick.Field.t -> actual_proofs_verified: (module Pickles_types.Nat.Add.Intf with type n = 'actual_proofs_verified) - -> ( Backend.Tick.Field.t * Backend.Tick.Field.t + -> ( Backend.Tick.Field.t array * Backend.Tick.Field.t array , Backend.Tick.Field.t array * Backend.Tick.Field.t array ) Pickles_types.Plonk_types.All_evals.With_public_input.t -> old_bulletproof_challenges: @@ -195,3 +185,40 @@ module Type1 : Plonk_checks.Make (Pickles_types.Shifted_value.Type1) (Plonk_checks.Scalars.Tick) + +module For_tests_only : sig + type shifted_tick_field = + Backend.Tick.Field.t Pickles_types.Shifted_value.Type1.t + + type scalar_challenge_constant = + Import.Challenge.Constant.t Import.Scalar_challenge.t + + type deferred_values_and_hints = + { x_hat_evals : Backend.Tick.Field.t array * Backend.Tick.Field.t array + ; sponge_digest_before_evaluations : Backend.Tick.Field.t + ; deferred_values : + ( ( Import.Challenge.Constant.t + , scalar_challenge_constant + , shifted_tick_field + , (shifted_tick_field, bool) Opt.t + , (scalar_challenge_constant, bool) Opt.t + , bool ) + Import.Types.Wrap.Proof_state.Deferred_values.Plonk.In_circuit.t + , scalar_challenge_constant + , shifted_tick_field + , scalar_challenge_constant Import.Bulletproof_challenge.t + Import.Types.Step_bp_vec.t + , Import.Branch_data.t ) + Import.Types.Wrap.Proof_state.Deferred_values.t + } + + val deferred_values : + sgs:(Kimchi_pasta_basic.Vesta.Affine.t, 'n) Vector.t + -> actual_feature_flags:bool Plonk_types.Features.t + -> prev_challenges:((Pasta_bindings.Fp.t, 'a) Vector.t, 'n) Vector.t + -> step_vk:Kimchi_bindings.Protocol.VerifierIndex.Fp.t + -> public_input:Pasta_bindings.Fp.t list + -> proof:Backend.Tick.Proof.with_public_evals + -> actual_proofs_verified:'n Nat.t + -> deferred_values_and_hints +end diff --git a/src/lib/pickles/wrap_deferred_values.ml b/src/lib/pickles/wrap_deferred_values.ml index e15ed8c41f1..e62bb875d84 100644 --- a/src/lib/pickles/wrap_deferred_values.ml +++ b/src/lib/pickles/wrap_deferred_values.ml @@ -1,22 +1,18 @@ module SC = Scalar_challenge open Core_kernel -open Async_kernel open Pickles_types open Common open Import open Backend -open Tuple_lib (* TODO: Just stick this in plonk_checks.ml *) module Plonk_checks = struct include Plonk_checks module Type1 = Plonk_checks.Make (Shifted_value.Type1) (Plonk_checks.Scalars.Tick) - module Type2 = - Plonk_checks.Make (Shifted_value.Type2) (Plonk_checks.Scalars.Tock) end -let expand_deferred (type n most_recent_width) +let expand_deferred (type n most_recent_width) ~zk_rows ~(evals : ( Backend.Tick.Field.t , Backend.Tick.Field.t array ) @@ -39,7 +35,6 @@ let expand_deferred (type n most_recent_width) , Branch_data.t ) Composition_types.Wrap.Proof_state.Minimal.Stable.V1.t ) : _ Types.Wrap.Proof_state.Deferred_values.t = - let module Plonk = Types.Wrap.Proof_state.Deferred_values.Plonk in let module Tick_field = Backend.Tick.Field in let tick_field : _ Plonk_checks.field = (module Tick_field) in Timer.start __LOC__ ; @@ -47,7 +42,7 @@ let expand_deferred (type n most_recent_width) let sc = SC.to_field_constant tick_field ~endo:Endo.Wrap_inner_curve.scalar in Timer.clock __LOC__ ; let plonk0 = proof_state.deferred_values.plonk in - let { Deferred_values.Minimal.branch_data; bulletproof_challenges } = + let { Deferred_values.Minimal.branch_data; bulletproof_challenges; _ } = Deferred_values.Minimal.map_challenges ~f:Challenge.Constant.to_tick_field ~scalar:sc proof_state.deferred_values in @@ -106,7 +101,7 @@ let expand_deferred (type n most_recent_width) (module Env_bool) (module Env_field) ~endo:Endo.Step_inner_curve.base ~mds:Tick_field_sponge.params.mds - ~srs_length_log2:Common.Max_degree.step_log2 + ~srs_length_log2:Common.Max_degree.step_log2 ~zk_rows ~field_of_hex:(fun s -> Kimchi_pasta.Pasta.Bigint256.of_hex_string s |> Kimchi_pasta.Pasta.Fp.of_bigint ) @@ -116,8 +111,6 @@ let expand_deferred (type n most_recent_width) let p = let module Field = struct include Tick.Field - - type nonrec bool = bool end in Plonk_checks.Type1.derive_plonk (module Field) @@ -128,11 +121,7 @@ let expand_deferred (type n most_recent_width) ; alpha = plonk0.alpha ; beta = plonk0.beta ; gamma = plonk0.gamma - ; lookup = - Option.map (Plonk_types.Opt.to_option_unsafe p.lookup) ~f:(fun l -> - { Types.Wrap.Proof_state.Deferred_values.Plonk.In_circuit.Lookup - .joint_combiner = Option.value_exn plonk0.joint_combiner - } ) + ; joint_combiner = plonk0.joint_combiner } in Timer.clock __LOC__ ; @@ -167,8 +156,8 @@ let expand_deferred (type n most_recent_width) absorb evals.ft_eval1 ; let xs = Plonk_types.Evals.to_absorption_sequence evals.evals.evals in let x1, x2 = evals.evals.public_input in - absorb x1 ; - absorb x2 ; + Array.iter ~f:absorb x1 ; + Array.iter ~f:absorb x2 ; List.iter xs ~f:(fun (x1, x2) -> Array.iter ~f:absorb x1 ; Array.iter ~f:absorb x2 ) ) ; let xi_chal = squeeze () in diff --git a/src/lib/pickles/wrap_deferred_values.mli b/src/lib/pickles/wrap_deferred_values.mli new file mode 100644 index 00000000000..770321b2486 --- /dev/null +++ b/src/lib/pickles/wrap_deferred_values.mli @@ -0,0 +1,38 @@ +open Pickles_types +open Import +open Backend + +val expand_deferred : + zk_rows:int + -> evals: + (Pasta_bindings.Fp.t, Pasta_bindings.Fp.t array) Plonk_types.All_evals.t + -> old_bulletproof_challenges: + ( Challenge.Constant.t Kimchi_types.scalar_challenge + Bulletproof_challenge.t + Step_bp_vec.t + , 'most_recent_width ) + Vector.vec + -> proof_state: + ( Challenge.Constant.t + , Challenge.Constant.t Kimchi_types.scalar_challenge + , Pasta_bindings.Fp.t Shifted_value.Type1.t + , bool + , 'n Reduced_messages_for_next_proof_over_same_field.Wrap.t + , Types.Digest.Constant.t + , Challenge.Constant.t Kimchi_types.scalar_challenge + Bulletproof_challenge.t + Step_bp_vec.t + , Branch_data.t ) + Types.Wrap.Proof_state.Minimal.t + -> ( ( Challenge.Constant.t + , Challenge.Constant.t Kimchi_types.scalar_challenge + , Pasta_bindings.Fp.t Shifted_value.Type1.t + , 'a + , Challenge.Constant.t Kimchi_types.scalar_challenge option + , bool ) + Types.Wrap.Proof_state.Deferred_values.Plonk.In_circuit.t + , Challenge.Constant.t Kimchi_types.scalar_challenge + , Pasta_bindings.Fp.t Shifted_value.Type1.t + , (Pasta_bindings.Fp.t, Tick.Rounds.n) Vector.vec + , Branch_data.t ) + Types.Wrap.Proof_state.Deferred_values.t diff --git a/src/lib/pickles/wrap_domains.ml b/src/lib/pickles/wrap_domains.ml index df2a67a4502..957c3393d52 100644 --- a/src/lib/pickles/wrap_domains.ml +++ b/src/lib/pickles/wrap_domains.ml @@ -2,9 +2,11 @@ open Core_kernel open Pickles_types open Import open Poly_types -open Hlist (* Compute the domains corresponding to wrap_main *) + +(* TODO: this functor does not depend on any of its argument why? *) + module Make (A : T0) (A_value : T0) @@ -13,8 +15,8 @@ module Make (Auxiliary_var : T0) (Auxiliary_value : T0) = struct - let f_debug full_signature num_choices choices_length ~feature_flags - ~max_proofs_verified = + let f_debug full_signature _num_choices choices_length ~feature_flags + ~num_chunks ~max_proofs_verified = let num_choices = Hlist.Length.to_nat choices_length in let dummy_step_domains = Vector.init num_choices ~f:(fun _ -> Fix_domains.rough_domains) @@ -26,26 +28,31 @@ struct let dummy_step_keys = lazy (Vector.init num_choices ~f:(fun _ -> - let g = Backend.Tock.Inner_curve.(to_affine_exn one) in - Verification_key.dummy_commitments g ) ) + let num_chunks = (* TODO *) 1 in + let g = + Array.init num_chunks ~f:(fun _ -> + Backend.Tock.Inner_curve.(to_affine_exn one) ) + in + Verification_key.dummy_step_commitments g ) ) in Timer.clock __LOC__ ; let srs = Backend.Tick.Keypair.load_urs () in let _, main = - Wrap_main.wrap_main ~feature_flags ~srs full_signature choices_length - dummy_step_keys dummy_step_widths dummy_step_domains max_proofs_verified + Wrap_main.wrap_main ~feature_flags ~num_chunks ~srs full_signature + choices_length dummy_step_keys dummy_step_widths dummy_step_domains + max_proofs_verified in Timer.clock __LOC__ ; let t = Fix_domains.domains (module Impls.Wrap) - (Impls.Wrap.input ()) + (Impls.Wrap.input ~feature_flags ()) (T (Snarky_backendless.Typ.unit (), Fn.id, Fn.id)) main in Timer.clock __LOC__ ; t - let f full_signature num_choices choices_length ~feature_flags + let f full_signature num_choices choices_length ~feature_flags ~num_chunks ~max_proofs_verified = let res = Common.wrap_domains @@ -54,8 +61,9 @@ struct ( if debug then let res' = f_debug full_signature num_choices choices_length ~feature_flags - ~max_proofs_verified + ~num_chunks ~max_proofs_verified in [%test_eq: Domains.t] res res' ) ; res end +[@@warning "-60"] diff --git a/src/lib/pickles/wrap_domains.mli b/src/lib/pickles/wrap_domains.mli index 387a66f3462..ec5bcc2f3f9 100644 --- a/src/lib/pickles/wrap_domains.mli +++ b/src/lib/pickles/wrap_domains.mli @@ -1,5 +1,7 @@ open Pickles_types +(* Module names below kept to document functor parameters, even though they are + unused in the signature, hence the [@@warning "-67"] *) module Make (A : Poly_types.T0) (A_value : Poly_types.T0) @@ -11,7 +13,8 @@ module Make ('a, 'b, 'c) Full_signature.t -> 'd -> ('e, 'b) Hlist.Length.t - -> feature_flags:Plonk_types.Opt.Flag.t Plonk_types.Features.t + -> feature_flags:Opt.Flag.t Plonk_types.Features.Full.t + -> num_chunks:int -> max_proofs_verified:(module Nat.Add.Intf with type n = 'a) -> Import.Domains.t @@ -19,7 +22,9 @@ module Make ('a, 'b, 'c) Full_signature.t -> 'd -> ('e, 'b) Hlist.Length.t - -> feature_flags:Plonk_types.Opt.Flag.t Plonk_types.Features.t + -> feature_flags:Opt.Flag.t Plonk_types.Features.Full.t + -> num_chunks:int -> max_proofs_verified:(module Nat.Add.Intf with type n = 'a) -> Import.Domains.Stable.V2.t end +[@@warning "-67"] diff --git a/src/lib/pickles/wrap_hack.ml b/src/lib/pickles/wrap_hack.ml index f99ad11f876..9bb5f8814d0 100644 --- a/src/lib/pickles/wrap_hack.ml +++ b/src/lib/pickles/wrap_hack.ml @@ -29,7 +29,7 @@ let pad_vector (type a) ~dummy (v : (a, _) Vector.t) = (* Specialized padding function. *) let pad_challenges (chalss : (_ Vector.t, _) Vector.t) = - pad_vector ~dummy:Dummy.Ipa.Wrap.challenges_computed chalss + pad_vector ~dummy:(Lazy.force Dummy.Ipa.Wrap.challenges_computed) chalss (* Specialized padding function. *) let pad_accumulator (xs : (Tock.Proof.Challenge_polynomial.t, _) Vector.t) = @@ -37,12 +37,13 @@ let pad_accumulator (xs : (Tock.Proof.Challenge_polynomial.t, _) Vector.t) = ~dummy: { Tock.Proof.Challenge_polynomial.commitment = Lazy.force Dummy.Ipa.Wrap.sg - ; challenges = Vector.to_array Dummy.Ipa.Wrap.challenges_computed + ; challenges = + Vector.to_array (Lazy.force Dummy.Ipa.Wrap.challenges_computed) } |> Vector.to_list (* Hash the me only, padding first. *) -let hash_messages_for_next_wrap_proof (type n) (max_proofs_verified : n Nat.t) +let hash_messages_for_next_wrap_proof (type n) (_max_proofs_verified : n Nat.t) (t : ( Tick.Curve.Affine.t , (_, n) Vector.t ) @@ -83,7 +84,7 @@ module Checked = struct pad_vector ~dummy: (Vector.map ~f:Impls.Wrap.Field.constant - Dummy.Ipa.Wrap.challenges_computed ) + (Lazy.force Dummy.Ipa.Wrap.challenges_computed) ) chalss let pad_commitments (commitments : _ Vector.t) = @@ -102,9 +103,10 @@ module Checked = struct let full_state s = (S.state s, s.sponge_state) in let sponge = S.create Tock_field_sponge.params in let s0 = full_state sponge in - Vector.iter ~f:(S.absorb sponge) Dummy.Ipa.Wrap.challenges_computed ; + let chals = Lazy.force Dummy.Ipa.Wrap.challenges_computed in + Vector.iter ~f:(S.absorb sponge) chals ; let s1 = full_state sponge in - Vector.iter ~f:(S.absorb sponge) Dummy.Ipa.Wrap.challenges_computed ; + Vector.iter ~f:(S.absorb sponge) chals ; let s2 = full_state sponge in [| s0; s1; s2 |] ) @@ -139,34 +141,4 @@ module Checked = struct (Composition_types.Wrap.Proof_state.Messages_for_next_wrap_proof .to_field_elements ~g1:Inner_curve.to_field_elements t ) ; Sponge.squeeze_field sponge - - (* Check that the pre-absorbing technique works. I.e., that it's consistent with - the actual definition of hash_messages_for_next_wrap_proof. *) - let%test_unit "hash_messages_for_next_wrap_proof correct" = - let open Impls.Wrap in - let test (type n) (n : n Nat.t) = - let messages_for_next_wrap_proof : - _ Composition_types.Wrap.Proof_state.Messages_for_next_wrap_proof.t = - let g = Wrap_main_inputs.Inner_curve.Constant.random () in - { Composition_types.Wrap.Proof_state.Messages_for_next_wrap_proof - .challenge_polynomial_commitment = g - ; old_bulletproof_challenges = - Vector.init n ~f:(fun _ -> - Vector.init Tock.Rounds.n ~f:(fun _ -> Tock.Field.random ()) ) - } - in - Internal_Basic.Test.test_equal ~sexp_of_t:Field.Constant.sexp_of_t - ~equal:Field.Constant.equal - (Composition_types.Wrap.Proof_state.Messages_for_next_wrap_proof.typ - Wrap_main_inputs.Inner_curve.typ - (Vector.typ Field.typ Backend.Tock.Rounds.n) - ~length:n ) - Field.typ - (fun t -> make_checked (fun () -> hash_messages_for_next_wrap_proof n t)) - (fun t -> - hash_constant_messages_for_next_wrap_proof n t - |> Digest.Constant.to_bits |> Impls.Wrap.Field.Constant.project ) - messages_for_next_wrap_proof - in - test Nat.N0.n ; test Nat.N1.n ; test Nat.N2.n end diff --git a/src/lib/pickles/wrap_main.ml b/src/lib/pickles/wrap_main.ml index 7ddff505c03..d32bf4b6138 100644 --- a/src/lib/pickles/wrap_main.ml +++ b/src/lib/pickles/wrap_main.ml @@ -48,7 +48,7 @@ let shifts ~log2_size = Common.tock_shifts ~log2_size let domain_generator ~log2_size = Backend.Tock.Field.domain_generator ~log2_size |> Impl.Field.constant -let split_field_typ : (Field.t * Boolean.var, Field.Constant.t) Typ.t = +let _split_field_typ : (Field.t * Boolean.var, Field.Constant.t) Typ.t = Typ.transport Typ.(field * Boolean.typ) ~there:(fun (x : Field.Constant.t) -> @@ -80,22 +80,19 @@ let split_field (x : Field.t) : Field.t * Boolean.var = Field.(Assert.equal ((of_int 2 * y) + (is_odd :> t)) x) ; res -let lookup_config_for_pack = - { Types.Wrap.Lookup_parameters.zero = Common.Lookup_parameters.tock_zero - ; use = Plonk_types.Opt.Flag.No - } - (* The SNARK function for wrapping any proof coming from the given set of keys *) let wrap_main - (type max_proofs_verified branches prev_varss prev_valuess env - max_local_max_proofs_verifieds ) ~feature_flags + (type max_proofs_verified branches prev_varss max_local_max_proofs_verifieds) + ~num_chunks ~feature_flags (full_signature : ( max_proofs_verified , branches , max_local_max_proofs_verifieds ) Full_signature.t ) (pi_branches : (prev_varss, branches) Hlist.Length.t) (step_keys : - ( Wrap_main_inputs.Inner_curve.Constant.t Wrap_verifier.index' + ( ( Wrap_main_inputs.Inner_curve.Constant.t array + , Wrap_main_inputs.Inner_curve.Constant.t array option ) + Wrap_verifier.index' , branches ) Vector.t Lazy.t ) (step_widths : (int, branches) Vector.t) @@ -128,7 +125,7 @@ let wrap_main (create () : (max_proofs_verified, max_local_max_proofs_verifieds) t)) in Timer.clock __LOC__ ; - let { Full_signature.padded; maxes = (module Max_widths_by_slot) } = + let { Full_signature.padded = _; maxes = (module Max_widths_by_slot) } = full_signature in Timer.clock __LOC__ ; @@ -204,7 +201,6 @@ let wrap_main let typ = typ (module Impl) - Common.Lookup_parameters.tock_zero ~assert_16_bits:(Wrap_verifier.assert_n_bits ~n:16) (Vector.init Max_proofs_verified.n ~f:(fun _ -> Plonk_types.Features.none ) ) @@ -216,7 +212,13 @@ let wrap_main with_label __LOC__ (fun () -> Wrap_verifier.choose_key which_branch (Vector.map (Lazy.force step_keys) - ~f:(Plonk_verification_key_evals.map ~f:Inner_curve.constant) ) ) + ~f: + (Plonk_verification_key_evals.Step.map + ~f:(Array.map ~f:Inner_curve.constant) ~f_opt:(function + | None -> + Opt.nothing + | Some x -> + Opt.just (Array.map ~f:Inner_curve.constant x) ) ) ) ) in let prev_step_accs = with_label __LOC__ (fun () -> @@ -260,7 +262,9 @@ let wrap_main let evals = let ty = let ty = - Plonk_types.All_evals.typ (module Impl) feature_flags + Plonk_types.All_evals.typ + (module Impl) + ~num_chunks:1 feature_flags in Vector.typ ty Max_proofs_verified.n in @@ -316,7 +320,7 @@ let wrap_main Need to compute this value from the which_branch. *) let (T - ( max_local_max_proofs_verified + ( _max_local_max_proofs_verified , old_bulletproof_challenges ) ) = old_bulletproof_challenges in @@ -391,7 +395,7 @@ let wrap_main Inner_curve.typ ~bool:Boolean.typ feature_flags ~dummy:Inner_curve.Params.one ~commitment_lengths: - (Commitment_lengths.create ~of_int:Fn.id) ) + (Commitment_lengths.default ~num_chunks) ) ~request:(fun () -> Req.Messages) ) in let sponge = Wrap_verifier.Opt.create sponge_params in diff --git a/src/lib/pickles/wrap_main.mli b/src/lib/pickles/wrap_main.mli index c535ddd3418..498b5892cae 100644 --- a/src/lib/pickles/wrap_main.mli +++ b/src/lib/pickles/wrap_main.mli @@ -3,16 +3,23 @@ open Pickles_types (** [wrap_main] is the SNARK function for wrapping any proof coming from the given set of keys **) val wrap_main : - feature_flags:Plonk_types.Opt.Flag.t Plonk_types.Features.t + num_chunks:int + -> feature_flags:Opt.Flag.t Plonk_types.Features.Full.t -> ( 'max_proofs_verified , 'branches , 'max_local_max_proofs_verifieds ) Full_signature.t -> ('prev_varss, 'branches) Pickles_types.Hlist.Length.t - -> ( Wrap_main_inputs.Inner_curve.Constant.t Wrap_verifier.index' + -> ( ( Wrap_main_inputs.Inner_curve.Constant.t array + (* commitments *) + , Wrap_main_inputs.Inner_curve.Constant.t array option + (* commitments to optional gates *) ) + Wrap_verifier.index' , 'branches ) Pickles_types.Vector.t Core_kernel.Lazy.t + (* All the commitments, include commitments to optional gates, saved in a + vector of size ['branches] *) -> (int, 'branches) Pickles_types.Vector.t -> (Import.Domains.t, 'branches) Pickles_types.Vector.t -> srs:Kimchi_bindings.Protocol.SRS.Fp.t @@ -25,8 +32,10 @@ val wrap_main : , ( Wrap_verifier.Other_field.Packed.t Pickles_types.Shifted_value.Type1.t , Wrap_main_inputs.Impl.Boolean.var ) - Pickles_types.Plonk_types.Opt.t - , 'a + Pickles_types.Opt.t + , ( Wrap_verifier.Scalar_challenge.t + , Wrap_main_inputs.Impl.Boolean.var ) + Pickles_types.Opt.t , Impls.Wrap.Boolean.var , Impls.Wrap.Field.t , Impls.Wrap.Field.t diff --git a/src/lib/pickles/wrap_main_inputs.ml b/src/lib/pickles/wrap_main_inputs.ml index 94620c70150..e3831fd6d64 100644 --- a/src/lib/pickles/wrap_main_inputs.ml +++ b/src/lib/pickles/wrap_main_inputs.ml @@ -1,21 +1,18 @@ open Core_kernel -open Common open Backend module Me = Tock module Other = Tick module Impl = Impls.Wrap -open Pickles_types -open Import -let high_entropy_bits = 128 +let _high_entropy_bits = 128 -let sponge_params_constant = - Sponge.Params.(map pasta_q_kimchi ~f:Impl.Field.Constant.of_string) +let sponge_params_constant = Kimchi_pasta_basic.poseidon_params_fq let field_random_oracle ?(length = Me.Field.size_in_bits - 1) s = Me.Field.of_bits (Ro.bits_random_oracle ~length s) -let unrelated_g = +let _unrelated_g = + let open Common in let group_map = unstage (group_map @@ -88,18 +85,19 @@ let%test_unit "sponge" = let module T = Make_sponge.Test (Impl) (Tock_field_sponge.Field) (Sponge.S) in T.test Tock_field_sponge.params -module Input_domain = struct - let lagrange_commitments domain : Me.Inner_curve.Affine.t array = - let domain_size = Domain.size domain in - time "lagrange" (fun () -> - Array.init domain_size ~f:(fun i -> - (Kimchi_bindings.Protocol.SRS.Fp.lagrange_commitment - (Tick.Keypair.load_urs ()) domain_size i ) - .unshifted.(0) - |> Common.finite_exn ) ) - - let domain = Domain.Pow_2_roots_of_unity 7 -end +(* module Input_domain = struct + let _lagrange_commitments domain : Backend.Tock.Inner_curve.Affine.t array = + let domain_size = Import.Domain.size domain in + Common.time "lagrange" (fun () -> + Array.init domain_size ~f:(fun i -> + (Kimchi_bindings.Protocol.SRS.Fp.lagrange_commitment + (Backend.Tick.Keypair.load_urs ()) + domain_size i ) + .unshifted.(0) + |> Common.finite_exn ) ) + + let _domain = Import.Domain.Pow_2_roots_of_unity 7 + end *) module Inner_curve = struct module C = Kimchi_pasta.Pasta.Vesta diff --git a/src/lib/pickles/wrap_main_inputs.mli b/src/lib/pickles/wrap_main_inputs.mli index e5e1237cdf8..0fa83d31b7e 100644 --- a/src/lib/pickles/wrap_main_inputs.mli +++ b/src/lib/pickles/wrap_main_inputs.mli @@ -103,8 +103,6 @@ module Inner_curve : sig type t = Inputs.F.t * Inputs.F.t - val double : t -> t - val add' : div:(Inputs.F.t -> Inputs.F.t -> Inputs.F.t) -> t -> t -> t val add_exn : t -> t -> t @@ -150,7 +148,7 @@ module Inner_curve : sig end module Shifted : functor - (M : sig + (_ : sig val shift : t end) () @@ -158,12 +156,6 @@ module Inner_curve : sig val shifted : unit -> (module Shifted_intf) - val scale : - ?init:Inputs.F.t * Inputs.F.t - -> t - -> Inputs.Impl.Boolean.var Bitstring_lib.Bitstring.Lsb_first.t - -> Inputs.F.t * Inputs.F.t - module Window_table : sig type t = Inputs.Constant.t Tuple_lib.Quadruple.t array diff --git a/src/lib/pickles/wrap_proof.ml b/src/lib/pickles/wrap_proof.ml index 966287e20f7..890b46fb693 100644 --- a/src/lib/pickles/wrap_proof.ml +++ b/src/lib/pickles/wrap_proof.ml @@ -36,9 +36,9 @@ let typ : (Checked.t, Constant.t) Typ.t = ~value_to_hlist:Constant.to_hlist ~value_of_hlist:Constant.of_hlist [ Plonk_types.Messages.typ (module Impl) - Inner_curve.typ Plonk_types.Features.none ~bool:Boolean.typ + Inner_curve.typ Plonk_types.Features.Full.none ~bool:Boolean.typ ~dummy:Inner_curve.Params.one - ~commitment_lengths:(Commitment_lengths.create ~of_int:(fun x -> x)) + ~commitment_lengths:(Commitment_lengths.default ~num_chunks:1) ; Types.Step.Bulletproof.typ ~length:(Nat.to_int Tock.Rounds.n) ( Typ.transport Other_field.typ ~there:(fun x -> diff --git a/src/lib/pickles/wrap_verifier.ml b/src/lib/pickles/wrap_verifier.ml index c13c8f94274..2120c7ef7ce 100644 --- a/src/lib/pickles/wrap_verifier.ml +++ b/src/lib/pickles/wrap_verifier.ml @@ -1,5 +1,4 @@ module S = Sponge -open Backend open Core_kernel open Util module SC = Scalar_challenge @@ -8,31 +7,35 @@ open Plonk_types open Tuple_lib open Import +(* G is for Generic. This module is just to protect {!val:challenge_polynomial} + below from being hidden by the included functor application at the end of + the module, so that we can re-export it in the end. *) module G = struct - let lookup_verification_enabled = false - (* given [chals], compute \prod_i (1 + chals.(i) * x^{2^{k - 1 - i}}) *) - let challenge_polynomial ~one ~add ~mul chals = - let ( + ) = add and ( * ) = mul in + let challenge_polynomial (type a) + (module M : Pickles_types.Shifted_value.Field_intf with type t = a) chals + : (a -> a) Staged.t = stage (fun pt -> let k = Array.length chals in let pow_two_pows = let res = Array.init k ~f:(fun _ -> pt) in for i = 1 to k - 1 do let y = res.(i - 1) in - res.(i) <- y * y + res.(i) <- M.(y * y) done ; res in let prod f = let r = ref (f 0) in for i = 1 to k - 1 do - r := f i * !r + r := M.(f i * !r) done ; !r in - prod (fun i -> one + (chals.(i) * pow_two_pows.(k - 1 - i))) ) + prod (fun i -> + let idx = k - 1 - i in + M.(one + (chals.(i) * pow_two_pows.(idx))) ) ) let num_possible_domains = Nat.S Wrap_hack.Padded_length.n @@ -59,7 +62,7 @@ struct let typ = Impls.Wrap.Other_field.typ - let to_bits_unsafe (x : t) = Wrap_main_inputs.Unsafe.unpack_unboolean x + let _to_bits_unsafe (x : t) = Wrap_main_inputs.Unsafe.unpack_unboolean x let absorb_shifted sponge (x : t Shifted_value.Type1.t) = match x with Shifted_value x -> Sponge.absorb sponge x @@ -74,7 +77,7 @@ struct let typ = Impls.Wrap.Other_field.typ_unchecked - let absorb_shifted sponge (x : t Shifted_value.Type1.t) = + let _absorb_shifted sponge (x : t Pickles_types.Shifted_value.Type1.t) = match x with Shifted_value x -> Sponge.absorb sponge x end end @@ -93,15 +96,15 @@ struct %!" lab (read_var x) (read_var y)) - let print_w lab gs = - if debug then + let _print_w lab gs = + if Import.debug then Array.iteri gs ~f:(fun i (fin, g) -> as_prover As_prover.(fun () -> printf "fin=%b %!" (read Boolean.typ fin)) ; ksprintf print_g "%s[%d]" lab i g ) - let print_chal lab x = - if debug then + let _print_chal lab x = + if Import.debug then as_prover As_prover.( fun () -> @@ -121,7 +124,8 @@ struct SC.Make (Impl) (Inner_curve) (Challenge) (Endo.Wrap_inner_curve) module Ops = Plonk_curve_ops.Make (Impl) (Inner_curve) - let product m f = List.reduce_exn (List.init m ~f) ~f:Field.( * ) + let _product m f = + Core_kernel.List.reduce_exn (Core_kernel.List.init m ~f) ~f:Field.( * ) let absorb sponge ty t = absorb @@ -171,7 +175,8 @@ struct let terms, challenges = Array.map2_exn gammas prechallenges ~f:term_and_challenge |> Array.unzip in - (Array.reduce_exn terms ~f:Ops.add_fast, challenges) + + (Array.reduce_exn terms ~f:(Ops.add_fast ?check_finite:None), challenges) let equal_g g1 g2 = List.map2_exn ~f:Field.equal @@ -181,27 +186,129 @@ struct module One_hot_vector = One_hot_vector.Make (Impl) - type 'a index' = 'a Plonk_verification_key_evals.t - - type 'a index = 'a Plonk_verification_key_evals.t + type ('comm, 'comm_opt) index' = + ('comm, 'comm_opt) Plonk_verification_key_evals.Step.t (* Mask out the given vector of indices with the given one-hot vector *) let choose_key : type n. n One_hot_vector.t - -> (Inner_curve.t index', n) Vector.t - -> Inner_curve.t index' = + -> ( (Inner_curve.t array, (Inner_curve.t array, Boolean.var) Opt.t) index' + , n ) + Vector.t + -> (Inner_curve.t array, (Inner_curve.t array, Boolean.var) Opt.t) index' + = let open Tuple_lib in - let map = Plonk_verification_key_evals.map in - let map2 = Plonk_verification_key_evals.map2 in fun bs keys -> let open Field in Vector.map2 (bs :> (Boolean.var, n) Vector.t) keys - ~f:(fun b key -> map key ~f:(fun g -> Double.map g ~f:(( * ) (b :> t)))) - |> Vector.reduce_exn ~f:(map2 ~f:(Double.map2 ~f:( + ))) - |> map ~f:(fun g -> Double.map ~f:(Util.seal (module Impl)) g) + ~f:(fun b key -> + Plonk_verification_key_evals.Step.map key + ~f:(Array.map ~f:(fun g -> Double.map g ~f:(( * ) (b :> t)))) + ~f_opt:(function + (* Here, we split the 3 variants into 3 separate accumulators. This + allows us to only compute the 'maybe' flag when we need to, and + allows us to fall back to the basically-free `Nothing` when a + feature is entirely unused, or to the less expensive `Just` if + it is used for every circuit. + In particular, it is important that we generate exactly + `Nothing` when none of the optional gates are used, otherwise + we will change the serialization of the protocol circuits. + *) + | Opt.Nothing -> + ([], [], [ b ]) + | Opt.Maybe (b_x, x) -> + ([], [ (b, b_x, x) ], []) + | Opt.Just x -> + ([ (b, x) ], [], []) ) ) + |> Vector.reduce_exn + ~f: + (Plonk_verification_key_evals.Step.map2 + ~f:(Array.map2_exn ~f:(Double.map2 ~f:( + ))) + ~f_opt:(fun (yes_1, maybe_1, no_1) (yes_2, maybe_2, no_2) -> + (yes_1 @ yes_2, maybe_1 @ maybe_2, no_1 @ no_2) ) ) + |> Plonk_verification_key_evals.Step.map ~f:Fn.id ~f_opt:(function + | [], [], _nones -> + (* We only have `Nothing`s, so we can emit exactly `Nothing` + without further computation. + *) + Opt.Nothing + | justs, [], [] -> + (* Special case: we don't need to compute the 'maybe' bool + because we know statically that all entries are `Just`. + *) + let sum = + justs + |> List.map ~f:(fun ((b : Boolean.var), g) -> + Array.map g ~f:(Double.map ~f:(( * ) (b :> t))) ) + |> List.reduce_exn + ~f:(Array.map2_exn ~f:(Double.map2 ~f:( + ))) + in + Opt.just sum + | justs, maybes, nones -> + let is_none = + List.reduce nones + ~f:(fun (b1 : Boolean.var) (b2 : Boolean.var) -> + Boolean.Unsafe.of_cvar Field.(add (b1 :> t) (b2 :> t)) ) + in + let none_sum = + let num_chunks = (* TODO *) 1 in + Option.map is_none ~f:(fun (b : Boolean.var) -> + Array.init num_chunks ~f:(fun _ -> + Double.map Inner_curve.one ~f:(( * ) (b :> t)) ) ) + in + let just_is_yes, just_sum = + justs + |> List.map ~f:(fun ((b : Boolean.var), g) -> + (b, Array.map g ~f:(Double.map ~f:(( * ) (b :> t)))) ) + |> List.reduce + ~f:(fun ((b1 : Boolean.var), g1) ((b2 : Boolean.var), g2) + -> + ( Boolean.Unsafe.of_cvar Field.(add (b1 :> t) (b2 :> t)) + , Array.map2_exn ~f:(Double.map2 ~f:( + )) g1 g2 ) ) + |> fun x -> (Option.map ~f:fst x, Option.map ~f:snd x) + in + let maybe_is_yes, maybe_sum = + maybes + |> List.map + ~f:(fun ((b : Boolean.var), (b_g : Boolean.var), g) -> + ( Boolean.Unsafe.of_cvar Field.(mul (b :> t) (b_g :> t)) + , Array.map g ~f:(Double.map ~f:(( * ) (b :> t))) ) ) + |> List.reduce + ~f:(fun ((b1 : Boolean.var), g1) ((b2 : Boolean.var), g2) + -> + ( Boolean.Unsafe.of_cvar Field.(add (b1 :> t) (b2 :> t)) + , Array.map2_exn ~f:(Double.map2 ~f:( + )) g1 g2 ) ) + |> fun x -> (Option.map ~f:fst x, Option.map ~f:snd x) + in + let is_yes = + [| just_is_yes; maybe_is_yes |] + |> Array.filter_map ~f:Fn.id + |> Array.reduce_exn + ~f:(fun (b1 : Boolean.var) (b2 : Boolean.var) -> + Boolean.Unsafe.of_cvar ((b1 :> t) + (b2 :> t)) ) + in + let sum = + [| none_sum; maybe_sum; just_sum |] + |> Array.filter_map ~f:Fn.id + |> Array.reduce_exn + ~f:(Array.map2_exn ~f:(Double.map2 ~f:( + ))) + in + Opt.Maybe (is_yes, sum) ) + |> Plonk_verification_key_evals.Step.map + ~f:(fun g -> Array.map ~f:(Double.map ~f:(Util.seal (module Impl))) g) + ~f_opt:(function + | Opt.Nothing -> + Opt.Nothing + | Opt.Maybe (b, x) -> + Opt.Maybe + ( Boolean.Unsafe.of_cvar (Util.seal (module Impl) (b :> t)) + , Array.map ~f:(Double.map ~f:(Util.seal (module Impl))) x ) + | Opt.Just x -> + Opt.Just + (Array.map ~f:(Double.map ~f:(Util.seal (module Impl))) x) ) (* TODO: Unify with the code in step_verifier *) let lagrange (type n) @@ -210,19 +317,23 @@ struct , (domains : (Domains.t, n) Vector.t) ) srs i = Vector.map domains ~f:(fun d -> let d = Int.pow 2 (Domain.log2_size d.h) in - match + let chunks = (Kimchi_bindings.Protocol.SRS.Fp.lagrange_commitment srs d i) .unshifted - with - | [| Finite g |] -> - let g = Inner_curve.Constant.of_affine g in - Inner_curve.constant g - | _ -> - assert false ) + in + Array.map chunks ~f:(function + | Finite g -> + let g = Inner_curve.Constant.of_affine g in + Inner_curve.constant g + | Infinity -> + (* Point at infinity should be impossible in the SRS *) + assert false ) ) |> Vector.map2 (which_branch :> (Boolean.var, n) Vector.t) - ~f:(fun b (x, y) -> Field.((b :> t) * x, (b :> t) * y)) - |> Vector.reduce_exn ~f:(Double.map2 ~f:Field.( + )) + ~f:(fun b pts -> + Array.map pts ~f:(fun (x, y) -> Field.((b :> t) * x, (b :> t) * y)) + ) + |> Vector.reduce_exn ~f:(Array.map2_exn ~f:(Double.map2 ~f:Field.( + ))) let scaled_lagrange (type n) c ~domain: @@ -230,24 +341,29 @@ struct , (domains : (Domains.t, n) Vector.t) ) srs i = Vector.map domains ~f:(fun d -> let d = Int.pow 2 (Domain.log2_size d.h) in - match + let chunks = (Kimchi_bindings.Protocol.SRS.Fp.lagrange_commitment srs d i) .unshifted - with - | [| Finite g |] -> - let g = Inner_curve.Constant.of_affine g in - Inner_curve.Constant.scale g c |> Inner_curve.constant - | _ -> - assert false ) + in + Array.map chunks ~f:(function + | Finite g -> + let g = Inner_curve.Constant.of_affine g in + Inner_curve.Constant.scale g c |> Inner_curve.constant + | Infinity -> + (* Point at infinity should be impossible in the SRS *) + assert false ) ) |> Vector.map2 (which_branch :> (Boolean.var, n) Vector.t) - ~f:(fun b (x, y) -> Field.((b :> t) * x, (b :> t) * y)) - |> Vector.reduce_exn ~f:(Double.map2 ~f:Field.( + )) + ~f:(fun b pts -> + Array.map pts ~f:(fun (x, y) -> Field.((b :> t) * x, (b :> t) * y)) + ) + |> Vector.reduce_exn ~f:(Array.map2_exn ~f:(Double.map2 ~f:Field.( + ))) let lagrange_with_correction (type n) ~input_length ~domain: ( (which_branch : n One_hot_vector.t) - , (domains : (Domains.t, n) Vector.t) ) srs i : Inner_curve.t Double.t = + , (domains : (Domains.t, n) Vector.t) ) srs i : + Inner_curve.t Double.t array = with_label __LOC__ (fun () -> let actual_shift = (* TODO: num_bits should maybe be input_length - 1. *) @@ -258,18 +374,19 @@ struct in let base_and_correction (h : Domain.t) = let d = Int.pow 2 (Domain.log2_size h) in - match + let chunks = (Kimchi_bindings.Protocol.SRS.Fp.lagrange_commitment srs d i) .unshifted - with - | [| Finite g |] -> - let open Inner_curve.Constant in - let g = of_affine g in - ( Inner_curve.constant g - , Inner_curve.constant (negate (pow2pow g actual_shift)) ) - | xs -> - failwithf "expected commitment to have length 1. got %d" - (Array.length xs) () + in + Array.map chunks ~f:(function + | Finite g -> + let open Inner_curve.Constant in + let g = of_affine g in + ( Inner_curve.constant g + , Inner_curve.constant (negate (pow2pow g actual_shift)) ) + | Infinity -> + (* Point at infinity should be impossible in the SRS *) + assert false ) in match domains with | [] -> @@ -283,13 +400,18 @@ struct |> Vector.map2 (which_branch :> (Boolean.var, n) Vector.t) ~f:(fun b pr -> - Double.map pr ~f:(fun (x, y) -> - Field.((b :> t) * x, (b :> t) * y) ) ) + Array.map pr + ~f: + (Double.map ~f:(fun (x, y) -> + Field.((b :> t) * x, (b :> t) * y) ) ) ) |> Vector.reduce_exn - ~f:(Double.map2 ~f:(Double.map2 ~f:Field.( + ))) - |> Double.map ~f:(Double.map ~f:(Util.seal (module Impl))) ) + ~f: + (Array.map2_exn + ~f:(Double.map2 ~f:(Double.map2 ~f:Field.( + ))) ) + |> Array.map + ~f:(Double.map ~f:(Double.map ~f:(Util.seal (module Impl)))) ) - let h_precomp = + let _h_precomp = Lazy.map ~f:Inner_curve.Scaling_precomputation.create Generators.h let group_map = @@ -323,12 +445,18 @@ struct [ `Finite of Inner_curve.t | `Maybe_finite of Boolean.var * Inner_curve.t ] - let finite : t -> Boolean.var = function + let _finite : t -> Boolean.var = function | `Finite _ -> Boolean.true_ | `Maybe_finite (b, _) -> b + let assert_finite : t -> unit = function + | `Finite _ -> + () + | `Maybe_finite _ -> + failwith "Not finite" + let add (p : t) (q : Inner_curve.t) = match p with | `Finite p -> @@ -344,34 +472,70 @@ struct end let combine batch ~xi without_bound with_bound = + let reduce_point p = + let point = ref (Point.underlying p.(Array.length p - 1)) in + for i = Array.length p - 2 downto 0 do + point := Point.add p.(i) (Scalar_challenge.endo !point xi) + done ; + !point + in let { Curve_opt.non_zero; point } = Pcs_batch.combine_split_commitments batch - ~scale_and_add:(fun ~(acc : Curve_opt.t) ~xi (keep, (p : Point.t)) -> + ~reduce_with_degree_bound:(fun _ -> assert false) + ~reduce_without_degree_bound:(fun x -> [ x ]) + ~scale_and_add:(fun ~(acc : Curve_opt.t) ~xi + (p : (Point.t array, Boolean.var) Opt.t) -> (* match acc.non_zero, keep with | false, false -> acc | true, false -> acc | false, true -> { point= p; non_zero= true } | true, true -> { point= p + xi * acc; non_zero= true } *) - let point = - Inner_curve.( - if_ keep - ~then_: - (if_ acc.non_zero - ~then_:(Point.add p (Scalar_challenge.endo acc.point xi)) - ~else_: - ((* In this branch, the accumulator was zero, so there is no harm in - putting the potentially junk underlying point here. *) - Point.underlying p ) ) - ~else_:acc.point) + let point keep p = + let base_point = + let p = p.(Array.length p - 1) in + Inner_curve.( + if_ acc.non_zero + ~then_:(Point.add p (Scalar_challenge.endo acc.point xi)) + ~else_: + ((* In this branch, the accumulator was zero, so there is no harm in + putting the potentially junk underlying point here. *) + Point.underlying p )) + in + let point = ref base_point in + for i = Array.length p - 2 downto 0 do + point := Point.add p.(i) (Scalar_challenge.endo !point xi) + done ; + let point = + Inner_curve.(if_ keep ~then_:!point ~else_:acc.point) + in + Array.iter ~f:Point.assert_finite p ; + let non_zero = Boolean.(keep &&& true_ ||| acc.non_zero) in + { Curve_opt.non_zero; point } in - let non_zero = Boolean.(keep &&& Point.finite p ||| acc.non_zero) in - { Curve_opt.non_zero; point } ) + match p with + | Opt.Nothing -> + acc + | Opt.Maybe (keep, p) -> + point keep p + | Opt.Just p -> + point Boolean.true_ p ) ~xi - ~init:(fun (keep, p) -> - { non_zero = Boolean.(keep &&& Point.finite p) - ; point = Point.underlying p - } ) + ~init:(function + | Opt.Nothing -> + None + | Opt.Maybe (keep, p) -> + Array.iter ~f:Point.assert_finite p ; + Some + { non_zero = Boolean.(keep &&& true_) + ; point = reduce_point p + } + | Opt.Just p -> + Array.iter ~f:Point.assert_finite p ; + Some + { non_zero = Boolean.(true_ &&& true_) + ; point = reduce_point p + } ) without_bound with_bound in Boolean.Assert.is_true non_zero ; @@ -473,35 +637,155 @@ struct (* Just for exhaustiveness over fields *) let iter2 ~chal ~scalar_chal - { Plonk.Minimal.alpha = alpha_0 + { Plonk.Minimal.In_circuit.alpha = alpha_0 ; beta = beta_0 ; gamma = gamma_0 ; zeta = zeta_0 + ; joint_combiner = joint_combiner_0 + ; feature_flags = _ } - { Plonk.Minimal.alpha = alpha_1 + { Plonk.Minimal.In_circuit.alpha = alpha_1 ; beta = beta_1 ; gamma = gamma_1 ; zeta = zeta_1 + ; joint_combiner = joint_combiner_1 + ; feature_flags = _ } = - if G.lookup_verification_enabled then failwith "TODO" else () ; + with_label __LOC__ (fun () -> + match[@warning "-4"] (joint_combiner_0, joint_combiner_1) with + | Nothing, Nothing -> + () + | Maybe (b0, j0), Maybe (b1, j1) -> + Boolean.Assert.(b0 = b1) ; + let (Typ { var_to_fields; _ }) = Scalar_challenge.typ in + Array.iter2_exn ~f:Field.Assert.equal + (fst @@ var_to_fields j0) + (fst @@ var_to_fields j1) + | Just j0, Just j1 -> + let (Typ { var_to_fields; _ }) = Scalar_challenge.typ in + Array.iter2_exn ~f:Field.Assert.equal + (fst @@ var_to_fields j0) + (fst @@ var_to_fields j1) + | ( ((Pickles_types.Opt.Just _ | Maybe _ | Nothing) as j0) + , ((Pickles_types.Opt.Just _ | Maybe _ | Nothing) as j1) ) -> + let sexp_of t = + Sexp.to_string + @@ Types.Opt.sexp_of_t + (fun _ -> Sexp.Atom "") + (fun _ -> Sexp.Atom "") + t + in + failwithf + "incompatible optional states for joint_combiners: %s vs %s" + (sexp_of j0) (sexp_of j1) () ) ; with_label __LOC__ (fun () -> chal beta_0 beta_1) ; with_label __LOC__ (fun () -> chal gamma_0 gamma_1) ; with_label __LOC__ (fun () -> scalar_chal alpha_0 alpha_1) ; with_label __LOC__ (fun () -> scalar_chal zeta_0 zeta_1) let assert_eq_plonk - (m1 : (_, Field.t Import.Scalar_challenge.t, _) Plonk.Minimal.t) - (m2 : (_, Scalar_challenge.t, _) Plonk.Minimal.t) = + (m1 : (_, Field.t Import.Scalar_challenge.t, _) Plonk.Minimal.In_circuit.t) + (m2 : (_, Scalar_challenge.t, _) Plonk.Minimal.In_circuit.t) = iter2 m1 m2 ~chal:(fun c1 c2 -> Field.Assert.equal c1 c2) ~scalar_chal:(fun ({ inner = t1 } : _ Import.Scalar_challenge.t) ({ inner = t2 } : Scalar_challenge.t) -> Field.Assert.equal t1 t2 ) + let index_to_field_elements ~g (m : _ Plonk_verification_key_evals.Step.t) = + let { Plonk_verification_key_evals.Step.sigma_comm + ; coefficients_comm + ; generic_comm + ; psm_comm + ; complete_add_comm + ; mul_comm + ; emul_comm + ; endomul_scalar_comm + ; range_check0_comm + ; range_check1_comm + ; foreign_field_mul_comm + ; foreign_field_add_comm + ; xor_comm + ; rot_comm + ; lookup_table_comm + ; lookup_table_ids + ; runtime_tables_selector + ; lookup_selector_xor + ; lookup_selector_lookup + ; lookup_selector_range_check + ; lookup_selector_ffmul + } = + m + in + let open Pickles_types in + let g_opt = Opt.map ~f:g in + List.map + ( Vector.to_list sigma_comm + @ Vector.to_list coefficients_comm + @ [ generic_comm + ; psm_comm + ; complete_add_comm + ; mul_comm + ; emul_comm + ; endomul_scalar_comm + ] ) + ~f:(fun x -> Opt.just (g x)) + @ [ g_opt range_check0_comm + ; g_opt range_check1_comm + ; g_opt foreign_field_mul_comm + ; g_opt foreign_field_add_comm + ; g_opt xor_comm + ; g_opt rot_comm + ] + @ List.map ~f:g_opt (Vector.to_list lookup_table_comm) + @ [ g_opt lookup_table_ids + ; g_opt runtime_tables_selector + ; g_opt lookup_selector_xor + ; g_opt lookup_selector_lookup + ; g_opt lookup_selector_range_check + ; g_opt lookup_selector_ffmul + ] + + (** Simulate an [Opt_sponge.t] locally in a block, but without running the + expensive optional logic that is otherwise required. + + Invariant: This requires that the sponge 'state' (i.e. the state after + absorbing or squeezing) is consistent between the initial state and the + final state when using the sponge. + *) + let simulate_optional_sponge_with_alignment (sponge : Sponge.t) ~f = function + | Pickles_types.Opt.Nothing -> + Pickles_types.Opt.Nothing + | Pickles_types.Opt.Maybe (b, x) -> + (* Cache the sponge state before *) + let sponge_state_before = sponge.sponge_state in + let state_before = Array.copy sponge.state in + (* Use the sponge *) + let res = f sponge x in + (* Check that the sponge ends in a compatible state. *) + ( match (sponge_state_before, sponge.sponge_state) with + | Absorbed x, Absorbed y -> + [%test_eq: int] x y + | Squeezed x, Squeezed y -> + [%test_eq: int] x y + | Absorbed _, Squeezed _ -> + [%test_eq: string] "absorbed" "squeezed" + | Squeezed _, Absorbed _ -> + [%test_eq: string] "squeezed" "absorbed" ) ; + let state = + Array.map2_exn sponge.state state_before ~f:(fun then_ else_ -> + Field.if_ b ~then_ ~else_ ) + in + sponge.state <- state ; + Pickles_types.Opt.Maybe (b, res) + | Pickles_types.Opt.Just x -> + Pickles_types.Opt.Just (f sponge x) + let incrementally_verify_proof (type b) (module Max_proofs_verified : Nat.Add.Intf with type n = b) ~actual_proofs_verified_mask ~step_domains ~srs - ~verification_key:(m : _ Plonk_verification_key_evals.t) ~xi ~sponge + ~verification_key:(m : (_ array, _) Plonk_verification_key_evals.Step.t) + ~xi ~sponge ~(public_input : [ `Field of Field.t * Boolean.var | `Packed_bits of Field.t * int ] array ) ~(sg_old : (_, Max_proofs_verified.n) Vector.t) ~advice @@ -511,7 +795,7 @@ struct let sg_old = with_label __LOC__ (fun () -> Vector.map2 actual_proofs_verified_mask sg_old ~f:(fun keep sg -> - [| (keep, sg) |] ) ) + (keep, sg) ) ) in with_label __LOC__ (fun () -> let sample () = Opt.challenge sponge in @@ -521,21 +805,27 @@ struct let index_digest = with_label "absorb verifier index" (fun () -> let index_sponge = Sponge.create sponge_params in - Array.iter - (Types.index_to_field_elements - ~g:(fun (z : Inputs.Inner_curve.t) -> - List.to_array (Inner_curve.to_field_elements z) ) + List.iter + (index_to_field_elements + ~g: + (Array.concat_map ~f:(fun (z : Inputs.Inner_curve.t) -> + List.to_array (Inner_curve.to_field_elements z) ) ) m ) - ~f:(fun x -> Sponge.absorb index_sponge x) ; + ~f:(fun x -> + let (_ : (unit, _) Pickles_types.Opt.t) = + simulate_optional_sponge_with_alignment index_sponge x + ~f:(fun sponge x -> + Array.iter ~f:(Sponge.absorb sponge) x ) + in + () ) ; Sponge.squeeze_field index_sponge ) in - let open Plonk_types.Messages in let without = Type.Without_degree_bound in let absorb_g gs = absorb sponge without (Array.map gs ~f:(fun g -> (Boolean.true_, g))) in absorb sponge Field (Boolean.true_, index_digest) ; - Vector.iter ~f:(Array.iter ~f:(absorb sponge PC)) sg_old ; + Vector.iter ~f:(absorb sponge PC) sg_old ; let x_hat = let domain = (which_branch, step_domains) in let public_input = @@ -551,7 +841,7 @@ struct List.partition_map Array.(to_list (mapi public_input ~f:(fun i t -> (i, t)))) ~f:(fun (i, t) -> - match t with + match[@warning "-4"] t with | `Field (Constant c, _) -> First ( if Field.Constant.(equal zero) c then None @@ -586,39 +876,335 @@ struct (List.filter_map terms ~f:(function | `Cond_add _ -> None - | `Add_with_correction (_, (_, corr)) -> - Some corr ) ) - ~f:Ops.add_fast ) + | `Add_with_correction (_, chunks) -> + Some (Array.map ~f:snd chunks) ) ) + ~f:(Array.map2_exn ~f:(Ops.add_fast ?check_finite:None)) ) in with_label __LOC__ (fun () -> let init = List.fold (List.filter_map ~f:Fn.id constant_part) - ~init:correction ~f:Ops.add_fast + ~init:correction + ~f:(Array.map2_exn ~f:(Ops.add_fast ?check_finite:None)) in - List.foldi terms ~init ~f:(fun i acc term -> + List.fold terms ~init ~f:(fun acc term -> match term with | `Cond_add (b, g) -> with_label __LOC__ (fun () -> - Inner_curve.if_ b ~then_:(Ops.add_fast g acc) - ~else_:acc ) - | `Add_with_correction ((x, num_bits), (g, _)) -> - Ops.add_fast acc - (Ops.scale_fast2' - (module Other_field.With_top_bit0) - g x ~num_bits ) ) ) ) - |> Inner_curve.negate + Array.map2_exn acc g ~f:(fun acc g -> + Inner_curve.if_ b ~then_:(Ops.add_fast g acc) + ~else_:acc ) ) + | `Add_with_correction ((x, num_bits), chunks) -> + Array.map2_exn acc chunks ~f:(fun acc (g, _) -> + Ops.add_fast acc + (Ops.scale_fast2' + (module Other_field.With_top_bit0) + g x ~num_bits ) ) ) ) ) + |> Array.map ~f:Inner_curve.negate in let x_hat = with_label "x_hat blinding" (fun () -> - Ops.add_fast x_hat - (Inner_curve.constant (Lazy.force Generators.h)) ) + Array.map x_hat ~f:(fun x_hat -> + Ops.add_fast x_hat + (Inner_curve.constant (Lazy.force Generators.h)) ) ) in - absorb sponge PC (Boolean.true_, x_hat) ; + Array.iter x_hat ~f:(fun x_hat -> + absorb sponge PC (Boolean.true_, x_hat) ) ; let w_comm = messages.w_comm in Vector.iter ~f:absorb_g w_comm ; + let runtime_comm = + match messages.lookup with + | Nothing + | Maybe (_, { runtime = Nothing; _ }) + | Just { runtime = Nothing; _ } -> + Pickles_types.Opt.Nothing + | Maybe (b_lookup, { runtime = Maybe (b_runtime, runtime); _ }) -> + let b = Boolean.( &&& ) b_lookup b_runtime in + Pickles_types.Opt.Maybe (b, runtime) + | Maybe (b, { runtime = Just runtime; _ }) + | Just { runtime = Maybe (b, runtime); _ } -> + Pickles_types.Opt.Maybe (b, runtime) + | Just { runtime = Just runtime; _ } -> + Pickles_types.Opt.Just runtime + in + let absorb_runtime_tables () = + match runtime_comm with + | Nothing -> + () + | Maybe (b, runtime) -> + let z = Array.map runtime ~f:(fun z -> (b, z)) in + absorb sponge Without_degree_bound z + | Just runtime -> + let z = Array.map runtime ~f:(fun z -> (Boolean.true_, z)) in + absorb sponge Without_degree_bound z + in + absorb_runtime_tables () ; + let joint_combiner = + let compute_joint_combiner (l : _ Messages.Lookup.In_circuit.t) = + let absorb_sorted_1 sponge = + let (first :: _) = l.sorted in + let z = Array.map first ~f:(fun z -> (Boolean.true_, z)) in + absorb sponge Without_degree_bound z + in + let absorb_sorted_2_to_4 () = + let (_ :: rest) = l.sorted in + Vector.iter rest ~f:(fun z -> + let z = Array.map z ~f:(fun z -> (Boolean.true_, z)) in + absorb sponge Without_degree_bound z ) + in + let absorb_sorted_5 () = + match l.sorted_5th_column with + | Nothing -> + () + | Maybe (b, z) -> + let z = Array.map z ~f:(fun z -> (b, z)) in + absorb sponge Without_degree_bound z + | Just z -> + let z = Array.map z ~f:(fun z -> (Boolean.true_, z)) in + absorb sponge Without_degree_bound z + in + match[@warning "-4"] + (m.lookup_table_comm, m.runtime_tables_selector) + with + | _ :: Just _ :: _, _ | _, Just _ -> + let joint_combiner = sample_scalar () in + absorb_sorted_1 sponge ; + absorb_sorted_2_to_4 () ; + absorb_sorted_5 () ; + joint_combiner + | _ :: Nothing :: _, Nothing -> + absorb_sorted_1 sponge ; + absorb_sorted_2_to_4 () ; + absorb_sorted_5 () ; + { inner = Field.zero } + | _ :: Maybe (b1, _) :: _, Maybe (b2, _) -> + let b = Boolean.(b1 ||| b2) in + let sponge2 = Opt.copy sponge in + let joint_combiner_if_true = + let joint_combiner = sample_scalar () in + absorb_sorted_1 sponge ; joint_combiner + in + let joint_combiner_if_false : Scalar_challenge.t = + absorb_sorted_1 sponge2 ; { inner = Field.zero } + in + Opt.recombine b ~original_sponge:sponge2 sponge ; + absorb_sorted_2_to_4 () ; + absorb_sorted_5 () ; + { inner = + Field.if_ b ~then_:joint_combiner_if_true.inner + ~else_:joint_combiner_if_false.inner + } + | _ :: Maybe (b, _) :: _, _ | _, Maybe (b, _) -> + let sponge2 = Opt.copy sponge in + let joint_combiner_if_true = + let joint_combiner = sample_scalar () in + absorb_sorted_1 sponge ; joint_combiner + in + let joint_combiner_if_false : Scalar_challenge.t = + absorb_sorted_1 sponge2 ; { inner = Field.zero } + in + Opt.recombine b ~original_sponge:sponge2 sponge ; + absorb_sorted_2_to_4 () ; + absorb_sorted_5 () ; + { inner = + Field.if_ b ~then_:joint_combiner_if_true.inner + ~else_:joint_combiner_if_false.inner + } + in + match messages.lookup with + | Nothing -> + Types.Opt.Nothing + | Maybe (b, l) -> + Opt.consume_all_pending sponge ; + let sponge2 = Opt.copy sponge in + let joint_combiner = compute_joint_combiner l in + Opt.consume_all_pending sponge ; + Opt.recombine b ~original_sponge:sponge2 sponge ; + (* We explicitly set this, because when we squeeze for [beta], we + there will be no pending values *but* we don't want to add a + dedicated permutation. + *) + sponge.needs_final_permute_if_empty <- false ; + Types.Opt.Maybe (b, joint_combiner) + | Just l -> + Opt.consume_all_pending sponge ; + Types.Opt.just (compute_joint_combiner l) + in + let lookup_table_comm = + let compute_lookup_table_comm (l : _ Messages.Lookup.In_circuit.t) + joint_combiner = + let (first_column :: second_column :: rest) = m.lookup_table_comm in + let second_column_with_runtime = + match (second_column, l.runtime) with + | Types.Opt.Nothing, comm | comm, Types.Opt.Nothing -> + comm + | ( Types.Opt.Maybe (has_second_column, second_column) + , Types.Opt.Maybe (has_runtime, runtime) ) -> + let second_with_runtime = + let sum = + Array.map2_exn ~f:Inner_curve.( + ) second_column runtime + in + Array.map2_exn second_column sum + ~f:(fun second_column sum -> + Inner_curve.if_ has_runtime ~then_:sum + ~else_:second_column ) + in + let res = + Array.map2_exn second_with_runtime runtime + ~f:(fun second_with_runtime runtime -> + Inner_curve.if_ has_second_column + ~then_:second_with_runtime ~else_:runtime ) + in + let b = Boolean.(has_second_column ||| has_runtime) in + Types.Opt.maybe b res + | ( Types.Opt.Maybe (has_second_column, second_column) + , Types.Opt.Just runtime ) -> + let res = + let sum = + Array.map2_exn ~f:Inner_curve.( + ) second_column runtime + in + Array.map2_exn runtime sum ~f:(fun runtime sum -> + Inner_curve.if_ has_second_column ~then_:sum + ~else_:runtime ) + in + Types.Opt.just res + | ( Types.Opt.Just second_column + , Types.Opt.Maybe (has_runtime, runtime) ) -> + let res = + let sum = + Array.map2_exn ~f:Inner_curve.( + ) second_column runtime + in + Array.map2_exn second_column sum + ~f:(fun second_column sum -> + Inner_curve.if_ has_runtime ~then_:sum + ~else_:second_column ) + in + Types.Opt.just res + | Types.Opt.Just second_column, Types.Opt.Just runtime -> + Types.Opt.just + (Array.map2_exn ~f:Inner_curve.( + ) second_column runtime) + in + let rest_rev = + Vector.rev (first_column :: second_column_with_runtime :: rest) + in + Vector.fold ~init:m.lookup_table_ids rest_rev ~f:(fun acc comm -> + match acc with + | Types.Opt.Nothing -> + comm + | Types.Opt.Maybe (has_acc, acc) -> ( + match comm with + | Types.Opt.Nothing -> + Types.Opt.maybe has_acc acc + | Types.Opt.Maybe (has_comm, comm) -> + let scaled_acc = + Array.map acc ~f:(fun acc -> + Scalar_challenge.endo acc joint_combiner ) + in + let sum = + Array.map2_exn ~f:Inner_curve.( + ) scaled_acc comm + in + let acc_with_comm = + Array.map2_exn sum comm ~f:(fun sum comm -> + Inner_curve.if_ has_acc ~then_:sum ~else_:comm ) + in + let res = + Array.map2_exn acc acc_with_comm + ~f:(fun acc acc_with_comm -> + Inner_curve.if_ has_comm ~then_:acc_with_comm + ~else_:acc ) + in + let b = Boolean.(has_acc ||| has_comm) in + Types.Opt.maybe b res + | Types.Opt.Just comm -> + let scaled_acc = + Array.map acc ~f:(fun acc -> + Scalar_challenge.endo acc joint_combiner ) + in + let sum = + Array.map2_exn ~f:Inner_curve.( + ) scaled_acc comm + in + let res = + Array.map2_exn sum comm ~f:(fun sum comm -> + Inner_curve.if_ has_acc ~then_:sum ~else_:comm ) + in + Types.Opt.just res ) + | Types.Opt.Just acc -> ( + match comm with + | Types.Opt.Nothing -> + Types.Opt.just acc + | Types.Opt.Maybe (has_comm, comm) -> + let scaled_acc = + Array.map acc ~f:(fun acc -> + Scalar_challenge.endo acc joint_combiner ) + in + let sum = + Array.map2_exn ~f:Inner_curve.( + ) scaled_acc comm + in + let res = + Array.map2_exn sum acc ~f:(fun sum acc -> + Inner_curve.if_ has_comm ~then_:sum ~else_:acc ) + in + Types.Opt.just res + | Types.Opt.Just comm -> + let scaled_acc = + Array.map acc ~f:(fun acc -> + Scalar_challenge.endo acc joint_combiner ) + in + Types.Opt.Just + (Array.map2_exn ~f:Inner_curve.( + ) scaled_acc comm) + ) ) + in + match (messages.lookup, joint_combiner) with + | Types.Opt.Nothing, Types.Opt.Nothing -> + Types.Opt.Nothing + | ( Types.Opt.Maybe (b_l, l) + , Types.Opt.Maybe (_b_joint_combiner, joint_combiner) ) -> ( + (* NB: b_l = _b_joint_combiner by construction *) + match compute_lookup_table_comm l joint_combiner with + | Types.Opt.Nothing -> + Types.Opt.Nothing + | Types.Opt.Maybe (b_lookup_table_comm, lookup_table_comm) -> + Types.Opt.Maybe + (Boolean.(b_l &&& b_lookup_table_comm), lookup_table_comm) + | Types.Opt.Just lookup_table_comm -> + Types.Opt.Maybe (b_l, lookup_table_comm) ) + | Types.Opt.Just l, Types.Opt.Just joint_combiner -> + compute_lookup_table_comm l joint_combiner + | ( (Types.Opt.Nothing | Maybe _ | Just _) + , (Types.Opt.Nothing | Maybe _ | Just _) ) -> + assert false + in + let lookup_sorted = + let lookup_sorted_minus_1 = + Nat.to_int Plonk_types.Lookup_sorted_minus_1.n + in + Vector.init Plonk_types.Lookup_sorted.n ~f:(fun i -> + match messages.lookup with + | Types.Opt.Nothing -> + Types.Opt.Nothing + | Types.Opt.Maybe (b, l) -> + if i = lookup_sorted_minus_1 then l.sorted_5th_column + else + Types.Opt.Maybe (b, Option.value_exn (Vector.nth l.sorted i)) + | Types.Opt.Just l -> + if i = lookup_sorted_minus_1 then l.sorted_5th_column + else Types.Opt.Just (Option.value_exn (Vector.nth l.sorted i)) ) + in let beta = sample () in let gamma = sample () in + let () = + match messages.lookup with + | Nothing -> + () + | Maybe (b, l) -> + let aggreg = Array.map l.aggreg ~f:(fun z -> (b, z)) in + absorb sponge Without_degree_bound aggreg + | Just l -> + let aggreg = + Array.map l.aggreg ~f:(fun z -> (Boolean.true_, z)) + in + absorb sponge Without_degree_bound aggreg + in let z_comm = messages.z_comm in absorb_g z_comm ; let alpha = sample_scalar () in @@ -635,12 +1221,14 @@ struct *) let sponge = match sponge with - | { state; sponge_state; params } -> ( - match sponge_state with - | Squeezed n -> - S.make ~state ~sponge_state:(Squeezed n) ~params - | _ -> - assert false ) + | { state + ; sponge_state = Squeezed n + ; params + ; needs_final_permute_if_empty = _ + } -> + S.make ~state ~sponge_state:(Squeezed n) ~params + | { sponge_state = Absorbing _; _ } -> + assert false in let sponge_before_evaluations = Sponge.copy sponge in let sponge_digest_before_evaluations = Sponge.squeeze_field sponge in @@ -660,9 +1248,13 @@ struct in let ft_comm = with_label __LOC__ (fun () -> - Common.ft_comm ~add:Ops.add_fast ~scale:scale_fast - ~negate:Inner_curve.negate ~endoscale:Scalar_challenge.endo - ~verification_key:m ~plonk ~alpha ~t_comm ) + Common.ft_comm + ~add:(Ops.add_fast ?check_finite:None) + ~scale:scale_fast ~negate:Inner_curve.negate + ~verification_key: + (Plonk_verification_key_evals.Step.forget_optional_commitments + m ) + ~plonk ~t_comm ) in let bulletproof_challenges = (* This sponge needs to be initialized with (some derivative of) @@ -673,8 +1265,20 @@ struct It should be sufficient to fork the sponge after squeezing beta_3 and then to absorb the combined inner product. *) - let num_commitments_without_degree_bound = Nat.N45.n in + let len_1, len_1_add = Plonk_types.(Columns.add Permuts_minus_1.n) in + let len_2, len_2_add = Plonk_types.(Columns.add len_1) in + let _len_3, len_3_add = Nat.N9.add len_2 in + let _len_4, len_4_add = Nat.N6.add Plonk_types.Lookup_sorted.n in + let len_5, len_5_add = + (* NB: Using explicit 11 because we can't get add on len_4 *) + Nat.N11.add Nat.N8.n + in + let len_6, len_6_add = Nat.N45.add len_5 in + let num_commitments_without_degree_bound = len_6 in let without_degree_bound = + let append_chain len second first = + Vector.append first second len + in (* sg_old x_hat ft_comm @@ -684,22 +1288,46 @@ struct w_comms all but last sigma_comm *) - Vector.append sg_old - ( [| x_hat |] :: [| ft_comm |] :: z_comm :: [| m.generic_comm |] - :: [| m.psm_comm |] :: [| m.complete_add_comm |] - :: [| m.mul_comm |] :: [| m.emul_comm |] - :: [| m.endomul_scalar_comm |] - :: Vector.append w_comm - (Vector.append - (Vector.map m.coefficients_comm ~f:(fun g -> [| g |])) - (Vector.map sigma_comm_init ~f:(fun g -> [| g |])) - (snd Plonk_types.(Columns.add Permuts_minus_1.n)) ) - (snd - Plonk_types.( - Columns.add (fst (Columns.add Permuts_minus_1.n))) ) - |> Vector.map ~f:(Array.map ~f:(fun g -> (Boolean.true_, g))) ) - (snd - (Max_proofs_verified.add num_commitments_without_degree_bound) ) + Vector.map sg_old ~f:(fun (keep, p) -> + Pickles_types.Opt.Maybe (keep, [| p |]) ) + |> append_chain + (snd (Max_proofs_verified.add len_6)) + ( [ x_hat + ; [| ft_comm |] + ; z_comm + ; m.generic_comm + ; m.psm_comm + ; m.complete_add_comm + ; m.mul_comm + ; m.emul_comm + ; m.endomul_scalar_comm + ] + |> append_chain len_3_add + (Vector.append w_comm + (Vector.append m.coefficients_comm sigma_comm_init + len_1_add ) + len_2_add ) + |> Vector.map ~f:Pickles_types.Opt.just + |> append_chain len_6_add + ( [ m.range_check0_comm + ; m.range_check1_comm + ; m.foreign_field_add_comm + ; m.foreign_field_mul_comm + ; m.xor_comm + ; m.rot_comm + ] + |> append_chain len_4_add lookup_sorted + |> append_chain len_5_add + [ Pickles_types.Opt.map messages.lookup ~f:(fun l -> + l.aggreg ) + ; lookup_table_comm + ; runtime_comm + ; m.runtime_tables_selector + ; m.lookup_selector_xor + ; m.lookup_selector_lookup + ; m.lookup_selector_range_check + ; m.lookup_selector_ffmul + ] ) ) in check_bulletproof ~pcs_batch: @@ -708,18 +1336,17 @@ struct ~sponge:sponge_before_evaluations ~xi ~advice ~openings_proof ~polynomials: ( Vector.map without_degree_bound - ~f:(Array.map ~f:(fun (keep, x) -> (keep, `Finite x))) + ~f: + (Pickles_types.Opt.map + ~f:(Array.map ~f:(fun x -> `Finite x)) ) , [] ) in - let joint_combiner = - if G.lookup_verification_enabled then failwith "TODO" else None - in assert_eq_plonk { alpha = plonk.alpha ; beta = plonk.beta ; gamma = plonk.gamma ; zeta = plonk.zeta - ; joint_combiner + ; joint_combiner = plonk.joint_combiner ; feature_flags = plonk.feature_flags } { alpha @@ -731,18 +1358,20 @@ struct } ; (sponge_digest_before_evaluations, bulletproof_challenges) ) - let mask_evals (type n) ~(lengths : (int, n) Vector.t Evals.t) - (choice : n One_hot_vector.t) (e : Field.t array Evals.t) : - (Boolean.var * Field.t) array Evals.t = - Evals.map2 lengths e ~f:(fun lengths e -> + let _mask_evals (type n) + ~(lengths : + (int, n) Pickles_types.Vector.t Pickles_types.Plonk_types.Evals.t ) + (choice : n One_hot_vector.t) + (e : Field.t array Pickles_types.Plonk_types.Evals.t) : + (Boolean.var * Field.t) array Pickles_types.Plonk_types.Evals.t = + Pickles_types.Plonk_types.Evals.map2 lengths e ~f:(fun lengths e -> Array.zip_exn (mask lengths choice) e ) let compute_challenges ~scalar chals = Vector.map chals ~f:(fun prechallenge -> scalar @@ Bulletproof_challenge.pack prechallenge ) - let challenge_polynomial chals = - Field.(G.challenge_polynomial ~add ~mul ~one chals) + let challenge_polynomial = G.challenge_polynomial (module Field) let pow2pow (pt : Field.t) (n : int) : Field.t = with_label __LOC__ (fun () -> @@ -782,17 +1411,14 @@ struct | [] -> failwith "empty list" ) - let shift1 = - Shifted_value.Type1.Shift.( + let _shift1 = + Pickles_types.Shifted_value.Type1.Shift.( map ~f:Field.constant (create (module Field.Constant))) let shift2 = Shifted_value.Type2.Shift.( map ~f:Field.constant (create (module Field.Constant))) - let%test_unit "endo scalar" = - SC.test (module Impl) ~endo:Endo.Step_inner_curve.scalar - let map_plonk_to_field plonk = Types.Step.Proof_state.Deferred_values.Plonk.In_circuit.map_challenges ~f:(Util.seal (module Impl)) @@ -805,9 +1431,6 @@ struct include Plonk_checks.Make (Shifted_value.Type2) (Plonk_checks.Scalars.Tock) end - let field_array_if b ~then_ ~else_ = - Array.map2_exn then_ else_ ~f:(fun x1 x2 -> Field.if_ b ~then_:x1 ~else_:x2) - (* This finalizes the "deferred values" coming from a previous proof over the same field. It 1. Checks that [xi] and [r] where sampled correctly. I.e., by absorbing all the @@ -851,8 +1474,8 @@ struct in Sponge.absorb sponge challenge_digest ; Sponge.absorb sponge ft_eval1 ; - Sponge.absorb sponge (fst evals.public_input) ; - Sponge.absorb sponge (snd evals.public_input) ; + Array.iter ~f:(Sponge.absorb sponge) (fst evals.public_input) ; + Array.iter ~f:(Sponge.absorb sponge) (snd evals.public_input) ; let xs = Evals.In_circuit.to_absorption_sequence evals.evals in (* This is a hacky, but much more efficient, version of the opt sponge. This uses the assumption that the sponge 'absorption state' will align @@ -863,9 +1486,9 @@ struct List.iter xs ~f:(fun opt -> let absorb = Array.iter ~f:(fun x -> Sponge.absorb sponge x) in match opt with - | None -> + | Nothing -> () - | Some (x1, x2) -> + | Just (x1, x2) -> absorb x1 ; absorb x2 | Maybe (b, (x1, x2)) -> (* Cache the sponge state before *) @@ -940,7 +1563,7 @@ struct Plonk_checks.scalars_env (module Env_bool) (module Env_field) - ~srs_length_log2:Common.Max_degree.wrap_log2 + ~srs_length_log2:Common.Max_degree.wrap_log2 ~zk_rows:3 ~endo:(Impl.Field.constant Endo.Wrap_inner_curve.base) ~mds:sponge_params.mds ~field_of_hex:(fun s -> @@ -966,15 +1589,15 @@ struct let a = Evals.In_circuit.to_list e |> List.map ~f:(function - | None -> + | Nothing -> [||] - | Some a -> - Array.map a ~f:(fun x -> Plonk_types.Opt.Some x) + | Just a -> + Array.map a ~f:Pickles_types.Opt.just | Maybe (b, a) -> - Array.map a ~f:(fun x -> Plonk_types.Opt.Maybe (b, x)) ) + Array.map a ~f:(Pickles_types.Opt.maybe b) ) in let sg_evals = - Vector.map sg_evals ~f:(fun x -> [| Plonk_types.Opt.Some x |]) + Vector.map sg_evals ~f:(fun x -> [| Pickles_types.Opt.just x |]) |> Vector.to_list (* TODO: This was the code before the wrap hack was put in match actual_proofs_verified with @@ -991,7 +1614,10 @@ struct [| Field.((b :> t) * f pt) |] ) ) *) in let v = - List.append sg_evals ([| Some x_hat |] :: [| Some ft |] :: a) + List.append sg_evals + ( Array.map ~f:Pickles_types.Opt.just x_hat + :: [| Pickles_types.Opt.just ft |] + :: a ) in Common.combined_evaluation (module Impl) ~xi v in @@ -1032,7 +1658,7 @@ struct (module Impl) ~env ~shift:shift2 (Composition_types.Step.Proof_state.Deferred_values.Plonk.In_circuit - .to_wrap ~opt_none:Plonk_types.Opt.None ~false_:Boolean.false_ + .to_wrap ~opt_none:Pickles_types.Opt.nothing ~false_:Boolean.false_ plonk ) combined_evals ) in @@ -1048,8 +1674,8 @@ struct ] , bulletproof_challenges ) - let map_challenges - { Types.Step.Proof_state.Deferred_values.plonk + let _map_challenges + { Import.Types.Step.Proof_state.Deferred_values.plonk ; combined_inner_product ; xi ; bulletproof_challenges diff --git a/src/lib/pickles/wrap_verifier.mli b/src/lib/pickles/wrap_verifier.mli index 6a2d43df2d5..baffd665ecd 100644 --- a/src/lib/pickles/wrap_verifier.mli +++ b/src/lib/pickles/wrap_verifier.mli @@ -1,12 +1,11 @@ (** Generic (polymorphic instance of [challenge_polynomial]) *) val challenge_polynomial : - one:'a - -> add:('a -> 'b -> 'b) - -> mul:('b -> 'b -> 'b) - -> 'b array - -> ('b -> 'b) Core_kernel.Staged.t + (module Pickles_types.Shifted_value.Field_intf with type t = 'a) + -> 'a array + -> ('a -> 'a) Core_kernel.Staged.t -type 'a index' = 'a Pickles_types.Plonk_verification_key_evals.t +type ('a, 'a_opt) index' = + ('a, 'a_opt) Pickles_types.Plonk_verification_key_evals.Step.t module Challenge : module type of Import.Challenge.Make (Impls.Wrap) @@ -63,8 +62,11 @@ val incrementally_verify_proof : -> step_domains:(Import.Domains.t, 'a) Pickles_types.Vector.t -> srs:Kimchi_bindings.Protocol.SRS.Fp.t -> verification_key: - Wrap_main_inputs.Inner_curve.t - Pickles_types.Plonk_verification_key_evals.t + ( Wrap_main_inputs.Inner_curve.t array + , ( Wrap_main_inputs.Inner_curve.t array + , Impls.Wrap.Boolean.var ) + Pickles_types.Opt.t ) + Pickles_types.Plonk_verification_key_evals.Step.t -> xi:Scalar_challenge.t -> sponge:Opt.t -> public_input: @@ -81,7 +83,7 @@ val incrementally_verify_proof : Import.Types.Step.Bulletproof.Advice.t -> messages: ( Wrap_main_inputs.Impl.Field.t * Wrap_main_inputs.Impl.Field.t - , 'c ) + , Wrap_main_inputs.Impl.Boolean.var ) Pickles_types.Plonk_types.Messages.In_circuit.t -> which_branch:'a One_hot_vector.t -> openings_proof: @@ -94,8 +96,10 @@ val incrementally_verify_proof : , Wrap_main_inputs.Impl.Field.t Pickles_types.Shifted_value.Type1.t , ( Wrap_main_inputs.Impl.Field.t Pickles_types.Shifted_value.Type1.t , Wrap_main_inputs.Impl.Boolean.var ) - Pickles_types.Plonk_types.Opt.t - , 'd + Pickles_types.Opt.t + , ( Wrap_main_inputs.Impl.Field.t Import.Scalar_challenge.t + , Wrap_main_inputs.Impl.Boolean.var ) + Pickles_types.Opt.t , Wrap_main_inputs.Impl.Boolean.var ) Import.Types.Wrap.Proof_state.Deferred_values.Plonk.In_circuit.t -> Wrap_main_inputs.Impl.Field.t @@ -140,5 +144,15 @@ val finalize_other_proof : val choose_key : 'n. 'n One_hot_vector.t - -> (Wrap_main_inputs.Inner_curve.t index', 'n) Pickles_types.Vector.t - -> Wrap_main_inputs.Inner_curve.t index' + -> ( ( Wrap_main_inputs.Inner_curve.t array + , ( Wrap_main_inputs.Inner_curve.t array + , Impls.Wrap.Boolean.var ) + Pickles_types.Opt.t ) + index' + , 'n ) + Pickles_types.Vector.t + -> ( Wrap_main_inputs.Inner_curve.t array + , ( Wrap_main_inputs.Inner_curve.t array + , Impls.Wrap.Boolean.var ) + Pickles_types.Opt.t ) + index' diff --git a/src/lib/pickles/wrap_wire_proof.ml b/src/lib/pickles/wrap_wire_proof.ml index a47493efc80..e0ffba689b7 100644 --- a/src/lib/pickles/wrap_wire_proof.ml +++ b/src/lib/pickles/wrap_wire_proof.ml @@ -1,6 +1,5 @@ open Core_kernel open Pickles_types -open Plonk_types module Columns = Nat.N15 module Columns_vec = Vector.Vector_15 module Coefficients = Nat.N15 @@ -10,6 +9,8 @@ module Quotient_polynomial_vec = Vector.Vector_7 module Permuts_minus_1 = Nat.N6 module Permuts_minus_1_vec = Vector.Vector_6 +[@@@warning "-4"] + module Commitments = struct [%%versioned module Stable = struct @@ -27,6 +28,8 @@ module Commitments = struct } [@@deriving compare, sexp, yojson, hash, equal] + [@@@warning "+4"] + let to_latest = Fn.id end end] @@ -48,6 +51,8 @@ module Commitments = struct } end +[@@@warning "-4"] + module Evaluations = struct [%%versioned module Stable = struct @@ -79,6 +84,8 @@ module Evaluations = struct } [@@deriving compare, sexp, yojson, hash, equal] + [@@@warning "+4"] + let to_latest = Fn.id end end] @@ -169,6 +176,8 @@ module Evaluations = struct } end +[@@@warning "-4"] + [%%versioned module Stable = struct module V1 = struct @@ -184,6 +193,8 @@ module Stable = struct } [@@deriving compare, sexp, yojson, hash, equal] + [@@@warning "+4"] + let to_latest = Fn.id end end] diff --git a/src/lib/pickles/wrap_wire_proof.mli b/src/lib/pickles/wrap_wire_proof.mli index 68e6764b73c..9a5a85436b3 100644 --- a/src/lib/pickles/wrap_wire_proof.mli +++ b/src/lib/pickles/wrap_wire_proof.mli @@ -4,7 +4,6 @@ open Core_kernel open Pickles_types -open Plonk_types module Columns = Nat.N15 module Columns_vec = Vector.Vector_15 module Coefficients = Nat.N15 diff --git a/src/lib/pickles_base/domain.mli b/src/lib/pickles_base/domain.mli index a0750553431..3c8dee266e2 100644 --- a/src/lib/pickles_base/domain.mli +++ b/src/lib/pickles_base/domain.mli @@ -1,4 +1,4 @@ -(* Domain specification *) +(** Wrapping integers for computing NTT domain *) module Stable : sig module V1 : sig diff --git a/src/lib/pickles_base/domains.mli b/src/lib/pickles_base/domains.mli index 61da40b2f36..a799b1442c4 100644 --- a/src/lib/pickles_base/domains.mli +++ b/src/lib/pickles_base/domains.mli @@ -1,4 +1,5 @@ -(* Domains *) +(** Artifact when Marlin was used as Marlin requires multiple domains. It should + not be used anymore. *) module Stable : sig module V2 : sig diff --git a/src/lib/pickles_base/proofs_verified.ml b/src/lib/pickles_base/proofs_verified.ml index 7f7dd662270..5560daf957d 100644 --- a/src/lib/pickles_base/proofs_verified.ml +++ b/src/lib/pickles_base/proofs_verified.ml @@ -40,7 +40,7 @@ let to_int : t -> int = function N0 -> 0 | N1 -> 1 | N2 -> 2 type proofs_verified = t -let of_nat (type n) (n : n Pickles_types.Nat.t) : t = +let of_nat_exn (type n) (n : n Pickles_types.Nat.t) : t = let open Pickles_types.Nat in match n with | Z -> @@ -50,9 +50,11 @@ let of_nat (type n) (n : n Pickles_types.Nat.t) : t = | S (S Z) -> N2 | S _ -> - failwithf "Proofs_verified.of_nat: got %d" (to_int n) () + raise + (Invalid_argument + (Printf.sprintf "Proofs_verified.of_nat: got %d" (to_int n)) ) -let of_int (n : int) : t = +let of_int_exn (n : int) : t = match n with | 0 -> N0 @@ -61,7 +63,8 @@ let of_int (n : int) : t = | 2 -> N2 | _ -> - failwithf "Proofs_verified.of_int: got %d" n () + raise + (Invalid_argument (Printf.sprintf "Proofs_verified.of_int: got %d" n)) type 'f boolean = 'f Snarky_backendless.Cvar.t Snarky_backendless.Boolean.t diff --git a/src/lib/pickles_base/proofs_verified.mli b/src/lib/pickles_base/proofs_verified.mli index 68cb088a454..9942ce8a191 100644 --- a/src/lib/pickles_base/proofs_verified.mli +++ b/src/lib/pickles_base/proofs_verified.mli @@ -1,3 +1,4 @@ +(** Represents how many proofs are verified. Currently only [0], [1] or [2] *) module Stable : sig module V1 : sig type t = Mina_wire_types.Pickles_base.Proofs_verified.V1.t = N0 | N1 | N2 @@ -12,10 +13,16 @@ end type t = Stable.V1.t = N0 | N1 | N2 [@@deriving sexp, compare, yojson, hash, equal] -val of_nat : 'n Pickles_types.Nat.t -> t +(** [of_nat_exn t_n] converts the type level natural [t_n] to the data type natural. + Raise an exception if [t_n] represents a value above or equal to 3 *) +val of_nat_exn : 'n Pickles_types.Nat.t -> t -val of_int : int -> t +(** [of_int_exn n] converts the runtime natural [n] to the data type natural. Raise + an exception if the value [n] is above or equal to 3 *) +val of_int_exn : int -> t +(** [to_int v] converts the value [v] to the corresponding integer, i.e [N0 -> + 0], [N1 -> 1] and [N2 -> 2] *) val to_int : t -> int module One_hot : sig @@ -35,6 +42,7 @@ end type 'f boolean = 'f Snarky_backendless.Cvar.t Snarky_backendless.Boolean.t +(** Vector of 2 elements *) type 'a vec2 = ('a, Pickles_types.Nat.N2.n) Pickles_types.Vector.t module Prefix_mask : sig diff --git a/src/lib/pickles_base/side_loaded_verification_key.mli b/src/lib/pickles_base/side_loaded_verification_key.mli index 6211fcb610b..a2090b43c33 100644 --- a/src/lib/pickles_base/side_loaded_verification_key.mli +++ b/src/lib/pickles_base/side_loaded_verification_key.mli @@ -1,4 +1,4 @@ -(* Module Side_loaded_verification_key *) +(** Homogeneous verification key representation, for use when the key is chosen dynamically according to some external logic *) module Poly : sig module Stable : sig diff --git a/src/lib/pickles_types/README.md b/src/lib/pickles_types/README.md new file mode 100644 index 00000000000..97038450370 --- /dev/null +++ b/src/lib/pickles_types/README.md @@ -0,0 +1,5 @@ +## Pickles types + +This library provides data structures encoded at the type level. The idea is to +encode runtime invariants and rely on the OCaml compiler to verify properties at +compile time instead of adding a runtime overhead. diff --git a/src/lib/pickles_types/abc.ml b/src/lib/pickles_types/abc.ml deleted file mode 100644 index 777a4ea7cb8..00000000000 --- a/src/lib/pickles_types/abc.ml +++ /dev/null @@ -1,17 +0,0 @@ -open Core_kernel - -[%%versioned -module Stable = struct - module V1 = struct - type 'a t = { a : 'a; b : 'a; c : 'a } - [@@deriving sexp, equal, compare, hash, yojson, hlist, fields] - - (* TODO: sexp, compare, hash, yojson, hlist and fields seem unused *) - end -end] - -module Label = struct - type t = A | B | C [@@deriving equal] - - let all = [ A; B; C ] -end diff --git a/src/lib/pickles_types/abc.mli b/src/lib/pickles_types/abc.mli deleted file mode 100644 index 98a15db4df0..00000000000 --- a/src/lib/pickles_types/abc.mli +++ /dev/null @@ -1,8 +0,0 @@ -(* Implementing triplets *) - -module Label : sig - type t = A | B | C [@@deriving equal] - - (** [all] returns the set of all elements of type {!t} as a list. *) - val all : t list -end diff --git a/src/lib/pickles_types/at_most.mli b/src/lib/pickles_types/at_most.mli index d95a6278e2f..4a80301db66 100644 --- a/src/lib/pickles_types/at_most.mli +++ b/src/lib/pickles_types/at_most.mli @@ -1,5 +1,5 @@ -(** Implementing structure with pre-defined length *) -(* TODO: Check if that's adequate *) +(** Implementing vectors with pre-defined length. For a fixed length version, see + {!Vector} *) (** {2 Type definitions} *) @@ -52,5 +52,6 @@ module With_length (N : Nat.Intf) : S with type 'a t = ('a, N.n) at_most val of_vector : 'a 'n 'm. ('a, 'n) Vector.vec -> ('n, 'm) Nat.Lte.t -> ('a, 'm) t -(** [to_vector m] transforms [m] into a vector *) +(** [to_vector m] transforms [m] into a vector whose length is forgotten at the + type level and only contains runtime data *) val to_vector : 'a 'n. ('a, 'n) t -> 'a Vector.e diff --git a/src/lib/pickles_types/dune b/src/lib/pickles_types/dune index 03b617836e9..2dbfb9890ef 100644 --- a/src/lib/pickles_types/dune +++ b/src/lib/pickles_types/dune @@ -32,6 +32,7 @@ base.caml bin_prot.shape ;; local libraries + kimchi_types snarky.backendless tuple_lib ppx_version.runtime diff --git a/src/lib/pickles_types/hlist.mli b/src/lib/pickles_types/hlist.mli index a38de1e458c..a54f601ab8b 100644 --- a/src/lib/pickles_types/hlist.mli +++ b/src/lib/pickles_types/hlist.mli @@ -300,7 +300,7 @@ module H1 : sig end (** Data type of a heterogeneous list of pairs. - + Both sides of the tuple are heterogeneous over the same type parameter. The underlying type structures are determined by the type functions in the first and second functor parameters. @@ -309,8 +309,8 @@ module H1 : sig type 'a t = 'a A.t * 'a B.t end - (** Usual zipping operation over two heterogeneous lists. - + (** Usual zipping operation over two heterogeneous lists. + The two functor parameters define the underlying contained type structures of the two lists. @@ -542,7 +542,7 @@ end (** {2 Over three type parameters} *) -(** Operations on heterogeneous lists whose content type varies over a tree +(** Operations on heterogeneous lists whose content type varies over a tree type parameters. Similar to {!H1}, with less operations. diff --git a/src/lib/pickles_types/nat.mli b/src/lib/pickles_types/nat.mli index 5809d6a652b..358d5f3f206 100644 --- a/src/lib/pickles_types/nat.mli +++ b/src/lib/pickles_types/nat.mli @@ -1,4 +1,4 @@ -(** Representation of naturals for Pickles *) +(** Representation of naturals for Pickles with Peano encoding *) (** {1 Type definitions} *) diff --git a/src/lib/pickles_types/opt.ml b/src/lib/pickles_types/opt.ml new file mode 100644 index 00000000000..13a8fe92650 --- /dev/null +++ b/src/lib/pickles_types/opt.ml @@ -0,0 +1,185 @@ +open Core_kernel + +[@@@warning "-4"] + +type ('a, 'bool) t = Just of 'a | Nothing | Maybe of 'bool * 'a +[@@deriving sexp, compare, yojson, hash, equal] + +let just a = Just a + +let nothing = Nothing + +let maybe b x = Maybe (b, x) + +let to_option : ('a, bool) t -> 'a option = function + | Just x -> + Some x + | Maybe (true, x) -> + Some x + | Maybe (false, _x) -> + None + | Nothing -> + None + +let to_option_unsafe : ('a, 'bool) t -> 'a option = function + | Just x -> + Some x + | Maybe (_, x) -> + Some x + | Nothing -> + None + +let value_exn = function + | Just x -> + x + | Maybe (_, x) -> + x + | Nothing -> + invalid_arg "Opt.value_exn" + +let of_option (t : 'a option) : ('a, 'bool) t = + match t with None -> Nothing | Some x -> Just x + +let lift ?on_maybe ~nothing f = function + | Nothing -> + nothing + | Just v -> + f v + | Maybe (b, v) -> ( + match on_maybe with None -> f v | Some g -> g b v ) + +module Flag = struct + type t = Yes | No | Maybe [@@deriving sexp, compare, yojson, hash, equal] + + let ( ||| ) x y = + match (x, y) with + | Yes, _ | _, Yes -> + Yes + | Maybe, _ | _, Maybe -> + Maybe + | No, No -> + No +end + +let map t ~f = + match t with + | Nothing -> + Nothing + | Just x -> + Just (f x) + | Maybe (b, x) -> + Maybe (b, f x) + +let iter t ~f = + match t with Nothing -> () | Just x -> f x | Maybe (_, x) -> f x + +open Snarky_backendless + +let some_typ (type a a_var f bool_var) (t : (a_var, a, f) Typ.t) : + ((a_var, bool_var) t, a option, f) Typ.t = + Typ.transport t ~there:(fun x -> Option.value_exn x) ~back:Option.return + |> Typ.transport_var + ~there:(function + | Just x -> + x + | Maybe _ | Nothing -> + failwith "Opt.some_typ: expected Just" ) + ~back:(fun x -> Just x) + +let none_typ (type a a_var f bool) () : ((a_var, bool) t, a option, f) Typ.t = + Typ.transport (Typ.unit ()) + ~there:(fun _ -> ()) + ~back:(fun () : _ Option.t -> None) + |> Typ.transport_var + ~there:(function + | Nothing -> + () + | Maybe _ | Just _ -> + failwith "Opt.none_typ: expected Nothing" ) + ~back:(fun () : _ t -> Nothing) + +let maybe_typ (type a a_var bool_var f) + (bool_typ : (bool_var, bool, f) Snarky_backendless.Typ.t) ~(dummy : a) + (a_typ : (a_var, a, f) Typ.t) : ((a_var, bool_var) t, a option, f) Typ.t = + Typ.transport + (Typ.tuple2 bool_typ a_typ) + ~there:(fun (t : a option) -> + match t with None -> (false, dummy) | Some x -> (true, x) ) + ~back:(fun (b, x) -> if b then Some x else None) + |> Typ.transport_var + ~there:(fun (t : (a_var, _) t) -> + match t with + | Maybe (b, x) -> + (b, x) + | Nothing | Just _ -> + failwith "Opt.maybe_typ: expected Maybe" ) + ~back:(fun (b, x) -> Maybe (b, x)) + +let constant_layout_typ (type a a_var f) (bool_typ : _ Typ.t) ~true_ ~false_ + (flag : Flag.t) (a_typ : (a_var, a, f) Typ.t) ~(dummy : a) + ~(dummy_var : a_var) = + let (Typ bool_typ) = bool_typ in + let bool_typ : _ Typ.t = + let check = + (* No need to boolean constrain in the No or Yes case *) + match flag with + | No | Yes -> + fun _ -> Checked_runner.Simple.return () + | Maybe -> + bool_typ.check + in + Typ { bool_typ with check } + in + Typ.transport + (Typ.tuple2 bool_typ a_typ) + ~there:(fun (t : a option) -> + match t with None -> (false, dummy) | Some x -> (true, x) ) + ~back:(fun (b, x) -> if b then Some x else None) + |> Typ.transport_var + ~there:(fun (t : (a_var, _) t) -> + match t with + | Maybe (b, x) -> + (b, x) + | Nothing -> + (false_, dummy_var) + | Just x -> + (true_, x) ) + ~back:(fun (b, x) -> + match flag with No -> Nothing | Yes -> Just x | Maybe -> Maybe (b, x) + ) + +let typ (type a a_var f) bool_typ (flag : Flag.t) (a_typ : (a_var, a, f) Typ.t) + ~(dummy : a) = + match flag with + | Yes -> + some_typ a_typ + | No -> + none_typ () + | Maybe -> + maybe_typ bool_typ ~dummy a_typ + +module Early_stop_sequence = struct + (* A sequence that should be considered to have stopped at + the first No flag *) + (* TODO: The documentation above makes it sound like the type below is too + generic: we're not guaranteed to have flags in there *) + type nonrec ('a, 'bool) t = ('a, 'bool) t list + + let fold (type a bool acc res) + (if_res : bool -> then_:res -> else_:res -> res) (t : (a, bool) t) + ~(init : acc) ~(f : acc -> a -> acc) ~(finish : acc -> res) = + let rec go acc = function + | [] -> + finish acc + | Nothing :: xs -> + go acc xs + | Just x :: xs -> + go (f acc x) xs + | Maybe (b, x) :: xs -> + (* Computing this first makes mutation in f OK. *) + let stop_res = finish acc in + let continue_res = go (f acc x) xs in + if_res b ~then_:continue_res ~else_:stop_res + in + go init t +end diff --git a/src/lib/pickles_types/opt.mli b/src/lib/pickles_types/opt.mli new file mode 100644 index 00000000000..6808a0c0eb8 --- /dev/null +++ b/src/lib/pickles_types/opt.mli @@ -0,0 +1,107 @@ +(* Implementation of an extended nullable type *) + +(** {1 Type} *) + +type ('a, 'bool) t = + | Just of 'a + | Nothing + | Maybe of 'bool * 'a + (** Representation of a value that can either be [None] or [Some] + depending on the actual value of its first parameter. *) +[@@deriving sexp, compare, yojson, hash, equal] + +(** {1 Constructors} *) + +val just : 'a -> ('a, 'bool) t + +val nothing : ('a, 'bool) t + +val maybe : 'bool -> 'a -> ('a, 'bool) t + +(** {1 Iterators} *) + +val map : ('a, 'bool) t -> f:('a -> 'b) -> ('b, 'bool) t + +val iter : ('a, 'bool) t -> f:('a -> unit) -> unit + +(** {1 Accessors and convertors} *) + +(** [value_exn o] is v when [o] if [Some v] or [Maybe (_, v)]. + + @raise Invalid_argument if [o] is [None] + **) +val value_exn : ('a, 'bool) t -> 'a + +(** [to_option_unsafe opt] is [Some v] when [opt] if [Just v] or [Maybe (_, v)], + [None] otherwise *) +val to_option_unsafe : ('a, 'bool) t -> 'a option + +(** [to_option bool_opt] maps {!const:Just}, resp. {!const:Nothing}, to + {!const:Option.Some}, resp. {!const:Option.None}. + + The difference with {!val:to_option_unsafe} lies in the treatment of + {!const:Maybe}, where [Maybe(false, x)] maps to {!val:Option.None} and + [Maybe(true, x)] to [Option.Some x]. + *) +val to_option : ('a, bool) t -> 'a option + +(** [of_option o] is a straightforward injection of a regular {!type:Option.t} + value [o] into type {!type:t}. + + {!const:Option.Some} maps to {!const:Just} and {!const:Option.None} to + {!const:Nothing}. +*) +val of_option : 'a option -> ('a, 'bool) t + +(** [lift ?on_maybe ~nothing f] lifts the application of function [f] to a value + of type !{type:('a, 'bool) t} as follows: + - [Just v]: apply [f] to contained value [v] + - [Nothing]: return the value specified by [nothing] + - [Maybe (b, v)]: defaults to the case [Some v] when [on_maybe] is + unspecified, otherwise apply [on_maybe b v] +*) +val lift : + ?on_maybe:('a -> 'b -> 'c) -> nothing:'c -> ('b -> 'c) -> ('b, 'a) t -> 'c + +module Flag : sig + type t = Yes | No | Maybe [@@deriving sexp, compare, yojson, hash, equal] + + (** [( ||| )] is a commutative ternary disjunction on {!type:t} with + a similar specification to its usual Boolean [||] counterpart: + + - [Yes] is absorbing: [Yes ||| x] is [Yes] + - [No] is neutral: [No ||| x] is [x] + *) + val ( ||| ) : t -> t -> t +end + +val constant_layout_typ : + ('b, bool, 'f) Snarky_backendless.Typ.t + -> true_:'b + -> false_:'b + -> Flag.t + -> ('a_var, 'a, 'f) Snarky_backendless.Typ.t + -> dummy:'a + -> dummy_var:'a_var + -> (('a_var, 'b) t, 'a option, 'f) Snarky_backendless.Typ.t + +val typ : + ('b, bool, 'f) Snarky_backendless.Typ.t + -> Flag.t + -> ('a_var, 'a, 'f) Snarky_backendless.Typ.t + -> dummy:'a + -> (('a_var, 'b) t, 'a option, 'f) Snarky_backendless.Typ.t + +(** A sequence that should be considered to have stopped at + the first occurence of {!Flag.No} *) +module Early_stop_sequence : sig + type nonrec ('a, 'bool) t = ('a, 'bool) t list + + val fold : + ('bool -> then_:'res -> else_:'res -> 'res) + -> ('a, 'bool) t + -> init:'acc + -> f:('acc -> 'a -> 'acc) + -> finish:('acc -> 'res) + -> 'res +end diff --git a/src/lib/pickles_types/pcs_batch.ml b/src/lib/pickles_types/pcs_batch.ml index 53ac2c77e71..e115635c46e 100644 --- a/src/lib/pickles_types/pcs_batch.ml +++ b/src/lib/pickles_types/pcs_batch.ml @@ -65,22 +65,29 @@ let combine_evaluations (type f) t ~crs_max_degree ~(mul : f -> f -> f) ~add ~shifted_pow:(fun deg x -> pow x (crs_max_degree - deg)) ~mul ~add ~one ~evaluation_point ~xi -open Plonk_types.Poly_comm - -let combine_split_commitments _t ~scale_and_add ~init:i ~xi (type n) +let combine_split_commitments _t ~scale_and_add ~init:i ~xi + ~reduce_without_degree_bound ~reduce_with_degree_bound (type n) (without_degree_bound : (_, n) Vector.t) with_degree_bound = let flat = - List.concat_map (Vector.to_list without_degree_bound) ~f:Array.to_list - @ List.concat_map (Vector.to_list with_degree_bound) - ~f:(fun { With_degree_bound.unshifted; shifted } -> - Array.to_list unshifted @ [ shifted ] ) + List.concat_map + (Vector.to_list without_degree_bound) + ~f:reduce_without_degree_bound + @ List.concat_map + (Vector.to_list with_degree_bound) + ~f:reduce_with_degree_bound in - match List.rev flat with - | [] -> - failwith "combine_split_commitments: empty" - | init :: comms -> - List.fold_left comms ~init:(i init) ~f:(fun acc p -> - scale_and_add ~acc ~xi p ) + let rec go = function + | [] -> + failwith "combine_split_commitments: empty" + | init :: comms -> ( + match i init with + | None -> + go comms + | Some init -> + List.fold_left comms ~init ~f:(fun acc p -> + scale_and_add ~acc ~xi p ) ) + in + go (List.rev flat) let combine_split_evaluations (type f f') ~(mul_and_add : acc:f' -> xi:f' -> f -> f') ~init:(i : f -> f') ~(xi : f') diff --git a/src/lib/pickles_types/pcs_batch.mli b/src/lib/pickles_types/pcs_batch.mli index 9143c24335d..dee36ea90b4 100644 --- a/src/lib/pickles_types/pcs_batch.mli +++ b/src/lib/pickles_types/pcs_batch.mli @@ -1,3 +1,5 @@ +(** Batch of Polynomial Commitment Scheme *) + type ('a, 'n, 'm) t val map : ('a, 'n, 'm) t -> f:('a -> 'b) -> ('b, 'n, 'm) t @@ -44,15 +46,15 @@ val combine_evaluations' : -> ('f, 'm) Vector.t -> 'f -open Plonk_types.Poly_comm - val combine_split_commitments : (_, 'n, 'm) t -> scale_and_add:(acc:'g_acc -> xi:'f -> 'g -> 'g_acc) - -> init:('g -> 'g_acc) + -> init:('g -> 'g_acc option) -> xi:'f - -> ('g Without_degree_bound.t, 'n) Vector.t - -> ('g With_degree_bound.t, 'm) Vector.t + -> reduce_without_degree_bound:('without_degree_bound -> 'g list) + -> reduce_with_degree_bound:('with_degree_bound -> 'g list) + -> ('without_degree_bound, 'n) Vector.t + -> ('with_degree_bound, 'm) Vector.t -> 'g_acc val combine_split_evaluations : diff --git a/src/lib/pickles_types/plonk_types.ml b/src/lib/pickles_types/plonk_types.ml index 785cfb90a80..6ec1bb2ed6d 100644 --- a/src/lib/pickles_types/plonk_types.ml +++ b/src/lib/pickles_types/plonk_types.ml @@ -17,177 +17,191 @@ module Permuts_minus_1 = Nat.N6 module Permuts_minus_1_vec = Vector.Vector_6 module Permuts = Nat.N7 module Permuts_vec = Vector.Vector_7 +module Lookup_sorted_minus_1 = Nat.N4 +module Lookup_sorted_minus_1_vec = Vector.Vector_4 +module Lookup_sorted = Nat.N5 module Lookup_sorted_vec = Vector.Vector_5 -module Opt = struct - [@@@warning "-4"] +module Features = struct + module Full = struct + type 'bool t = + { range_check0 : 'bool + ; range_check1 : 'bool + ; foreign_field_add : 'bool + ; foreign_field_mul : 'bool + ; xor : 'bool + ; rot : 'bool + ; lookup : 'bool + ; runtime_tables : 'bool + ; uses_lookups : 'bool + ; table_width_at_least_1 : 'bool + ; table_width_at_least_2 : 'bool + ; table_width_3 : 'bool + ; lookups_per_row_3 : 'bool + ; lookups_per_row_4 : 'bool + ; lookup_pattern_xor : 'bool + ; lookup_pattern_range_check : 'bool + } + [@@deriving sexp, compare, yojson, hash, equal, hlist] + + let get_feature_flag (feature_flags : _ t) + (feature : Kimchi_types.feature_flag) = + match feature with + | RangeCheck0 -> + Some feature_flags.range_check0 + | RangeCheck1 -> + Some feature_flags.range_check1 + | ForeignFieldAdd -> + Some feature_flags.foreign_field_add + | ForeignFieldMul -> + Some feature_flags.foreign_field_mul + | Xor -> + Some feature_flags.xor + | Rot -> + Some feature_flags.rot + | LookupTables -> + Some feature_flags.uses_lookups + | RuntimeLookupTables -> + Some feature_flags.runtime_tables + | TableWidth 3 -> + Some feature_flags.table_width_3 + | TableWidth 2 -> + Some feature_flags.table_width_at_least_2 + | TableWidth i when i <= 1 -> + Some feature_flags.table_width_at_least_1 + | TableWidth _ -> + None + | LookupsPerRow 4 -> + Some feature_flags.lookups_per_row_4 + | LookupsPerRow i when i <= 3 -> + Some feature_flags.lookups_per_row_3 + | LookupsPerRow _ -> + None + | LookupPattern Lookup -> + Some feature_flags.lookup + | LookupPattern Xor -> + Some feature_flags.lookup_pattern_xor + | LookupPattern RangeCheck -> + Some feature_flags.lookup_pattern_range_check + | LookupPattern ForeignFieldMul -> + Some feature_flags.foreign_field_mul + + let map + { range_check0 + ; range_check1 + ; foreign_field_add + ; foreign_field_mul + ; rot + ; xor + ; lookup + ; runtime_tables + ; uses_lookups + ; table_width_at_least_1 + ; table_width_at_least_2 + ; table_width_3 + ; lookups_per_row_3 + ; lookups_per_row_4 + ; lookup_pattern_xor + ; lookup_pattern_range_check + } ~f = + { range_check0 = f range_check0 + ; range_check1 = f range_check1 + ; foreign_field_add = f foreign_field_add + ; foreign_field_mul = f foreign_field_mul + ; xor = f xor + ; rot = f rot + ; lookup = f lookup + ; runtime_tables = f runtime_tables + ; uses_lookups = f uses_lookups + ; table_width_at_least_1 = f table_width_at_least_1 + ; table_width_at_least_2 = f table_width_at_least_2 + ; table_width_3 = f table_width_3 + ; lookups_per_row_3 = f lookups_per_row_3 + ; lookups_per_row_4 = f lookups_per_row_4 + ; lookup_pattern_xor = f lookup_pattern_xor + ; lookup_pattern_range_check = f lookup_pattern_range_check + } - type ('a, 'bool) t = Some of 'a | None | Maybe of 'bool * 'a - [@@deriving sexp, compare, yojson, hash, equal] + let map2 x1 x2 ~f = + { range_check0 = f x1.range_check0 x2.range_check0 + ; range_check1 = f x1.range_check1 x2.range_check1 + ; foreign_field_add = f x1.foreign_field_add x2.foreign_field_add + ; foreign_field_mul = f x1.foreign_field_mul x2.foreign_field_mul + ; xor = f x1.xor x2.xor + ; rot = f x1.rot x2.rot + ; lookup = f x1.lookup x2.lookup + ; runtime_tables = f x1.runtime_tables x2.runtime_tables + ; uses_lookups = f x1.uses_lookups x2.uses_lookups + ; table_width_at_least_1 = + f x1.table_width_at_least_1 x2.table_width_at_least_1 + ; table_width_at_least_2 = + f x1.table_width_at_least_2 x2.table_width_at_least_2 + ; table_width_3 = f x1.table_width_3 x2.table_width_3 + ; lookups_per_row_3 = f x1.lookups_per_row_3 x2.lookups_per_row_3 + ; lookups_per_row_4 = f x1.lookups_per_row_4 x2.lookups_per_row_4 + ; lookup_pattern_xor = f x1.lookup_pattern_xor x2.lookup_pattern_xor + ; lookup_pattern_range_check = + f x1.lookup_pattern_range_check x2.lookup_pattern_range_check + } - let to_option : ('a, bool) t -> 'a option = function - | Some x -> - Some x - | Maybe (true, x) -> - Some x - | Maybe (false, _x) -> - None - | None -> - None - - let to_option_unsafe : ('a, 'bool) t -> 'a option = function - | Some x -> - Some x - | Maybe (_, x) -> - Some x - | None -> - None - - let value_exn = function - | Some x -> - x - | Maybe (_, x) -> - x - | None -> - invalid_arg "Opt.value_exn" - - let of_option (t : 'a option) : ('a, 'bool) t = - match t with None -> None | Some x -> Some x - - module Flag = struct - type t = Yes | No | Maybe [@@deriving sexp, compare, yojson, hash, equal] - - let ( ||| ) x y = - match (x, y) with - | Yes, _ | _, Yes -> - Yes - | Maybe, _ | _, Maybe -> - Maybe - | No, No -> - No - end + let none = + { range_check0 = Opt.Flag.No + ; range_check1 = Opt.Flag.No + ; foreign_field_add = Opt.Flag.No + ; foreign_field_mul = Opt.Flag.No + ; xor = Opt.Flag.No + ; rot = Opt.Flag.No + ; lookup = Opt.Flag.No + ; runtime_tables = Opt.Flag.No + ; uses_lookups = Opt.Flag.No + ; table_width_at_least_1 = Opt.Flag.No + ; table_width_at_least_2 = Opt.Flag.No + ; table_width_3 = Opt.Flag.No + ; lookups_per_row_3 = Opt.Flag.No + ; lookups_per_row_4 = Opt.Flag.No + ; lookup_pattern_xor = Opt.Flag.No + ; lookup_pattern_range_check = Opt.Flag.No + } - let map t ~f = - match t with - | None -> - None - | Some x -> - Some (f x) - | Maybe (b, x) -> - Maybe (b, f x) - - open Snarky_backendless - - let some_typ (type a a_var f bool_var) (t : (a_var, a, f) Typ.t) : - ((a_var, bool_var) t, a option, f) Typ.t = - Typ.transport t ~there:(fun x -> Option.value_exn x) ~back:Option.return - |> Typ.transport_var - ~there:(function - | Some x -> - x - | Maybe _ | None -> - failwith "Opt.some_typ: expected Some" ) - ~back:(fun x -> Some x) - - let none_typ (type a a_var f bool) () : ((a_var, bool) t, a option, f) Typ.t = - Typ.transport (Typ.unit ()) - ~there:(fun _ -> ()) - ~back:(fun () : _ Option.t -> None) - |> Typ.transport_var - ~there:(function - | None -> - () - | Maybe _ | Some _ -> - failwith "Opt.none_typ: expected None" ) - ~back:(fun () : _ t -> None) - - let maybe_typ (type a a_var bool_var f) - (bool_typ : (bool_var, bool, f) Snarky_backendless.Typ.t) ~(dummy : a) - (a_typ : (a_var, a, f) Typ.t) : ((a_var, bool_var) t, a option, f) Typ.t = - Typ.transport - (Typ.tuple2 bool_typ a_typ) - ~there:(fun (t : a option) -> - match t with None -> (false, dummy) | Some x -> (true, x) ) - ~back:(fun (b, x) -> if b then Some x else None) - |> Typ.transport_var - ~there:(fun (t : (a_var, _) t) -> - match t with - | Maybe (b, x) -> - (b, x) - | None | Some _ -> - failwith "Opt.maybe_typ: expected Maybe" ) - ~back:(fun (b, x) -> Maybe (b, x)) - - let constant_layout_typ (type a a_var f) (bool_typ : _ Typ.t) ~true_ ~false_ - (flag : Flag.t) (a_typ : (a_var, a, f) Typ.t) ~(dummy : a) - ~(dummy_var : a_var) = - let (Typ bool_typ) = bool_typ in - let bool_typ : _ Typ.t = - let check = - (* No need to boolean constrain in the No or Yes case *) - match flag with - | No | Yes -> - fun _ -> Checked_runner.Simple.return () - | Maybe -> - bool_typ.check - in - Typ { bool_typ with check } - in - Typ.transport - (Typ.tuple2 bool_typ a_typ) - ~there:(fun (t : a option) -> - match t with None -> (false, dummy) | Some x -> (true, x) ) - ~back:(fun (b, x) -> if b then Some x else None) - |> Typ.transport_var - ~there:(fun (t : (a_var, _) t) -> - match t with - | Maybe (b, x) -> - (b, x) - | None -> - (false_, dummy_var) - | Some x -> - (true_, x) ) - ~back:(fun (b, x) -> - match flag with No -> None | Yes -> Some x | Maybe -> Maybe (b, x) ) - - let typ (type a a_var f) bool_typ (flag : Flag.t) - (a_typ : (a_var, a, f) Typ.t) ~(dummy : a) = - match flag with - | Yes -> - some_typ a_typ - | No -> - none_typ () - | Maybe -> - maybe_typ bool_typ ~dummy a_typ - - module Early_stop_sequence = struct - (* A sequence that should be considered to have stopped at - the first No flag *) - (* TODO: The documentation above makes it sound like the type below is too - generic: we're not guaranteed to have flags in there *) - type nonrec ('a, 'bool) t = ('a, 'bool) t list - - let fold (type a bool acc res) - (if_res : bool -> then_:res -> else_:res -> res) (t : (a, bool) t) - ~(init : acc) ~(f : acc -> a -> acc) ~(finish : acc -> res) = - let rec go acc = function - | [] -> - finish acc - | None :: xs -> - go acc xs - | Some x :: xs -> - go (f acc x) xs - | Maybe (b, x) :: xs -> - (* Computing this first makes mutation in f OK. *) - let stop_res = finish acc in - let continue_res = go (f acc x) xs in - if_res b ~then_:continue_res ~else_:stop_res - in - go init t + let maybe = + { range_check0 = Opt.Flag.Maybe + ; range_check1 = Opt.Flag.Maybe + ; foreign_field_add = Opt.Flag.Maybe + ; foreign_field_mul = Opt.Flag.Maybe + ; xor = Opt.Flag.Maybe + ; rot = Opt.Flag.Maybe + ; lookup = Opt.Flag.Maybe + ; runtime_tables = Opt.Flag.Maybe + ; uses_lookups = Opt.Flag.Maybe + ; table_width_at_least_1 = Opt.Flag.Maybe + ; table_width_at_least_2 = Opt.Flag.Maybe + ; table_width_3 = Opt.Flag.Maybe + ; lookups_per_row_3 = Opt.Flag.Maybe + ; lookups_per_row_4 = Opt.Flag.Maybe + ; lookup_pattern_xor = Opt.Flag.Maybe + ; lookup_pattern_range_check = Opt.Flag.Maybe + } + + let none_bool = + { range_check0 = false + ; range_check1 = false + ; foreign_field_add = false + ; foreign_field_mul = false + ; xor = false + ; rot = false + ; lookup = false + ; runtime_tables = false + ; uses_lookups = false + ; table_width_at_least_1 = false + ; table_width_at_least_2 = false + ; table_width_3 = false + ; lookups_per_row_3 = false + ; lookups_per_row_4 = false + ; lookup_pattern_xor = false + ; lookup_pattern_range_check = false + } end -end -module Features = struct [%%versioned module Stable = struct module V1 = struct @@ -205,6 +219,96 @@ module Features = struct end end] + let of_full + ({ range_check0 + ; range_check1 + ; foreign_field_add + ; foreign_field_mul + ; xor + ; rot + ; lookup + ; runtime_tables + ; uses_lookups = _ + ; table_width_at_least_1 = _ + ; table_width_at_least_2 = _ + ; table_width_3 = _ + ; lookups_per_row_3 = _ + ; lookups_per_row_4 = _ + ; lookup_pattern_xor = _ + ; lookup_pattern_range_check = _ + } : + 'bool Full.t ) = + { range_check0 + ; range_check1 + ; foreign_field_add + ; foreign_field_mul + ; xor + ; rot + ; lookup + ; runtime_tables + } + + let to_full ~or_:( ||| ) ?(any = List.reduce_exn ~f:( ||| )) + { range_check0 + ; range_check1 + ; foreign_field_add + ; foreign_field_mul + ; xor + ; rot + ; lookup + ; runtime_tables + } : _ Full.t = + let lookup_pattern_range_check = + (* RangeCheck, Rot gates use RangeCheck lookup pattern *) + range_check0 ||| range_check1 ||| rot + in + let lookup_pattern_xor = + (* Xor lookup pattern *) + xor + in + (* Make sure these stay up-to-date with the layouts!! *) + let table_width_3 = + (* Xor have max_joint_size = 3 *) + lookup_pattern_xor + in + let table_width_at_least_2 = + (* Lookup has max_joint_size = 2 *) + table_width_3 ||| lookup + in + let table_width_at_least_1 = + (* RangeCheck, ForeignFieldMul have max_joint_size = 1 *) + any + [ table_width_at_least_2 + ; lookup_pattern_range_check + ; foreign_field_mul + ] + in + let lookups_per_row_4 = + (* Xor, RangeCheckGate, ForeignFieldMul, have max_lookups_per_row = 4 *) + any [ lookup_pattern_xor; lookup_pattern_range_check; foreign_field_mul ] + in + let lookups_per_row_3 = + (* Lookup has max_lookups_per_row = 3 *) + lookups_per_row_4 ||| lookup + in + { uses_lookups = lookups_per_row_3 + ; table_width_at_least_1 + ; table_width_at_least_2 + ; table_width_3 + ; lookups_per_row_3 + ; lookups_per_row_4 + ; lookup_pattern_xor + ; lookup_pattern_range_check + ; range_check0 + ; range_check1 + ; foreign_field_add + ; foreign_field_mul + ; xor + ; rot + ; lookup + ; runtime_tables + } + type options = Opt.Flag.t t type flags = bool t @@ -313,6 +417,17 @@ module Features = struct ; runtime_tables = Opt.Flag.No } + let maybe = + { range_check0 = Opt.Flag.Maybe + ; range_check1 = Opt.Flag.Maybe + ; foreign_field_add = Opt.Flag.Maybe + ; foreign_field_mul = Opt.Flag.Maybe + ; xor = Opt.Flag.Maybe + ; rot = Opt.Flag.Maybe + ; lookup = Opt.Flag.Maybe + ; runtime_tables = Opt.Flag.Maybe + } + let none_bool = { range_check0 = false ; range_check1 = false @@ -636,9 +751,8 @@ module Evals = struct ; range_check_lookup_selector ; foreign_field_mul_lookup_selector } = - let some x = Opt.Some x in let always_present = - List.map ~f:some + List.map ~f:Opt.just ( [ z ; generic_selector ; poseidon_selector @@ -723,8 +837,8 @@ module Evals = struct ; lookup_table ] in - let some x = Opt.Some x in - List.map ~f:some always_present + + List.map ~f:Opt.just always_present @ optional_gates @ Vector.to_list lookup_sorted @ [ runtime_lookup_table @@ -1061,32 +1175,24 @@ module Evals = struct let typ (type f a_var a) (module Impl : Snarky_backendless.Snark_intf.Run with type field = f) - ~dummy e (feature_flags : _ Features.t) : + ~dummy e + ({ uses_lookups; lookups_per_row_3; lookups_per_row_4; _ } as + feature_flags : + _ Features.Full.t ) : ((a_var, Impl.Boolean.var) In_circuit.t, a t, f) Snarky_backendless.Typ.t = let open Impl in let opt flag = Opt.typ Impl.Boolean.typ flag e ~dummy in - let uses_lookup = - let { Features.range_check0 - ; range_check1 - ; foreign_field_add = _ (* Doesn't use lookup *) - ; foreign_field_mul - ; xor - ; rot - ; lookup - ; runtime_tables = _ (* Fixme *) - } = - feature_flags - in - Array.reduce_exn ~f:Opt.Flag.( ||| ) - [| range_check0; range_check1; foreign_field_mul; xor; rot; lookup |] - in let lookup_sorted = - match uses_lookup with - | Opt.Flag.No -> - Opt.Flag.No - | Yes | Maybe -> - Opt.Flag.Maybe + let lookups_per_row_3 = opt lookups_per_row_3 in + let lookups_per_row_4 = opt lookups_per_row_4 in + Vector.typ' + [ lookups_per_row_3 + ; lookups_per_row_3 + ; lookups_per_row_3 + ; lookups_per_row_3 + ; lookups_per_row_4 + ] in Typ.of_hlistable [ Vector.typ e Columns.n @@ -1105,18 +1211,14 @@ module Evals = struct ; opt feature_flags.foreign_field_mul ; opt feature_flags.xor ; opt feature_flags.rot - ; opt uses_lookup - ; opt uses_lookup - ; Vector.typ (opt lookup_sorted) Nat.N5.n (* TODO: Fixme *) + ; opt uses_lookups + ; opt uses_lookups + ; lookup_sorted ; opt feature_flags.runtime_tables ; opt feature_flags.runtime_tables - ; opt feature_flags.xor + ; opt feature_flags.lookup_pattern_xor ; opt feature_flags.lookup - ; opt - Opt.Flag.( - feature_flags.range_check0 ||| feature_flags.range_check1 - ||| feature_flags.rot) - (* TODO: This logic does not belong here. *) + ; opt feature_flags.lookup_pattern_range_check ; opt feature_flags.foreign_field_mul ] ~var_to_hlist:In_circuit.to_hlist ~var_of_hlist:In_circuit.of_hlist @@ -1162,6 +1264,8 @@ module All_evals = struct [%%versioned module Stable = struct + [@@@no_toplevel_latest_type] + module V1 = struct type ('f, 'f_multi) t = { evals : ('f * 'f, 'f_multi * 'f_multi) With_public_input.Stable.V1.t @@ -1171,10 +1275,19 @@ module All_evals = struct end end] + type ('f, 'f_multi) t = + { evals : ('f_multi * 'f_multi, 'f_multi * 'f_multi) With_public_input.t + ; ft_eval1 : 'f + } + [@@deriving sexp, compare, yojson, hash, equal, hlist] + module In_circuit = struct type ('f, 'f_multi, 'bool) t = { evals : - ('f * 'f, 'f_multi * 'f_multi, 'bool) With_public_input.In_circuit.t + ( 'f_multi * 'f_multi + , 'f_multi * 'f_multi + , 'bool ) + With_public_input.In_circuit.t ; ft_eval1 : 'f } [@@deriving hlist] @@ -1184,21 +1297,22 @@ module All_evals = struct : (b1, b2) t = { evals = With_public_input.map t.evals - ~f1:(Tuple_lib.Double.map ~f:f1) + ~f1:(Tuple_lib.Double.map ~f:f2) ~f2:(Tuple_lib.Double.map ~f:f2) ; ft_eval1 = f1 t.ft_eval1 } let typ (type f) (module Impl : Snarky_backendless.Snark_intf.Run with type field = f) - feature_flags = + ~num_chunks feature_flags = let open Impl.Typ in - let single = array ~length:1 field in + let single = array ~length:num_chunks field in + let dummy = Array.init num_chunks ~f:(fun _ -> Impl.Field.Constant.zero) in let evals = With_public_input.typ (module Impl) - feature_flags (tuple2 field field) (tuple2 single single) - ~dummy:Impl.Field.Constant.([| zero |], [| zero |]) + feature_flags (tuple2 single single) (tuple2 single single) + ~dummy:(dummy, dummy) in of_hlistable [ evals; Impl.Field.typ ] ~var_to_hlist:In_circuit.to_hlist ~var_of_hlist:In_circuit.of_hlist ~value_to_hlist:to_hlist @@ -1301,6 +1415,8 @@ module Messages = struct module Lookup = struct [%%versioned module Stable = struct + [@@@no_toplevel_latest_type] + module V1 = struct type 'g t = { sorted : 'g Bounded_types.ArrayN16.Stable.V1.t @@ -1311,39 +1427,52 @@ module Messages = struct end end] + type 'g t = + { sorted : 'g Lookup_sorted_minus_1_vec.t + ; sorted_5th_column : 'g option + ; aggreg : 'g + ; runtime : 'g option + } + [@@deriving fields, sexp, compare, yojson, hash, equal, hlist] + module In_circuit = struct type ('g, 'bool) t = - { sorted : 'g array; aggreg : 'g; runtime : ('g, 'bool) Opt.t } + { sorted : 'g Lookup_sorted_minus_1_vec.t + ; sorted_5th_column : ('g, 'bool) Opt.t + ; aggreg : 'g + ; runtime : ('g, 'bool) Opt.t + } [@@deriving hlist] end - let sorted_length = 5 - - let dummy ~runtime_tables z = + let dummy z = { aggreg = z - ; sorted = Array.create ~len:sorted_length z - ; runtime = Option.some_if runtime_tables z + ; sorted = Vector.init Lookup_sorted_minus_1.n ~f:(fun _ -> z) + ; sorted_5th_column = None + ; runtime = None } - let typ bool_typ e ~runtime_tables ~dummy = + let typ bool_typ e ~lookups_per_row_4 ~runtime_tables ~dummy = Snarky_backendless.Typ.of_hlistable - [ Snarky_backendless.Typ.array ~length:sorted_length e + [ Vector.typ e Lookup_sorted_minus_1.n + ; Opt.typ bool_typ lookups_per_row_4 e ~dummy ; e ; Opt.typ bool_typ runtime_tables e ~dummy ] ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist ~var_to_hlist:In_circuit.to_hlist ~var_of_hlist:In_circuit.of_hlist - let opt_typ bool_typ ~(lookup : Opt.Flag.t) ~(runtime_tables : Opt.Flag.t) + let opt_typ bool_typ ~(uses_lookup : Opt.Flag.t) + ~(lookups_per_row_4 : Opt.Flag.t) ~(runtime_tables : Opt.Flag.t) ~dummy:z elt = - Opt.typ bool_typ lookup - ~dummy: - (dummy z ~runtime_tables:Opt.Flag.(not (equal runtime_tables No))) - (typ bool_typ ~runtime_tables ~dummy:z elt) + Opt.typ bool_typ uses_lookup ~dummy:(dummy z) + (typ bool_typ ~lookups_per_row_4 ~runtime_tables ~dummy:z elt) end [%%versioned module Stable = struct + [@@@no_toplevel_latest_type] + module V2 = struct type 'g t = { w_comm : 'g Without_degree_bound.Stable.V1.t Columns_vec.Stable.V1.t @@ -1355,6 +1484,14 @@ module Messages = struct end end] + type 'g t = + { w_comm : 'g Without_degree_bound.t Columns_vec.t + ; z_comm : 'g Without_degree_bound.t + ; t_comm : 'g Without_degree_bound.t + ; lookup : 'g Without_degree_bound.t Lookup.t option + } + [@@deriving sexp, compare, yojson, fields, hash, equal, hlist] + module In_circuit = struct type ('g, 'bool) t = { w_comm : 'g Without_degree_bound.t Columns_vec.t @@ -1368,7 +1505,8 @@ module Messages = struct let typ (type n f) (module Impl : Snarky_backendless.Snark_intf.Run with type field = f) g - ({ lookup; runtime_tables; _ } : Opt.Flag.t Features.t) ~dummy + ({ runtime_tables; uses_lookups; lookups_per_row_4; _ } : + Opt.Flag.t Features.Full.t ) ~dummy ~(commitment_lengths : (((int, n) Vector.t as 'v), int, int) Poly.t) ~bool = let open Snarky_backendless.Typ in @@ -1381,7 +1519,8 @@ module Messages = struct ~dummy_group_element:dummy ~bool in let lookup = - Lookup.opt_typ Impl.Boolean.typ ~lookup ~runtime_tables ~dummy:[| dummy |] + Lookup.opt_typ Impl.Boolean.typ ~uses_lookup:uses_lookups + ~lookups_per_row_4 ~runtime_tables ~dummy:[| dummy |] (wo [ 1 ]) in of_hlistable @@ -1393,6 +1532,8 @@ end module Proof = struct [%%versioned module Stable = struct + [@@@no_toplevel_latest_type] + module V2 = struct type ('g, 'fq, 'fqv) t = { messages : 'g Messages.Stable.V2.t @@ -1401,6 +1542,10 @@ module Proof = struct [@@deriving sexp, compare, yojson, hash, equal] end end] + + type ('g, 'fq, 'fqv) t = + { messages : 'g Messages.t; openings : ('g, 'fq, 'fqv) Openings.t } + [@@deriving sexp, compare, yojson, hash, equal] end module Shifts = struct diff --git a/src/lib/pickles_types/plonk_types.mli b/src/lib/pickles_types/plonk_types.mli index ed7d292110d..09e0ee03f36 100644 --- a/src/lib/pickles_types/plonk_types.mli +++ b/src/lib/pickles_types/plonk_types.mli @@ -2,64 +2,42 @@ val hash_fold_array : 'a Sigs.hashable -> 'a array Sigs.hashable -module Opt : sig - type ('a, 'bool) t = Some of 'a | None | Maybe of 'bool * 'a - [@@deriving sexp, compare, yojson, hash, equal] - - val map : ('a, 'bool) t -> f:('a -> 'b) -> ('b, 'bool) t - - (** [value_exn o] is v when [o] if [Some v] or [Maybe (_, v)]. - - @raise Invalid_argument if [o] is [None] - **) - val value_exn : ('a, 'bool) t -> 'a +(** Features are custom gates, lookup tables or runtime tables *) +module Features : sig + module Full : sig + type 'bool t = private + { range_check0 : 'bool + ; range_check1 : 'bool + ; foreign_field_add : 'bool + ; foreign_field_mul : 'bool + ; xor : 'bool + ; rot : 'bool + ; lookup : 'bool + ; runtime_tables : 'bool + ; uses_lookups : 'bool + ; table_width_at_least_1 : 'bool + ; table_width_at_least_2 : 'bool + ; table_width_3 : 'bool + ; lookups_per_row_3 : 'bool + ; lookups_per_row_4 : 'bool + ; lookup_pattern_xor : 'bool + ; lookup_pattern_range_check : 'bool + } + [@@deriving sexp, compare, yojson, hash, equal, hlist] - (** [to_option_unsafe o] is [Some v] when [o] if [Some v] or [Maybe (_, v)], - [None] otherwise. - *) - val to_option_unsafe : ('a, 'bool) t -> 'a option + val get_feature_flag : 'bool t -> Kimchi_types.feature_flag -> 'bool option - val to_option : ('a, bool) t -> 'a option + val map : 'a t -> f:('a -> 'b) -> 'b t - module Flag : sig - type t = Yes | No | Maybe [@@deriving sexp, compare, yojson, hash, equal] + val map2 : 'a t -> 'b t -> f:('a -> 'b -> 'c) -> 'c t - val ( ||| ) : t -> t -> t - end + val none : Opt.Flag.t t - val constant_layout_typ : - ('b, bool, 'f) Snarky_backendless.Typ.t - -> true_:'b - -> false_:'b - -> Flag.t - -> ('a_var, 'a, 'f) Snarky_backendless.Typ.t - -> dummy:'a - -> dummy_var:'a_var - -> (('a_var, 'b) t, 'a option, 'f) Snarky_backendless.Typ.t + val maybe : Opt.Flag.t t - val typ : - ('b, bool, 'f) Snarky_backendless.Typ.t - -> Flag.t - -> ('a_var, 'a, 'f) Snarky_backendless.Typ.t - -> dummy:'a - -> (('a_var, 'b) t, 'a option, 'f) Snarky_backendless.Typ.t - - (** A sequence that should be considered to have stopped at - the first occurence of {!Flag.No} *) - module Early_stop_sequence : sig - type nonrec ('a, 'bool) t = ('a, 'bool) t list - - val fold : - ('bool -> then_:'res -> else_:'res -> 'res) - -> ('a, 'bool) t - -> init:'acc - -> f:('acc -> 'a -> 'acc) - -> finish:('acc -> 'res) - -> 'res + val none_bool : bool t end -end -module Features : sig [%%versioned: module Stable : sig module V1 : sig @@ -77,17 +55,28 @@ module Features : sig end end] + val to_full : + or_:('bool -> 'bool -> 'bool) + -> ?any:('bool list -> 'bool) + -> 'bool t + -> 'bool Full.t + + val of_full : 'a Full.t -> 'a t + (** {2 Type aliases} *) type options = Opt.Flag.t t type flags = bool t + (** [to_data flags] takes the record defined above and deconstructs it in a + list, in the field order *) val to_data : 'a t -> ('a * ('a * ('a * ('a * ('a * ('a * ('a * ('a * unit)))))))) Hlist.HlistId.t + (** [of_data flags_list] constructs a record from the flags list *) val of_data : ('a * ('a * ('a * ('a * ('a * ('a * ('a * ('a * unit)))))))) Hlist.HlistId.t @@ -100,6 +89,8 @@ module Features : sig val none : options + val maybe : options + val none_bool : flags val map : 'a t -> f:('a -> 'b) -> 'b t @@ -118,40 +109,77 @@ module Poly_comm : sig end end +(** The number of wires in the proving system *) module Columns_vec = Vector.Vector_15 + module Columns = Nat.N15 -module Permuts_vec = Vector.Vector_7 + +(** The number of wires that are considered in the permutation argument *) module Permuts = Nat.N7 + +module Permuts_vec = Vector.Vector_7 module Permuts_minus_1 = Nat.N6 module Permuts_minus_1_vec = Vector.Vector_6 +module Lookup_sorted_minus_1 = Nat.N4 +module Lookup_sorted_minus_1_vec = Vector.Vector_4 +module Lookup_sorted = Nat.N5 module Lookup_sorted_vec = Vector.Vector_5 +(** Messages involved in the polynomial IOP *) module Messages : sig module Poly : sig type ('w, 'z, 't) t = { w : 'w; z : 'z; t : 't } end + (** The types of lookup tables. This should stay in line with the {{ + https://o1-labs.github.io/proof-systems/rfcs/extended-lookup-tables.html} RFC4 + - Extended lookup tables } in the kimchi book *) module Lookup : sig - type 'g t = { sorted : 'g array; aggreg : 'g; runtime : 'g option } + module Stable : sig + module V1 : sig + type 'g t = { sorted : 'g array; aggreg : 'g; runtime : 'g option } + [@@deriving fields, sexp, compare, yojson, hash, equal, hlist] + end + end + + type 'g t = + { sorted : 'g Lookup_sorted_minus_1_vec.t + ; sorted_5th_column : 'g option + ; aggreg : 'g + ; runtime : 'g option + } module In_circuit : sig type ('g, 'bool) t = - { sorted : 'g array; aggreg : 'g; runtime : ('g, 'bool) Opt.t } + { sorted : 'g Lookup_sorted_minus_1_vec.t + ; sorted_5th_column : ('g, 'bool) Opt.t + ; aggreg : 'g + ; runtime : ('g, 'bool) Opt.t + } end end module Stable : sig module V2 : sig + (** Commitments to the different polynomials. + - [w_comm] is a vector containing the commitments to the wires. As + usual, the vector size is encoded at the type level using + {!Columns_vec} for compile time verification of vector properties. + - [z_comm] is the commitment to the permutation polynomial + - [t_comm] is the commitment to the quotient polynomial + - [lookup] contains the commitments to the polynomials involved the + lookup arguments. + *) type 'g t = { w_comm : 'g Poly_comm.Without_degree_bound.t Columns_vec.t ; z_comm : 'g Poly_comm.Without_degree_bound.t ; t_comm : 'g Poly_comm.Without_degree_bound.t - ; lookup : 'g Poly_comm.Without_degree_bound.t Lookup.t option + ; lookup : 'g Poly_comm.Without_degree_bound.t Lookup.Stable.V1.t option } end end - type 'g t = 'g Stable.V2.t = + type 'g t = { w_comm : 'g Poly_comm.Without_degree_bound.t Columns_vec.t ; z_comm : 'g Poly_comm.Without_degree_bound.t ; t_comm : 'g Poly_comm.Without_degree_bound.t @@ -168,21 +196,13 @@ module Messages : sig , 'bool ) Opt.t } - - (** Field accessors *) - - val w_comm : - ('g, 'bool) t -> 'g Poly_comm.Without_degree_bound.t Columns_vec.t - - val z_comm : ('g, 'bool) t -> 'g Poly_comm.Without_degree_bound.t - - val t_comm : ('g, 'bool) t -> 'g Poly_comm.Without_degree_bound.t + [@@deriving fields] end val typ : (module Snarky_backendless.Snark_intf.Run with type field = 'f) -> ('a, 'b, 'f) Snarky_backendless.Typ.t - -> Opt.Flag.t Features.t + -> Opt.Flag.t Features.Full.t -> dummy:'b -> commitment_lengths:((int, 'n) Vector.vec, int, int) Poly.t -> bool:('c, bool, 'f) Snarky_backendless.Typ.t @@ -341,7 +361,9 @@ module Proof : sig module Stable : sig module V2 : sig type ('g, 'fq, 'fqv) t = - { messages : 'g Messages.t; openings : ('g, 'fq, 'fqv) Openings.t } + { messages : 'g Messages.Stable.V2.t + ; openings : ('g, 'fq, 'fqv) Openings.t + } include Sigs.Full.S3 with type ('a, 'b, 'c) t := ('a, 'b, 'c) t end @@ -349,8 +371,11 @@ module Proof : sig module Latest = V2 end - type ('a, 'b, 'c) t = ('a, 'b, 'c) Stable.V2.t = + (** Represents a proof. A proof consists of messages and openings from the + polynomial protocols *) + type ('a, 'b, 'c) t = { messages : 'a Messages.t; openings : ('a, 'b, 'c) Openings.t } + [@@deriving compare, sexp, yojson, hash, equal] end module All_evals : sig @@ -376,18 +401,23 @@ module All_evals : sig include Sigs.Full.S2 with type ('a, 'b) t := ('a, 'b) t end + + module Latest = V1 end module In_circuit : sig type ('f, 'f_multi, 'bool) t = { evals : - ('f * 'f, 'f_multi * 'f_multi, 'bool) With_public_input.In_circuit.t + ( 'f_multi * 'f_multi + , 'f_multi * 'f_multi + , 'bool ) + With_public_input.In_circuit.t ; ft_eval1 : 'f } end - type ('f, 'f_multi) t = ('f, 'f_multi) Stable.V1.t = - { evals : ('f * 'f, 'f_multi * 'f_multi) With_public_input.t + type ('f, 'f_multi) t = + { evals : ('f_multi * 'f_multi, 'f_multi * 'f_multi) With_public_input.t ; ft_eval1 : 'f } [@@deriving sexp, compare, yojson, hash, equal] @@ -396,7 +426,8 @@ module All_evals : sig val typ : (module Snarky_backendless.Snark_intf.Run with type field = 'f) - -> Opt.Flag.t Features.t + -> num_chunks:int + -> Opt.Flag.t Features.Full.t -> ( ( 'f Snarky_backendless.Cvar.t , 'f Snarky_backendless.Cvar.t array , 'f Snarky_backendless.Cvar.t Snarky_backendless.Boolean.t ) @@ -407,6 +438,7 @@ module All_evals : sig Snarky_backendless.Types.Typ.typ end +(** Shifts, related to the permutation argument in Plonk *) module Shifts : sig type 'a t = 'a array end diff --git a/src/lib/pickles_types/plonk_verification_key_evals.ml b/src/lib/pickles_types/plonk_verification_key_evals.ml index 593a46b3ae9..87d5ef5a6e9 100644 --- a/src/lib/pickles_types/plonk_verification_key_evals.ml +++ b/src/lib/pickles_types/plonk_verification_key_evals.ml @@ -65,3 +65,168 @@ let typ g = ] ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist ~value_of_hlist:of_hlist + +module Step = struct + type ('comm, 'opt_comm) t = + { sigma_comm : 'comm Plonk_types.Permuts_vec.t + ; coefficients_comm : 'comm Plonk_types.Columns_vec.t + ; generic_comm : 'comm + ; psm_comm : 'comm + ; complete_add_comm : 'comm + ; mul_comm : 'comm + ; emul_comm : 'comm + ; endomul_scalar_comm : 'comm + ; xor_comm : 'opt_comm + ; range_check0_comm : 'opt_comm + ; range_check1_comm : 'opt_comm + ; foreign_field_add_comm : 'opt_comm + ; foreign_field_mul_comm : 'opt_comm + ; rot_comm : 'opt_comm + ; lookup_table_comm : 'opt_comm Plonk_types.Lookup_sorted_minus_1_vec.t + ; lookup_table_ids : 'opt_comm + ; runtime_tables_selector : 'opt_comm + ; lookup_selector_lookup : 'opt_comm + ; lookup_selector_xor : 'opt_comm + ; lookup_selector_range_check : 'opt_comm + ; lookup_selector_ffmul : 'opt_comm + } + [@@deriving sexp, equal, compare, hash, yojson, hlist, fields] + + let map + { sigma_comm + ; coefficients_comm + ; generic_comm + ; psm_comm + ; complete_add_comm + ; mul_comm + ; emul_comm + ; endomul_scalar_comm + ; xor_comm + ; range_check0_comm + ; range_check1_comm + ; foreign_field_add_comm + ; foreign_field_mul_comm + ; rot_comm + ; lookup_table_comm + ; lookup_table_ids + ; runtime_tables_selector + ; lookup_selector_lookup + ; lookup_selector_xor + ; lookup_selector_range_check + ; lookup_selector_ffmul + } ~f ~f_opt = + { sigma_comm = Vector.map ~f sigma_comm + ; coefficients_comm = Vector.map ~f coefficients_comm + ; generic_comm = f generic_comm + ; psm_comm = f psm_comm + ; complete_add_comm = f complete_add_comm + ; mul_comm = f mul_comm + ; emul_comm = f emul_comm + ; endomul_scalar_comm = f endomul_scalar_comm + ; xor_comm = f_opt xor_comm + ; range_check0_comm = f_opt range_check0_comm + ; range_check1_comm = f_opt range_check1_comm + ; foreign_field_add_comm = f_opt foreign_field_add_comm + ; foreign_field_mul_comm = f_opt foreign_field_mul_comm + ; rot_comm = f_opt rot_comm + ; lookup_table_comm = Vector.map ~f:f_opt lookup_table_comm + ; lookup_table_ids = f_opt lookup_table_ids + ; runtime_tables_selector = f_opt runtime_tables_selector + ; lookup_selector_lookup = f_opt lookup_selector_lookup + ; lookup_selector_xor = f_opt lookup_selector_xor + ; lookup_selector_range_check = f_opt lookup_selector_range_check + ; lookup_selector_ffmul = f_opt lookup_selector_ffmul + } + + let map2 t1 t2 ~f ~f_opt = + { sigma_comm = Vector.map2 ~f t1.sigma_comm t2.sigma_comm + ; coefficients_comm = + Vector.map2 ~f t1.coefficients_comm t2.coefficients_comm + ; generic_comm = f t1.generic_comm t2.generic_comm + ; psm_comm = f t1.psm_comm t2.psm_comm + ; complete_add_comm = f t1.complete_add_comm t2.complete_add_comm + ; mul_comm = f t1.mul_comm t2.mul_comm + ; emul_comm = f t1.emul_comm t2.emul_comm + ; endomul_scalar_comm = f t1.endomul_scalar_comm t2.endomul_scalar_comm + ; xor_comm = f_opt t1.xor_comm t2.xor_comm + ; range_check0_comm = f_opt t1.range_check0_comm t2.range_check0_comm + ; range_check1_comm = f_opt t1.range_check1_comm t2.range_check1_comm + ; foreign_field_add_comm = + f_opt t1.foreign_field_add_comm t2.foreign_field_add_comm + ; foreign_field_mul_comm = + f_opt t1.foreign_field_mul_comm t2.foreign_field_mul_comm + ; rot_comm = f_opt t1.rot_comm t2.rot_comm + ; lookup_table_comm = + Vector.map2 ~f:f_opt t1.lookup_table_comm t2.lookup_table_comm + ; lookup_table_ids = f_opt t1.lookup_table_ids t2.lookup_table_ids + ; runtime_tables_selector = + f_opt t1.runtime_tables_selector t2.runtime_tables_selector + ; lookup_selector_lookup = + f_opt t1.lookup_selector_lookup t2.lookup_selector_lookup + ; lookup_selector_xor = f_opt t1.lookup_selector_xor t2.lookup_selector_xor + ; lookup_selector_range_check = + f_opt t1.lookup_selector_range_check t2.lookup_selector_range_check + ; lookup_selector_ffmul = + f_opt t1.lookup_selector_ffmul t2.lookup_selector_ffmul + } + + let typ g g_opt = + Snarky_backendless.Typ.of_hlistable + [ Vector.typ g Plonk_types.Permuts.n + ; Vector.typ g Plonk_types.Columns.n + ; g + ; g + ; g + ; g + ; g + ; g + ; g_opt + ; g_opt + ; g_opt + ; g_opt + ; g_opt + ; g_opt + ; Vector.typ g_opt Plonk_types.Lookup_sorted_minus_1.n + ; g_opt + ; g_opt + ; g_opt + ; g_opt + ; g_opt + ; g_opt + ] + ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist + ~value_of_hlist:of_hlist + + let forget_optional_commitments + { sigma_comm + ; coefficients_comm + ; generic_comm + ; psm_comm + ; complete_add_comm + ; mul_comm + ; emul_comm + ; endomul_scalar_comm + ; xor_comm = _ + ; range_check0_comm = _ + ; range_check1_comm = _ + ; foreign_field_add_comm = _ + ; foreign_field_mul_comm = _ + ; rot_comm = _ + ; lookup_table_comm = _ + ; lookup_table_ids = _ + ; runtime_tables_selector = _ + ; lookup_selector_lookup = _ + ; lookup_selector_xor = _ + ; lookup_selector_range_check = _ + ; lookup_selector_ffmul = _ + } : _ Stable.Latest.t = + { sigma_comm + ; coefficients_comm + ; generic_comm + ; psm_comm + ; complete_add_comm + ; mul_comm + ; emul_comm + ; endomul_scalar_comm + } +end diff --git a/src/lib/pickles_types/plonk_verification_key_evals.mli b/src/lib/pickles_types/plonk_verification_key_evals.mli index 0e792000ab3..553ff080b98 100644 --- a/src/lib/pickles_types/plonk_verification_key_evals.mli +++ b/src/lib/pickles_types/plonk_verification_key_evals.mli @@ -42,3 +42,54 @@ val map : 'a t -> f:('a -> 'b) -> 'b t (** [map2] *) val map2 : 'a t -> 'b t -> f:('a -> 'b -> 'c) -> 'c t + +module Step : sig + type ('comm, 'opt_comm) t = + { sigma_comm : 'comm Plonk_types.Permuts_vec.t + ; coefficients_comm : 'comm Plonk_types.Columns_vec.t + ; generic_comm : 'comm + ; psm_comm : 'comm + ; complete_add_comm : 'comm + ; mul_comm : 'comm + ; emul_comm : 'comm + ; endomul_scalar_comm : 'comm + ; xor_comm : 'opt_comm + ; range_check0_comm : 'opt_comm + ; range_check1_comm : 'opt_comm + ; foreign_field_add_comm : 'opt_comm + ; foreign_field_mul_comm : 'opt_comm + ; rot_comm : 'opt_comm + ; lookup_table_comm : 'opt_comm Plonk_types.Lookup_sorted_minus_1_vec.t + ; lookup_table_ids : 'opt_comm + ; runtime_tables_selector : 'opt_comm + ; lookup_selector_lookup : 'opt_comm + ; lookup_selector_xor : 'opt_comm + ; lookup_selector_range_check : 'opt_comm + ; lookup_selector_ffmul : 'opt_comm + } + [@@deriving sexp, equal, compare, hash, yojson, hlist] + + val typ : + ('comm_var, 'comm_value, 'c) Snarky_backendless.Typ.t + -> ('opt_comm_var, 'opt_comm_value, 'c) Snarky_backendless.Typ.t + -> ( ('comm_var, 'opt_comm_var) t + , ('comm_value, 'opt_comm_value) t + , 'c ) + Snarky_backendless.Typ.t + + val map : + ('comm1, 'opt_comm1) t + -> f:('comm1 -> 'comm2) + -> f_opt:('opt_comm1 -> 'opt_comm2) + -> ('comm2, 'opt_comm2) t + + val map2 : + ('comm1, 'opt_comm1) t + -> ('comm2, 'opt_comm2) t + -> f:('comm1 -> 'comm2 -> 'comm3) + -> f_opt:('opt_comm1 -> 'opt_comm2 -> 'opt_comm3) + -> ('comm3, 'opt_comm3) t + + val forget_optional_commitments : + ('comm, 'opt_comm) t -> 'comm Stable.Latest.t +end diff --git a/src/lib/pickles_types/shifted_value.mli b/src/lib/pickles_types/shifted_value.mli index 1dac854bd8f..32d9d3ea675 100644 --- a/src/lib/pickles_types/shifted_value.mli +++ b/src/lib/pickles_types/shifted_value.mli @@ -1,24 +1,43 @@ module type Field_intf = sig + (** Represents an element of the field *) type t + (** The number of bits in the field's order, i.e. + [1 + log2(field_order)] *) val size_in_bits : int + (** [negate x] returns the unique value [y] such that [x + y = zero mod p] + where [p] is the order of the field *) val negate : t -> t + (** [a - b] returns the unique value [c] such that [a + c = b mod p] where + [p] is the order of the field *) val ( - ) : t -> t -> t + (** [a + b] returns the unique value [c] such that [a + b = c mod p] where + [p] is the order of the field *) val ( + ) : t -> t -> t + (** [a * b] returns the unique value [c] such that [a * b = c mod p] where + [p] is the order of the field *) val ( * ) : t -> t -> t + (** [a / b] returns the unique value [c] such that [a * c = b mod p] where + [p] is the order of the field *) val ( / ) : t -> t -> t + (** [inv x] returns the unique value [y] such that [x * y = one mod p] + where [p] is the order of the field *) val inv : t -> t + (** The identity element for the addition *) val zero : t + (** The identity element for the multiplication *) val one : t + (** [of_int x] builds an element of type [t]. [x] is the canonical + representation of the field element. *) val of_int : int -> t end @@ -43,6 +62,7 @@ module type S = sig val create : (module Field_intf with type t = 'f) -> 'f t + (** [map x f] applies [f] on the value contained in [x] *) val map : 'a t -> f:('a -> 'b) -> 'b t end diff --git a/src/lib/pickles_types/test/main.ml b/src/lib/pickles_types/test/main.ml index 038ba05c12c..9c77e7ae4ab 100644 --- a/src/lib/pickles_types/test/main.ml +++ b/src/lib/pickles_types/test/main.ml @@ -1 +1,3 @@ -let () = Alcotest.run "Pickles types" Test_pcs_batch.tests +let tests = Test_pcs_batch.tests @ Test_vector.tests + +let () = Alcotest.run "Pickles types" tests diff --git a/src/lib/pickles_types/test/test_vector.ml b/src/lib/pickles_types/test/test_vector.ml new file mode 100644 index 00000000000..9f01d5cf232 --- /dev/null +++ b/src/lib/pickles_types/test/test_vector.ml @@ -0,0 +1,29 @@ +let test_initialize_with_correct_size () = + let v = Pickles_types.Vector.init Pickles_types.Nat.N10.n ~f:(fun i -> i) in + assert (Pickles_types.(Nat.to_int (Vector.length v)) = 10) + +let test_split () = + (* v is of length 10. We want to split in two vectors of size 6 and 4 *) + let v = Pickles_types.Vector.init Pickles_types.Nat.N10.n ~f:(fun i -> i) in + (* 6 + 4 *) + let ten = snd (Pickles_types.Nat.N6.add Pickles_types.Nat.N4.n) in + let v_6, v_4 = Pickles_types.Vector.split v ten in + (* Checking the size of both splits *) + assert (Pickles_types.(Nat.to_int (Vector.length v_6)) = 6) ; + assert (Pickles_types.(Nat.to_int (Vector.length v_4)) = 4) ; + (* We will now check the elements have been splitted correctly, we should have + 0 to 5 in v_6 and 6 to 9 in v_4 *) + let v_6_list = Pickles_types.Vector.to_list v_6 in + assert (List.for_all2_exn v_6_list (List.init 6 ~f:(fun i -> i)) ~f:Int.equal) ; + let v_4_list = Pickles_types.Vector.to_list v_4 in + assert ( + List.for_all2_exn v_4_list (List.init 4 ~f:(fun i -> 6 + i)) ~f:Int.equal ) + +let tests = + let open Alcotest in + [ ( "Vectors" + , [ test_case "test initialize with correct size" `Quick + test_initialize_with_correct_size + ; test_case "test split" `Quick test_split + ] ) + ] diff --git a/src/lib/pickles_types/vector.ml b/src/lib/pickles_types/vector.ml index be76c5d5e59..2ca4650738c 100644 --- a/src/lib/pickles_types/vector.ml +++ b/src/lib/pickles_types/vector.ml @@ -12,6 +12,8 @@ end include T +let singleton a = [ a ] + let unsingleton (type a) ([ x ] : (a, z s) t) : a = x let rec iter : type a n. (a, n) t -> f:(a -> unit) -> unit = @@ -29,6 +31,31 @@ let iteri (type a n) (t : (a, n) t) ~(f : int -> a -> unit) : unit = in go 0 t +let rec length : type a n. (a, n) t -> n Nat.t = function + | [] -> + Z + | _ :: xs -> + S (length xs) + +let nth v i = + let rec loop : type a n. int -> (a, n) t -> a option = + fun j -> function + | [] -> + None + | x :: xs -> + if Int.equal i j then Some x else loop (j + 1) xs + in + loop 0 v + +let nth_exn v i = + match nth v i with + | None -> + invalid_argf "Vector.nth_exn %d called on a vector of length %d" i + (length v |> Nat.to_int) + () + | Some e -> + e + let rec iter2 : type a b n. (a, n) t -> (b, n) t -> f:(a -> b -> unit) -> unit = fun t1 t2 ~f -> match (t1, t2) with @@ -92,12 +119,6 @@ let sexp_of_t a _ v = List.sexp_of_t a (to_list v) let to_array t = Array.of_list (to_list t) -let rec length : type a n. (a, n) t -> n Nat.t = function - | [] -> - Z - | _ :: xs -> - S (length xs) - let rec init : type a n. int -> n Nat.t -> f:(int -> a) -> (a, n) t = fun i n ~f -> match n with Z -> [] | S n -> f i :: init (i + 1) n ~f diff --git a/src/lib/pickles_types/vector.mli b/src/lib/pickles_types/vector.mli index 38a96b6dbb7..5911761c4ab 100644 --- a/src/lib/pickles_types/vector.mli +++ b/src/lib/pickles_types/vector.mli @@ -1,7 +1,12 @@ -(** Vectors for Pickles *) +(** Typed size vectors for Pickles. The size of the vector is encoded at the + type level. + The module also provides common methods available for built-in lists ['a + list] like [map], [iter], etc. +*) (** {1 Types} *) +(** Encode a vector at the type level with its size *) module T : sig type ('a, _) t = | [] : ('a, Nat.z) t @@ -12,10 +17,15 @@ type ('a, 'b) t = ('a, 'b) T.t = | [] : ('a, Nat.z) t | ( :: ) : 'a * ('a, 'n) t -> ('a, 'n Nat.s) t +(** Simple alias for the type [t] *) type ('a, 'n) vec = ('a, 'n) t +(** A value of type ['a e] forgets the size of the vector and contains only the + elements of the list. It can be seen as an alias to ['a list] *) type _ e = T : ('a, 'n) t -> 'a e +(** ['a L.t] is nothing more than an alias to ['a list]. No type level encoding + of the size is provided. It only transports the runtime data. *) module L : sig type 'a t = 'a list [@@deriving yojson] end @@ -24,8 +34,6 @@ end (** {2 Module types} *) -module type Nat_intf = Nat.Intf - module type S = sig type 'a t [@@deriving compare, yojson, sexp, hash, equal] @@ -36,6 +44,7 @@ module type S = sig val to_list : 'a t -> 'a list end +(** Main module type to encode a typed size vector *) module type VECTOR = sig type 'a t @@ -81,29 +90,43 @@ module Vector_16 : VECTOR with type 'a t = ('a, Nat.N16.n) vec (** Vector of size 8 *) module Vector_8 : VECTOR with type 'a t = ('a, Nat.N8.n) vec +(** Vector of size 7 *) module Vector_7 : VECTOR with type 'a t = ('a, Nat.N7.n) vec +(** Vector of size 6 *) module Vector_6 : VECTOR with type 'a t = ('a, Nat.N6.n) vec +(** Vector of size 5 *) module Vector_5 : VECTOR with type 'a t = ('a, Nat.N5.n) vec +(** Vector of size 4 *) module Vector_4 : VECTOR with type 'a t = ('a, Nat.N4.n) vec +(** Vector of size 2 *) module Vector_2 : VECTOR with type 'a t = ('a, Nat.N2.n) vec -module With_length (N : Nat_intf) : S with type 'a t = ('a, N.n) vec +(** Functor to build any vector of size [N]. The parameters of the functor is a + natural encoded at the type level. For instance, {!Vector_2} could be seen + as the output of [With_length (Nat.N2)] +*) +module With_length (N : Nat.Intf) : S with type 'a t = ('a, N.n) vec -(** {1 Functions} *) +(** {1 Snarky related functions } *) +(** [typ v t_n] creates a snarky [Typ.t] for a vector of the length [t_n] and + sets the contents of each cell to [v] *) val typ : ('a, 'b, 'c) Snarky_backendless.Typ.t -> 'd Nat.nat -> (('a, 'd) vec, ('b, 'd) vec, 'c) Snarky_backendless.Typ.t +(** Builds a Snarky type from a type [('a, 'n) t]*) val typ' : (('var, 'value, 'f) Snarky_backendless.Typ.t, 'n) t -> (('var, 'n) t, ('value, 'n) t, 'f) Snarky_backendless.Typ.t +(** {1 Common interface of vectors } *) + val of_list : 'a list -> 'a e val of_list_and_length_exn : 'a list -> 'n Nat.t -> ('a, 'n) t @@ -116,8 +139,6 @@ val sexp_of_t : -> ('a, 'c) t -> Ppx_sexp_conv_lib.Sexp.t -val nth : ('a, 'n) t -> int -> 'a option - (** [zip v1 v2] combines together vectors [v1] and [v2] of length ['b]. *) val zip : ('a, 'b) t -> ('c, 'b) t -> ('a * 'c, 'b) t @@ -149,6 +170,31 @@ val iter2 : ('a, 'n) t -> ('b, 'n) t -> f:('a -> 'b -> unit) -> unit val for_all : ('a, 'n) t -> f:('a -> bool) -> bool +(** [split v n] splits the vector [v] into two vectors [v1] and [v2] such that + [v1] is of size [n] and [v2] is of size [m] where [length v = n + m] and [v1 + || v2 = v]. + + [n] must have been constructed using {Pickles_types.Nat.I.add} whose result + is equal the length of [v]. + More concretely: + ``` + let six = Pickles_types.Nat.N6.n in + let eleven = Pickles_types.Nat.N11.n in + let v = Pickles_types.Vector.init eleven ~f:(fun i -> i) in + (* will split v into two vectors, the first of size 5 and the second of size + 6 + *) + let v_five, v_six = + Pickles_types.Vector.split + (* built using Nat.I.add *) + v + (snd (Pickles_types.Nat.N5.add six)) + in + [...] + ``` + The reason to construct the argument [n] with {add} is to correctly built at + compile time an argument that will be smaller or equal to the size of [v]. +*) val split : ('a, 'n_m) t -> ('n, 'm, 'n_m) Nat.Adds.t -> ('a, 'n) t * ('a, 'm) t val rev : ('a, 'n) t -> ('a, 'n) t @@ -158,6 +204,9 @@ val length : ('a, 'n) t -> 'n Nat.t val append : ('a, 'n) vec -> ('a, 'm) vec -> ('n, 'm, 'n_m) Nat.Adds.t -> ('a, 'n_m) vec +(** [singleton x] is [x] *) +val singleton : 'a -> ('a, Nat.z Nat.s) t + val unsingleton : ('a, Nat.z Nat.s) t -> 'a val trim : 'a 'n 'm. ('a, 'm) vec -> ('n, 'm) Nat.Lte.t -> ('a, 'n) vec @@ -177,3 +226,16 @@ val extend_front : val extend_front_exn : 'a 'n 'm. ('a, 'n) vec -> 'm Nat.t -> 'a -> ('a, 'm) vec val transpose : 'a 'n 'm. (('a, 'n) vec, 'm) vec -> (('a, 'm) vec, 'n) vec + +(** [nth v i] returns the [i]-th element [e] of vector [v]. The first element is + at position 0. + + @return [None] if [i] is not a valid index for vector [v] +*) +val nth : ('a, 'n) vec -> int -> 'a option + +(** [nth_exn v i] returns the [i]-th element of vector [v]. The first element is + at position 0. + + @raise Invalid_argument if [i] is not a valid index for vector [v] *) +val nth_exn : ('a, 'n) vec -> int -> 'a diff --git a/src/lib/ppx_dhall_type/deriving.ml b/src/lib/ppx_dhall_type/deriving.ml deleted file mode 100644 index 256017a4ce4..00000000000 --- a/src/lib/ppx_dhall_type/deriving.ml +++ /dev/null @@ -1,160 +0,0 @@ -(* deriving.ml -- deriving ppx for Dhall types *) - -(* TODO: - deriver for signatures - default values in records -*) - -open Core_kernel -open Ppxlib - -let deriver = "dhall_type" - -let field_key_attr = - Attribute.declare (deriver ^ ".key") Attribute.Context.Label_declaration - Ast_pattern.(single_expr_payload (estring __)) - Fn.id - -let make_lident_cmp items lident = - List.mem items (Longident.name lident.txt) ~equal:String.equal - -let is_bool_lident = make_lident_cmp [ "bool"; "Bool.t" ] - -let is_int_lident = make_lident_cmp [ "int"; "Int.t" ] - -let is_float_lident = make_lident_cmp [ "float"; "Float.t" ] - -let is_string_lident = make_lident_cmp [ "string"; "String.t" ] - -let is_option_lident = make_lident_cmp [ "option"; "Option.t" ] - -let is_list_lident = make_lident_cmp [ "list"; "List.t" ] - -let rec dhall_type_of_core_type core_type = - let (module Ast_builder) = Ast_builder.make core_type.ptyp_loc in - let open Ast_builder in - match core_type.ptyp_desc with - | Ptyp_constr (lident, []) when is_bool_lident lident -> - [%expr Ppx_dhall_type.Dhall_type.Bool] - | Ptyp_constr (lident, []) when is_int_lident lident -> - [%expr Ppx_dhall_type.Dhall_type.Integer] - | Ptyp_constr (lident, []) when is_float_lident lident -> - [%expr Ppx_dhall_type.Dhall_type.Double] - | Ptyp_constr (lident, []) when is_string_lident lident -> - [%expr Ppx_dhall_type.Dhall_type.Text] - | Ptyp_constr (lident, [ ty ]) when is_option_lident lident -> - [%expr Ppx_dhall_type.Dhall_type.Optional [%e dhall_type_of_core_type ty]] - | Ptyp_constr (lident, [ ty ]) when is_list_lident lident -> - [%expr Ppx_dhall_type.Dhall_type.List [%e dhall_type_of_core_type ty]] - | Ptyp_constr ({ txt = Lident id; _ }, []) -> - evar (id ^ "_dhall_type") - | Ptyp_constr ({ txt = Lident id; _ }, params) -> - let dhall_type_fun = evar (id ^ "_dhall_type") in - let args = List.map params ~f:dhall_type_of_core_type in - eapply dhall_type_fun args - | Ptyp_constr ({ txt = Ldot (prefix, nm); _ }, []) -> - let mod_path = Longident.name prefix in - if String.equal nm "t" then evar (mod_path ^ ".dhall_type") - else evar (mod_path ^ "." ^ nm ^ "_dhall_type") - | Ptyp_constr ({ txt = Ldot (prefix, nm); _ }, params) -> - let mod_path = Longident.name prefix in - let dhall_type_fun = - if String.equal nm "t" then evar (mod_path ^ ".dhall_type") - else evar (mod_path ^ "." ^ nm ^ "_dhall_type") - in - let args = List.map params ~f:dhall_type_of_core_type in - eapply dhall_type_fun args - | Ptyp_var a -> - evar a - | _ -> - Location.raise_errorf ~loc:core_type.ptyp_loc "Unsupported type" - -let dhall_variant_from_constructor_declaration ctor_decl = - let (module Ast_builder) = Ast_builder.make ctor_decl.pcd_name.loc in - let open Ast_builder in - let name = estring @@ String.lowercase ctor_decl.pcd_name.txt in - match ctor_decl.pcd_args with - | Pcstr_tuple [] -> - [%expr [%e name], None] - | Pcstr_tuple [ ty ] -> - [%expr [%e name], Some [%e dhall_type_of_core_type ty]] - | Pcstr_tuple tys -> - let tys_expr = elist (List.map tys ~f:dhall_type_of_core_type) in - [%expr [%e name], Some (List [%e tys_expr])] - | Pcstr_record _ -> - Location.raise_errorf ~loc:ctor_decl.pcd_name.loc - "Records not yet supported" - -let dhall_field_from_label_declaration label_decl = - let (module Ast_builder) = Ast_builder.make label_decl.pld_name.loc in - let open Ast_builder in - let name = - match Attribute.get field_key_attr label_decl with - | Some name -> - estring name - | None -> - estring label_decl.pld_name.txt - in - let ty = dhall_type_of_core_type label_decl.pld_type in - [%expr [%e name], [%e ty]] - -let generate_dhall_type type_decl = - let (module Ast_builder) = Ast_builder.make type_decl.ptype_loc in - let open Ast_builder in - let dhall_type = - match type_decl.ptype_kind with - | Ptype_abstract -> ( - match type_decl.ptype_manifest with - | None -> - Location.raise_errorf ~loc:type_decl.ptype_loc - "Abstract type declaration has no manifest (right-hand side)" - | Some core_type -> - dhall_type_of_core_type core_type ) - | Ptype_variant ctor_decls -> - [%expr - Ppx_dhall_type.Dhall_type.Union - [%e - elist - (List.map ctor_decls - ~f:dhall_variant_from_constructor_declaration )]] - | Ptype_record label_decls -> - [%expr - Ppx_dhall_type.Dhall_type.Record - [%e - elist (List.map label_decls ~f:dhall_field_from_label_declaration)]] - | Ptype_open -> - Location.raise_errorf ~loc:type_decl.ptype_loc - "Open types not supported" - in - let ty_name = - match type_decl.ptype_name.txt with - | "t" -> - pvar "dhall_type" - | nm -> - pvar (nm ^ "_dhall_type") - in - match type_decl.ptype_params with - | [] -> - [%stri let [%p ty_name] = [%e dhall_type]] - | params -> - let args = - List.map params ~f:(fun (core_type, _variance) -> - match core_type.ptyp_desc with - | Ptyp_var a -> - pvar a - | _ -> - Location.raise_errorf ~loc:type_decl.ptype_loc - "Type parameter not a type variable" ) - in - let abs = eabstract args dhall_type in - [%stri let [%p ty_name] = [%e abs]] - -let generate_dhall_types ~loc:_ ~path:_ (_rec_flag, type_decls) = - List.map type_decls ~f:generate_dhall_type - -let attributes = [ Attribute.T field_key_attr ] - -let str_type_decl = - Deriving.Generator.make_noarg ~attributes generate_dhall_types - -let () = Deriving.add deriver ~str_type_decl |> Ppxlib.Deriving.ignore diff --git a/src/lib/ppx_dhall_type/dhall_type.ml b/src/lib/ppx_dhall_type/dhall_type.ml deleted file mode 100644 index 30f0f90ea60..00000000000 --- a/src/lib/ppx_dhall_type/dhall_type.ml +++ /dev/null @@ -1,48 +0,0 @@ -(* dhall_type.ml -- derive a Dhall type from an OCaml type *) - -open Core_kernel - -(* based on https://github.com/dhall-lang/dhall-lang/blob/master/standard/type-inference.md *) -type t = - | Bool - | Natural - | Text - | Integer - | Double - | Optional of t - | List of t - | Record of (string * t) list - | Union of (string * t option) list - | Function of t * t - -let rec to_string = function - | Bool -> - "Bool" - | Integer -> - "Integer" - | Natural -> - "Natural" - | Text -> - "Text" - | Double -> - "Double" - | Optional t -> - "Optional (" ^ to_string t ^ ")" - | List t -> - "List (" ^ to_string t ^ ")" - | Record fields -> - let field_to_string (nm, ty) = nm ^ " : " ^ to_string ty in - let formatted_fields = - String.concat ~sep:", " (List.map fields ~f:field_to_string) - in - "{ " ^ formatted_fields ^ " }" - | Union alts -> - let alt_to_string (nm, ty_opt) = - match ty_opt with None -> nm | Some ty -> nm ^ " : " ^ to_string ty - in - let formatted_alts = - String.concat ~sep:" | " (List.map alts ~f:alt_to_string) - in - "< " ^ formatted_alts ^ " >" - | Function (t_in, t_out) -> - to_string t_in ^ " -> " ^ to_string t_out diff --git a/src/lib/ppx_dhall_type/dune b/src/lib/ppx_dhall_type/dune deleted file mode 100644 index 7de391a373a..00000000000 --- a/src/lib/ppx_dhall_type/dune +++ /dev/null @@ -1,8 +0,0 @@ -(library - (name ppx_dhall_type) - (public_name ppx_dhall_type) - (kind ppx_deriver) - (libraries ppxlib core_kernel base) - (preprocessor_deps ../../config.mlh) - (instrumentation (backend bisect_ppx)) - (preprocess (pps ppx_version ppxlib.metaquot))) diff --git a/src/lib/ppx_version/README.md b/src/lib/ppx_version/README.md new file mode 100644 index 00000000000..3d3972c039c --- /dev/null +++ b/src/lib/ppx_version/README.md @@ -0,0 +1,178 @@ +ppx_version +=========== + +The `ppx_version` preprocessor comprises a type deriver, an annotation +for modules, and a syntax linter. + +Type deriver +------------ + +This deriver is meant to be added automatically when using the +%%versioned annotation for `Stable` modules (see below). That code is in +versioned_module.ml + +*** You should not need to add this deriver to the `deriving` list explicitly *** + +The deriver accomplishes these goals: + + 1) check that a versioned type is always in valid module hierarchy + 2) versioned types depend only on other versioned types or OCaml built-in types + +The usage in type declarations is: + + [@@deriving version] + + or + + [@@deriving version { option }] + +where the option is one of "rpc" or "binable" (mutally +exclusively). For types within signatures, no options are used. + +Within structures, the deriver generates two definitions: + + let version = n + let __versioned__ = () + +where `n` is taken from the surrounding module Vn. + +Within signatures, the deriver generates the definition: + + val __versioned__ : unit + +The purpose of `__versioned__` is to make sure that types referred to +in versioned type definitions are themselves versioned. + +Without options (the common case), the type must be named "t", and its +definition occurs in the module hierarchy "Stable.Vn" or +"Stable.Vn.T", where n is a positive integer. + +The "binable" option asserts that the type is versioned, to allow +compilation to proceed. The types referred to in the type are not +checked for versioning with this option. It assumes that the type will +be serialized using a "Binable.Of_..." or "Make_binable" functors, +which relies on the serialization of some other type. + +If "rpc" is true, again, the type must be named "query", "response", +or "msg", and the type definition occurs in the hierarchy "Vn.T". + +Versioned modules +----------------- + +Modules in structures with versioned types are annotated: + + [%%versioned + module Stable = struct + module Vn = struct + type t = ... + let to_latest = ... + end + ... + end] + +Within a `Stable` module, there can be arbitrarily many versioned type +modules, which must be listed in descending numeric order (most recent +version first). A versioned module must define `to_latest`, which +takes instances of the type `t` to instances of the most recent type +version. + +Modules in signatures are annotated similarly (note the colon): + + [%%versioned: + module Stable : sig + module Vn : sig + type t = ... + end + ... + end] + +The annotation generates a deriver list for the type that includes +`bin_io` and `version`, which are added to any other deriver items +already listed. + +Just past the most recent Vn, a definition is generated: + + module Latest = Vn + +A type definition is generated just past the `Stable` module: + + type t = Stable.Latest.t + +Sometimes that causes compilation issues, which can be avoided by +adding the annotation `[@@@no_toplevel_latest_type]` at the start of +the `Stable` module, in either structures or signatures. + +For compatibility with older code, there is the annotation: + + [@@@with_all_version_tags] + +Given at the start of a `Vn` module, generates a module +Vn.With_all_version_tags`, where the `Bin_prot` functions add the +version number as an integer at the start of the serialization of this +type, and similarly for all versioned types referred to by this type +(which means those referred-to types must also have that +annotation). That mimics the way all types were serialized in the +original Mina mainnet. The representation of some values, like public +keys, rely on the `Bin_prot` serialization, so this annotation is +required in order to maintain that representation. + +A related annotation is: + + [@@@with_top_version_tag] + +Given at the start of a `Stable` module, generates for each contained +module `Vn`, another module `Vn.With_top_version_tag`, where the +`Bin_prot` serialization adds the version number at the start of the +type serialization, but does not change the serialization of +referred-to types. That's useful to know which version the remainder +of the serialization is for. For example, a transaction id is the +Base64 encoding of the `Bin_prot` serialization of a command. +Therefore, the transaction id contains the information about the +transaction type version used to create it. + +or JSON serialization: + + [@@@with_versioned_json] + +When given at the start of a `Stable` module, for each module `Vn`, if +the `yojson` deriver is used in `Vn.t`, then `Vn.to_yojson` generates: + + `Assoc [("version",`Int n); ("data",)] + +For example, use this version-tagged JSON for precomputed and +extensional blocks to know which version of the code produced them. + +Using %%versioned on a `Stable` module generates code that registers a +shape, that is, an instance of `Bin_prot.Shape.t`, for each versioned +type defined in that module. That supports the CLI command +`mina internal dump-type-shapes`, which prints shapes for all versioned +types. + +Syntax linter +------------- + +The linter finds invalid syntax related to type versioning. + +The lint rules: + +- "deriving bin_io" and "deriving version" never appear in types + defined inside functor bodies, except for the `Make_str` functors + used for wire types. + +- otherwise, "bin_io" may appear in a "deriving" attribute only if + "version" also appears in that extension + +- versioned types only appear in versioned type definitions + +- versioned type definitions appear only in %%versioned... extensions + +- packaged modules, like "(module Foo)", may not be stable-versioned + (but allowed inside %%versioned for legitimate uses) + +- the constructs "include Stable.Latest" and "include Stable.Vn" are prohibited + + - uses of Binable.Of... and Bin_prot.Utils.Make_binable functors are + always in stable-versioned modules, and always as an argument to + "include" + +- these restrictions are not enforced in inline tests and inline test modules diff --git a/src/lib/ppx_version/lint_version_syntax.ml b/src/lib/ppx_version/lint_version_syntax.ml index b8a770dafd9..4a8f236663f 100644 --- a/src/lib/ppx_version/lint_version_syntax.ml +++ b/src/lib/ppx_version/lint_version_syntax.ml @@ -1,16 +1,4 @@ -(* lint_version_syntax.ml -- static enforcement of syntactic items relating to proper versioning - - - "deriving bin_io" and "deriving version" never appear in types defined inside functor bodies - - otherwise, "bin_io" may appear in a "deriving" attribute only if "version" also appears in that extension - - versioned types only appear in versioned type definitions - - versioned type definitions appear only in %%versioned... extensions - - packaged modules, like "(module Foo)", may not be stable-versioned (but allowed inside %%versioned for - legitimate uses) - - the constructs "include Stable.Latest" and "include Stable.Vn" are prohibited - - uses of Binable.Of... and Bin_prot.Utils.Make_binable functors are always in stable-versioned modules, - and always as an argument to "include" - - restrictions are not enforced in inline tests and inline test modules -*) +(* lint_version_syntax.ml -- static enforcement of syntactic items relating to proper versioning *) open Core_kernel open Ppxlib diff --git a/src/lib/ppx_version/test/Makefile b/src/lib/ppx_version/test/Makefile index 34022911e85..43d2df4c4ee 100644 --- a/src/lib/ppx_version/test/Makefile +++ b/src/lib/ppx_version/test/Makefile @@ -11,14 +11,13 @@ endif .PHONY: positive-tests negative-tests -# all : positive-tests negative-tests -all : negative-tests +all : positive-tests negative-tests positive-tests : # version syntax @ echo -n "Version syntax, should succeed..." - dune build good_version_syntax.cma ${REDIRECT} - echo "OK" + @ dune build good_version_syntax.cma ${REDIRECT} + @ echo "OK" # versioning @ echo -n "Versioned types, should succeed..." @ dune build versioned_good.cma ${REDIRECT} diff --git a/src/lib/ppx_version/test/README.md b/src/lib/ppx_version/test/README.md new file mode 100644 index 00000000000..09da230b3b1 --- /dev/null +++ b/src/lib/ppx_version/test/README.md @@ -0,0 +1,33 @@ +ppx_version tests +================= + +These are tests for the basic features of ppx_version. + +There are "positive" tests, where the syntax should be accepted, and +"negative" tests, where the syntax should be rejected. + +Disabling vendoring +------------------- + +*** IMPORTANT *** + +Before running these tests, *temporarily* comment out the +`vendored_dirs` clause in the dune file in the directory above this +one: + + ; (vendored_dirs test) + +That clause prevents the ppx_version linter warnings from +taking effect in the negative tests, so that those tests fail. + +Running the tests +----------------- + +Run `make` to run all tests. There are also separate targets +"positive-tests" and "negative-tests". + +The negative tests succeed if the dune build fails, but the failures +may occur for reasons other than the expected reasons. Ordinarily, +the test output is suppressed. By setting the VERBOSE environment +variable, the output is shown, in order to make sure the failures +are as expected. diff --git a/src/lib/ppx_version/test/dune b/src/lib/ppx_version/test/dune index 7475aa2bd9c..50762eacc8d 100644 --- a/src/lib/ppx_version/test/dune +++ b/src/lib/ppx_version/test/dune @@ -1,5 +1,6 @@ -;;; each library below has an identical preprocess clause, because of this -;;; dune bug: https://github.com/ocaml/dune/issues/1946 +(env + (_ + (flags (:standard -warn-error @22)))) ;;; should succeed @@ -7,27 +8,27 @@ (library (name good_version_syntax) (preprocess (pps ppx_jane ppx_version ppx_deriving_yojson)) - (libraries base.caml core_kernel bin_prot.shape) + (libraries base.caml core_kernel bin_prot.shape ppx_version.runtime) (modules good_version_syntax)) ;; versioning (library (name versioned_good) (preprocess (pps ppx_jane ppx_deriving_yojson ppx_version)) - (libraries base.caml core_kernel bin_prot.shape sexplib0) + (libraries base.caml core_kernel bin_prot.shape sexplib0 ppx_version.runtime) (modules versioned_good)) ;; module versioning (executable (name versioned_module_good) (preprocess (pps ppx_jane ppx_deriving_yojson ppx_version)) - (libraries base.caml core_kernel bin_prot.shape sexplib0) + (libraries base.caml core_kernel bin_prot.shape sexplib0 ppx_version.runtime) (modules versioned_module_good)) (executable (name versioned_sig_good) (preprocess (pps ppx_jane ppx_deriving_yojson ppx_version)) - (libraries base.caml core_kernel bin_prot.shape sexplib0) + (libraries base.caml core_kernel bin_prot.shape sexplib0 ppx_version.runtime) (modules versioned_sig_good)) ;;; should fail @@ -37,25 +38,25 @@ (library (name bad_version_syntax_missing_versioned) (preprocess (pps ppx_jane ppx_deriving_yojson ppx_version)) - (libraries base.caml core_kernel bin_prot.shape sexplib0) + (libraries base.caml core_kernel bin_prot.shape sexplib0 ppx_version.runtime) (modules bad_version_syntax_missing_versioned)) (library (name bad_versioned_in_functor) (preprocess (pps ppx_jane ppx_deriving_yojson ppx_version)) - (libraries base.caml core_kernel bin_prot.shape sexplib0) + (libraries base.caml core_kernel bin_prot.shape sexplib0 ppx_version.runtime) (modules bad_versioned_in_functor)) (library (name bad_versioned_in_nested_functor) (preprocess (pps ppx_jane ppx_deriving_yojson ppx_version)) - (libraries base.caml core_kernel bin_prot.shape sexplib0) + (libraries base.caml core_kernel bin_prot.shape sexplib0 ppx_version.runtime) (modules bad_versioned_in_nested_functor)) (library (name bad_version_syntax_multiple_errors) (preprocess (pps ppx_jane ppx_deriving_yojson ppx_version)) - (libraries base.caml core_kernel bin_prot.shape sexplib0) + (libraries base.caml core_kernel bin_prot.shape sexplib0 ppx_version.runtime) (modules bad_version_syntax_multiple_errors)) ;; versioning @@ -63,66 +64,66 @@ (library (name versioned_bad_module_name) (preprocess (pps ppx_jane ppx_deriving_yojson ppx_version)) - (libraries base.caml core_kernel bin_prot.shape sexplib0) + (libraries base.caml core_kernel bin_prot.shape sexplib0 ppx_version.runtime) (modules versioned_bad_module_name)) (library (name versioned_bad_version_name) (preprocess (pps ppx_jane ppx_deriving_yojson ppx_version)) - (libraries base.caml core_kernel bin_prot.shape sexplib0) + (libraries base.caml core_kernel bin_prot.shape sexplib0 ppx_version.runtime) (modules versioned_bad_version_name)) (library (name versioned_bad_type_name) (preprocess (pps ppx_jane ppx_deriving_yojson ppx_version)) - (libraries base.caml core_kernel bin_prot.shape sexplib0) + (libraries base.caml core_kernel bin_prot.shape sexplib0 ppx_version.runtime) (modules versioned_bad_type_name)) (library (name versioned_bad_option) (preprocess (pps ppx_jane ppx_deriving_yojson ppx_version)) - (libraries base.caml core_kernel bin_prot.shape sexplib0) + (libraries base.caml core_kernel bin_prot.shape sexplib0 ppx_version.runtime) (modules versioned_bad_option)) (library (name versioned_bad_contained_types) (preprocess (pps ppx_jane ppx_deriving_yojson ppx_version)) - (libraries base.caml core_kernel bin_prot.shape sexplib0) + (libraries base.caml core_kernel bin_prot.shape sexplib0 ppx_version.runtime) (modules versioned_bad_contained_types)) (library (name versioned_bad_arrow_type) (preprocess (pps ppx_jane ppx_deriving_yojson ppx_version)) - (libraries base.caml core_kernel bin_prot.shape sexplib0) + (libraries base.caml core_kernel bin_prot.shape sexplib0 ppx_version.runtime) (modules versioned_bad_arrow_type)) ;; module versioning (library (name versioned_module_bad_stable_name) (preprocess (pps ppx_jane ppx_deriving_yojson ppx_version)) - (libraries base.caml core_kernel bin_prot.shape sexplib0) + (libraries base.caml core_kernel bin_prot.shape sexplib0 ppx_version.runtime) (modules versioned_module_bad_stable_name)) (library (name versioned_module_bad_version_name) (preprocess (pps ppx_jane ppx_deriving_yojson ppx_version)) - (libraries base.caml core_kernel bin_prot.shape sexplib0) + (libraries base.caml core_kernel bin_prot.shape sexplib0 ppx_version.runtime) (modules versioned_module_bad_version_name)) (library (name versioned_module_bad_missing_type) (preprocess (pps ppx_jane ppx_deriving_yojson ppx_version)) - (libraries base.caml core_kernel bin_prot.shape sexplib0) + (libraries base.caml core_kernel bin_prot.shape sexplib0 ppx_version.runtime) (modules versioned_module_bad_missing_type)) (library (name versioned_module_bad_version_order) (preprocess (pps ppx_jane ppx_deriving_yojson ppx_version)) - (libraries base.caml core_kernel bin_prot.shape sexplib0) + (libraries base.caml core_kernel bin_prot.shape sexplib0 ppx_version.runtime) (modules versioned_module_bad_version_order)) (library (name versioned_module_bad_missing_to_latest) (preprocess (pps ppx_jane ppx_deriving_yojson ppx_version)) - (libraries base.caml core_kernel bin_prot.shape sexplib0) + (libraries base.caml core_kernel bin_prot.shape sexplib0 ppx_version.runtime) (modules versioned_module_bad_missing_to_latest)) diff --git a/src/lib/ppx_version/versioned_module.ml b/src/lib/ppx_version/versioned_module.ml index 62a7763f45f..e63e0fecd19 100644 --- a/src/lib/ppx_version/versioned_module.ml +++ b/src/lib/ppx_version/versioned_module.ml @@ -1,3 +1,5 @@ +(* versioned_module.ml -- modules with versioned types *) + open Core_kernel open Ppxlib open Versioned_util diff --git a/src/lib/ppx_version/versioned_type.ml b/src/lib/ppx_version/versioned_type.ml index b817967fd51..3808c43d30c 100644 --- a/src/lib/ppx_version/versioned_type.ml +++ b/src/lib/ppx_version/versioned_type.ml @@ -1,51 +1,4 @@ -(* versioned_types.ml -- static enforcement of versioned types via ppx *) - -(* If the dune profile defines "print_versioned_types" to be true, this deriver - prints a representation of each versioned type to stdout. The driver "print_versioned_types" - can be used to print the types from a particular OCaml source file. This facility is - meant to be used in CI to detect changes to versioned types. - - Otherwise, we use this deriver as follows: - - 1) check that versioned type always in valid module hierarchy - 2) versioned types depend only on other versioned types or OCaml built-in types - - to use, add coda_ppx to the dune pps list, and annotate a type declaration with - either - - [@@deriving version] - - or - - [@@deriving version { option }] - - where option is one of "rpc" or "binable". - - Without options (the common case), the type must be named "t", and its definition - occurs in the module hierarchy "Stable.Vn" or "Stable.Vn.T", where n is a positive integer. - - The "binable" option asserts that the type is versioned, to allow compilation - to proceed. The types referred to in the type are not checked for versioning - with this option. It assumes that the type will be serialized using a - "Binable.Of_..." or "Make_binable" functors, which relies on the serialization of - some other type. - - If "rpc" is true, again, the type must be named "query", "response", or "msg", - and the type definition occurs in the hierarchy "Vn.T". - - All these options are available for types within structures. - - Within signatures, the declaration - - val __versioned__ : unit - - is generated. If the "numbered" option is given, then - - val version : int - - is also generated. This option should be needed only by the internal versioning - machinery, and not in ordinary code. No other options are available within signatures. -*) +(* versioned_types.ml -- deriver for versioned types *) open Core_kernel open Ppxlib @@ -53,535 +6,399 @@ open Versioned_util let deriver = "version" -let printing_ref = ref false - -let set_printing () = printing_ref := true - -let unset_printing () = printing_ref := false - (* path is filename.ml.M1.M2.... *) let module_path_list path = List.drop (String.split path ~on:'.') 2 -(* print versioned types *) -module Printing = struct - let contains_deriving_bin_io (attrs : attributes) = - let derivers = - Ast_pattern.( - attribute ~name:(string "deriving") ~payload:(single_expr_payload __)) - in - match - List.find_map attrs ~f:(fun attr -> - parse_opt derivers Location.none attr (fun l -> Some l) ) - with - | Some derivers -> - let derivers = - match derivers.pexp_desc with - | Pexp_tuple derivers -> - derivers - | _ -> - [ derivers ] - in - let bin_io_pattern = - Ast_pattern.(pexp_ident (lident (string "bin_io"))) - in - List.exists derivers ~f:(fun deriver -> - Option.is_some - @@ parse_opt bin_io_pattern Location.none deriver (Some ()) ) - | None -> - false - - (* singleton attribute *) - let just_bin_io = - let module E = Ppxlib.Ast_builder.Make (struct - let loc = Location.none - end) in - let open E in - { attr_name = { txt = "deriving"; loc } - ; attr_payload = PStr [%str bin_io] - ; attr_loc = Location.none - } - - (* remove internal attributes, on core type in manifest and in records or variants in kind *) - let type_decl_remove_internal_attributes type_decl = - let removed_in_kind = - match type_decl.ptype_kind with - | Ptype_variant ctors -> - Ptype_variant - (List.map ctors ~f:(fun ctor -> { ctor with pcd_attributes = [] })) - | Ptype_record labels -> - Ptype_record - (List.map labels ~f:(fun label -> - { label with pld_attributes = [] } ) ) - | kind -> - kind - in - let removed_in_manifest = - Option.map type_decl.ptype_manifest ~f:(fun core_type -> - { core_type with ptyp_attributes = [] } ) - in - { type_decl with - ptype_manifest = removed_in_manifest - ; ptype_kind = removed_in_kind - } - - (* filter attributes from types, except for bin_io, don't care about changes to others *) - let filter_type_decls_attrs type_decl = - (* retain only `deriving bin_io` in deriving list *) - let ptype_attributes = - if contains_deriving_bin_io type_decl.ptype_attributes then - [ just_bin_io ] - else [] - in - let type_decl_no_attrs = type_decl_remove_internal_attributes type_decl in - { type_decl_no_attrs with ptype_attributes } - - (* remove manifests from non-abstract types, so these print the same: - type t = Quux.t = Foo | Bar - type t = Foo | Bar - *) - let filter_type_manifests type_decl = - match type_decl.ptype_kind with - | Ptype_abstract | Ptype_open -> - type_decl - | Ptype_variant _ | Ptype_record _ -> - { type_decl with ptype_manifest = None } - - (* convert type_decls to structure item so we can print it *) - let type_decls_to_stri type_decls = - (* type derivers only work with recursive types *) - { pstr_desc = Pstr_type (Ast.Recursive, type_decls) - ; pstr_loc = Location.none - } - - (* prints path_to_type:type_definition *) - let print_type ~loc:_ ~path (_rec_flag, type_decls) _rpc _binable = - let module_path = module_path_list path in - let path_len = List.length module_path in - List.iteri module_path ~f:(fun i s -> - printf "%s" s ; - if i < path_len - 1 then printf "." ) ; - printf ".%s" (List.hd_exn type_decls).ptype_name.txt ; - printf ":%!" ; - let type_decls_filtered_attrs = - List.map type_decls ~f:filter_type_decls_attrs - in - let type_decls_filtered_manifests = - List.map type_decls_filtered_attrs ~f:filter_type_manifests - in - let stri = type_decls_to_stri type_decls_filtered_manifests in - let formatter = Versioned_util.diff_formatter Format.std_formatter in - Pprintast.structure_item formatter stri ; - Format.pp_print_flush formatter () ; - printf "\n%!" ; - [] +type generation_kind = Plain | Rpc - (* we're worried about changes to the serialization of types, which can occur via changes to implementations, - so nothing to do for signatures - *) - let gen_empty_sig ~loc:_ ~path:_ (_rec_flag, _type_decls) = [] -end - -(* real derivers *) -module Deriving = struct - type generation_kind = Plain | Rpc - - let validate_rpc_type_decl inner3_modules type_decl = - match List.take inner3_modules 2 with - | [ "T"; module_version ] -> - validate_module_version module_version type_decl.ptype_loc - | _ -> - Location.raise_errorf ~loc:type_decl.ptype_loc - "Versioned RPC type must be contained in module path Vn.T, for some \ - number n" - - let validate_plain_type_decl inner3_modules type_decl = - match inner3_modules with - | [ "T"; module_version; "Stable" ] | module_version :: "Stable" :: _ -> - validate_module_version module_version type_decl.ptype_loc - | _ -> - Location.raise_errorf ~loc:type_decl.ptype_loc - "Versioned type must be contained in module path Stable.Vn or \ - Stable.Vn.T, for some number n" +let validate_rpc_type_decl inner3_modules type_decl = + match List.take inner3_modules 2 with + | [ "T"; module_version ] -> + validate_module_version module_version type_decl.ptype_loc + | _ -> + Location.raise_errorf ~loc:type_decl.ptype_loc + "Versioned RPC type must be contained in module path Vn.T, for some \ + number n" + +let validate_plain_type_decl inner3_modules type_decl = + match inner3_modules with + | [ "T"; module_version; "Stable" ] | module_version :: "Stable" :: _ -> + validate_module_version module_version type_decl.ptype_loc + | _ -> + Location.raise_errorf ~loc:type_decl.ptype_loc + "Versioned type must be contained in module path Stable.Vn or \ + Stable.Vn.T, for some number n" - (* check that a versioned type occurs in valid module hierarchy and is named "t" - (for RPC types, the name can be "query", "response", or "msg") - *) - let validate_type_decl inner3_modules generation_kind type_decl = - let name = type_decl.ptype_name.txt in - let loc = type_decl.ptype_name.loc in +(* check that a versioned type occurs in valid module hierarchy and is named "t" + (for RPC types, the name can be "query", "response", or "msg") +*) +let validate_type_decl inner3_modules generation_kind type_decl = + let name = type_decl.ptype_name.txt in + let loc = type_decl.ptype_name.loc in + match generation_kind with + | Rpc -> + let rpc_valid_names = [ "query"; "response"; "msg" ] in + if + List.find rpc_valid_names ~f:(fun ty -> String.equal ty name) + |> Option.is_none + then + Location.raise_errorf ~loc + "RPC versioned type must be named one of \"%s\", got: \"%s\"" + (String.concat ~sep:"," rpc_valid_names) + name ; + validate_rpc_type_decl inner3_modules type_decl + | Plain -> + let valid_name = "t" in + if not (String.equal name valid_name) then + Location.raise_errorf ~loc + "Versioned type must be named \"%s\", got: \"%s\"" valid_name name ; + validate_plain_type_decl inner3_modules type_decl + +(* module structure in this case validated by linter *) + +let module_name_from_plain_path inner3_modules = + match inner3_modules with + | [ "T"; module_version; "Stable" ] | module_version :: "Stable" :: _ -> + module_version + | _ -> + failwith "module_name_from_plain_path: unexpected module path" + +let module_name_from_rpc_path inner3_modules = + match List.take inner3_modules 2 with + | [ "T"; module_version ] -> + module_version + | _ -> + failwith "module_name_from_rpc_path: unexpected module path" + +(* generate "let version = n", when version module is Vn *) +let generate_version_number_decl inner3_modules loc generation_kind = + (* invariant: we've checked module name already *) + let module E = Ppxlib.Ast_builder.Make (struct + let loc = loc + end) in + let open E in + let module_name = match generation_kind with - | Rpc -> - let rpc_valid_names = [ "query"; "response"; "msg" ] in - if - List.find rpc_valid_names ~f:(fun ty -> String.equal ty name) - |> Option.is_none - then - Location.raise_errorf ~loc - "RPC versioned type must be named one of \"%s\", got: \"%s\"" - (String.concat ~sep:"," rpc_valid_names) - name ; - validate_rpc_type_decl inner3_modules type_decl | Plain -> - let valid_name = "t" in - if not (String.equal name valid_name) then - Location.raise_errorf ~loc - "Versioned type must be named \"%s\", got: \"%s\"" valid_name name ; - validate_plain_type_decl inner3_modules type_decl - - (* module structure in this case validated by linter *) - - let module_name_from_plain_path inner3_modules = - match inner3_modules with - | [ "T"; module_version; "Stable" ] | module_version :: "Stable" :: _ -> - module_version - | _ -> - failwith "module_name_from_plain_path: unexpected module path" - - let module_name_from_rpc_path inner3_modules = - match List.take inner3_modules 2 with - | [ "T"; module_version ] -> - module_version + module_name_from_plain_path inner3_modules + | Rpc -> + module_name_from_rpc_path inner3_modules + in + let version = version_of_versioned_module_name module_name in + [%str + let version = [%e eint version] + + (* to prevent unused value warnings *) + let (_ : _) = version] + +let ocaml_builtin_types = + [ "bytes" + ; "int" + ; "int32" + ; "int64" + ; "float" + ; "char" + ; "string" + ; "bool" + ; "unit" + ] + +let ocaml_builtin_type_constructors = [ "list"; "array"; "option"; "ref" ] + +(* true iff module_path is of form M. ... .Stable.Vn, where M is Core or Core_kernel, and n is integer *) +let is_jane_street_stable_module module_path = + let hd_elt = List.hd_exn module_path in + List.mem jane_street_modules hd_elt ~equal:String.equal + && + match List.rev module_path with + | vn :: "Stable" :: _ -> + Versioned_util.is_version_module vn + | vn :: label :: "Stable" :: "Time" :: _ + when List.mem [ "Span"; "With_utc_sexp" ] label ~equal:String.equal -> + (* special cases, maybe improper module structure *) + is_version_module vn + | _ -> + false + +let trustlisted_prefix prefix ~loc = + match prefix with + | Lident id -> + String.equal id "Bitstring" + | Ldot _ -> + let module_path = Longident.flatten_exn prefix in + is_jane_street_stable_module module_path + | Lapply _ -> + Location.raise_errorf ~loc "Type name contains unexpected application" + +(* disallow Stable.Latest types in versioned types *) + +let is_stable_latest = + let is_longident_with_id id = function + | Lident s when String.equal id s -> + true + | Ldot (_lident, s) when String.equal id s -> + true | _ -> - failwith "module_name_from_rpc_path: unexpected module path" - - (* generate "let version = n", when version module is Vn *) - let generate_version_number_decl inner3_modules loc generation_kind = - (* invariant: we've checked module name already *) - let module E = Ppxlib.Ast_builder.Make (struct - let loc = loc - end) in - let open E in - let module_name = - match generation_kind with - | Plain -> - module_name_from_plain_path inner3_modules - | Rpc -> - module_name_from_rpc_path inner3_modules - in - let version = version_of_versioned_module_name module_name in - [%str - let version = [%e eint version] - - (* to prevent unused value warnings *) - let (_ : _) = version] - - let ocaml_builtin_types = - [ "bytes" - ; "int" - ; "int32" - ; "int64" - ; "float" - ; "char" - ; "string" - ; "bool" - ; "unit" - ] - - let ocaml_builtin_type_constructors = [ "list"; "array"; "option"; "ref" ] - - (* true iff module_path is of form M. ... .Stable.Vn, where M is Core or Core_kernel, and n is integer *) - let is_jane_street_stable_module module_path = - let hd_elt = List.hd_exn module_path in - List.mem jane_street_modules hd_elt ~equal:String.equal + false + in + let is_stable = is_longident_with_id "Stable" in + let is_latest = is_longident_with_id "Latest" in + fun prefix -> + is_latest prefix && - match List.rev module_path with - | vn :: "Stable" :: _ -> - Versioned_util.is_version_module vn - | vn :: label :: "Stable" :: "Time" :: _ - when List.mem [ "Span"; "With_utc_sexp" ] label ~equal:String.equal -> - (* special cases, maybe improper module structure *) - is_version_module vn + match prefix with + | Ldot (lident, _) when is_stable lident -> + true | _ -> false - let trustlisted_prefix prefix ~loc = - match prefix with - | Lident id -> - String.equal id "Bitstring" - | Ldot _ -> - let module_path = Longident.flatten_exn prefix in - is_jane_street_stable_module module_path - | Lapply _ -> - Location.raise_errorf ~loc "Type name contains unexpected application" - - (* disallow Stable.Latest types in versioned types *) - - let is_stable_latest = - let is_longident_with_id id = function - | Lident s when String.equal id s -> - true - | Ldot (_lident, s) when String.equal id s -> - true - | _ -> - false - in - let is_stable = is_longident_with_id "Stable" in - let is_latest = is_longident_with_id "Latest" in - fun prefix -> - is_latest prefix - && - match prefix with - | Ldot (lident, _) when is_stable lident -> - true - | _ -> - false - - let rec generate_core_type_version_decls type_name core_type = - let version_asserted_str = "version_asserted" in - match core_type.ptyp_desc with - | Ptyp_constr ({ txt; _ }, core_types) -> ( - match txt with - | Lident id -> - (* type t = id *) - if String.equal id type_name (* recursion *) then [] - else if - List.is_empty core_types - && List.mem ocaml_builtin_types id ~equal:String.equal - then (* no versioning to worry about *) - [] - else if - List.mem ocaml_builtin_type_constructors id ~equal:String.equal - then - match core_types with - | [ _ ] -> - generate_version_lets_for_core_types type_name core_types +let rec generate_core_type_version_decls type_name core_type = + let version_asserted_str = "version_asserted" in + match core_type.ptyp_desc with + | Ptyp_constr ({ txt; _ }, core_types) -> ( + match txt with + | Lident id -> + (* type t = id *) + if String.equal id type_name (* recursion *) then [] + else if + List.is_empty core_types + && List.mem ocaml_builtin_types id ~equal:String.equal + then (* no versioning to worry about *) + [] + else if + List.mem ocaml_builtin_type_constructors id ~equal:String.equal + then + match core_types with + | [ _ ] -> + generate_version_lets_for_core_types type_name core_types + | _ -> + Location.raise_errorf ~loc:core_type.ptyp_loc + "Type constructor \"%s\" expects one type argument, got %d" id + (List.length core_types) + else + Location.raise_errorf ~loc:core_type.ptyp_loc + "\"%s\" is neither an OCaml type constructor nor a versioned type" + id + | Ldot (prefix, "t") -> + (* type t = A.B.t + if prefix not trustlisted, generate: let _ = A.B.__versioned__ + disallow Stable.Latest.t + *) + if is_stable_latest prefix then + Location.raise_errorf ~loc:core_type.ptyp_loc + "Cannot use type of the form Stable.Latest.t within a versioned \ + type" ; + let core_type_decls = + generate_version_lets_for_core_types type_name core_types + in + (* type t = M.t [@version_asserted] *) + let version_asserted = + List.find core_type.ptyp_attributes ~f:(fun attr -> + String.equal attr.attr_name.txt version_asserted_str ) + |> Option.is_some + in + if + version_asserted + || trustlisted_prefix prefix ~loc:core_type.ptyp_loc + then core_type_decls + else + let loc = core_type.ptyp_loc in + let pexp_loc = loc in + let new_prefix = + (* allow types within stable-versioned modules generated + by Hashable.Make_binable, like M.Stable.Vn.Table.t; + generate "let _ = M.Stable.Vn.__versioned__" + *) + match prefix with + | Ldot ((Ldot (_, vn) as longident), label) + when is_version_module vn + && List.mem + [ "Table"; "Hash_set"; "Hash_queue" ] + label ~equal:String.equal -> + longident | _ -> - Location.raise_errorf ~loc:core_type.ptyp_loc - "Type constructor \"%s\" expects one type argument, got %d" - id (List.length core_types) - else - Location.raise_errorf ~loc:core_type.ptyp_loc - "\"%s\" is neither an OCaml type constructor nor a versioned \ - type" - id - | Ldot (prefix, "t") -> - (* type t = A.B.t - if prefix not trustlisted, generate: let _ = A.B.__versioned__ - disallow Stable.Latest.t - *) - if is_stable_latest prefix then - Location.raise_errorf ~loc:core_type.ptyp_loc - "Cannot use type of the form Stable.Latest.t within a \ - versioned type" ; - let core_type_decls = - generate_version_lets_for_core_types type_name core_types + prefix in - (* type t = M.t [@version_asserted] *) - let version_asserted = - List.find core_type.ptyp_attributes ~f:(fun attr -> - String.equal attr.attr_name.txt version_asserted_str ) - |> Option.is_some + let versioned_ident = + { pexp_desc = + Pexp_ident { txt = Ldot (new_prefix, "__versioned__"); loc } + ; pexp_loc + ; pexp_loc_stack = [] + ; pexp_attributes = [] + } in - if - version_asserted - || trustlisted_prefix prefix ~loc:core_type.ptyp_loc - then core_type_decls - else - let loc = core_type.ptyp_loc in - let pexp_loc = loc in - let new_prefix = - (* allow types within stable-versioned modules generated - by Hashable.Make_binable, like M.Stable.Vn.Table.t; - generate "let _ = M.Stable.Vn.__versioned__" - *) - match prefix with - | Ldot ((Ldot (_, vn) as longident), label) - when is_version_module vn - && List.mem - [ "Table"; "Hash_set"; "Hash_queue" ] - label ~equal:String.equal -> - longident - | _ -> - prefix - in - let versioned_ident = - { pexp_desc = - Pexp_ident { txt = Ldot (new_prefix, "__versioned__"); loc } - ; pexp_loc - ; pexp_loc_stack = [] - ; pexp_attributes = [] - } - in - [%str let (_ : _) = [%e versioned_ident]] @ core_type_decls - | _ -> - Location.raise_errorf ~loc:core_type.ptyp_loc - "Unrecognized type constructor for versioned type" ) - | Ptyp_tuple core_types -> - (* type t = t1 * t2 * t3 *) - generate_version_lets_for_core_types type_name core_types - | Ptyp_variant _ -> - (* type t = [ `A | `B ] *) - [] - | Ptyp_var _ -> - (* type variable *) - [] - | Ptyp_any -> - (* underscore *) - [] - | _ -> - Location.raise_errorf ~loc:core_type.ptyp_loc - "Can't determine versioning for contained type" - - and generate_version_lets_for_core_types type_name core_types = - List.fold_right core_types ~init:[] ~f:(fun core_type accum -> - generate_core_type_version_decls type_name core_type @ accum ) - - let generate_version_lets_for_label_decls type_name label_decls = - generate_version_lets_for_core_types type_name - (List.map label_decls ~f:(fun lab_decl -> lab_decl.pld_type)) - - let generate_constructor_decl_decls type_name ctor_decl = - let result_lets = - match ctor_decl.pcd_res with - | None -> - [] - | Some res -> - (* for GADTs, check versioned-ness of parameters to result type *) - let ty_params = - match res.ptyp_desc with - | Ptyp_constr (_, params) -> - params - | _ -> - failwith - "generate_constructor_decl_decls: expected type parameter \ - list" - in - generate_version_lets_for_core_types type_name ty_params - in - match ctor_decl.pcd_args with - | Pcstr_tuple core_types -> - (* C of T1 * ... * Tn, or GADT C : T1 -> T2 *) - let arg_lets = - generate_version_lets_for_core_types type_name core_types - in - arg_lets @ result_lets - | Pcstr_record label_decls -> - (* C of { ... }, or GADT C : { ... } -> T *) - let arg_lets = - generate_version_lets_for_label_decls type_name label_decls - in - arg_lets @ result_lets + [%str let (_ : _) = [%e versioned_ident]] @ core_type_decls + | _ -> + Location.raise_errorf ~loc:core_type.ptyp_loc + "Unrecognized type constructor for versioned type" ) + | Ptyp_tuple core_types -> + (* type t = t1 * t2 * t3 *) + generate_version_lets_for_core_types type_name core_types + | Ptyp_variant _ -> + (* type t = [ `A | `B ] *) + [] + | Ptyp_var _ -> + (* type variable *) + [] + | Ptyp_any -> + (* underscore *) + [] + | _ -> + Location.raise_errorf ~loc:core_type.ptyp_loc + "Can't determine versioning for contained type" - let generate_constraint_type_decls type_name cstrs = - let gen_for_constraint (ty1, ty2, _loc) = - List.concat_map [ ty1; ty2 ] - ~f:(generate_core_type_version_decls type_name) - in - List.concat_map cstrs ~f:gen_for_constraint +and generate_version_lets_for_core_types type_name core_types = + List.fold_right core_types ~init:[] ~f:(fun core_type accum -> + generate_core_type_version_decls type_name core_type @ accum ) - let generate_contained_type_version_decls type_decl = - let type_name = type_decl.ptype_name.txt in - let constraint_type_version_decls = - generate_constraint_type_decls type_decl.ptype_name.txt - type_decl.ptype_cstrs - in - let main_type_version_decls = - match type_decl.ptype_kind with - | Ptype_abstract -> ( - match type_decl.ptype_manifest with - | Some manifest -> - generate_core_type_version_decls type_name manifest - | None -> - Location.raise_errorf ~loc:type_decl.ptype_loc - "Versioned type, not a label or variant, must have manifest \ - (right-hand side)" ) - | Ptype_variant ctor_decls -> - List.fold ctor_decls ~init:[] ~f:(fun accum ctor_decl -> - generate_constructor_decl_decls type_name ctor_decl @ accum ) - | Ptype_record label_decls -> - generate_version_lets_for_label_decls type_name label_decls - | Ptype_open -> - Location.raise_errorf ~loc:type_decl.ptype_loc - "Versioned type may not be open" - in - constraint_type_version_decls @ main_type_version_decls - - let generate_versioned_decls ~binable generation_kind type_decl = - let module E = Ppxlib.Ast_builder.Make (struct - let loc = type_decl.ptype_loc - end) in - let open E in - let versioned_current = [%stri let __versioned__ = ()] in - if binable then [ versioned_current ] - else - match generation_kind with - | Rpc -> - (* check whether contained types are versioned, - but don't assert versioned-ness of this type *) - generate_contained_type_version_decls type_decl - | Plain -> - (* check contained types, assert this type is versioned *) - versioned_current :: generate_contained_type_version_decls type_decl - - let get_type_decl_representative type_decls = - match type_decls with - | [ type_decl1 ] -> - type_decl1 - | type_decl1 :: type_decls -> - let type_decl2 = List.hd_exn (List.rev type_decls) in - let loc = - { loc_start = type_decl1.ptype_loc.loc_start - ; loc_end = type_decl2.ptype_loc.loc_end - ; loc_ghost = true - } +let generate_version_lets_for_label_decls type_name label_decls = + generate_version_lets_for_core_types type_name + (List.map label_decls ~f:(fun lab_decl -> lab_decl.pld_type)) + +let generate_constructor_decl_decls type_name ctor_decl = + let result_lets = + match ctor_decl.pcd_res with + | None -> + [] + | Some res -> + (* for GADTs, check versioned-ness of parameters to result type *) + let ty_params = + match res.ptyp_desc with + | Ptyp_constr (_, params) -> + params + | _ -> + failwith + "generate_constructor_decl_decls: expected type parameter list" in - Location.raise_errorf ~loc - "Versioned type must be just one type \"t\", not a sequence of types" - | [] -> - assert false - (* assumed to not be possible *) - - let generate_let_bindings_for_type_decl_str ~loc ~path (_rec_flag, type_decls) - rpc binable = - let type_decl = get_type_decl_representative type_decls in - if binable && rpc then - Location.raise_errorf ~loc:type_decl.ptype_loc - "Options \"binable\" and \"rpc\" cannot be combined" ; - let generation_kind = if rpc then Rpc else Plain in - let module_path = module_path_list path in - let inner3_modules = List.take (List.rev module_path) 3 in - (* TODO: when Module_version.Registration goes away, remove - the empty list special case - *) - if List.is_empty inner3_modules then - (* module path doesn't seem to be tracked inside test module *) - [] - else ( - validate_type_decl inner3_modules generation_kind type_decl ; - let versioned_decls = - generate_versioned_decls ~binable generation_kind type_decl + generate_version_lets_for_core_types type_name ty_params + in + match ctor_decl.pcd_args with + | Pcstr_tuple core_types -> + (* C of T1 * ... * Tn, or GADT C : T1 -> T2 *) + let arg_lets = + generate_version_lets_for_core_types type_name core_types in - let type_name = type_decl.ptype_name.txt in - (* generate version number for Rpc response, but not for query, so we - don't get an unused value - *) - match generation_kind with - | Rpc when String.equal type_name "query" -> - versioned_decls - | _ -> - generate_version_number_decl inner3_modules loc generation_kind - @ versioned_decls ) + arg_lets @ result_lets + | Pcstr_record label_decls -> + (* C of { ... }, or GADT C : { ... } -> T *) + let arg_lets = + generate_version_lets_for_label_decls type_name label_decls + in + arg_lets @ result_lets + +let generate_constraint_type_decls type_name cstrs = + let gen_for_constraint (ty1, ty2, _loc) = + List.concat_map [ ty1; ty2 ] ~f:(generate_core_type_version_decls type_name) + in + List.concat_map cstrs ~f:gen_for_constraint - let generate_val_decls_for_type_decl ~loc type_decl = +let generate_contained_type_version_decls type_decl = + let type_name = type_decl.ptype_name.txt in + let constraint_type_version_decls = + generate_constraint_type_decls type_decl.ptype_name.txt + type_decl.ptype_cstrs + in + let main_type_version_decls = match type_decl.ptype_kind with - (* the structure of the type doesn't affect what we generate for signatures *) - | Ptype_abstract | Ptype_variant _ | Ptype_record _ -> - [ [%sigi: val __versioned__ : unit] ] + | Ptype_abstract -> ( + match type_decl.ptype_manifest with + | Some manifest -> + generate_core_type_version_decls type_name manifest + | None -> + Location.raise_errorf ~loc:type_decl.ptype_loc + "Versioned type, not a label or variant, must have manifest \ + (right-hand side)" ) + | Ptype_variant ctor_decls -> + List.fold ctor_decls ~init:[] ~f:(fun accum ctor_decl -> + generate_constructor_decl_decls type_name ctor_decl @ accum ) + | Ptype_record label_decls -> + generate_version_lets_for_label_decls type_name label_decls | Ptype_open -> - (* but the type can't be open, else it might vary over time *) - Location.raise_errorf ~loc - "Versioned type in a signature must not be open" - - let generate_val_decls_for_type_decl_sig ~loc ~path:_ (_rec_flag, type_decls) - = - (* in a signature, the module path may vary *) - let type_decl = get_type_decl_representative type_decls in - generate_val_decls_for_type_decl ~loc type_decl -end - -(* at preprocessing time, choose between printing, deriving derivers *) -let choose_deriver ~printing ~deriving = - if !printing_ref then printing else deriving + Location.raise_errorf ~loc:type_decl.ptype_loc + "Versioned type may not be open" + in + constraint_type_version_decls @ main_type_version_decls + +let generate_versioned_decls ~binable generation_kind type_decl = + let module E = Ppxlib.Ast_builder.Make (struct + let loc = type_decl.ptype_loc + end) in + let open E in + let versioned_current = [%stri let __versioned__ = ()] in + if binable then [ versioned_current ] + else + match generation_kind with + | Rpc -> + (* check whether contained types are versioned, + but don't assert versioned-ness of this type *) + generate_contained_type_version_decls type_decl + | Plain -> + (* check contained types, assert this type is versioned *) + versioned_current :: generate_contained_type_version_decls type_decl + +let get_type_decl_representative type_decls = + match type_decls with + | [ type_decl1 ] -> + type_decl1 + | type_decl1 :: type_decls -> + let type_decl2 = List.hd_exn (List.rev type_decls) in + let loc = + { loc_start = type_decl1.ptype_loc.loc_start + ; loc_end = type_decl2.ptype_loc.loc_end + ; loc_ghost = true + } + in + Location.raise_errorf ~loc + "Versioned type must be just one type \"t\", not a sequence of types" + | [] -> + (* assumed not possible *) + assert false + +let generate_let_bindings_for_type_decl_str ~loc ~path (_rec_flag, type_decls) + rpc binable = + let type_decl = get_type_decl_representative type_decls in + if binable && rpc then + Location.raise_errorf ~loc:type_decl.ptype_loc + "Options \"binable\" and \"rpc\" cannot be combined" ; + let generation_kind = if rpc then Rpc else Plain in + let module_path = module_path_list path in + let inner3_modules = List.take (List.rev module_path) 3 in + (* TODO: when Module_version.Registration goes away, remove + the empty list special case + *) + if List.is_empty inner3_modules then + (* module path doesn't seem to be tracked inside test module *) + [] + else ( + validate_type_decl inner3_modules generation_kind type_decl ; + let versioned_decls = + generate_versioned_decls ~binable generation_kind type_decl + in + let type_name = type_decl.ptype_name.txt in + (* generate version number for Rpc response, but not for query, so we + don't get an unused value + *) + match generation_kind with + | Rpc when String.equal type_name "query" -> + versioned_decls + | _ -> + generate_version_number_decl inner3_modules loc generation_kind + @ versioned_decls ) + +let generate_val_decls_for_type_decl ~loc type_decl = + match type_decl.ptype_kind with + (* the structure of the type doesn't affect what we generate for signatures *) + | Ptype_abstract | Ptype_variant _ | Ptype_record _ -> + [ [%sigi: val __versioned__ : unit] ] + | Ptype_open -> + (* but the type can't be open, else it might vary over time *) + Location.raise_errorf ~loc + "Versioned type in a signature must not be open" + +let generate_val_decls_for_type_decl_sig ~loc ~path:_ (_rec_flag, type_decls) = + (* in a signature, the module path may vary *) + let type_decl = get_type_decl_representative type_decls in + generate_val_decls_for_type_decl ~loc type_decl let str_type_decl : (structure, rec_flag * type_declaration list) Ppxlib.Deriving.Generator.t = @@ -590,18 +407,15 @@ let str_type_decl : empty +> flag "rpc" +> flag "binable" in let deriver ~loc ~path (rec_flag, type_decls) rpc binable = - (choose_deriver ~printing:Printing.print_type - ~deriving:Deriving.generate_let_bindings_for_type_decl_str ) - ~loc ~path (rec_flag, type_decls) rpc binable + generate_let_bindings_for_type_decl_str ~loc ~path (rec_flag, type_decls) + rpc binable in Ppxlib.Deriving.Generator.make args deriver let sig_type_decl : (signature, rec_flag * type_declaration list) Ppxlib.Deriving.Generator.t = let deriver ~loc ~path (rec_flag, type_decls) = - (choose_deriver ~printing:Printing.gen_empty_sig - ~deriving:Deriving.generate_val_decls_for_type_decl_sig ) - ~loc ~path (rec_flag, type_decls) + generate_val_decls_for_type_decl_sig ~loc ~path (rec_flag, type_decls) in Ppxlib.Deriving.Generator.make_noarg deriver diff --git a/src/lib/precomputed_values/gen_values/gen_values.ml b/src/lib/precomputed_values/gen_values/gen_values.ml index 5be91a9731f..0f75d0f78cb 100644 --- a/src/lib/precomputed_values/gen_values/gen_values.ml +++ b/src/lib/precomputed_values/gen_values/gen_values.ml @@ -84,7 +84,7 @@ module Dummy = struct let base_proof_expr = if generate_genesis_proof then - Some (Async.return [%expr Mina_base.Proof.blockchain_dummy]) + Some (Async.return [%expr Lazy.force Mina_base.Proof.blockchain_dummy]) else None let compiled_values = diff --git a/src/lib/prover/prover.ml b/src/lib/prover/prover.ml index 91cf5add8ac..19ef29032fa 100644 --- a/src/lib/prover/prover.ml +++ b/src/lib/prover/prover.ml @@ -80,7 +80,7 @@ module Worker_state = struct with sok_digest = Sok_message.Digest.default } - , Proof.transaction_dummy ) + , Lazy.force Proof.transaction_dummy ) let create { logger; proof_level; constraint_constants; _ } : t Deferred.t = Deferred.return @@ -175,9 +175,11 @@ module Worker_state = struct ~constraint_constants { transition = block ; prev_state = Blockchain_snark.Blockchain.state chain - ; prev_state_proof = Mina_base.Proof.blockchain_dummy + ; prev_state_proof = + Lazy.force Mina_base.Proof.blockchain_dummy ; txn_snark = t - ; txn_snark_proof = Mina_base.Proof.transaction_dummy + ; txn_snark_proof = + Lazy.force Mina_base.Proof.transaction_dummy } ~handler: (Consensus.Data.Prover_state.handler state_for_handler @@ -185,7 +187,7 @@ module Worker_state = struct next_state |> Or_error.map ~f:(fun () -> Blockchain_snark.Blockchain.create ~state:next_state - ~proof:Mina_base.Proof.blockchain_dummy ) + ~proof:(Lazy.force Mina_base.Proof.blockchain_dummy) ) in Or_error.iter_error res ~f:(fun e -> [%log error] @@ -208,7 +210,7 @@ module Worker_state = struct Deferred.return @@ Ok (Blockchain_snark.Blockchain.create - ~proof:Mina_base.Proof.blockchain_dummy + ~proof:(Lazy.force Mina_base.Proof.blockchain_dummy) ~state:next_state ) let verify _ _ = Deferred.return (Ok ()) diff --git a/src/lib/random_oracle/permutation/external/random_oracle_permutation.ml b/src/lib/random_oracle/permutation/external/random_oracle_permutation.ml index c72162f3707..deb96da5b70 100644 --- a/src/lib/random_oracle/permutation/external/random_oracle_permutation.ml +++ b/src/lib/random_oracle/permutation/external/random_oracle_permutation.ml @@ -17,7 +17,7 @@ let block_cipher _params (s : Field.t array) = let%test_unit "check rust implementation of block-cipher" = let params' : Field.t Sponge.Params.t = - Sponge.Params.(map pasta_p_kimchi ~f:Field.of_string) + Kimchi_pasta_basic.poseidon_params_fp in let open Pickles.Impls.Step in let module T = Internal_Basic in diff --git a/src/lib/random_oracle/random_oracle.ml b/src/lib/random_oracle/random_oracle.ml index f474727b83d..c976871a4cb 100644 --- a/src/lib/random_oracle/random_oracle.ml +++ b/src/lib/random_oracle/random_oracle.ml @@ -16,12 +16,15 @@ module State = struct include Array let map2 = map2_exn + + let to_array t = t + + let of_array t = t end module Input = Random_oracle_input -let params : Field.t Sponge.Params.t = - Sponge.Params.(map pasta_p_kimchi ~f:Field.of_string) +let params : Field.t Sponge.Params.t = Kimchi_pasta_basic.poseidon_params_fp module Operations = struct let add_assign ~state i x = Field.(state.(i) <- state.(i) + x) @@ -143,7 +146,7 @@ module Legacy = struct module State = State let params : Field.t Sponge.Params.t = - Sponge.Params.(map pasta_p_legacy ~f:Field.of_string) + Sponge.Params.(map pasta_p_legacy ~f:Kimchi_pasta_basic.Fp.of_string) module Rounds = struct let rounds_full = 63 diff --git a/src/lib/random_oracle/random_oracle.mli b/src/lib/random_oracle/random_oracle.mli index c865afe9465..3fe30566dd8 100644 --- a/src/lib/random_oracle/random_oracle.mli +++ b/src/lib/random_oracle/random_oracle.mli @@ -18,6 +18,10 @@ module State : sig val map : 'a t -> f:('a -> 'b) -> 'b t val map2 : 'a t -> 'b t -> f:('a -> 'b -> 'c) -> 'c t + + val to_array : 'a t -> 'a array + + val of_array : 'a array -> 'a t end include @@ -71,6 +75,10 @@ module Legacy : sig val map : 'a t -> f:('a -> 'b) -> 'b t val map2 : 'a t -> 'b t -> f:('a -> 'b -> 'c) -> 'c t + + val to_array : 'a t -> 'a array + + val of_array : 'a array -> 'a t end include diff --git a/src/lib/rocksdb/database.ml b/src/lib/rocksdb/database.ml index 9b9c580701e..b7302b6d844 100644 --- a/src/lib/rocksdb/database.ml +++ b/src/lib/rocksdb/database.ml @@ -1,6 +1,4 @@ -(* rocksdb.ml -- expose RocksDB operations for Coda *) - -open Core +(* rocksdb.ml -- expose RocksDB operations for Mina *) type t = { uuid : Uuid.Stable.V1.t; db : (Rocks.t[@sexp.opaque]) } [@@deriving sexp] @@ -33,7 +31,7 @@ let set t ~(key : Bigstring.t) ~(data : Bigstring.t) : unit = Rocks.put ?key_pos:None ?key_len:None ?value_pos:None ?value_len:None ?opts:None t.db key data -let set_batch t ?(remove_keys = []) +let[@warning "-16"] set_batch t ?(remove_keys : Bigstring.t list = []) ~(key_data_pairs : (Bigstring.t * Bigstring.t) list) : unit = let batch = Rocks.WriteBatch.create () in (* write to batch *) @@ -57,31 +55,68 @@ module Batch = struct Rocks.write t.db batch ; result end -let copy _t = failwith "copy: not implemented" - let remove t ~(key : Bigstring.t) : unit = Rocks.delete ?pos:None ?len:None ?opts:None t.db key +let copy_bigstring t : Bigstring.t = + let tlen = Bigstring.length t in + let new_t = Bigstring.create tlen in + Bigstring.blit ~src:t ~dst:new_t ~src_pos:0 ~dst_pos:0 ~len:tlen ; + new_t + let to_alist t : (Bigstring.t * Bigstring.t) list = let iterator = Rocks.Iterator.create t.db in Rocks.Iterator.seek_to_last iterator ; (* iterate backwards and cons, to build list sorted by key *) - let copy t = - let tlen = Bigstring.length t in - let new_t = Bigstring.create tlen in - Bigstring.blit ~src:t ~dst:new_t ~src_pos:0 ~dst_pos:0 ~len:tlen ; - new_t - in let rec loop accum = if Rocks.Iterator.is_valid iterator then ( - let key = copy (Rocks.Iterator.get_key iterator) in - let value = copy (Rocks.Iterator.get_value iterator) in + let key = copy_bigstring (Rocks.Iterator.get_key iterator) in + let value = copy_bigstring (Rocks.Iterator.get_value iterator) in Rocks.Iterator.prev iterator ; loop ((key, value) :: accum) ) else accum in loop [] +let foldi : + t + -> init:'a + -> f:(int -> 'a -> key:Bigstring.t -> data:Bigstring.t -> 'a) + -> 'a = + fun t ~init ~f -> + let iterator = Rocks.Iterator.create t.db in + let rec loop i accum = + if Rocks.Iterator.is_valid iterator then ( + let key = copy_bigstring (Rocks.Iterator.get_key iterator) in + let data = copy_bigstring (Rocks.Iterator.get_value iterator) in + Rocks.Iterator.next iterator ; + loop (i + 1) (f i accum ~key ~data) ) + else accum + in + loop 0 init + +let fold_until : + t + -> init:'a + -> f: + ( 'a + -> key:Bigstring.t + -> data:Bigstring.t + -> ('a, 'b) Continue_or_stop.t ) + -> finish:('a -> 'b) + -> 'b = + fun t ~init ~f ~finish -> + let iterator = Rocks.Iterator.create t.db in + let rec loop accum = + if Rocks.Iterator.is_valid iterator then ( + let key = copy_bigstring (Rocks.Iterator.get_key iterator) in + let data = copy_bigstring (Rocks.Iterator.get_value iterator) in + Rocks.Iterator.next iterator ; + match f accum ~key ~data with Stop _ -> accum | Continue v -> loop v ) + else accum + in + finish @@ loop init + let to_bigstring = Bigstring.of_string let%test_unit "get_batch" = @@ -89,7 +124,7 @@ let%test_unit "get_batch" = File_system.with_temp_dir "/tmp/mina-rocksdb-test" ~f:(fun db_dir -> let db = create db_dir in let[@warning "-8"] [ key1; key2; key3 ] = - List.map ~f:Bigstring.of_string [ "a"; "b"; "c" ] + List.map ~f:(fun s -> Bigstring.of_string s) [ "a"; "b"; "c" ] in let data = Bigstring.of_string "test" in set db ~key:key1 ~data ; @@ -141,6 +176,7 @@ let%test_unit "checkpoint read" = | `Duplicate_key _ -> Deferred.unit | `Ok db_hashtbl -> ( + let open Core in let cp_hashtbl = Hashtbl.copy db_hashtbl in let db_dir = Filename.temp_dir "test_db" "" in let cp_dir = diff --git a/src/lib/rocksdb/database.mli b/src/lib/rocksdb/database.mli new file mode 100644 index 00000000000..8b66cf176d8 --- /dev/null +++ b/src/lib/rocksdb/database.mli @@ -0,0 +1,64 @@ +type uuid := Uuid.Stable.V1.t + +type key := Bigstring.t + +type data := Bigstring.t + +type t = { uuid : uuid; db : (Rocks.t[@sexp.opaque]) } [@@deriving sexp] + +type db := t + +(** [create dirname] creates a database contained in [dirname]. + + @param dirname will be created if it does not exist + *) +val create : string -> t + +val get : t -> key:key -> data option + +val get_batch : t -> keys:key list -> data option list + +val set : t -> key:key -> data:data -> unit + +(** Any key present both in [remove_keys] and [key_data_pairs] will be absent + from the database. + + @param remove_keys defaults to [[]] +*) +val set_batch : + t -> ?remove_keys:key list -> key_data_pairs:(key * data) list -> unit + +val remove : t -> key:key -> unit + +val close : t -> unit + +val to_alist : t -> (key * data) list + +val make_checkpoint : t -> string -> unit + +val create_checkpoint : t -> string -> t + +val get_uuid : t -> uuid + +val foldi : + t + -> init:'a + -> f:(int -> 'a -> key:Bigstring.t -> data:Bigstring.t -> 'a) + -> 'a + +val fold_until : + t + -> init:'a + -> f:('a -> key:Bigstring.t -> data:Bigstring.t -> ('a, 'b) Continue_or_stop.t) + -> finish:('a -> 'b) + -> 'b + +module Batch : sig + type t = Rocks.WriteBatch.t + + val remove : t -> key:key -> unit + + val set : t -> key:key -> data:data -> unit + + val with_batch : db -> f:(t -> 'a) -> 'a +end diff --git a/src/lib/rocksdb/dune b/src/lib/rocksdb/dune index 92c40bcda65..66bfcc0b8f7 100644 --- a/src/lib/rocksdb/dune +++ b/src/lib/rocksdb/dune @@ -2,24 +2,25 @@ (name rocksdb) (public_name rocksdb) (library_flags -linkall) -(libraries + (flags (:standard -w +a-40..42-44 -warn-error +a-70) -open Core_kernel) + (libraries ;; opam libraries - async_kernel - async - base.caml - core.uuid - core_kernel - rocks - core - core_kernel.uuid - sexplib0 - ppx_inline_test.config - async_unix - base.base_internalhash_types - ;; local libraries - file_system - key_value_database -) + async + async_kernel + async_unix + base.base_internalhash_types + base.caml + core + core.uuid + core_kernel + core_kernel.uuid + ppx_inline_test.config + rocks + sexplib0 + ;; local libraries + file_system + key_value_database + ) (preprocess (pps ppx_version ppx_jane)) (instrumentation (backend bisect_ppx)) diff --git a/src/lib/rocksdb/intf.ml b/src/lib/rocksdb/intf.ml new file mode 100644 index 00000000000..3c8d177787d --- /dev/null +++ b/src/lib/rocksdb/intf.ml @@ -0,0 +1,67 @@ +module Key = struct + module type S = sig + type 'a t + + val to_string : 'a t -> string + + val binable_key_type : 'a t -> 'a t Bin_prot.Type_class.t + + val binable_data_type : 'a t -> 'a Bin_prot.Type_class.t + end + + module type Intf = sig + type 'a unwrapped_t + + type t = Some_key : 'a unwrapped_t -> t + + type with_value = Some_key_value : 'a unwrapped_t * 'a -> with_value + end + + module Some (K : sig + type 'a t + end) : Intf with type 'a unwrapped_t := 'a K.t = struct + type t = Some_key : 'a K.t -> t + + type with_value = Some_key_value : 'a K.t * 'a -> with_value + end +end + +module Database = struct + module type Intf = sig + type t + + type 'a g + + val set : t -> key:'a g -> data:'a -> unit + + val set_raw : t -> key:'a g -> data:Bigstring.t -> unit + + val remove : t -> key:'a g -> unit + end + + module type S = sig + include Intf + + module Key : Key.Intf with type 'a unwrapped_t := 'a g + + module T : sig + type nonrec t = t + end + + val create : string -> t + + val close : t -> unit + + val get : t -> key:'a g -> 'a option + + val get_raw : t -> key:'a g -> Bigstring.t option + + val get_batch : t -> keys:Key.t list -> Key.with_value option list + + module Batch : sig + include Intf with type 'a g := 'a g + + val with_batch : T.t -> f:(t -> 'a) -> 'a + end + end +end diff --git a/src/lib/rocksdb/key_intf.ml b/src/lib/rocksdb/key_intf.ml deleted file mode 100644 index dd76b8acb87..00000000000 --- a/src/lib/rocksdb/key_intf.ml +++ /dev/null @@ -1,25 +0,0 @@ -module type S = sig - type 'a t - - val to_string : 'a t -> string - - val binable_key_type : 'a t -> 'a t Bin_prot.Type_class.t - - val binable_data_type : 'a t -> 'a Bin_prot.Type_class.t -end - -module type Some_key_intf = sig - type 'a unwrapped_t - - type t = Some_key : 'a unwrapped_t -> t - - type with_value = Some_key_value : 'a unwrapped_t * 'a -> with_value -end - -module Some_key (K : sig - type 'a t -end) : Some_key_intf with type 'a unwrapped_t := 'a K.t = struct - type t = Some_key : 'a K.t -> t - - type with_value = Some_key_value : 'a K.t * 'a -> with_value -end diff --git a/src/lib/rocksdb/serializable.ml b/src/lib/rocksdb/serializable.ml index 569cee975ff..4e5c5ff93bd 100644 --- a/src/lib/rocksdb/serializable.ml +++ b/src/lib/rocksdb/serializable.ml @@ -1,5 +1,3 @@ -open Core_kernel - module Make (Key : Binable.S) (Value : Binable.S) : Key_value_database.Intf.S with module M := Key_value_database.Monad.Ident @@ -29,7 +27,7 @@ module Make (Key : Binable.S) (Value : Binable.S) : ~key:(Binable.to_bigstring (module Key) key) ~data:(Binable.to_bigstring (module Value) data) - let set_batch t ?(remove_keys = []) ~update_pairs = + let[@warning "-16"] set_batch t ?(remove_keys = []) ~update_pairs = let key_data_pairs = List.map update_pairs ~f:(fun (key, data) -> ( Binable.to_bigstring (module Key) key @@ -52,45 +50,8 @@ end (** Database Interface for storing heterogeneous key-value pairs. Similar to Janestreet's Core.Univ_map *) module GADT = struct - module type Database_intf = sig - type t - - type 'a g - - val set : t -> key:'a g -> data:'a -> unit - - val set_raw : t -> key:'a g -> data:Bigstring.t -> unit - - val remove : t -> key:'a g -> unit - end - - module type S = sig - include Database_intf - - module Some_key : Key_intf.Some_key_intf with type 'a unwrapped_t := 'a g - - module T : sig - type nonrec t = t - end - - val create : string -> t - - val close : t -> unit - - val get : t -> key:'a g -> 'a option - - val get_raw : t -> key:'a g -> Bigstring.t option - - val get_batch : t -> keys:Some_key.t list -> Some_key.with_value option list - - module Batch : sig - include Database_intf with type 'a g := 'a g - - val with_batch : T.t -> f:(t -> 'a) -> 'a - end - end - - module Make (Key : Key_intf.S) : S with type 'a g := 'a Key.t = struct + module Make (Key : Intf.Key.S) : Intf.Database.S with type 'a g := 'a Key.t = + struct let bin_key_dump (key : 'a Key.t) = Bin_prot.Utils.bin_dump (Key.binable_key_type key).writer key @@ -132,19 +93,20 @@ module GADT = struct let bin_data = Key.binable_data_type key in bin_data.reader.read serialized_value ~pos_ref:(ref 0) - module Some_key = Key_intf.Some_key (Key) + (* This one's re-exported as Key at the end. The name is to prevent from + hiding the Key module parameter *) + module K = Intf.Key.Some (Key) let get_batch t ~keys = - let open Some_key in - let skeys = List.map keys ~f:(fun (Some_key k) -> bin_key_dump k) in + let skeys = List.map keys ~f:(fun (K.Some_key k) -> bin_key_dump k) in let serialized_value_opts = Database.get_batch ~keys:skeys t in - let f (Some_key k) = + let f (K.Some_key k) = Option.map ~f:(fun serialized_value -> let bin_data = Key.binable_data_type k in let value = bin_data.reader.read serialized_value ~pos_ref:(ref 0) in - Some_key_value (k, value) ) + K.Some_key_value (k, value) ) in List.map2_exn keys serialized_value_opts ~f @@ -153,5 +115,7 @@ module GADT = struct let with_batch = Database.Batch.with_batch end + + module Key = K end end diff --git a/src/lib/rocksdb/serializable.mli b/src/lib/rocksdb/serializable.mli new file mode 100644 index 00000000000..36fe5ee75f7 --- /dev/null +++ b/src/lib/rocksdb/serializable.mli @@ -0,0 +1,10 @@ +module Make (Key : Binable.S) (Value : Binable.S) : + Key_value_database.Intf.S + with module M := Key_value_database.Monad.Ident + and type key := Key.t + and type value := Value.t + and type config := string + +module GADT : sig + module Make (Key : Intf.Key.S) : Intf.Database.S with type 'a g := 'a Key.t +end diff --git a/src/lib/runtime_config/dune b/src/lib/runtime_config/dune index 402333b992c..df586545ce3 100644 --- a/src/lib/runtime_config/dune +++ b/src/lib/runtime_config/dune @@ -28,7 +28,6 @@ mina_base.import mina_numbers mina_wire_types - ppx_dhall_type mina_state snark_params unsigned_extended @@ -44,4 +43,4 @@ ) (instrumentation (backend bisect_ppx)) (preprocess (pps ppx_custom_printf ppx_sexp_conv ppx_let ppx_deriving_yojson - ppx_fields_conv ppx_dhall_type ppx_version ppx_compare))) + ppx_fields_conv ppx_version ppx_compare))) diff --git a/src/lib/runtime_config/runtime_config.ml b/src/lib/runtime_config/runtime_config.ml index d032bc6785c..236d8a0db1d 100644 --- a/src/lib/runtime_config/runtime_config.ml +++ b/src/lib/runtime_config/runtime_config.ml @@ -8,7 +8,7 @@ module Fork_config = struct ; blockchain_length : int (* number of blocks produced since genesis *) ; global_slot_since_genesis : int (* global slot since genesis *) } - [@@deriving yojson, dhall_type, bin_io_unversioned] + [@@deriving yojson, bin_io_unversioned] let gen = let open Quickcheck.Generator.Let_syntax in @@ -77,7 +77,7 @@ module Json_layout = struct ; vesting_period : Mina_numbers.Global_slot_span.t ; vesting_increment : Currency.Amount.t } - [@@deriving yojson, fields, dhall_type, sexp] + [@@deriving yojson, fields, sexp] let fields = Fields.names |> Array.of_list @@ -87,7 +87,7 @@ module Json_layout = struct module Permissions = struct module Auth_required = struct type t = None | Either | Proof | Signature | Impossible - [@@deriving dhall_type, sexp, bin_io_unversioned] + [@@deriving sexp, bin_io_unversioned] let to_yojson = function | None -> @@ -156,7 +156,7 @@ module Json_layout = struct module Verification_key_perm = struct type t = { auth : Auth_required.t; txn_version : Txn_version.t } - [@@deriving dhall_type, sexp, yojson, bin_io_unversioned] + [@@deriving sexp, yojson, bin_io_unversioned] end type t = @@ -220,7 +220,7 @@ module Json_layout = struct Auth_required.of_account_perm Mina_base.Permissions.user_default.set_timing] } - [@@deriving yojson, fields, dhall_type, sexp, bin_io_unversioned] + [@@deriving yojson, fields, sexp, bin_io_unversioned] let fields = Fields.names |> Array.of_list @@ -270,9 +270,6 @@ module Json_layout = struct type t = Snark_params.Tick.Field.t [@@deriving sexp, bin_io_unversioned] - (* can't be automatically derived *) - let dhall_type = Ppx_dhall_type.Dhall_type.Text - let to_yojson t = `String (Snark_params.Tick.Field.to_string t) let of_yojson = function @@ -288,9 +285,6 @@ module Json_layout = struct type t = Pickles.Side_loaded.Verification_key.Stable.Latest.t [@@deriving sexp, bin_io_unversioned] - (* can't be automatically derived *) - let dhall_type = Ppx_dhall_type.Dhall_type.Text - let to_yojson t = `String (Pickles.Side_loaded.Verification_key.to_base64 t) @@ -324,7 +318,7 @@ module Json_layout = struct ; proved_state : bool ; zkapp_uri : string } - [@@deriving sexp, fields, dhall_type, yojson, bin_io_unversioned] + [@@deriving sexp, fields, yojson, bin_io_unversioned] let fields = Fields.names |> Array.of_list @@ -362,7 +356,7 @@ module Json_layout = struct ; permissions : Permissions.t option [@default None] ; token_symbol : string option [@default None] } - [@@deriving sexp, fields, yojson, dhall_type] + [@@deriving sexp, fields, yojson] let fields = Fields.names |> Array.of_list @@ -384,13 +378,13 @@ module Json_layout = struct } end - type t = Single.t list [@@deriving yojson, dhall_type] + type t = Single.t list [@@deriving yojson] end module Ledger = struct module Balance_spec = struct type t = { number : int; balance : Currency.Balance.t } - [@@deriving yojson, dhall_type] + [@@deriving yojson] end type t = @@ -402,7 +396,7 @@ module Json_layout = struct ; name : string option [@default None] ; add_genesis_winner : bool option [@default None] } - [@@deriving yojson, fields, dhall_type] + [@@deriving yojson, fields] let fields = Fields.names |> Array.of_list @@ -412,11 +406,10 @@ module Json_layout = struct module Proof_keys = struct module Transaction_capacity = struct type t = - { log_2 : int option - [@default None] [@key "2_to_the"] [@dhall_type.key "two_to_the"] + { log_2 : int option [@default None] [@key "2_to_the"] ; txns_per_second_x10 : int option [@default None] } - [@@deriving yojson, dhall_type] + [@@deriving yojson] (* we don't deriving the field names here, because the first one differs from the field in the record type @@ -444,7 +437,7 @@ module Json_layout = struct ; account_creation_fee : Currency.Fee.t option [@default None] ; fork : Fork_config.t option [@default None] } - [@@deriving yojson, fields, dhall_type] + [@@deriving yojson, fields] let fields = Fields.names |> Array.of_list @@ -460,7 +453,7 @@ module Json_layout = struct ; grace_period_slots : int option [@default None] ; genesis_state_timestamp : string option [@default None] } - [@@deriving yojson, fields, dhall_type] + [@@deriving yojson, fields] let fields = Fields.names |> Array.of_list @@ -481,7 +474,7 @@ module Json_layout = struct ; slot_tx_end : int option [@default None] ; slot_chain_end : int option [@default None] } - [@@deriving yojson, fields, dhall_type] + [@@deriving yojson, fields] let fields = Fields.names |> Array.of_list @@ -496,7 +489,7 @@ module Json_layout = struct ; s3_data_hash : string option [@default None] ; hash : string option [@default None] } - [@@deriving yojson, fields, dhall_type] + [@@deriving yojson, fields] let fields = Fields.names |> Array.of_list @@ -507,7 +500,7 @@ module Json_layout = struct { staking : Data.t ; next : (Data.t option[@default None]) (*If None then next = staking*) } - [@@deriving yojson, fields, dhall_type] + [@@deriving yojson, fields] let fields = Fields.names |> Array.of_list @@ -521,7 +514,7 @@ module Json_layout = struct ; ledger : Ledger.t option [@default None] ; epoch_data : Epoch_data.t option [@default None] } - [@@deriving yojson, fields, dhall_type] + [@@deriving yojson, fields] let fields = Fields.names |> Array.of_list diff --git a/src/lib/snark_keys_header/snark_keys_header.ml b/src/lib/snark_keys_header/snark_keys_header.ml index c774f34fa05..f64df384e78 100644 --- a/src/lib/snark_keys_header/snark_keys_header.ml +++ b/src/lib/snark_keys_header/snark_keys_header.ml @@ -127,6 +127,12 @@ module Commits = struct (** Commit identifiers *) type t = { mina : string; marlin : string } [@@deriving yojson, sexp, ord, equal] + + module With_date = struct + type commits = t + + type t = { commits : commits; commit_date : string } + end end let header_version = 1 diff --git a/src/lib/snark_worker/debug.ml b/src/lib/snark_worker/debug.ml index d5130a4323e..f1e329001bc 100644 --- a/src/lib/snark_worker/debug.ml +++ b/src/lib/snark_worker/debug.ml @@ -29,6 +29,6 @@ module Inputs = struct let sok_digest = Sok_message.digest message in Deferred.Or_error.return @@ ( Transaction_snark.create ~statement:{ stmt with sok_digest } - ~proof:Proof.transaction_dummy + ~proof:(Lazy.force Proof.transaction_dummy) , Time.Span.zero ) end diff --git a/src/lib/snark_worker/prod.ml b/src/lib/snark_worker/prod.ml index 03a0ed74fd6..ae6551d3280 100644 --- a/src/lib/snark_worker/prod.ml +++ b/src/lib/snark_worker/prod.ml @@ -284,6 +284,6 @@ module Inputs = struct in Deferred.Or_error.return @@ ( Transaction_snark.create ~statement:{ stmt with sok_digest } - ~proof:Proof.transaction_dummy + ~proof:(Lazy.force Proof.transaction_dummy) , Time.Span.zero ) end diff --git a/src/lib/snarky b/src/lib/snarky index 7edf1362887..94b2df82129 160000 --- a/src/lib/snarky +++ b/src/lib/snarky @@ -1 +1 @@ -Subproject commit 7edf13628872081fd7cad154de257dad8b9ba621 +Subproject commit 94b2df82129658d505b612806a5804bc192f13f0 diff --git a/src/lib/staged_ledger/README.md b/src/lib/staged_ledger/README.md new file mode 100644 index 00000000000..1b7fb39593e --- /dev/null +++ b/src/lib/staged_ledger/README.md @@ -0,0 +1,126 @@ +# Staged Ledger + +A staged ledger is a state that is the result of applying a block, specifically the transactions and snark work from a block. The transactions included in a block don't have proofs yet. They are added to the staged ledger as pending work for which snark workers generate proofs. The snarks included in a block are for transactions from previous blocks and correspond to pending work in the staged ledger. + +Staged ledger mainly consists of- + + 1. Ledger + 2. Scan state + 3. Pending coinbase collection + +## Glossary +| Name | Description | +|------|-------------| +|Snarked ledger | A ledger state that can be verified by a snark| +|Proof | Used interchangeably with snark, refers to transaction snark| +| Snark work | A bundle of at most two proofs along with the prover pk and fees. Also referred to as completed work and is defined in transaction_snark_work.ml| +| Ledger proof | A transaction snark that certifies a ledger state. A ledger proof is emitted from the scan state when all the proofs for a set of transactions are included in blocks, certifying the ledger that is obtained from applying those transactions | +| Work statement/ Statement | A fact about the ledger state that is proven by transaction snark | +| Snark worker | A node in Mina that generates transaction snarks for a fee| +| Protocol state | Representation of the state in a chain, defined in `src/lib/mina_state/protocol_state.ml`| +| Protocol state view | A selected few fields from the protocol state that is required to update the staged ledger +| User command | user transactions namely payments, stake delegations, snapp transactions | +| Fee transfer | A transaction created by block producers to pay transaction fees or snark fees| +| Coinbase | A transaction created by block producers to pay themselves the coinbase amount for winning the block| + +## Ledger + +A merkle ledger (src/lib/mina_base/ledger.ml) that has all the transactions (both snarked and unsnarked) in the chain that ends at a given block. + +## Scan state + +Scan state is a data structure that keeps track of all the partially proven and unproven ledger states in the chain. The scan state is a queue of trees where each tree holds all the data required to prove a set of transactions and thereby prove a ledger state resulting from applying those transactions. A proof can either be of transaction or of merging proofs of transactions. A ledger proof is a special case of merging proofs of transactions where it proves all the transactions added to a tree. The ledger state certified by a ledger proof is also called a snarked ledger. + +In the event of a new block when the staged ledger from the previous block is updated, all the data required to prove the included transactions are added to the leaves of a tree in the scan state as pending work. All the snark work in the block that correspond to the pending work from previous blocks are also added to the scan state and create new pending work for merging proofs of transactions, unless it is the final proof aka ledger proof in which case it is simply returned as a result of updating the scan state. + +Scan state has a maximum size that determines how many transactions can be included in a block. These are currently defined at compile time as `scan_state_transaction_capacity_log_2` or `scan_state_tps_goal_x10`. It also specifies the order and the number of proofs required for every set of new transactions or pending work added. The goal is to complete existing pending work before adding new ones. +The abstract structure of scan state is defined in src/lib/parallel_scan/ and instantiated in src/lib/transaction_snark_scan_state/transaction_snark_scan_state.ml with the values that are stored in it. The data structure itself is described in detail in [this](https://minaprotocol.com/blog/scanning-for-scans) blog post and in src/lib/parallel_scan/scan_state.md + +## Pending coinbase collection + +It is a collection of coinbase reward recipients and protocol state of each block in the chain. TODO: readme for pending coinbase + +## Staged ledger functions + +A staged-ledger-diff consists of all the user transactions, fee transfers that pays snark fees and transaction fees, coinbase transaction, and snark work included in a block. + +The two main functions in this module are- + +1. Generate staged-ledger-diff for a block (`create_diff`) +2. Apply staged-ledger-diff from a block (`apply` and `apply_diff_unchecked`) + +### Generating a staged-ledger-diff + +The `create_diff` function in this module creates a staged-ledger-diff for a new block that is valid against a given staged ledger. The user transactions passed here are retrieved from mempool and are in descending order of the fees. Also passed is a function to retrieve required snark work given a statement. + +The generated diff should pass the following checks: + +1. The number of transactions included in a diff (user or otherwise) should not exceed the max size set at compile time. +2. All the transactions must be valid against the ledger in the given staged ledger. +3. For all the transactions included there should be an equal amount of snark work included. For each spot taken by a transaction on the scan state ~2 proofs are required. This is encapsulated in the term snark work which represents a bundle of at most two proofs. +4. Snark fees, if any, is paid using the transaction fees and therefore the diff can only include snark work that can be paid for. Snark work required to add a coinbase transaction is paid for using the coinbase amount. + a) Total transaction fees - snark fees (except the ones paid for using coinbase) = transaction fees to the block producer + b) Total coinbase amount - snark fee to include coinbase = coinbase amount to the block producer +5. The transaction fees from all the user transactions included in the diff ia settled in the same diff. + +Next, there are some invariants of the scan state that affect the diff creation process and are worth noting. + +#### fee-excess + +Since transaction fees or snark fees are paid to the recipients using separate transactions, a separate field in the statement of a transaction snark called fee-excess keeps track of fees resulting from the transaction that should be accounted for in another transaction (a fee transfer). The total fee excess from transactions within a block should be zero i.e., debited from the fee payer account and credited to the fee recipient account. This is ensured when creating the diff. +However, the zero fee excess is also required for transactions in a tree in the scan state. In other words, the statement of the ledger proof emitted by the scan state should have zero fee excess. + +For example, consider the following tree of depth 3 showing only the fee excess when all the transactions included in a block fit in one tree. A positive fee excess is from user transaction and negative fee excess is from fee transfers. Since the total fee excess from transactions within a block is zero, the total fee excess certified by the ledger proof for that tree is also zero (at root of the tree) + + 0 + 4 -4 + 2 2 2 -6 + 1 1 1 1 1 1 -2 -4 + +Say, a block only had a few transactions and therefore filled a tree partially. The fee excess would at each level would look something like this: (`?` since some leaves are empty) + + ? + 1 ? + 2 -1 ? ? + 1 1 1 -2 -1 _ _ _ + +Now when a new block arrives that has more than three transactions, some of the transactions are added to the first tree and the rest are added the next tree. Adding to the tree above, we'll get: + + 4 + 1 3 + 2 -1 1 2 + 1 1 1 -2 -1 2 1 1 -2 -2 _ _ _ _ _ _ + +In this case, although the fee excess from transactions within a block is zero, the total fee excess at the root of the first tree is not zero (the total fee excess in the second tree is also not zero). + +To ensure every tree has a fee excess of zero, the diff is split into two parts (prediffs) where the first part has transactions and fee transfers that'll occupy the empty leaves of the first tree and the second part has transactions and fee transfers that'll occupy a new tree. In the example above where fee excess is non-zero, with the prediffs, will look like: + + 0 + 1 -1 + 2 -1 1 -2 + 1 1 1 -2 -1 2 -1 -1 2 -1 -1 _ _ _ _ _ + +## Coinbase splitting + +A coinbase is included in a block if there is enough work available for one slot +A coinbase transaction includes a fee transfer to pay for the snark work (deducted from the coinbase amount) and therefore only needs one spot on the scan state tree. However, to include a user transaction, three spots are needed in the worst case (when every snark work is from a different prover; one slot for the user transaction and two slots for fee transfers) + +When the diff is split into two prediffs and if after adding user transactions and fee transfers to it, the first prediff has two spots remaining and cannot not accommodate user transactions, then those spots are filled by two coinbase transactions that split the coinbase amount. +If it has one spot, then we simply add one coinbase transaction. It is also possible that the first prediff may have no slots left after adding transactions (For example, when there are three slots and all the required snark work is from a different prover), in which case, we simply add one coinbase as part of the second prediff. + +It could also happen that after adding transactions to the first prediff, there is not enough work to add a coinbase transaction to the second prediff. In this case, the two prediffs are discarded and a single prediff is created with a coinbase transaction and how many ever user transactions possible + +### Applying a staged-ledger-diff + +Given a staged ledger `s` corresponding to a block `X` and a staged-ledger-diff from a new block generated off of `X`, the `apply` function applies the staged-ledger-diff to `s` to produce a staged ledger `s'` corresponding to the new block. + +In the resulting staged ledger `s'`, + +1. The ledger has all the transactions from the new block applied +2. Snark work from the diff is added to the scan state that'll mark some pending jobs as `done` and create new pending jobs for the newly added transactions and for merging the newly added proofs. +3. Pending coinbase collection is updated with the coinbase for the new block and protocol state of block `X`. + +Along with the conditions mentioned [above](###Creating a staged-ledger-diff), the `apply` function also checks if the snark work is valid and that the invariants of the scan state are maintained. +If any of the validations fail, the block consisting of the staged-ledger-diff is rejected. + +`apply_diff_unchecked` is the same as `apply` but is called for diffs generated by the node itself. It skips verification of snark work since they get verified before getting into the snark pool. If any of the validations fail (which suggests there is a bug in `create_diff`), the diff is dropped and no block gets created. diff --git a/src/lib/staged_ledger/staged_ledger.ml b/src/lib/staged_ledger/staged_ledger.ml index 494abd4366f..d5d4e597a7b 100644 --- a/src/lib/staged_ledger/staged_ledger.ml +++ b/src/lib/staged_ledger/staged_ledger.ml @@ -2589,7 +2589,7 @@ let%test_module "staged ledger tests" = let sok_digest = Sok_message.Digest.default in One_or_two.map stmts ~f:(fun statement -> Ledger_proof.create ~statement ~sok_digest - ~proof:Proof.transaction_dummy ) + ~proof:(Lazy.force Proof.transaction_dummy) ) let stmt_to_work_random_prover (stmts : Transaction_snark_work.Statement.t) : Transaction_snark_work.Checked.t option = diff --git a/src/lib/staged_ledger/staged_ledger.mli b/src/lib/staged_ledger/staged_ledger.mli index 147bceca8d3..90181dea908 100644 --- a/src/lib/staged_ledger/staged_ledger.mli +++ b/src/lib/staged_ledger/staged_ledger.mli @@ -21,6 +21,11 @@ module Scan_state : sig type t [@@deriving sexp, to_yojson] end + (** Space available and number of jobs required to enqueue transactions in the scan state. + + first = space on the latest tree and number of proofs required + + second = If the space on the latest tree is less than max size (defined at compile time) then remaining number of slots for a new tree and the corresponding number of proofs required *) module Space_partition : sig type t = { first : int * int; second : (int * int) option } [@@deriving sexp] @@ -46,6 +51,7 @@ module Scan_state : sig val empty : constraint_constants:Genesis_constants.Constraint_constants.t -> unit -> t + (** Statements of the required snark work *) val snark_job_list_json : t -> string (** All the transactions with hash of the parent block in which they were included in the order in which they were applied*) @@ -57,6 +63,7 @@ module Scan_state : sig Transactions_ordered.Poly.t list + (** Statements of all the pending work. Fails if there are any invalid statements in the scan state [t] *) val all_work_statements_exn : t -> Transaction_snark_work.Statement.t list (** Hashes of the protocol states required for proving pending transactions*) @@ -165,6 +172,7 @@ val create_exn : val replace_ledger_exn : t -> Ledger.t -> t +(** Transactions corresponding to the most recent ledger proof in t *) val proof_txns_with_state_hashes : t -> ( Transaction.t With_status.t @@ -233,6 +241,7 @@ val apply_diff_unchecked : , Staged_ledger_error.t ) Deferred.Result.t +(** Most recent ledger proof in t *) val current_ledger_proof : t -> Ledger_proof.t option (* Internals of the txn application. This is only exposed to facilitate @@ -282,6 +291,7 @@ val create_diff : , Pre_diff_info.Error.t ) Result.t +(** A block producer is eligible if the account won the slot [winner] has no unlocked tokens at slot [global_slot] in the staking ledger [epoch_ledger] *) val can_apply_supercharged_coinbase_exn : winner:Public_key.Compressed.t -> epoch_ledger:Mina_ledger.Sparse_ledger.t @@ -310,6 +320,7 @@ val of_scan_state_pending_coinbases_and_snarked_ledger_unchecked : -> get_state:(State_hash.t -> Mina_state.Protocol_state.value Or_error.t) -> t Or_error.t Deferred.t +(** All the pending work in t and the data required to generate proofs. *) val all_work_pairs : t -> get_state:(State_hash.t -> Mina_state.Protocol_state.value Or_error.t) @@ -318,6 +329,7 @@ val all_work_pairs : list Or_error.t +(** Statements of all the pending work in t*) val all_work_statements_exn : t -> Transaction_snark_work.Statement.t list val check_commands : diff --git a/src/lib/syncable_ledger/syncable_ledger.ml b/src/lib/syncable_ledger/syncable_ledger.ml index a5db250cda5..44f1f972064 100644 --- a/src/lib/syncable_ledger/syncable_ledger.ml +++ b/src/lib/syncable_ledger/syncable_ledger.ml @@ -69,7 +69,7 @@ module type Inputs_intf = sig end module MT : - Merkle_ledger.Syncable_intf.S + Merkle_ledger.Intf.SYNCABLE with type hash := Hash.t and type root_hash := Root_hash.t and type addr := Addr.t diff --git a/src/lib/syncable_ledger/test.ml b/src/lib/syncable_ledger/test.ml index 4f32857daea..0d076706b12 100644 --- a/src/lib/syncable_ledger/test.ml +++ b/src/lib/syncable_ledger/test.ml @@ -4,7 +4,7 @@ open Pipe_lib open Network_peer module type Ledger_intf = sig - include Merkle_ledger.Syncable_intf.S + include Merkle_ledger.Intf.SYNCABLE type account_id diff --git a/src/lib/transaction/transaction_hash.ml b/src/lib/transaction/transaction_hash.ml index 0cceb768bd6..2ee2bd7a047 100644 --- a/src/lib/transaction/transaction_hash.ml +++ b/src/lib/transaction/transaction_hash.ml @@ -86,7 +86,7 @@ let ( hash_signed_command_v1 let dummy_auth = match acct_update.authorization with | Control.Proof _ -> - Control.Proof Proof.transaction_dummy + Control.Proof (Lazy.force Proof.transaction_dummy) | Control.Signature _ -> Control.Signature Signature.dummy | Control.None_given -> @@ -291,7 +291,7 @@ let%test_module "Transaction hashes" = * Run dune in this library's directory dune utop src/lib/transaction * Generate a zkapp transaction: - let txn = let txn = Mina_base.Zkapp_command.dummy in {txn with account_updates = Mina_base.Zkapp_command.Call_forest.map txn.account_updates ~f:(fun x -> {x with Mina_base.Account_update.authorization= Proof Mina_base.Proof.blockchain_dummy})};; + let txn = let txn = (Lazy.force Mina_base.Zkapp_command.dummy) in {txn with account_updates = Mina_base.Zkapp_command.Call_forest.map txn.account_updates ~f:(fun x -> {x with Mina_base.Account_update.authorization= Proof (Lazy.force Mina_base.Proof.blockchain_dummy)})};; * Print the transaction: Core_kernel.Out_channel.with_file "txn_id" ~f:(fun file -> Out_channel.output_string file (Core_kernel.Binable.to_string (module Mina_base.User_command.Stable.V2) (Zkapp_command txn) |> Base64.encode |> (function Ok x -> x | Error _ -> "")));; * Get the hash: diff --git a/src/lib/transaction_consistency_tests/transaction_consistency_tests.ml b/src/lib/transaction_consistency_tests/transaction_consistency_tests.ml index 254edf0711f..b95368a27a1 100644 --- a/src/lib/transaction_consistency_tests/transaction_consistency_tests.ml +++ b/src/lib/transaction_consistency_tests/transaction_consistency_tests.ml @@ -11,7 +11,7 @@ module Hash = struct let hash_account = Fn.compose Ledger_hash.of_digest Account.digest - let empty_account = Ledger_hash.of_digest Account.empty_digest + let empty_account = Ledger_hash.of_digest (Lazy.force Account.empty_digest) end let%test_module "transaction logic consistency" = diff --git a/src/lib/transaction_logic/mina_transaction_logic.ml b/src/lib/transaction_logic/mina_transaction_logic.ml index 8eb116d3631..e6b9631a847 100644 --- a/src/lib/transaction_logic/mina_transaction_logic.ml +++ b/src/lib/transaction_logic/mina_transaction_logic.ml @@ -545,6 +545,24 @@ module type S = sig -> bool Or_error.t module For_tests : sig + module Stack (Elt : sig + type t + end) : sig + type t = Elt.t list + + val if_ : bool -> then_:t -> else_:t -> t + + val empty : unit -> t + + val is_empty : t -> bool + + val pop_exn : t -> Elt.t * t + + val pop : t -> (Elt.t * t) option + + val push : Elt.t -> onto:t -> t + end + val validate_timing_with_min_balance : account:Account.t -> txn_amount:Amount.t @@ -2522,6 +2540,8 @@ module Make (L : Ledger_intf.S) : >>= Mina_stdlib.Result.List.map ~f:(apply_transaction_second_pass ledger) module For_tests = struct + module Stack = Inputs.Stack + let validate_timing_with_min_balance = validate_timing_with_min_balance let validate_timing = validate_timing @@ -2779,7 +2799,7 @@ module For_tests = struct } ; authorization = ( if use_full_commitment then Signature Signature.dummy - else Proof Mina_base.Proof.transaction_dummy ) + else Proof (Lazy.force Mina_base.Proof.transaction_dummy) ) } ; { body = { public_key = receiver diff --git a/src/lib/transaction_logic/test/zkapp_logic.ml b/src/lib/transaction_logic/test/zkapp_logic.ml index 30891040418..a0cc7f57faf 100644 --- a/src/lib/transaction_logic/test/zkapp_logic.ml +++ b/src/lib/transaction_logic/test/zkapp_logic.ml @@ -866,3 +866,89 @@ let%test_module "Test transaction logic." = ) (run_zkapp_cmd ~fee_payer ~fee ~accounts txns) ) end ) + +(* This module tests Inputs.Stack *) +let%test_module "Test stack module" = + ( module struct + module Stack = Transaction_logic.For_tests.Stack (Int) + + let%test_unit "Ensure pop works on non-empty list." = + Quickcheck.test ~trials + (let open Quickcheck in + let open Generator.Let_syntax in + let%map stack = Generator.list_non_empty Generator.size in + let top = List.hd_exn stack in + let tail = List.tl_exn stack in + (stack, top, tail)) + ~f:(fun (stack, top, tail) -> + match Stack.pop stack with + | Some (x, xs) -> + assert (Int.equal x top && List.equal Int.equal xs tail) + | None -> + assert false ) + + let%test_unit "Ensure pop works on empty list." = + match Stack.pop (Stack.empty ()) with + | Some _ -> + assert false + | None -> + assert true + + let%test_unit "Ensure push functionality works." = + Quickcheck.test ~trials + (let open Quickcheck in + let open Generator.Let_syntax in + let%bind stack = Generator.list_non_empty Generator.size in + let%bind stack' = Generator.list_non_empty Generator.size in + let pushed = + List.fold_right stack' ~init:stack ~f:(fun x s -> + Stack.push x ~onto:s ) + in + let pushed' = List.append stack' stack in + return (pushed, pushed')) + ~f:(fun (pushed, pushed') -> + assert (List.equal Int.equal pushed pushed') ) + end ) + +(* This module tests Inputs.Stack *) +let%test_module "Test stack module" = + ( module struct + module Stack = Transaction_logic.For_tests.Stack (Int) + + let%test_unit "Ensure pop works on non-empty list." = + Quickcheck.test ~trials + (let open Quickcheck in + let open Generator.Let_syntax in + let%map stack = Generator.list_non_empty Generator.size in + let top = List.hd_exn stack in + let tail = List.tl_exn stack in + (stack, top, tail)) + ~f:(fun (stack, top, tail) -> + match Stack.pop stack with + | Some (x, xs) -> + assert (Int.equal x top && List.equal Int.equal xs tail) + | None -> + assert false ) + + let%test_unit "Ensure pop works on empty list." = + match Stack.pop (Stack.empty ()) with + | Some _ -> + assert false + | None -> + assert true + + let%test_unit "Ensure push functionality works." = + Quickcheck.test ~trials + (let open Quickcheck in + let open Generator.Let_syntax in + let%bind stack = Generator.list_non_empty Generator.size in + let%bind stack' = Generator.list_non_empty Generator.size in + let pushed = + List.fold_right stack' ~init:stack ~f:(fun x s -> + Stack.push x ~onto:s ) + in + let pushed' = List.append stack' stack in + return (pushed, pushed')) + ~f:(fun (pushed, pushed') -> + assert (List.equal Int.equal pushed pushed') ) + end ) diff --git a/src/lib/transaction_snark/dune b/src/lib/transaction_snark/dune index 425f689bfca..d458bb68d29 100644 --- a/src/lib/transaction_snark/dune +++ b/src/lib/transaction_snark/dune @@ -67,6 +67,8 @@ ppx_version.runtime logger zkapp_command_builder + snark_keys_header + mina_version ) (preprocess (pps ppx_snarky ppx_version ppx_mina ppx_jane ppx_deriving.std ppx_deriving_yojson h_list.ppx)) diff --git a/src/lib/transaction_snark/test/multisig_account/multisig_account.ml b/src/lib/transaction_snark/test/multisig_account/multisig_account.ml index 413baffc0f9..73e5892d449 100644 --- a/src/lib/transaction_snark/test/multisig_account/multisig_account.ml +++ b/src/lib/transaction_snark/test/multisig_account/multisig_account.ml @@ -375,7 +375,8 @@ let%test_module "multisig_account" = ; may_use_token = No ; authorization_kind = Proof (With_hash.hash vk) } - ; authorization = Proof Mina_base.Proof.transaction_dummy + ; authorization = + Proof (Lazy.force Mina_base.Proof.transaction_dummy) } in let memo = Signed_command_memo.empty in diff --git a/src/lib/transaction_snark/test/ring_sig.ml b/src/lib/transaction_snark/test/ring_sig.ml index 4d81d96e3cc..49787c15b4d 100644 --- a/src/lib/transaction_snark/test/ring_sig.ml +++ b/src/lib/transaction_snark/test/ring_sig.ml @@ -233,7 +233,8 @@ let%test_unit "ring-signature zkapp tx with 3 zkapp_command" = ; use_full_commitment = false ; authorization_kind = Proof (With_hash.hash vk) } - ; authorization = Proof Mina_base.Proof.transaction_dummy + ; authorization = + Proof (Lazy.force Mina_base.Proof.transaction_dummy) } in let protocol_state = Zkapp_precondition.Protocol_state.accept in diff --git a/src/lib/transaction_snark/transaction_snark.ml b/src/lib/transaction_snark/transaction_snark.ml index 3b85bc475c1..8dc3b4661cd 100644 --- a/src/lib/transaction_snark/transaction_snark.ml +++ b/src/lib/transaction_snark/transaction_snark.ml @@ -2098,14 +2098,14 @@ module Make_str (A : Wire_types.Concrete) = struct let open Basic in let module M = H4.T (Pickles.Tag) in let s = Basic.spec t in - let prev_should_verify = + let prev_must_verify = match proof_level with | Genesis_constants.Proof_level.Full -> true | _ -> false in - let b = Boolean.var_of_value prev_should_verify in + let b = Boolean.var_of_value prev_must_verify in match t with | Proved -> { identifier = "proved" @@ -3270,14 +3270,14 @@ module Make_str (A : Wire_types.Concrete) = struct (s1, s2) let rule ~proof_level self : _ Pickles.Inductive_rule.t = - let prev_should_verify = + let prev_must_verify = match proof_level with | Genesis_constants.Proof_level.Full -> true | _ -> false in - let b = Boolean.var_of_value prev_should_verify in + let b = Boolean.var_of_value prev_must_verify in { identifier = "merge" ; prevs = [ self; self ] ; main = @@ -3318,6 +3318,13 @@ module Make_str (A : Wire_types.Concrete) = struct ~constraint_constants: (Genesis_constants.Constraint_constants.to_snark_keys_header constraint_constants ) + ~commits: + { commits = + { mina = Mina_version.commit_id + ; marlin = Mina_version.marlin_commit_id + } + ; commit_date = Mina_version.commit_date + } ~choices:(fun ~self -> let zkapp_command x = Base.Zkapp_command_snark.rule ~constraint_constants ~proof_level x @@ -4740,7 +4747,8 @@ module Make_str (A : Wire_types.Concrete) = struct ; may_use_token = No ; authorization_kind = Proof (With_hash.hash vk) } - ; authorization = Control.Proof Mina_base.Proof.blockchain_dummy + ; authorization = + Control.Proof (Lazy.force Mina_base.Proof.blockchain_dummy) } in let account_update_digest_with_selected_chain = @@ -5135,7 +5143,7 @@ module Make_str (A : Wire_types.Concrete) = struct ; may_use_token = No ; authorization_kind = Proof (With_hash.hash vk) } - ; authorization = Proof Mina_base.Proof.transaction_dummy + ; authorization = Proof (Lazy.force Mina_base.Proof.transaction_dummy) } in let memo = Signed_command_memo.empty in diff --git a/src/lib/transition_frontier/README.md b/src/lib/transition_frontier/README.md new file mode 100644 index 00000000000..a5b88070b69 --- /dev/null +++ b/src/lib/transition_frontier/README.md @@ -0,0 +1,156 @@ +# Transition Frontier + +The transition frontier is a hybrid in-memory/on-disk data structure which represents all known states on the network up to the point of finality. This data structure plays an essential role in tracking states produced by consensus protocol in a way that automatically garbage collects orphaned states which will never become part of the canonical chain, and does so in a way that optimizes for high read/write performance while also persisting information in the background for future recovery. As such, the transition frontier can be thought of as both an in-memory data structure, and a concurrent subsystem which maintains a persistent on-disk copy of the datastructure. To help maintain this viewpoint, the implementation is split into 2 main parts: a full frontier, which stores the entire expanded state of each block in-memory, and a persistent frontier, which asynchronously processes state transitions to the full frontier, proxying those operations to a RocksDB representation of the frontier in the background. + +In terms of the data structure, the transition frontier can be thought of as a combination of the following pieces of information: + +1) A rose tree that contains all blockchains (including forks) up to `k` in length from the most recently finalized block. +2) A history of recently finalized blocks. +3) A snarked ledger for the most recently finalized block. +4) A series of ledger masks, chained off of the aforementioned snarked ledger, to represent intermediate staged ledger states achieved by blocks tracked past the most recently finalized block. +5) The auxiliary scan state information associated with each block tracked past the most recently finalized block. + +Importantly, the transition frontier also can identify which of the states it is tracking is the strongest state, which is referred to as the "best tip". The consensus mechanism informs the transition frontier of how to compare blocks for strength. + +## Formal Spec + +TODO + +## Glossary + +| Name | Description | +|--------------------------|-------------| +| Best Tip | The best state in a frontier. This is always a tip (leaf) of the frontier, by nature of the consensus selection rules | +| Breadcrumb | A fully expanded state for a block. Contains a validated block and a make-chained staged ledger, with some metadata. | +| Frontier Diff | A representation of a state transition to perform on a frontier data structure. | +| Frontier Root Data | Fat, auxiliary information stored for the root of a persistent frontier. Stored in a single file, it contains a serialized scan state and pending coinbase state, which can be used to reconstruct a staged ledger at the root of the persistent frontier. | +| Frontier Root Identifier | Light, auxiliary information stored for the persistent root. Stored in a single file, it identifies the root state hash currently associated with a persistent root. | +| Full Frontier | The in-memory representation of the transition frontier, with fully expanded states at every node (breadcrumbs). | +| Persistent Frontier | An on-disk representation of the transition frontier. Stores block information in RocksDB, which is asynchronously updated by processing frontier diffs applied to the in memory full frontier representation. | +| Persistent Root | An on-disk ledger where the root snarked ledger of the transition frontier is stored. The ledger serves as the root ledger in the full frontier's mask chain, and is actively mutated by the full frontier as new roots are committed. | +| Root Snarked Ledger | Synonymous with persistent root (in the case of full frontier), this is the fully snarked ledger at the root of a frontier | + +## Architecture + +### Frontier Spec + +#### Frontier Invariants + +All frontiers must hold the following invariants at every state: + +- all paths leading from the root of the frontier are no more than `k` in length +- the best tip of the frontier is stronger than all other nodes in the frontier (selected via consensus) +- (for full frontier) all masks contained in breadcrumb staged ledgers are sequentially chained in a topology that matches the frontier's structure, ultimately rooted back to the snarked ledger stored in the root data + +#### Frontier Read Interace + +Each frontier must expose the following operations: + +- find the root node in O(1) +- find the best tip node in O(1) +- find a node by block hash in O(1) +- access the successor hashes of a node in O(1) + +Some other operations are also required, but these operations are more or less helpers on top of the operations described above. + +#### Frontier State Transitions (Frontier Diffs) + +There are 3 types of state transitions that can be performed on a frontier. Frontier diffs serve as a data representation format for these 3 types of state transitions, and does so in a way that allows for the diffs to specify state transitions on different types of frontier nodes (as not all frontiers store the same node type). The supported frontier state transitions are: + +- add a node to the frontier +- transition the root to a successor +- update the best tip + +When these diffs are applied to frontiers, they are applied in a more-or-less blind fashion (as in, the frontier applying the diffs is not checking that the state reached after the application will still hold all of the frontier's invariants). Instead, the responsibility for maintaining frontier invariants is on the function which computes the diffs to apply. + +### Persistent Root + +The persistent root stores and maintains the root snarked ledger of a frontier. This is the oldest ledger maintained by a frontier, and is persisted on-disk in the form of a RocksDB ledger. This ledger is loaded into the full frontier upon initialization and serves as the basis for ledger information for all ledgers maintained by the full frontier. + +### Root Data + +TODO + +### Full Frontier + +The full frontier is a fully expanded in memory frontier implementation. It is created from frontier root data and the root snarked ledger. The full frontier maintains a hash-indexed k-tree of breadcrumbs, implemented as a hashtable. Each breadcrumb in the frontier, including the root breadcrumb, consists of a block and a staged ledger associated with that block. The staged ledger's ledger state is built using ledger masks, where each ledger mask is chained off of the preceeding breadcrumbs staged ledger mask, and the root's staged ledger mask is chained off of the root's snarked ledger (the persistent root). + +TODO: mask maintenance on root transition & mask chaining diagram + +### Persistent Frontier + +TODO: add the new rules here for izzy's root hack + +The persistent frontier is an on-disk, limited representation of a frontier, along with a concurrent subsystem for synchronizing it with the full frontier's state. To maintain a reasonable level of disk I/O, the persistent frontier stores only blocks and not fully expanded breadcrumbs. It maintains neither the auxiliary scan state or the ledger required to construct the staged ledger. Instead, it relies on additional auxiliary information, "minimal root data", to also be available. This information is more expensive to write (larger) than the normal database synchronization operations, but occurs less often. Because the root data in the database is not necessarily kept in sync with the other information for the persistent frontier, and there is no guarantee that the persisted root (which is required for building the root staged ledger) will be in sync, it is important that the persistent frontier can recover from desynchronizations. The daemon attempts to always synchronize this data when it shuts down, but in the case of a crash, sometimes this will not happen. + +The persistent frontier receives a notification every time diffs are applied to the full frontier. When this notification is received, the persistent frontier writes any diffs that were applied into a diff buffer. At a later point in time, this diff buffer is flushed, and all of the recorded diffs are performed against the persistent frontier's database. All diffs are processed in the buffer, but the auxiliary root data stored in the persistent frontier is only updated 1 time per flush. + +![](./res/persistent_frontier_concurrency.dot.png) + +#### Diff Buffer Flush Rules + +The diff buffer parameterized with 3 values: the preferred flush capacity, the maximum capacity, and a maximum latency. The diff buffer will attempt to flush as soon as the flush capacity is exceeded, so long as there is not an active flush job. If there is an active flush job, the diff buffer will continue accumulating diffs until that job has succeeded, up until it reaches the maximum capacity, at which point the daemon will crash. To ensure that the persistent frontier is still updated even when there is a low amount of activity on the network, the diff buffer will also be flushed after the maximum latency has been exceeded. + +#### Database Representation + +The database supports the following schema: + +| Key | Args | Value | Description | +|---------------------------------------|----------------|-----------------------------|-------------| +| `Db_version` | `()` | `int` | The current schema version stored in the database. | +| `Root` | `()` | `Root_data.Minimal.t` | The auxiliary root data. | +| `Best_tip` | `()` | `State_hash.t` | Pointer to the current best tip. | +| `Protocol_states_for_root_scan_state` | `()` | `Protocol_state.value list` | Auxilliary block headers required for constructing the scan state at the root | +| `Transition` | `State_hash.t` | `External_transition.t` | Block storage by state hash. | +| `Arcs` | `State_hash.t` | `State_hash.t list` | Successor hash storage by predecessor hash. | + +#### Resynchronization + +TODO + +### Root History + +TOOD + +TODO: note about the fact it is currently an extension + +### Extensions + +TODO + +### Consensus Hooks + +TODO + +### Transition Frontier + +The transition frontier combines together the full frontier, persistent frontier, root history, and extensions into a single interface. It configures the root history to contain at most `2*k` previous roots, giving a total span of `3*k` blocks in the chains stored at any given time. This is done so that nodes can serve bootstrap requests (proofs of finality) to nodes within `2*k` blocks of the best tip. + +![](./res/transition_frontier_diagram.conv.tex.png) + +## Code Directory + +TODO: extensions + +| Name | File | Description | +|-------------------------------------|--------------------------------------------------------------------------|-------------| +| Breadcrumb | [frontier\_base/breadcrumb.ml](./frontier_base/breadcrumb.ml) | The breadcrumb data structure. | +| Frontier Interface | [frontier\_base/frontier\_intf.ml](./frontier_base/frontier_intf.ml) | The external interface which frontiers must provide to. | +| Diff | [frontier\_base/diff.ml](./frontier_base/diff.ml) | The representation of frontier diffs. | +| Root Data | [frontier\_base/root\_data.ml](./frontier_base/root_data.ml) | The representation of frontier root data, at varying levels of detail/size. | +| Root Identifier | [frontier\_base/root\_identifier.ml](./frontier_base/root_identifier.ml) | The representation of frontier root identifiers. | +| Full Frontier | [full\_frontier/full\_frontier.ml](./full_frontier/full_frontier.ml) | The in memory, fully expanded frontier data structure. | +| Persistent Frontier Database | [database.ml](./persistent_frontier/database.ml) | The RocksDB database that the persistent frontier is stored in. | +| Persistent Frontier Diff Buffer | [diff\_buffer.ml](./persistent_frontier/diff_buffer.ml) | The diff buffer used as part of the persistent frontier synchronization subsystem. | +| Persistent Frontier Synchronization | [sync.ml](./persistent_frontier/sync.ml) | The persistent frontier synchronization subsystem. | +| Persistent Frontier Worker | [worker.ml](./persistent_frontier/worker.ml) | The persistent frontier synchronization subsystem worker. Responsible for applying diffs flushed from the diff to the persistent frontier database. | +| Persistent Frontier | [diff\_buffer.ml](./persistent_frontier/diff_buffer.ml) | The persistent frontier instance and singleton factory. | +| Transition Frontier | [transition\_frontier.ml](./transition_frontier.ml) | The library entrypoint which ties together all of the transition frontier concepts. | + +## Future Plans + +[RFC 0028](../../../rfcs/0028-frontier-synchronization.md) describes a long-term solution to a class of async race-conditions that are possible when consuming transition frontier extensions. We plan on implementing this work in the transition frontier at some point in the future. + +As mentioned in the root history section, there is some tech debt to refactor the root history as an extension or rip it out of extensions altogether. + +TODO: dump the state of desync recovery here diff --git a/src/lib/transition_frontier/extensions/extensions.ml b/src/lib/transition_frontier/extensions/extensions.ml index 14fe3323b04..b18e50e56dd 100644 --- a/src/lib/transition_frontier/extensions/extensions.ml +++ b/src/lib/transition_frontier/extensions/extensions.ml @@ -2,20 +2,16 @@ open Async_kernel open Core_kernel open Pipe_lib module Best_tip_diff = Best_tip_diff -module Identity = Identity module Root_history = Root_history module Snark_pool_refcount = Snark_pool_refcount module Transition_registry = Transition_registry module New_breadcrumbs = New_breadcrumbs -module Ledger_table = Ledger_table type t = { root_history : Root_history.Broadcasted.t ; snark_pool_refcount : Snark_pool_refcount.Broadcasted.t ; best_tip_diff : Best_tip_diff.Broadcasted.t ; transition_registry : Transition_registry.Broadcasted.t - ; ledger_table : Ledger_table.Broadcasted.t - ; identity : Identity.Broadcasted.t ; new_breadcrumbs : New_breadcrumbs.Broadcasted.t } [@@deriving fields] @@ -34,20 +30,14 @@ let create ~logger frontier : t Deferred.t = let%bind transition_registry = Transition_registry.(Broadcasted.create (create ~logger frontier)) in - let%bind identity = Identity.(Broadcasted.create (create ~logger frontier)) in let%bind new_breadcrumbs = New_breadcrumbs.(Broadcasted.create (create ~logger frontier)) in - let%bind ledger_table = - Ledger_table.(Broadcasted.create (create ~logger frontier)) - in return { root_history ; snark_pool_refcount ; best_tip_diff ; transition_registry - ; identity - ; ledger_table ; new_breadcrumbs } @@ -64,8 +54,6 @@ let close t : unit = ~best_tip_diff:(close_extension (module Best_tip_diff.Broadcasted)) ~transition_registry: (close_extension (module Transition_registry.Broadcasted)) - ~ledger_table:(close_extension (module Ledger_table.Broadcasted)) - ~identity:(close_extension (module Identity.Broadcasted)) ~new_breadcrumbs:(close_extension (module New_breadcrumbs.Broadcasted)) let notify (t : t) ~logger ~frontier ~diffs_with_mutants = @@ -84,9 +72,7 @@ let notify (t : t) ~logger ~frontier ~diffs_with_mutants = ~snark_pool_refcount:(update (module Snark_pool_refcount.Broadcasted)) ~best_tip_diff:(update (module Best_tip_diff.Broadcasted)) ~transition_registry:(update (module Transition_registry.Broadcasted)) - ~ledger_table:(update (module Ledger_table.Broadcasted)) - ~new_breadcrumbs:(update (module New_breadcrumbs.Broadcasted)) - ~identity:(update (module Identity.Broadcasted)) ) + ~new_breadcrumbs:(update (module New_breadcrumbs.Broadcasted)) ) type ('ext, 'view) access = | Root_history : (Root_history.t, Root_history.view) access @@ -95,8 +81,6 @@ type ('ext, 'view) access = | Best_tip_diff : (Best_tip_diff.t, Best_tip_diff.view) access | Transition_registry : (Transition_registry.t, Transition_registry.view) access - | Ledger_table : (Ledger_table.t, Ledger_table.view) access - | Identity : (Identity.t, Identity.view) access | New_breadcrumbs : (New_breadcrumbs.t, New_breadcrumbs.view) access type ('ext, 'view) broadcasted_extension = @@ -115,9 +99,7 @@ let get : ; snark_pool_refcount ; best_tip_diff ; transition_registry - ; ledger_table ; new_breadcrumbs - ; identity } -> function | Root_history -> Broadcasted_extension ((module Root_history.Broadcasted), root_history) @@ -129,10 +111,6 @@ let get : | Transition_registry -> Broadcasted_extension ((module Transition_registry.Broadcasted), transition_registry) - | Ledger_table -> - Broadcasted_extension ((module Ledger_table.Broadcasted), ledger_table) - | Identity -> - Broadcasted_extension ((module Identity.Broadcasted), identity) | New_breadcrumbs -> Broadcasted_extension ((module New_breadcrumbs.Broadcasted), new_breadcrumbs) diff --git a/src/lib/transition_frontier/extensions/extensions.mli b/src/lib/transition_frontier/extensions/extensions.mli index 3d28ed64ae2..e91c40094ab 100644 --- a/src/lib/transition_frontier/extensions/extensions.mli +++ b/src/lib/transition_frontier/extensions/extensions.mli @@ -13,12 +13,10 @@ open Async_kernel open Pipe_lib open Frontier_base module Best_tip_diff = Best_tip_diff -module Identity = Identity module Root_history = Root_history module Snark_pool_refcount = Snark_pool_refcount module Transition_registry = Transition_registry module New_breadcrumbs = New_breadcrumbs -module Ledger_table = Ledger_table type t @@ -40,8 +38,6 @@ type ('ext, 'view) access = | Best_tip_diff : (Best_tip_diff.t, Best_tip_diff.view) access | Transition_registry : (Transition_registry.t, Transition_registry.view) access - | Ledger_table : (Ledger_table.t, Ledger_table.view) access - | Identity : (Identity.t, Identity.view) access | New_breadcrumbs : (New_breadcrumbs.t, New_breadcrumbs.view) access val get_extension : t -> ('ext, _) access -> 'ext diff --git a/src/lib/transition_frontier/extensions/identity.ml b/src/lib/transition_frontier/extensions/identity.ml deleted file mode 100644 index 83ca53ea8b7..00000000000 --- a/src/lib/transition_frontier/extensions/identity.ml +++ /dev/null @@ -1,18 +0,0 @@ -open Frontier_base - -(* TODO: refactor out, this is a leak in the abstraction of extensions *) -module T = struct - type t = unit - - type view = Diff.Full.With_mutant.t list - - let name = "identity" - - let create ~logger:_ _ = ((), []) - - let handle_diffs () _ diffs_with_mutants : view option = - Some diffs_with_mutants -end - -include T -module Broadcasted = Functor.Make_broadcasted (T) diff --git a/src/lib/transition_frontier/extensions/identity.mli b/src/lib/transition_frontier/extensions/identity.mli deleted file mode 100644 index 86d8ec911e7..00000000000 --- a/src/lib/transition_frontier/extensions/identity.mli +++ /dev/null @@ -1,3 +0,0 @@ -open Frontier_base - -include Intf.Extension_intf with type view = Diff.Full.With_mutant.t list diff --git a/src/lib/transition_frontier/extensions/ledger_table.ml b/src/lib/transition_frontier/extensions/ledger_table.ml deleted file mode 100644 index 4dcba985c0b..00000000000 --- a/src/lib/transition_frontier/extensions/ledger_table.ml +++ /dev/null @@ -1,85 +0,0 @@ -open Core_kernel -open Mina_base -open Frontier_base - -(* WARNING: don't use this code until @nholland has landed a PR that - synchronize the read/write of transition frontier -*) - -module T = struct - (* a pair of hash tables - the first maps ledger hashes to ledgers - the second maps ledger hashes to reference counts - *) - type t = - { ledgers : Mina_ledger.Ledger.t Ledger_hash.Table.t - ; counts : int Ledger_hash.Table.t - } - - type view = unit - - let name = "ledger_table" - - let add_entry t ~ledger_hash ~ledger = - (* add ledger, increment ref count *) - ignore - ( Hashtbl.add t.ledgers ~key:ledger_hash ~data:ledger - : [ `Duplicate | `Ok ] ) ; - ignore (Hashtbl.incr t.counts ledger_hash : view) - - let remove_entry t ~ledger_hash = - (* decrement ref count, remove ledger if count is 0 *) - Hashtbl.decr t.counts ledger_hash ~remove_if_zero:true ; - if not (Hashtbl.mem t.counts ledger_hash) then - Hashtbl.remove t.ledgers ledger_hash - - let create ~logger:_ frontier = - (* populate ledger table from breadcrumbs *) - let t = - { ledgers = Ledger_hash.Table.create () - ; counts = Ledger_hash.Table.create () - } - in - let breadcrumbs = Full_frontier.all_breadcrumbs frontier in - List.iter breadcrumbs ~f:(fun bc -> - let ledger = Staged_ledger.ledger @@ Breadcrumb.staged_ledger bc in - let ledger_hash = Mina_ledger.Ledger.merkle_root ledger in - add_entry t ~ledger_hash ~ledger ) ; - (t, ()) - - let lookup t ledger_hash = Ledger_hash.Table.find t.ledgers ledger_hash - - let handle_diffs t _frontier diffs_with_mutants = - let open Diff.Full.With_mutant in - List.iter diffs_with_mutants ~f:(function - | E (New_node (Full breadcrumb), _) -> - let ledger = - Staged_ledger.ledger @@ Breadcrumb.staged_ledger breadcrumb - in - let ledger_hash = Mina_ledger.Ledger.merkle_root ledger in - add_entry t ~ledger_hash ~ledger - | E (Root_transitioned transition, _) -> ( - match transition.garbage with - | Full nodes -> - let open Mina_state in - List.iter nodes ~f:(fun node -> - let blockchain_state = - Protocol_state.blockchain_state - @@ Mina_block.( - Header.protocol_state - @@ Validated.header node.transition) - in - let staged_ledger = - Blockchain_state.staged_ledger_hash blockchain_state - in - let ledger_hash = - Staged_ledger_hash.ledger_hash staged_ledger - in - remove_entry t ~ledger_hash ) ) - | E (Best_tip_changed _, _) -> - () ) ; - None -end - -module Broadcasted = Functor.Make_broadcasted (T) -include T diff --git a/src/lib/transition_frontier/extensions/ledger_table.mli b/src/lib/transition_frontier/extensions/ledger_table.mli deleted file mode 100644 index c0d0731c6c4..00000000000 --- a/src/lib/transition_frontier/extensions/ledger_table.mli +++ /dev/null @@ -1,9 +0,0 @@ -(** A ledger table maps ledger hashes to ledgers *) - -open Mina_base - -type t - -include Intf.Extension_intf with type t := t and type view = unit - -val lookup : t -> Ledger_hash.t -> Mina_ledger.Ledger.t option diff --git a/src/lib/transition_frontier/frontier_base/breadcrumb.ml b/src/lib/transition_frontier/frontier_base/breadcrumb.ml index 56393bf28d0..2af84b6f168 100644 --- a/src/lib/transition_frontier/frontier_base/breadcrumb.ml +++ b/src/lib/transition_frontier/frontier_base/breadcrumb.ml @@ -363,7 +363,7 @@ module For_tests = struct One_or_two.map stmts ~f:(fun statement -> Ledger_proof.create ~statement ~sok_digest:Sok_message.Digest.default - ~proof:Proof.transaction_dummy ) + ~proof:(Lazy.force Proof.transaction_dummy) ) ; prover } in @@ -466,7 +466,7 @@ module For_tests = struct let next_block = let header = Mina_block.Header.create ~protocol_state - ~protocol_state_proof:Proof.blockchain_dummy + ~protocol_state_proof:(Lazy.force Proof.blockchain_dummy) ~delta_block_chain_proof:(previous_state_hashes.state_hash, []) () in diff --git a/src/lib/transition_frontier/persistent_frontier/database.ml b/src/lib/transition_frontier/persistent_frontier/database.ml index 8bb5036c4bd..41ad0a1b0d9 100644 --- a/src/lib/transition_frontier/persistent_frontier/database.ml +++ b/src/lib/transition_frontier/persistent_frontier/database.ml @@ -321,14 +321,14 @@ let initialize t ~root_data = let find_arcs_and_root t ~(arcs_cache : State_hash.t list State_hash.Table.t) ~parent_hashes = - let f h = Some_key.Some_key (Arcs h) in + let f h = Rocks.Key.Some_key (Arcs h) in let values = get_batch t.db ~keys:(Some_key Root :: List.map parent_hashes ~f) in let populate res parent_hash arc_opt = let%bind.Result () = res in match arc_opt with - | Some (Some_key.Some_key_value (Arcs _, (data : State_hash.t list))) -> + | Some (Key.Some_key_value (Arcs _, (data : State_hash.t list))) -> State_hash.Table.set arcs_cache ~key:parent_hash ~data ; Result.return () | _ -> diff --git a/src/lib/transition_frontier/res/persistent_frontier_concurrency.dot b/src/lib/transition_frontier/res/persistent_frontier_concurrency.dot new file mode 100644 index 00000000000..08d79bd9406 --- /dev/null +++ b/src/lib/transition_frontier/res/persistent_frontier_concurrency.dot @@ -0,0 +1,9 @@ +digraph G { + node [shape=box]; + + Breadcrumb -> Frontier_diffs [label="Full_frontier.calculate_diffs"]; + Frontier_diffs -> Full_frontier [label="Full_frontier.apply_diffs"]; + Frontier_diffs -> Persistent_frontier [label="Persistent_frontier.notify_sync"]; + Persistent_frontier -> "Persistent_frontier.Diff_buffer" [label="Persistent_frontier.Diff_buffer.write"]; + "Persistent_frontier.Diff_buffer" -> "Persistent_frontier.Database" [label="Persistent_frontier.Diff_buffer.flush",style=dashed]; +} diff --git a/src/lib/transition_frontier/res/persistent_frontier_concurrency.dot.png b/src/lib/transition_frontier/res/persistent_frontier_concurrency.dot.png new file mode 100644 index 00000000000..19bb7af6329 --- /dev/null +++ b/src/lib/transition_frontier/res/persistent_frontier_concurrency.dot.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:002fd1840aa4d1bda5bf8ed92c36fdad6675e09612925a0bff09eff544b40f88 +size 34174 diff --git a/docs/res/transition_frontier_diagram.tex b/src/lib/transition_frontier/res/transition_frontier_diagram.conv.tex similarity index 92% rename from docs/res/transition_frontier_diagram.tex rename to src/lib/transition_frontier/res/transition_frontier_diagram.conv.tex index 466e5ecd9c4..123e0dff3d6 100644 --- a/docs/res/transition_frontier_diagram.tex +++ b/src/lib/transition_frontier/res/transition_frontier_diagram.conv.tex @@ -1,8 +1,7 @@ -% This document can be rendered with a texlive-full installation by executing: - -\documentclass[ - border=1pt -]{standalone} +\documentclass[convert={outext=.tex.png}]{standalone} +%\documentclass[ +% border=1pt +%]{standalone} \usepackage{units} \usepackage{tikz} @@ -59,9 +58,9 @@ [,circle,draw]] [,circle,draw]]] ]]]]]]] - \measurexdistance[$k=2160$] + \measurexdistance[$k=290$] {(locked.north)}{(best.north)}{(.north)+(0,6mm)}{above} - \measurexdistance[$k=2160$] + \measurexdistance[$2k=580$] {(oldest.north)}{(locked.south)}{(.south)+(0,-4mm)}{below} \end{forest} diff --git a/src/lib/transition_frontier/res/transition_frontier_diagram.conv.tex.png b/src/lib/transition_frontier/res/transition_frontier_diagram.conv.tex.png new file mode 100644 index 00000000000..10238094d6c --- /dev/null +++ b/src/lib/transition_frontier/res/transition_frontier_diagram.conv.tex.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:be62ccc188f329d88a7a1af752ad52185da999b380c20395e0c5e1506b5dd872 +size 12821 diff --git a/src/lib/transition_router/transition_router.ml b/src/lib/transition_router/transition_router.ml index e0c9951fcec..5f49c4daf8e 100644 --- a/src/lib/transition_router/transition_router.ml +++ b/src/lib/transition_router/transition_router.ml @@ -302,6 +302,11 @@ let wait_for_high_connectivity ~logger ~network ~is_seed = [ ( high_connectivity >>| fun () -> [%log info] "Already connected to enough peers, start initialization" ) + ; ( if is_seed then ( + [%log info] + "We are seed, not waiting for peers to show up, start initialization" ; + Deferred.unit ) + else Deferred.never () ) ; ( after (Time_ns.Span.of_sec connectivity_time_upperbound) >>= fun () -> Mina_networking.peers network diff --git a/src/lib/unsigned_extended/dune b/src/lib/unsigned_extended/dune index 523ce94bb22..5c2fdd57601 100644 --- a/src/lib/unsigned_extended/dune +++ b/src/lib/unsigned_extended/dune @@ -18,7 +18,6 @@ ;; local libraries bignum_bigint snark_params - ppx_dhall_type test_util ppx_version.runtime ) diff --git a/src/lib/unsigned_extended/unsigned_extended.ml b/src/lib/unsigned_extended/unsigned_extended.ml index 4c34a6d7d7b..3e7812f240b 100644 --- a/src/lib/unsigned_extended/unsigned_extended.ml +++ b/src/lib/unsigned_extended/unsigned_extended.ml @@ -128,8 +128,6 @@ module UInt64 = struct include M - let dhall_type = Ppx_dhall_type.Dhall_type.Text - let to_uint64 : t -> uint64 = Fn.id let of_uint64 : uint64 -> t = Fn.id diff --git a/src/lib/unsigned_extended/unsigned_extended.mli b/src/lib/unsigned_extended/unsigned_extended.mli index c636e32283a..c1877320b4a 100644 --- a/src/lib/unsigned_extended/unsigned_extended.mli +++ b/src/lib/unsigned_extended/unsigned_extended.mli @@ -23,8 +23,6 @@ module UInt64 : sig include S with type t = Stable.Latest.t - val dhall_type : Ppx_dhall_type.Dhall_type.t - val to_uint64 : t -> uint64 val of_uint64 : uint64 -> t diff --git a/src/lib/zkapp_command_builder/zkapp_command_builder.ml b/src/lib/zkapp_command_builder/zkapp_command_builder.ml index c48cffdec78..8e32f2b6d25 100644 --- a/src/lib/zkapp_command_builder/zkapp_command_builder.ml +++ b/src/lib/zkapp_command_builder/zkapp_command_builder.ml @@ -70,7 +70,7 @@ let mk_zkapp_command ?memo ~fee ~fee_payer_pk ~fee_payer_nonce account_updates : | None_given -> Control.None_given | Proof _ -> - Control.Proof Mina_base.Proof.blockchain_dummy + Control.Proof (Lazy.force Mina_base.Proof.blockchain_dummy) | Signature -> Control.Signature Signature.dummy in diff --git a/src/mina_stdlib.opam b/src/mina_stdlib.opam deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/src/nonconsensus/currency/dune b/src/nonconsensus/currency/dune index 5bc3acbdf4e..91851e4d413 100644 --- a/src/nonconsensus/currency/dune +++ b/src/nonconsensus/currency/dune @@ -19,7 +19,6 @@ mina_wire_types codable bignum_bigint - ppx_dhall_type random_oracle_input snark_params_nonconsensus random_oracle_nonconsensus diff --git a/src/nonconsensus/mina_numbers/dune b/src/nonconsensus/mina_numbers/dune index cbbfdacc0f3..c10dda74686 100644 --- a/src/nonconsensus/mina_numbers/dune +++ b/src/nonconsensus/mina_numbers/dune @@ -15,7 +15,6 @@ result ;;local libraries mina_wire_types - ppx_dhall_type bignum_bigint fold_lib tuple_lib diff --git a/src/nonconsensus/unsigned_extended/dune b/src/nonconsensus/unsigned_extended/dune index 7c7eece1bcf..63eea74cf27 100644 --- a/src/nonconsensus/unsigned_extended/dune +++ b/src/nonconsensus/unsigned_extended/dune @@ -17,7 +17,6 @@ ppx_inline_test.config ;;local libraries bignum_bigint - ppx_dhall_type snark_params_nonconsensus ) (enabled_if