diff --git a/Cargo.toml b/Cargo.toml index 86d40ddb..d57ea966 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,5 +3,6 @@ members = [ "air-script", "parser", "ir", - "codegen/winterfell" + "codegen/winterfell", + "codegen/gce" ] \ No newline at end of file diff --git a/air-script/Cargo.toml b/air-script/Cargo.toml index d4f5591e..0c4b9b4a 100644 --- a/air-script/Cargo.toml +++ b/air-script/Cargo.toml @@ -18,6 +18,7 @@ path = "src/main.rs" [dependencies] codegen-winter = { package = "air-codegen-winter", path = "../codegen/winterfell", version = "0.1.0" } +codegen-gce = { package = "air-codegen-gce", path = "../codegen/gce", version = "0.1.0" } env_logger = "0.9" ir = { package = "air-ir", path = "../ir", version = "0.1.0" } log = { version = "0.4", default-features = false } diff --git a/air-script/src/cli/transpile.rs b/air-script/src/cli/transpile.rs index 27a16cb0..b3b5d2a7 100644 --- a/air-script/src/cli/transpile.rs +++ b/air-script/src/cli/transpile.rs @@ -1,7 +1,7 @@ use std::{fs, path::PathBuf}; use structopt::StructOpt; -use codegen_winter::CodeGenerator; +use codegen_winter::WinterfellCodeGenerator; use ir::AirIR; use parser::parse; @@ -64,7 +64,7 @@ impl TranspileCmd { let ir = ir.unwrap(); // generate Rust code targeting Winterfell - let codegen = CodeGenerator::new(&ir); + let codegen = WinterfellCodeGenerator::new(&ir); // write transpiled output to the output path let result = fs::write(output_path.clone(), codegen.generate()); diff --git a/air-script/src/lib.rs b/air-script/src/lib.rs index e04db514..c0b0913d 100644 --- a/air-script/src/lib.rs +++ b/air-script/src/lib.rs @@ -8,4 +8,4 @@ pub use parser::parse; pub use ir::AirIR; /// Code generation targeting Rust for the Winterfell prover -pub use codegen_winter::CodeGenerator; +pub use codegen_winter::WinterfellCodeGenerator; diff --git a/air-script/tests/aux_trace/aux_trace.json b/air-script/tests/aux_trace/aux_trace.json new file mode 100644 index 00000000..809ab47a --- /dev/null +++ b/air-script/tests/aux_trace/aux_trace.json @@ -0,0 +1,28 @@ +{ + "num_polys": 7, + "num_variables": 18, + "constants": [1, 1, 1, 1, 1], + "expressions": [ + {"op": "ADD", "lhs": {"type": "POL", "index": 1}, "rhs": {"type": "POL", "index": 2}}, + {"op": "SUB", "lhs": {"type": "POL_NEXT", "index": 0}, "rhs": {"type": "EXPR", "index": 0}}, + {"op": "ADD", "lhs": {"type": "POL", "index": 2}, "rhs": {"type": "POL_NEXT", "index": 0}}, + {"op": "SUB", "lhs": {"type": "POL_NEXT", "index": 1}, "rhs": {"type": "EXPR", "index": 2}}, + {"op": "ADD", "lhs": {"type": "POL", "index": 0}, "rhs": {"type": "POL", "index": 1}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 2}, "rhs": {"type": "EXPR", "index": 4}}, + {"op": "ADD", "lhs": {"type": "POL", "index": 0}, "rhs": {"type": "VAR", "index": 16}}, + {"op": "ADD", "lhs": {"type": "EXPR", "index": 6}, "rhs": {"type": "POL", "index": 1}}, + {"op": "ADD", "lhs": {"type": "EXPR", "index": 7}, "rhs": {"type": "VAR", "index": 17}}, + {"op": "MUL", "lhs": {"type": "POL", "index": 3}, "rhs": {"type": "EXPR", "index": 8}}, + {"op": "SUB", "lhs": {"type": "POL_NEXT", "index": 3}, "rhs": {"type": "EXPR", "index": 9}}, + {"op": "ADD", "lhs": {"type": "POL", "index": 2}, "rhs": {"type": "VAR", "index": 16}}, + {"op": "MUL", "lhs": {"type": "POL_NEXT", "index": 4}, "rhs": {"type": "EXPR", "index": 11}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 4}, "rhs": {"type": "EXPR", "index": 12}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 0}, "rhs": {"type": "CONST", "index": 0}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 1}, "rhs": {"type": "CONST", "index": 0}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 3}, "rhs": {"type": "CONST", "index": 0}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 4}, "rhs": {"type": "VAR", "index": 16}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 3}, "rhs": {"type": "CONST", "index": 0}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 4}, "rhs": {"type": "CONST", "index": 0}} + ], + "outputs": [1, 3, 5, 10, 13, 14, 15, 16, 17, 18, 19] +} diff --git a/air-script/tests/aux_trace/generated_aux_trace.json b/air-script/tests/aux_trace/generated_aux_trace.json new file mode 100644 index 00000000..809ab47a --- /dev/null +++ b/air-script/tests/aux_trace/generated_aux_trace.json @@ -0,0 +1,28 @@ +{ + "num_polys": 7, + "num_variables": 18, + "constants": [1, 1, 1, 1, 1], + "expressions": [ + {"op": "ADD", "lhs": {"type": "POL", "index": 1}, "rhs": {"type": "POL", "index": 2}}, + {"op": "SUB", "lhs": {"type": "POL_NEXT", "index": 0}, "rhs": {"type": "EXPR", "index": 0}}, + {"op": "ADD", "lhs": {"type": "POL", "index": 2}, "rhs": {"type": "POL_NEXT", "index": 0}}, + {"op": "SUB", "lhs": {"type": "POL_NEXT", "index": 1}, "rhs": {"type": "EXPR", "index": 2}}, + {"op": "ADD", "lhs": {"type": "POL", "index": 0}, "rhs": {"type": "POL", "index": 1}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 2}, "rhs": {"type": "EXPR", "index": 4}}, + {"op": "ADD", "lhs": {"type": "POL", "index": 0}, "rhs": {"type": "VAR", "index": 16}}, + {"op": "ADD", "lhs": {"type": "EXPR", "index": 6}, "rhs": {"type": "POL", "index": 1}}, + {"op": "ADD", "lhs": {"type": "EXPR", "index": 7}, "rhs": {"type": "VAR", "index": 17}}, + {"op": "MUL", "lhs": {"type": "POL", "index": 3}, "rhs": {"type": "EXPR", "index": 8}}, + {"op": "SUB", "lhs": {"type": "POL_NEXT", "index": 3}, "rhs": {"type": "EXPR", "index": 9}}, + {"op": "ADD", "lhs": {"type": "POL", "index": 2}, "rhs": {"type": "VAR", "index": 16}}, + {"op": "MUL", "lhs": {"type": "POL_NEXT", "index": 4}, "rhs": {"type": "EXPR", "index": 11}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 4}, "rhs": {"type": "EXPR", "index": 12}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 0}, "rhs": {"type": "CONST", "index": 0}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 1}, "rhs": {"type": "CONST", "index": 0}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 3}, "rhs": {"type": "CONST", "index": 0}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 4}, "rhs": {"type": "VAR", "index": 16}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 3}, "rhs": {"type": "CONST", "index": 0}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 4}, "rhs": {"type": "CONST", "index": 0}} + ], + "outputs": [1, 3, 5, 10, 13, 14, 15, 16, 17, 18, 19] +} diff --git a/air-script/tests/binary/binary.json b/air-script/tests/binary/binary.json new file mode 100644 index 00000000..e8d3bc2d --- /dev/null +++ b/air-script/tests/binary/binary.json @@ -0,0 +1,15 @@ +{ + "num_polys": 2, + "num_variables": 16, + "constants": [0, 2, 0, 2], + "expressions": [ + {"op": "MUL", "lhs": {"type": "POL", "index": 0}, "rhs": {"type": "POL", "index": 0}}, + {"op": "SUB", "lhs": {"type": "EXPR", "index": 0}, "rhs": {"type": "POL", "index": 0}}, + {"op": "SUB", "lhs": {"type": "EXPR", "index": 1}, "rhs": {"type": "CONST", "index": 0}}, + {"op": "MUL", "lhs": {"type": "POL", "index": 1}, "rhs": {"type": "POL", "index": 1}}, + {"op": "SUB", "lhs": {"type": "EXPR", "index": 3}, "rhs": {"type": "POL", "index": 1}}, + {"op": "SUB", "lhs": {"type": "EXPR", "index": 4}, "rhs": {"type": "CONST", "index": 0}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 0}, "rhs": {"type": "CONST", "index": 0}} + ], + "outputs": [2, 5, 6] +} diff --git a/air-script/tests/binary/generated_binary.json b/air-script/tests/binary/generated_binary.json new file mode 100644 index 00000000..e8d3bc2d --- /dev/null +++ b/air-script/tests/binary/generated_binary.json @@ -0,0 +1,15 @@ +{ + "num_polys": 2, + "num_variables": 16, + "constants": [0, 2, 0, 2], + "expressions": [ + {"op": "MUL", "lhs": {"type": "POL", "index": 0}, "rhs": {"type": "POL", "index": 0}}, + {"op": "SUB", "lhs": {"type": "EXPR", "index": 0}, "rhs": {"type": "POL", "index": 0}}, + {"op": "SUB", "lhs": {"type": "EXPR", "index": 1}, "rhs": {"type": "CONST", "index": 0}}, + {"op": "MUL", "lhs": {"type": "POL", "index": 1}, "rhs": {"type": "POL", "index": 1}}, + {"op": "SUB", "lhs": {"type": "EXPR", "index": 3}, "rhs": {"type": "POL", "index": 1}}, + {"op": "SUB", "lhs": {"type": "EXPR", "index": 4}, "rhs": {"type": "CONST", "index": 0}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 0}, "rhs": {"type": "CONST", "index": 0}} + ], + "outputs": [2, 5, 6] +} diff --git a/air-script/tests/constants/constants.json b/air-script/tests/constants/constants.json new file mode 100644 index 00000000..ebfb7f5e --- /dev/null +++ b/air-script/tests/constants/constants.json @@ -0,0 +1,42 @@ +{ + "num_polys": 10, + "num_variables": 32, + "constants": [1, 0, 1, 1, 2, 2, 0], + "expressions": [ + {"op": "ADD", "lhs": {"type": "POL", "index": 0}, "rhs": {"type": "CONST", "index": 0}}, + {"op": "SUB", "lhs": {"type": "POL_NEXT", "index": 0}, "rhs": {"type": "EXPR", "index": 0}}, + {"op": "MUL", "lhs": {"type": "CONST", "index": 1}, "rhs": {"type": "POL", "index": 1}}, + {"op": "SUB", "lhs": {"type": "POL_NEXT", "index": 1}, "rhs": {"type": "EXPR", "index": 2}}, + {"op": "ADD", "lhs": {"type": "CONST", "index": 0}, "rhs": {"type": "CONST", "index": 1}}, + {"op": "MUL", "lhs": {"type": "EXPR", "index": 4}, "rhs": {"type": "POL", "index": 2}}, + {"op": "SUB", "lhs": {"type": "POL_NEXT", "index": 2}, "rhs": {"type": "EXPR", "index": 5}}, + {"op": "ADD", "lhs": {"type": "POL", "index": 4}, "rhs": {"type": "CONST", "index": 0}}, + {"op": "MUL", "lhs": {"type": "CONST", "index": 1}, "rhs": {"type": "CONST", "index": 4}}, + {"op": "ADD", "lhs": {"type": "EXPR", "index": 7}, "rhs": {"type": "EXPR", "index": 8}}, + {"op": "SUB", "lhs": {"type": "POL_NEXT", "index": 4}, "rhs": {"type": "EXPR", "index": 9}}, + {"op": "MUL", "lhs": {"type": "CONST", "index": 0}, "rhs": {"type": "CONST", "index": 1}}, + {"op": "ADD", "lhs": {"type": "CONST", "index": 0}, "rhs": {"type": "EXPR", "index": 11}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 4}, "rhs": {"type": "EXPR", "index": 12}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 0}, "rhs": {"type": "CONST", "index": 0}}, + {"op": "MUL", "lhs": {"type": "CONST", "index": 1}, "rhs": {"type": "CONST", "index": 4}}, + {"op": "ADD", "lhs": {"type": "CONST", "index": 0}, "rhs": {"type": "EXPR", "index": 15}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 1}, "rhs": {"type": "EXPR", "index": 16}}, + {"op": "SUB", "lhs": {"type": "CONST", "index": 1}, "rhs": {"type": "CONST", "index": 1}}, + {"op": "MUL", "lhs": {"type": "EXPR", "index": 18}, "rhs": {"type": "CONST", "index": 0}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 2}, "rhs": {"type": "EXPR", "index": 19}}, + {"op": "ADD", "lhs": {"type": "CONST", "index": 0}, "rhs": {"type": "CONST", "index": 1}}, + {"op": "SUB", "lhs": {"type": "EXPR", "index": 21}, "rhs": {"type": "CONST", "index": 0}}, + {"op": "ADD", "lhs": {"type": "EXPR", "index": 22}, "rhs": {"type": "CONST", "index": 0}}, + {"op": "SUB", "lhs": {"type": "EXPR", "index": 23}, "rhs": {"type": "CONST", "index": 4}}, + {"op": "ADD", "lhs": {"type": "EXPR", "index": 24}, "rhs": {"type": "CONST", "index": 4}}, + {"op": "SUB", "lhs": {"type": "EXPR", "index": 25}, "rhs": {"type": "CONST", "index": 1}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 3}, "rhs": {"type": "EXPR", "index": 26}}, + {"op": "MUL", "lhs": {"type": "CONST", "index": 1}, "rhs": {"type": "CONST", "index": 4}}, + {"op": "ADD", "lhs": {"type": "CONST", "index": 0}, "rhs": {"type": "EXPR", "index": 28}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 4}, "rhs": {"type": "EXPR", "index": 29}}, + {"op": "MUL", "lhs": {"type": "CONST", "index": 0}, "rhs": {"type": "CONST", "index": 0}}, + {"op": "SUB", "lhs": {"type": "CONST", "index": 0}, "rhs": {"type": "EXPR", "index": 31}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 4}, "rhs": {"type": "EXPR", "index": 32}} + ], + "outputs": [1, 3, 6, 10, 13, 14, 17, 20, 27, 30, 33] +} diff --git a/air-script/tests/constants/generated_constants.json b/air-script/tests/constants/generated_constants.json new file mode 100644 index 00000000..ebfb7f5e --- /dev/null +++ b/air-script/tests/constants/generated_constants.json @@ -0,0 +1,42 @@ +{ + "num_polys": 10, + "num_variables": 32, + "constants": [1, 0, 1, 1, 2, 2, 0], + "expressions": [ + {"op": "ADD", "lhs": {"type": "POL", "index": 0}, "rhs": {"type": "CONST", "index": 0}}, + {"op": "SUB", "lhs": {"type": "POL_NEXT", "index": 0}, "rhs": {"type": "EXPR", "index": 0}}, + {"op": "MUL", "lhs": {"type": "CONST", "index": 1}, "rhs": {"type": "POL", "index": 1}}, + {"op": "SUB", "lhs": {"type": "POL_NEXT", "index": 1}, "rhs": {"type": "EXPR", "index": 2}}, + {"op": "ADD", "lhs": {"type": "CONST", "index": 0}, "rhs": {"type": "CONST", "index": 1}}, + {"op": "MUL", "lhs": {"type": "EXPR", "index": 4}, "rhs": {"type": "POL", "index": 2}}, + {"op": "SUB", "lhs": {"type": "POL_NEXT", "index": 2}, "rhs": {"type": "EXPR", "index": 5}}, + {"op": "ADD", "lhs": {"type": "POL", "index": 4}, "rhs": {"type": "CONST", "index": 0}}, + {"op": "MUL", "lhs": {"type": "CONST", "index": 1}, "rhs": {"type": "CONST", "index": 4}}, + {"op": "ADD", "lhs": {"type": "EXPR", "index": 7}, "rhs": {"type": "EXPR", "index": 8}}, + {"op": "SUB", "lhs": {"type": "POL_NEXT", "index": 4}, "rhs": {"type": "EXPR", "index": 9}}, + {"op": "MUL", "lhs": {"type": "CONST", "index": 0}, "rhs": {"type": "CONST", "index": 1}}, + {"op": "ADD", "lhs": {"type": "CONST", "index": 0}, "rhs": {"type": "EXPR", "index": 11}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 4}, "rhs": {"type": "EXPR", "index": 12}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 0}, "rhs": {"type": "CONST", "index": 0}}, + {"op": "MUL", "lhs": {"type": "CONST", "index": 1}, "rhs": {"type": "CONST", "index": 4}}, + {"op": "ADD", "lhs": {"type": "CONST", "index": 0}, "rhs": {"type": "EXPR", "index": 15}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 1}, "rhs": {"type": "EXPR", "index": 16}}, + {"op": "SUB", "lhs": {"type": "CONST", "index": 1}, "rhs": {"type": "CONST", "index": 1}}, + {"op": "MUL", "lhs": {"type": "EXPR", "index": 18}, "rhs": {"type": "CONST", "index": 0}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 2}, "rhs": {"type": "EXPR", "index": 19}}, + {"op": "ADD", "lhs": {"type": "CONST", "index": 0}, "rhs": {"type": "CONST", "index": 1}}, + {"op": "SUB", "lhs": {"type": "EXPR", "index": 21}, "rhs": {"type": "CONST", "index": 0}}, + {"op": "ADD", "lhs": {"type": "EXPR", "index": 22}, "rhs": {"type": "CONST", "index": 0}}, + {"op": "SUB", "lhs": {"type": "EXPR", "index": 23}, "rhs": {"type": "CONST", "index": 4}}, + {"op": "ADD", "lhs": {"type": "EXPR", "index": 24}, "rhs": {"type": "CONST", "index": 4}}, + {"op": "SUB", "lhs": {"type": "EXPR", "index": 25}, "rhs": {"type": "CONST", "index": 1}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 3}, "rhs": {"type": "EXPR", "index": 26}}, + {"op": "MUL", "lhs": {"type": "CONST", "index": 1}, "rhs": {"type": "CONST", "index": 4}}, + {"op": "ADD", "lhs": {"type": "CONST", "index": 0}, "rhs": {"type": "EXPR", "index": 28}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 4}, "rhs": {"type": "EXPR", "index": 29}}, + {"op": "MUL", "lhs": {"type": "CONST", "index": 0}, "rhs": {"type": "CONST", "index": 0}}, + {"op": "SUB", "lhs": {"type": "CONST", "index": 0}, "rhs": {"type": "EXPR", "index": 31}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 4}, "rhs": {"type": "EXPR", "index": 32}} + ], + "outputs": [1, 3, 6, 10, 13, 14, 17, 20, 27, 30, 33] +} diff --git a/air-script/tests/exponentiation/exponentiation.air b/air-script/tests/exponentiation/exponentiation.air new file mode 100644 index 00000000..16a1c747 --- /dev/null +++ b/air-script/tests/exponentiation/exponentiation.air @@ -0,0 +1,14 @@ +def ExponentiationAir + +trace_columns: + main: [a, b] + +public_inputs: + stack_inputs: [16] + +boundary_constraints: + enf a.first = stack_inputs[0]^3 + +transition_constraints: + enf a^15 - a = 0 + enf b^2 - b = 0 \ No newline at end of file diff --git a/air-script/tests/exponentiation/generated_exponentiation.json b/air-script/tests/exponentiation/generated_exponentiation.json new file mode 100644 index 00000000..b2bb25b5 --- /dev/null +++ b/air-script/tests/exponentiation/generated_exponentiation.json @@ -0,0 +1,20 @@ +{ + "num_polys": 2, + "num_variables": 16, + "constants": [15, 0, 2], + "expressions": [ + {"op": "MUL", "lhs": {"type": "POL", "index": 0}, "rhs": {"type": "POL", "index": 0}}, + {"op": "MUL", "lhs": {"type": "EXPR", "index": 0}, "rhs": {"type": "EXPR", "index": 0}}, + {"op": "MUL", "lhs": {"type": "EXPR", "index": 1}, "rhs": {"type": "EXPR", "index": 1}}, + {"op": "MUL", "lhs": {"type": "EXPR", "index": 2}, "rhs": {"type": "EXPR", "index": 1}}, + {"op": "MUL", "lhs": {"type": "EXPR", "index": 3}, "rhs": {"type": "EXPR", "index": 0}}, + {"op": "MUL", "lhs": {"type": "EXPR", "index": 4}, "rhs": {"type": "POL", "index": 0}}, + {"op": "SUB", "lhs": {"type": "EXPR", "index": 5}, "rhs": {"type": "POL", "index": 0}}, + {"op": "SUB", "lhs": {"type": "EXPR", "index": 6}, "rhs": {"type": "CONST", "index": 1}}, + {"op": "MUL", "lhs": {"type": "POL", "index": 1}, "rhs": {"type": "POL", "index": 1}}, + {"op": "SUB", "lhs": {"type": "EXPR", "index": 8}, "rhs": {"type": "POL", "index": 1}}, + {"op": "SUB", "lhs": {"type": "EXPR", "index": 9}, "rhs": {"type": "CONST", "index": 1}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 0}, "rhs": {"type": "VAR", "index": 0}} + ], + "outputs": [7, 10, 11] +} diff --git a/air-script/tests/helpers.rs b/air-script/tests/helpers.rs index 7b071ad1..aaf8e93c 100644 --- a/air-script/tests/helpers.rs +++ b/air-script/tests/helpers.rs @@ -1,4 +1,5 @@ -use codegen_winter::CodeGenerator; +use codegen_gce::GCECodeGenerator; +use codegen_winter::WinterfellCodeGenerator; use ir::AirIR; use parser::parse; use std::fs; @@ -8,23 +9,20 @@ pub enum TestError { IO(String), Parse(String), IR(String), + Gce(String), } pub struct Test { - input_path: String, + ir: AirIR, } impl Test { - pub fn new(input_path: String) -> Self { - Test { input_path } - } - - pub fn transpile(&self) -> Result { + pub fn new(input_path: String) -> Result { // load source input from file - let source = fs::read_to_string(&self.input_path).map_err(|err| { + let source = fs::read_to_string(&input_path).map_err(|err| { TestError::IO(format!( "Failed to open input file `{:?}` - {}", - self.input_path, err + input_path, err )) })?; @@ -32,19 +30,33 @@ impl Test { let parsed = parse(source.as_str()).map_err(|_| { TestError::Parse(format!( "Failed to parse the input air file at {}", - &self.input_path + input_path )) })?; let ir = AirIR::from_source(&parsed).map_err(|_| { TestError::IR(format!( "Failed to convert the input air file at {} to IR representation", - &self.input_path + input_path )) })?; + Ok(Test { ir }) + } + + pub fn generate_winterfell(&self) -> String { // generate Rust code targeting Winterfell - let codegen = CodeGenerator::new(&ir); - Ok(codegen.generate()) + let codegen = WinterfellCodeGenerator::new(&self.ir); + codegen.generate() + } + + pub fn generate_gce(&self, extension_degree: u8, path: &str) -> Result<(), TestError> { + // generate Rust code targeting Winterfell + let codegen = GCECodeGenerator::new(&self.ir, extension_degree).map_err(|err| { + TestError::Gce(format!("Failed to create GCECodeGenerator: {:?}", err)) + })?; + codegen + .generate(path) + .map_err(|err| TestError::Gce(format!("Failed to generate JSON file: {:?}", err))) } } diff --git a/air-script/tests/main.rs b/air-script/tests/main.rs index ad1a8e90..e5644573 100644 --- a/air-script/tests/main.rs +++ b/air-script/tests/main.rs @@ -1,5 +1,7 @@ -use expect_test::expect_file; +use std::fs::File; +use std::io::prelude::*; +use expect_test::expect_file; mod helpers; use helpers::Test; @@ -7,80 +9,213 @@ use helpers::Test; // ================================================================================================ #[test] -fn aux_trace() { +fn winterfell_aux_trace() { let generated_air = Test::new("tests/aux_trace/aux_trace.air".to_string()) - .transpile() - .unwrap(); + .unwrap() + .generate_winterfell(); let expected = expect_file!["aux_trace/aux_trace.rs"]; expected.assert_eq(&generated_air); } #[test] -fn binary() { - let generated_air = Test::new("tests/binary/binary.air".to_string()) - .transpile() +fn gce_aux_trace() { + Test::new("tests/aux_trace/aux_trace.air".to_string()) + .unwrap() + .generate_gce(2, "tests/aux_trace/generated_aux_trace.json") .unwrap(); + let expected = expect_file!["aux_trace/aux_trace.json"]; + + let mut file = File::open("tests/aux_trace/generated_aux_trace.json").unwrap(); + let mut contents = String::new(); + file.read_to_string(&mut contents).unwrap(); + + expected.assert_eq(&contents); +} + +#[test] +fn winterfell_binary() { + let generated_air = Test::new("tests/binary/binary.air".to_string()) + .unwrap() + .generate_winterfell(); + let expected = expect_file!["binary/binary.rs"]; expected.assert_eq(&generated_air); } #[test] -fn periodic_columns() { - let generated_air = Test::new("tests/periodic_columns/periodic_columns.air".to_string()) - .transpile() +fn gce_binary() { + Test::new("tests/binary/binary.air".to_string()) + .unwrap() + .generate_gce(2, "tests/binary/generated_binary.json") .unwrap(); + let expected = expect_file!["binary/binary.json"]; + + let mut file = File::open("tests/binary/generated_binary.json").unwrap(); + let mut contents = String::new(); + file.read_to_string(&mut contents).unwrap(); + + expected.assert_eq(&contents); +} + +#[test] +fn winterfell_periodic_columns() { + let generated_air = Test::new("tests/periodic_columns/periodic_columns.air".to_string()) + .unwrap() + .generate_winterfell(); + let expected = expect_file!["periodic_columns/periodic_columns.rs"]; expected.assert_eq(&generated_air); } +// not yet implemented (periodic columns) #[test] -fn pub_inputs() { - let generated_air = Test::new("tests/pub_inputs/pub_inputs.air".to_string()) - .transpile() +#[ignore] +fn gce_periodic_columns() { + Test::new("tests/periodic_columns/periodic_columns.air".to_string()) + .unwrap() + .generate_gce(2, "tests/periodic_columns/generated_periodic_columns.json") .unwrap(); + let expected = expect_file!["periodic_columns/periodic_columns.json"]; + + let mut file = File::open("tests/periodic_columns/generated_periodic_columns.json").unwrap(); + let mut contents = String::new(); + file.read_to_string(&mut contents).unwrap(); + + expected.assert_eq(&contents); +} + +#[test] +fn winterfell_pub_inputs() { + let generated_air = Test::new("tests/pub_inputs/pub_inputs.air".to_string()) + .unwrap() + .generate_winterfell(); + let expected = expect_file!["pub_inputs/pub_inputs.rs"]; expected.assert_eq(&generated_air); } #[test] -fn system() { - let generated_air = Test::new("tests/system/system.air".to_string()) - .transpile() +fn gce_pub_inputs() { + Test::new("tests/pub_inputs/pub_inputs.air".to_string()) + .unwrap() + .generate_gce(2, "tests/pub_inputs/generated_pub_inputs.json") .unwrap(); + let expected = expect_file!["pub_inputs/pub_inputs.json"]; + + let mut file = File::open("tests/pub_inputs/generated_pub_inputs.json").unwrap(); + let mut contents = String::new(); + file.read_to_string(&mut contents).unwrap(); + + expected.assert_eq(&contents); +} + +#[test] +fn winterfell_system() { + let generated_air = Test::new("tests/system/system.air".to_string()) + .unwrap() + .generate_winterfell(); + let expected = expect_file!["system/system.rs"]; expected.assert_eq(&generated_air); } #[test] -fn bitwise() { - let generated_air = Test::new("tests/bitwise/bitwise.air".to_string()) - .transpile() +fn gce_system() { + Test::new("tests/system/system.air".to_string()) + .unwrap() + .generate_gce(2, "tests/system/generated_system.json") .unwrap(); + let expected = expect_file!["system/system.json"]; + + let mut file = File::open("tests/system/generated_system.json").unwrap(); + let mut contents = String::new(); + file.read_to_string(&mut contents).unwrap(); + + expected.assert_eq(&contents); +} + +#[test] +fn winterfell_bitwise() { + let generated_air = Test::new("tests/bitwise/bitwise.air".to_string()) + .unwrap() + .generate_winterfell(); + let expected = expect_file!["bitwise/bitwise.rs"]; expected.assert_eq(&generated_air); } +// not yet implemented (periodic columns) #[test] -fn constants() { - let generated_air = Test::new("tests/constants/constants.air".to_string()) - .transpile() +#[ignore] +fn gce_bitwise() { + Test::new("tests/bitwise/bitwise.air".to_string()) + .unwrap() + .generate_gce(2, "tests/bitwise/generated_bitwise.json") .unwrap(); + let expected = expect_file!["bitwise/bitwise.json"]; + + let mut file = File::open("tests/bitwise/generated_bitwise.json").unwrap(); + let mut contents = String::new(); + file.read_to_string(&mut contents).unwrap(); + + expected.assert_eq(&contents); +} + +#[test] +fn winterfell_constants() { + let generated_air = Test::new("tests/constants/constants.air".to_string()) + .unwrap() + .generate_winterfell(); + let expected = expect_file!["constants/constants.rs"]; expected.assert_eq(&generated_air); } +#[test] +fn gce_constants() { + Test::new("tests/constants/constants.air".to_string()) + .unwrap() + .generate_gce(2, "tests/constants/generated_constants.json") + .unwrap(); + + let expected = expect_file!["constants/constants.json"]; + + let mut file = File::open("tests/constants/generated_constants.json").unwrap(); + let mut contents = String::new(); + file.read_to_string(&mut contents).unwrap(); + + expected.assert_eq(&contents); +} + +#[test] +#[ignore] // exponentiation for boundary constraints is not ready +fn gce_exponentiation() { + Test::new("tests/exponentiation/exponentiation.air".to_string()) + .unwrap() + .generate_gce(2, "tests/exponentiation/generated_exponentiation.json") + .unwrap(); + + let expected = expect_file!["exponentiation/exponentiation.json"]; + + let mut file = File::open("tests/exponentiation/generated_exponentiation.json").unwrap(); + let mut contents = String::new(); + file.read_to_string(&mut contents).unwrap(); + + expected.assert_eq(&contents); +} + #[test] fn variables() { let generated_air = Test::new("tests/variables/variables.air".to_string()) - .transpile() - .unwrap(); + .unwrap() + .generate_winterfell(); let expected = expect_file!["variables/variables.rs"]; expected.assert_eq(&generated_air); @@ -89,8 +224,8 @@ fn variables() { #[test] fn trace_col_groups() { let generated_air = Test::new("tests/trace_col_groups/trace_col_groups.air".to_string()) - .transpile() - .unwrap(); + .unwrap() + .generate_winterfell(); let expected = expect_file!["trace_col_groups/trace_col_groups.rs"]; expected.assert_eq(&generated_air); diff --git a/air-script/tests/pub_inputs/generated_pub_inputs.json b/air-script/tests/pub_inputs/generated_pub_inputs.json new file mode 100644 index 00000000..86dc592e --- /dev/null +++ b/air-script/tests/pub_inputs/generated_pub_inputs.json @@ -0,0 +1,18 @@ +{ + "num_polys": 4, + "num_variables": 32, + "constants": [], + "expressions": [ + {"op": "ADD", "lhs": {"type": "POL", "index": 1}, "rhs": {"type": "POL", "index": 2}}, + {"op": "SUB", "lhs": {"type": "POL_NEXT", "index": 0}, "rhs": {"type": "EXPR", "index": 0}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 0}, "rhs": {"type": "VAR", "index": 4}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 1}, "rhs": {"type": "VAR", "index": 5}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 2}, "rhs": {"type": "VAR", "index": 6}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 3}, "rhs": {"type": "VAR", "index": 7}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 0}, "rhs": {"type": "VAR", "index": 8}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 1}, "rhs": {"type": "VAR", "index": 9}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 2}, "rhs": {"type": "VAR", "index": 10}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 3}, "rhs": {"type": "VAR", "index": 11}} + ], + "outputs": [1, 2, 3, 4, 5, 6, 7, 8, 9] +} diff --git a/air-script/tests/pub_inputs/pub_inputs.json b/air-script/tests/pub_inputs/pub_inputs.json new file mode 100644 index 00000000..86dc592e --- /dev/null +++ b/air-script/tests/pub_inputs/pub_inputs.json @@ -0,0 +1,18 @@ +{ + "num_polys": 4, + "num_variables": 32, + "constants": [], + "expressions": [ + {"op": "ADD", "lhs": {"type": "POL", "index": 1}, "rhs": {"type": "POL", "index": 2}}, + {"op": "SUB", "lhs": {"type": "POL_NEXT", "index": 0}, "rhs": {"type": "EXPR", "index": 0}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 0}, "rhs": {"type": "VAR", "index": 4}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 1}, "rhs": {"type": "VAR", "index": 5}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 2}, "rhs": {"type": "VAR", "index": 6}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 3}, "rhs": {"type": "VAR", "index": 7}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 0}, "rhs": {"type": "VAR", "index": 8}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 1}, "rhs": {"type": "VAR", "index": 9}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 2}, "rhs": {"type": "VAR", "index": 10}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 3}, "rhs": {"type": "VAR", "index": 11}} + ], + "outputs": [1, 2, 3, 4, 5, 6, 7, 8, 9] +} diff --git a/air-script/tests/system/generated_system.json b/air-script/tests/system/generated_system.json new file mode 100644 index 00000000..06dbeda4 --- /dev/null +++ b/air-script/tests/system/generated_system.json @@ -0,0 +1,11 @@ +{ + "num_polys": 3, + "num_variables": 16, + "constants": [0, 1], + "expressions": [ + {"op": "ADD", "lhs": {"type": "POL", "index": 0}, "rhs": {"type": "CONST", "index": 1}}, + {"op": "SUB", "lhs": {"type": "POL_NEXT", "index": 0}, "rhs": {"type": "EXPR", "index": 0}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 0}, "rhs": {"type": "CONST", "index": 0}} + ], + "outputs": [1, 2] +} diff --git a/air-script/tests/system/system.json b/air-script/tests/system/system.json new file mode 100644 index 00000000..06dbeda4 --- /dev/null +++ b/air-script/tests/system/system.json @@ -0,0 +1,11 @@ +{ + "num_polys": 3, + "num_variables": 16, + "constants": [0, 1], + "expressions": [ + {"op": "ADD", "lhs": {"type": "POL", "index": 0}, "rhs": {"type": "CONST", "index": 1}}, + {"op": "SUB", "lhs": {"type": "POL_NEXT", "index": 0}, "rhs": {"type": "EXPR", "index": 0}}, + {"op": "SUB", "lhs": {"type": "POL", "index": 0}, "rhs": {"type": "CONST", "index": 0}} + ], + "outputs": [1, 2] +} diff --git a/codegen/gce/Cargo.toml b/codegen/gce/Cargo.toml new file mode 100644 index 00000000..b6a5c00a --- /dev/null +++ b/codegen/gce/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "air-codegen-gce" +version = "0.1.0" +description="Code generation for the generic constraint evaluation format." +authors = ["miden contributors"] +readme="README.md" +license = "MIT" +repository = "https://github.com/0xPolygonMiden/air-script" +categories = ["compilers", "cryptography"] +keywords = ["air", "stark", "winterfell", "zero-knowledge", "zkp"] +edition = "2021" +rust-version = "1.65" + +[dependencies] +codegen = "0.2.0" +ir = {package = "air-ir", path="../../ir", version="0.1.0" } diff --git a/codegen/gce/README.md b/codegen/gce/README.md new file mode 100644 index 00000000..26d5766f --- /dev/null +++ b/codegen/gce/README.md @@ -0,0 +1,2 @@ +# Generic Constraint Evaluation Generator + diff --git a/codegen/gce/src/boundary_constraints.rs b/codegen/gce/src/boundary_constraints.rs new file mode 100644 index 00000000..e724a9c0 --- /dev/null +++ b/codegen/gce/src/boundary_constraints.rs @@ -0,0 +1,359 @@ +use crate::helpers::get_random_value_index; + +use super::error::ConstraintEvaluationError; +use super::helpers::{ + get_constant_index_by_matrix_access, get_constant_index_by_name, get_constant_index_by_value, + get_constant_index_by_vector_access, get_public_input_index, Expression, NodeReference, + NodeType, +}; +use ir::{AirIR, BoundaryExpr}; +use std::collections::BTreeMap; + +pub fn set_boundary_expressions_and_outputs( + ir: &AirIR, + boundary_constraints_vec: [&Vec<(usize, &BoundaryExpr)>; 4], + expressions: &mut Vec, + outputs: &mut Vec, + constants: &Vec, + const_public_type_map: &BTreeMap<&str, NodeType>, +) -> Result<(), ConstraintEvaluationError> { + for (i, &constraints) in boundary_constraints_vec.iter().enumerate() { + for constraint in constraints { + if (0..2).contains(&i) { + handle_boundary_operation( + ir, + constraint, + expressions, + outputs, + constants, + const_public_type_map, + )?; + } else { + handle_boundary_operation( + ir, + &(constraint.0 + ir.num_polys()[0] as usize, constraint.1), + expressions, + outputs, + constants, + const_public_type_map, + )?; + } + } + } + Ok(()) +} + +// Problems: +// #3: I need to be able to create NodeReferences to the last row of the table (to create reference +// to the .last boundary constraints) +// #4: Need to be able to reuse boundary expressions if it has already been created +/// Parses boundary operation, creates related [Expression] instance, pushes it to the `expressions` +/// array and adds its index to the `outputs` array +fn handle_boundary_operation( + ir: &AirIR, + expr: &(usize, &BoundaryExpr), + expressions: &mut Vec, + outputs: &mut Vec, + constants: &Vec, + const_public_type_map: &BTreeMap<&str, NodeType>, +) -> Result<(), ConstraintEvaluationError> { + use BoundaryExpr::*; + match expr.1 { + Const(v) => { + let constant_index = get_constant_index_by_value(*v, constants)?; + push_boundary_value( + expressions, + outputs, + NodeType::CONST, + constant_index, + expr.0, + ); + Ok(()) + } + Elem(id) => { + let constant_index = get_constant_index_by_name(ir, &id.0, constants)?; + push_boundary_value( + expressions, + outputs, + NodeType::CONST, + constant_index, + expr.0, + ); + Ok(()) + } + VectorAccess(vector_access) => { + let node_type = const_public_type_map + .get(vector_access.name()) + .ok_or_else(|| { + ConstraintEvaluationError::identifier_not_found(vector_access.name()) + })?; + match node_type { + NodeType::CONST => { + let constant_index = + get_constant_index_by_vector_access(ir, vector_access, constants)?; + push_boundary_value( + expressions, + outputs, + NodeType::CONST, + constant_index, + expr.0, + ); + Ok(()) + } + NodeType::VAR => { + let public_input_index = get_public_input_index(ir, vector_access)?; + push_boundary_value( + expressions, + outputs, + NodeType::VAR, + public_input_index, + expr.0, + ); + Ok(()) + } + _ => Err(ConstraintEvaluationError::InvalidOperation( + "Invalid node type: only CONST and VAR allowed".to_string(), + )), + } + } + MatrixAccess(matrix_access) => { + let constant_index = get_constant_index_by_matrix_access(ir, matrix_access, constants)?; + push_boundary_value( + expressions, + outputs, + NodeType::CONST, + constant_index, + expr.0, + ); + Ok(()) + } + Rand(rand_index) => { + let index = get_random_value_index(ir, *rand_index); + push_boundary_value(expressions, outputs, NodeType::VAR, index, expr.0); + Ok(()) + } + Add(l, r) => parse_boundary_expression( + ir, + (expr.0, l, r), + "ADD".to_string(), + constants, + const_public_type_map, + expressions, + outputs, + ), + Sub(l, r) => parse_boundary_expression( + ir, + (expr.0, l, r), + "SUB".to_string(), + constants, + const_public_type_map, + expressions, + outputs, + ), + Mul(l, r) => parse_boundary_expression( + ir, + (expr.0, l, r), + "MUL".to_string(), + constants, + const_public_type_map, + expressions, + outputs, + ), + + Exp(_i, _degree) => todo!(), + } +} + +/// Creates an [Expression] instance on an equation of the form `boundary_constraint = expression`, +/// pushes it to the `expressions` array and adds its index to the `outputs` array +fn push_boundary_value( + expressions: &mut Vec, + outputs: &mut Vec, + node_type: NodeType, + value_index: usize, + column_index: usize, +) { + let lhs = NodeReference { + node_type: NodeType::POL, + index: column_index, + }; + let rhs = NodeReference { + node_type, + index: value_index, + }; + let result = Expression { + op: "SUB".to_string(), + lhs, + rhs, + }; + expressions.push(result); + outputs.push(expressions.len() - 1); +} + +/// Parses boundary operation in case it is an expression. Creates [Expression], pushes it to the +/// `expressions` array and adds its index to the `outputs` array +fn parse_boundary_expression( + ir: &AirIR, + boundary_expr: (usize, &BoundaryExpr, &BoundaryExpr), + op_type: String, + constants: &Vec, + const_public_type_map: &BTreeMap<&str, NodeType>, + expressions: &mut Vec, + output: &mut Vec, +) -> Result<(), ConstraintEvaluationError> { + let node_reference = parse_recursive_boundary_expression( + ir, + (boundary_expr.1, boundary_expr.2), + op_type, + constants, + const_public_type_map, + expressions, + )?; + + let lhs = NodeReference { + node_type: NodeType::POL, + index: boundary_expr.0, + }; + let rhs = node_reference; + let result = Expression { + op: "SUB".to_string(), + lhs, + rhs, + }; + expressions.push(result); + output.push(expressions.len() - 1); + Ok(()) +} + +/// Recursively parses boundary expression. +/// Returns [NodeReference] to the parsed expression +fn parse_recursive_boundary_expression( + ir: &AirIR, + boundary_expr: (&BoundaryExpr, &BoundaryExpr), + op_type: String, + constants: &Vec, + const_public_type_map: &BTreeMap<&str, NodeType>, + expressions: &mut Vec, +) -> Result { + let lhs = parse_boundary_limb( + ir, + boundary_expr.0, + constants, + const_public_type_map, + expressions, + )?; + let rhs = parse_boundary_limb( + ir, + boundary_expr.1, + constants, + const_public_type_map, + expressions, + )?; + + let result = Expression { + op: op_type, + lhs, + rhs, + }; + expressions.push(result); + + Ok(NodeReference { + node_type: NodeType::EXPR, + index: expressions.len() - 1, + }) +} + +/// Parses boundary expression limb. +/// Returns [NodeReference] to the parsed expression +fn parse_boundary_limb( + ir: &AirIR, + i: &BoundaryExpr, + constants: &Vec, + const_public_type_map: &BTreeMap<&str, NodeType>, + expressions: &mut Vec, +) -> Result { + use BoundaryExpr::*; + match i { + Const(v) => { + let constant_index = get_constant_index_by_value(*v, constants)?; + Ok(NodeReference { + node_type: NodeType::CONST, + index: constant_index, + }) + } + Elem(id) => { + let constant_index = get_constant_index_by_name(ir, &id.0, constants)?; + Ok(NodeReference { + node_type: NodeType::CONST, + index: constant_index, + }) + } + VectorAccess(vector_access) => { + let node_type = const_public_type_map + .get(vector_access.name()) + .ok_or_else(|| { + ConstraintEvaluationError::identifier_not_found(vector_access.name()) + })?; + match node_type { + NodeType::CONST => { + let constant_index = + get_constant_index_by_vector_access(ir, vector_access, constants)?; + Ok(NodeReference { + node_type: NodeType::CONST, + index: constant_index, + }) + } + NodeType::VAR => { + let public_input_index = get_public_input_index(ir, vector_access)?; + Ok(NodeReference { + node_type: NodeType::VAR, + index: public_input_index, + }) + } + _ => Err(ConstraintEvaluationError::InvalidOperation( + "Invalid node type: only CONST and VAR allowed".to_string(), + )), + } + } + MatrixAccess(matrix_access) => { + let constant_index = get_constant_index_by_matrix_access(ir, matrix_access, constants)?; + Ok(NodeReference { + node_type: NodeType::CONST, + index: constant_index, + }) + } + Rand(rand_index) => { + let index = get_random_value_index(ir, *rand_index); + Ok(NodeReference { + node_type: NodeType::VAR, + index, + }) + } + Add(l, r) => parse_recursive_boundary_expression( + ir, + (l, r), + "ADD".to_string(), + constants, + const_public_type_map, + expressions, + ), + Sub(l, r) => parse_recursive_boundary_expression( + ir, + (l, r), + "SUB".to_string(), + constants, + const_public_type_map, + expressions, + ), + Mul(l, r) => parse_recursive_boundary_expression( + ir, + (l, r), + "MUL".to_string(), + constants, + const_public_type_map, + expressions, + ), + + Exp(_i, _degree) => todo!(), + } +} diff --git a/codegen/gce/src/error.rs b/codegen/gce/src/error.rs new file mode 100644 index 00000000..b14da5c3 --- /dev/null +++ b/codegen/gce/src/error.rs @@ -0,0 +1,51 @@ +#[derive(Debug)] +pub enum ConstraintEvaluationError { + InvalidTraceSegment(String), + InvalidOperation(String), + IdentifierNotFound(String), + ConstantNotFound(String), + PublicInputNotFound(String), + OperationNotFound(String), + InvalidConstantType(String), +} + +impl ConstraintEvaluationError { + pub fn invalid_trace_segment(segment: u8) -> Self { + ConstraintEvaluationError::InvalidTraceSegment(format!( + "Trace segment {} is invalid", + segment + )) + } + + pub fn identifier_not_found(name: &str) -> Self { + ConstraintEvaluationError::IdentifierNotFound(format!( + "Identifier {} not found in JSON arrays", + name + )) + } + + pub fn constant_not_found(name: &str) -> Self { + ConstraintEvaluationError::ConstantNotFound(format!("Constant \"{}\" not found", name)) + } + + pub fn public_input_not_found(name: &str) -> Self { + ConstraintEvaluationError::PublicInputNotFound(format!( + "Public Input \"{}\" not found", + name + )) + } + + pub fn invalid_constant_type(name: &str, constant_type: &str) -> Self { + ConstraintEvaluationError::InvalidConstantType(format!( + "Invalid type of constant \"{}\". {} exprected.", + name, constant_type + )) + } + + pub fn operation_not_found(index: usize) -> Self { + ConstraintEvaluationError::OperationNotFound(format!( + "Operation with index {} does not match the expression in the expressions JSON array", + index + )) + } +} diff --git a/codegen/gce/src/helpers.rs b/codegen/gce/src/helpers.rs new file mode 100644 index 00000000..aa0c1cc4 --- /dev/null +++ b/codegen/gce/src/helpers.rs @@ -0,0 +1,196 @@ +use super::error::ConstraintEvaluationError; +use ir::{ + ast::{constants::ConstantType, MatrixAccess, VectorAccess}, + AirIR, BoundaryExpr, +}; +use std::fmt::Display; + +// I think we can allow non camel case type since we translate it directly to string in node +// reference type, where we don't use camel case +/// Stroes node type required in [NodeReference] struct +#[allow(non_camel_case_types, clippy::upper_case_acronyms)] +#[derive(Debug, Clone)] +pub enum NodeType { + POL, + POL_NEXT, + VAR, + CONST, + EXPR, +} + +/// Stores data used in JSON generation +#[derive(Debug, Clone)] +pub struct NodeReference { + pub node_type: NodeType, + pub index: usize, +} + +// TODO: change String to &str (Or should I create another enum?) +/// Stores data used in JSON generation +#[derive(Debug)] +pub struct Expression { + pub op: String, + pub lhs: NodeReference, + pub rhs: NodeReference, +} + +impl Display for NodeReference { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "{{\"type\": \"{:?}\", \"index\": {}}}", + self.node_type, self.index + ) + } +} + +impl Display for Expression { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "{{\"op\": \"{}\", \"lhs\": {}, \"rhs\": {}}}", + self.op, self.lhs, self.rhs + ) + } +} + +/// Counts overall number of random values in boundary constraints +pub fn count_boundary_rand_values<'a>( + expr: &'a BoundaryExpr, + max_random_values_index: &'a mut usize, +) { + use BoundaryExpr::*; + match expr { + Rand(i) => *max_random_values_index = *max_random_values_index.max(&mut (*i + 1)), + Add(l, r) | Sub(l, r) | Mul(l, r) => { + count_boundary_rand_values(l, max_random_values_index); + count_boundary_rand_values(r, max_random_values_index); + } + _ => {} + } +} + +/// Pushes inline constants found in boundary expression to the `constants` vector +pub fn acumulate_constants(expr: &BoundaryExpr, constants: &mut Vec) { + use BoundaryExpr::*; + match expr { + Const(v) => constants.push(*v), + Add(l, r) | Sub(l, r) | Mul(l, r) => { + acumulate_constants(l, constants); + acumulate_constants(r, constants); + } + Exp(i, degree) => { + if *degree == 0 { + constants.push(1); // constant needed for optimization, since node^0 is Const(1) + } else { + constants.push(*degree); + } + acumulate_constants(i, constants); + } + _ => {} + } +} + +/// Returns index of the constant found in the `constants` array by its value +pub fn get_constant_index_by_value( + v: u64, + constants: &[u64], +) -> Result { + constants + .iter() + .position(|&x| x == v) + .ok_or_else(|| ConstraintEvaluationError::constant_not_found(&v.to_string())) +} + +/// Returns index of the constant found in the `constants` array by its `name` +pub fn get_constant_index_by_name( + ir: &AirIR, + name: &String, + constants: &[u64], +) -> Result { + let constant = ir + .constants() + .iter() + .find(|v| v.name().name() == name) + .ok_or_else(|| ConstraintEvaluationError::constant_not_found(name))?; + let value = match constant.value() { + ConstantType::Scalar(s) => Ok(*s), + _ => Err(ConstraintEvaluationError::invalid_constant_type( + name, "Scalar", + )), + }?; + get_constant_index_by_value(value, constants) +} + +/// Returns index of the constant found in the `constants` array by its vector access (name and +/// index) +pub fn get_constant_index_by_vector_access( + ir: &AirIR, + vector_access: &VectorAccess, + constants: &[u64], +) -> Result { + let constant = ir + .constants() + .iter() + .find(|v| v.name().name() == vector_access.name()) + .ok_or_else(|| ConstraintEvaluationError::constant_not_found(vector_access.name()))?; + let value = match constant.value() { + ConstantType::Vector(v) => Ok(v[vector_access.idx()]), + _ => Err(ConstraintEvaluationError::invalid_constant_type( + vector_access.name(), + "Vector", + )), + }?; + get_constant_index_by_value(value, constants) +} + +/// Returns index of the constant found in the `constants` array by its matrix access (name and +/// indexes) +pub fn get_constant_index_by_matrix_access( + ir: &AirIR, + matrix_access: &MatrixAccess, + constants: &[u64], +) -> Result { + let constant = ir + .constants() + .iter() + .find(|v| v.name().name() == matrix_access.name()) + .ok_or_else(|| ConstraintEvaluationError::constant_not_found(matrix_access.name()))?; + + let value = match constant.value() { + ConstantType::Matrix(m) => Ok(m[matrix_access.row_idx()][matrix_access.col_idx()]), + _ => Err(ConstraintEvaluationError::invalid_constant_type( + matrix_access.name(), + "Matrix", + )), + }?; + get_constant_index_by_value(value, constants) +} + +/// Returns index of the public input value found in the merged public inputs array by its vector +/// access (name and index) +pub fn get_public_input_index( + ir: &AirIR, + vector_access: &VectorAccess, +) -> Result { + let mut accumulative_index = 0; + for public_input in ir.public_inputs() { + if vector_access.name() == public_input.0 { + accumulative_index += vector_access.idx(); + break; + } + accumulative_index += public_input.1; + } + + if accumulative_index == ir.public_inputs().iter().map(|v| v.1).sum() { + return Err(ConstraintEvaluationError::public_input_not_found( + vector_access.name(), + )); + } + + Ok(accumulative_index) +} + +pub fn get_random_value_index(ir: &AirIR, rand_index: usize) -> usize { + ir.public_inputs().iter().map(|v| v.1).sum::() + rand_index +} diff --git a/codegen/gce/src/lib.rs b/codegen/gce/src/lib.rs new file mode 100644 index 00000000..b89169a2 --- /dev/null +++ b/codegen/gce/src/lib.rs @@ -0,0 +1,200 @@ +use ir::{ + ast::constants::ConstantType::*, + transition_stmts::{ConstantValue, Operation}, + AirIR, BoundaryExpr, +}; + +mod boundary_constraints; +use boundary_constraints::set_boundary_expressions_and_outputs; + +mod error; +use error::ConstraintEvaluationError; + +mod helpers; +use helpers::{acumulate_constants, count_boundary_rand_values, Expression, NodeType}; + +mod transition_constraints; +use transition_constraints::{set_transition_expressions, set_transition_outputs}; + +use std::collections::BTreeMap; +use std::fs::File; +use std::io::Write; + +/// Holds data for JSON generation +#[derive(Default, Debug)] +pub struct GCECodeGenerator { + num_polys: u16, + num_variables: usize, + constants: Vec, + expressions: Vec, + outputs: Vec, +} + +impl GCECodeGenerator { + pub fn new(ir: &AirIR, extension_degree: u8) -> Result { + // vector of all boundary constraints vectors + let boundary_constraints_vec = [ + &ir.main_first_boundary_constraints(), + &ir.main_last_boundary_constraints(), + &ir.aux_first_boundary_constraints(), + &ir.aux_last_boundary_constraints(), + ]; + + // maps indexes in Node vector in AlgebraicGraph and in `expressions` JSON array + let mut expressions_map = BTreeMap::new(); + + // maps names of named constants and public inputs to their NodeType + // Only CONST or VAR allowed + let mut const_public_type_map = BTreeMap::new(); + + // vector of expression nodes + let mut expressions = Vec::new(); + + // vector of `expressions` indexes + let mut outputs = Vec::new(); + + let num_polys = set_num_polys(ir, extension_degree); + let num_variables = + set_num_variables(ir, &mut const_public_type_map, boundary_constraints_vec); + + // TODO #1: get rid of the vector and push values directly into result string + // constants from Constants AirIR field + // TODO #2: currently I add all found constants in the vector. Should I add only unique ones, + // since I'll get constants by their value, not index? + let constants = set_constants(ir, &mut const_public_type_map, boundary_constraints_vec); + + set_transition_expressions(ir, &mut expressions, &constants, &mut expressions_map)?; + set_transition_outputs(ir, &mut outputs, &expressions_map)?; + set_boundary_expressions_and_outputs( + ir, + boundary_constraints_vec, + &mut expressions, + &mut outputs, + &constants, + &const_public_type_map, + )?; + + Ok(GCECodeGenerator { + num_polys, + num_variables, + constants, + expressions, + outputs, + }) + } + + /// Generates constraint evaluation JSON file + pub fn generate(&self, path: &str) -> std::io::Result<()> { + let mut file = File::create(path)?; + file.write_all("{\n".as_bytes())?; + file.write_all(format!("\t\"num_polys\": {},\n", self.num_polys).as_bytes())?; + file.write_all(format!("\t\"num_variables\": {},\n", self.num_variables).as_bytes())?; + file.write_all(format!("\t\"constants\": {:?},\n", self.constants).as_bytes())?; + file.write_all(format!("\t\"expressions\": [\n\t\t{}", self.expressions[0]).as_bytes())?; + for expr in self.expressions.iter().skip(1) { + file.write_all(format!(",\n\t\t{}", expr).as_bytes())?; + } + file.write_all("\n\t],\n".as_bytes())?; + file.write_all(format!("\t\"outputs\": {:?}\n", self.outputs).as_bytes())?; + + file.write_all("}\n".as_bytes())?; + Ok(()) + } +} + +// HELPER FUNCTIONS +// ================================================================================================ + +fn set_num_polys(ir: &AirIR, extension_degree: u8) -> u16 { + // TODO: Should all aux columns be extended to be quadratic or cubic? + let num_polys_vec = ir.num_polys(); + num_polys_vec + .iter() + .skip(1) + .fold(num_polys_vec[0], |acc, &x| { + acc + x * extension_degree as u16 + }) +} + +fn set_num_variables<'a>( + ir: &'a AirIR, + const_public_type_map: &mut BTreeMap<&'a str, NodeType>, + boundary_constraints_vec: [&Vec<(usize, &BoundaryExpr)>; 4], +) -> usize { + let mut num_variables = 0; + // public inputs + for input in ir.public_inputs() { + num_variables += input.1; + const_public_type_map.insert(input.0.as_str(), NodeType::VAR); + } + + // TODO: how many random values can we have? Would them fit in u8? + let mut max_random_values_index = 0; + // random values from boundary constrains + for constraints in boundary_constraints_vec { + for (_, expr) in constraints { + count_boundary_rand_values(expr, &mut max_random_values_index); + } + } + + // random values from transition constrains + for expr in ir.transition_graph().nodes() { + if let Operation::RandomValue(i) = expr.op() { + max_random_values_index = max_random_values_index.max(*i + 1) + } + } + + num_variables + max_random_values_index +} + +fn set_constants<'a>( + ir: &'a AirIR, + const_public_type_map: &mut BTreeMap<&'a str, NodeType>, + boundary_constraints_vec: [&Vec<(usize, &BoundaryExpr)>; 4], +) -> Vec { + let mut constants = Vec::new(); + for constant in ir.constants() { + match constant.value() { + Scalar(value) => { + constants.push(*value); + } + Vector(values) => { + for elem in values { + constants.push(*elem); + } + // not sure thet this approach is better + // let mut local_values = values.clone(); + // constants.append(&mut local_values); + } + Matrix(values) => { + for elem in values.iter().flatten() { + constants.push(*elem); + } + } + } + const_public_type_map.insert(constant.name().name(), NodeType::CONST); + } + // constants from boundary_constraints + for constraints in boundary_constraints_vec { + for (_, expr) in constraints { + acumulate_constants(expr, &mut constants); + } + } + + // constants and random values from transition_constraints + for node in ir.transition_graph().nodes() { + match node.op() { + Operation::Constant(ConstantValue::Inline(v)) => constants.push(*v), + Operation::Exp(_, degree) => { + if *degree == 0 { + constants.push(1); // constant needed for optimization, since node^0 is Const(1) + } else { + constants.push(*degree as u64) + } + } + _ => {} + } + } + + constants +} diff --git a/codegen/gce/src/transition_constraints.rs b/codegen/gce/src/transition_constraints.rs new file mode 100644 index 00000000..dbbf7ae6 --- /dev/null +++ b/codegen/gce/src/transition_constraints.rs @@ -0,0 +1,317 @@ +use crate::helpers::get_random_value_index; + +use super::error::ConstraintEvaluationError; +use super::helpers::{ + get_constant_index_by_matrix_access, get_constant_index_by_name, get_constant_index_by_value, + get_constant_index_by_vector_access, Expression, NodeReference, NodeType, +}; +use ir::{ + transition_stmts::{ConstantValue, Operation}, + AirIR, NodeIndex, +}; +use std::collections::BTreeMap; + +const MAIN_TRACE_SEGMENT_INDEX: u8 = 0; + +pub fn set_transition_expressions( + ir: &AirIR, + expressions: &mut Vec, + constants: &[u64], + expressions_map: &mut BTreeMap, +) -> Result<(), ConstraintEvaluationError> { + // TODO: currently we can't create a node reference to the last row (which is required for + // main.last and aux.last boundary constraints). Working in assumption that first reference to + // the column is .first constraint and second is .last constraint (in the boundary section, not + // entire array) + for (index, node) in ir.transition_graph().nodes().iter().enumerate() { + match node.op() { + Operation::Add(l, r) => { + expressions.push(handle_transition_expression( + ir, + "ADD".to_string(), + *l, + *r, + constants, + expressions_map, + )?); + expressions_map.insert(index, expressions.len() - 1); + } + Operation::Sub(l, r) => { + expressions.push(handle_transition_expression( + ir, + "SUB".to_string(), + *l, + *r, + constants, + expressions_map, + )?); + expressions_map.insert(index, expressions.len() - 1); + } + Operation::Mul(l, r) => { + expressions.push(handle_transition_expression( + ir, + "MUL".to_string(), + *l, + *r, + constants, + expressions_map, + )?); + expressions_map.insert(index, expressions.len() - 1); + } + Operation::Exp(i, degree) => { + match degree { + 0 => { + // I decided that node^0 could be emulated using the product of 1*1, but perhaps there are better ways + let index_of_1 = get_constant_index_by_value(1, constants)?; + let const_1_node = NodeReference { + node_type: NodeType::CONST, + index: index_of_1, + }; + expressions.push(Expression { + op: "MUL".to_string(), + lhs: const_1_node.clone(), + rhs: const_1_node, + }); + } + 1 => { + let lhs = handle_node_reference(ir, *i, constants, expressions_map)?; + let degree_index = get_constant_index_by_value(1, constants)?; + let rhs = NodeReference { + node_type: NodeType::CONST, + index: degree_index, + }; + expressions.push(Expression { + op: "MUL".to_string(), + lhs, + rhs, + }); + } + _ => handle_exponentiation( + ir, + expressions, + expressions_map, + *i, + *degree, + constants, + )?, + } + expressions_map.insert(index, expressions.len() - 1); + } + _ => {} + } + } + Ok(()) +} + +/// Fills the `outputs` vector according to the indexes from `expressions_map` +pub fn set_transition_outputs( + ir: &AirIR, + outputs: &mut Vec, + expressions_map: &BTreeMap, +) -> Result<(), ConstraintEvaluationError> { + for i in 0..ir.num_polys().len() { + for root in ir.transition_constraints(i as u8) { + let index = expressions_map + .get(&root.index()) + .ok_or_else(|| ConstraintEvaluationError::operation_not_found(root.index()))?; + outputs.push(*index); + } + } + Ok(()) +} + +/// Parses expression in transition graph Node vector and returns related [Expression] instance +fn handle_transition_expression( + ir: &AirIR, + op: String, + l: NodeIndex, + r: NodeIndex, + constants: &[u64], + expressions_map: &BTreeMap, +) -> Result { + let lhs = handle_node_reference(ir, l, constants, expressions_map)?; + let rhs = handle_node_reference(ir, r, constants, expressions_map)?; + Ok(Expression { op, lhs, rhs }) +} + +/// Parses expression in transition graph Node vector by [NodeIndex] and returns related +/// [NodeReference] instance +fn handle_node_reference( + ir: &AirIR, + i: NodeIndex, + constants: &[u64], + expressions_map: &BTreeMap, +) -> Result { + use Operation::*; + match ir.transition_graph().node(&i).op() { + Add(_, _) | Sub(_, _) | Mul(_, _) | Exp(_, _) => { + let index = expressions_map + .get(&i.index()) + .ok_or_else(|| ConstraintEvaluationError::operation_not_found(i.index()))?; + Ok(NodeReference { + node_type: NodeType::EXPR, + index: *index, + }) + } + Constant(constant_value) => { + match constant_value { + ConstantValue::Inline(v) => { + let index = get_constant_index_by_value(*v, constants)?; + Ok(NodeReference { + node_type: NodeType::CONST, + index, + }) + } + ConstantValue::Scalar(name) => { + let index = get_constant_index_by_name(ir, name, constants)?; + Ok(NodeReference { + node_type: NodeType::CONST, + index, + }) + } + ConstantValue::Vector(vector_access) => { + // why Constant.name() returns Identifier and VectorAccess.name() works like + // VectorAccess.name.name() and returns &str? (same with MatrixAccess) + let index = get_constant_index_by_vector_access(ir, vector_access, constants)?; + Ok(NodeReference { + node_type: NodeType::CONST, + index, + }) + } + ConstantValue::Matrix(matrix_access) => { + let index = get_constant_index_by_matrix_access(ir, matrix_access, constants)?; + Ok(NodeReference { + node_type: NodeType::CONST, + index, + }) + } + } + } + TraceElement(trace_access) => { + // Working in assumption that segment 0 is main columns, and others are main columns + match trace_access.trace_segment() { + MAIN_TRACE_SEGMENT_INDEX => { + // TODO: handle other offsets (not only 1) + if trace_access.row_offset() == 0 { + Ok(NodeReference { + node_type: NodeType::POL, + index: trace_access.col_idx(), + }) + } else { + Ok(NodeReference { + node_type: NodeType::POL_NEXT, + index: trace_access.col_idx(), + }) + } + } + i if i < ir.num_polys().len() as u8 => { + let col_index = ir.num_polys()[0..i as usize].iter().sum::() as usize + + trace_access.col_idx(); + if trace_access.row_offset() == 0 { + Ok(NodeReference { + node_type: NodeType::POL, + index: col_index, + }) + } else { + Ok(NodeReference { + node_type: NodeType::POL_NEXT, + index: col_index, + }) + } + } + _ => Err(ConstraintEvaluationError::invalid_trace_segment( + trace_access.trace_segment(), + )), + } + } + RandomValue(rand_index) => { + let index = get_random_value_index(ir, *rand_index); + Ok(NodeReference { + node_type: NodeType::VAR, + index, + }) + } + + PeriodicColumn(_column, _length) => todo!(), + + // Currently it can only be `Neg` + _ => Err(ConstraintEvaluationError::InvalidOperation( + "Invalid transition constraint operation".to_string(), + )), + } +} + +/// Replaces the exponentiation operation with multiplication operations, adding them to the +/// expressions vector +fn handle_exponentiation( + ir: &AirIR, + expressions: &mut Vec, + expressions_map: &BTreeMap, + i: NodeIndex, + degree: usize, + constants: &[u64], +) -> Result<(), ConstraintEvaluationError> { + // base node that we want to raise to a degree + let base_node = handle_node_reference(ir, i, constants, expressions_map)?; + // push node^2 expression + expressions.push(Expression { + op: "MUL".to_string(), + lhs: base_node.clone(), + rhs: base_node.clone(), + }); + let square_node_index = expressions.len() - 1; + + // square the previous expression while there is such an opportunity + let mut cur_degree_of_2 = 1; // currently we have node^(2^cur_degree_of_2) = node^(2^1) = node^2 + while 2_usize.pow(cur_degree_of_2) <= degree / 2 { + let last_node = NodeReference { + node_type: NodeType::EXPR, + index: expressions.len() - 1, + }; + expressions.push(Expression { + op: "MUL".to_string(), + lhs: last_node.clone(), + rhs: last_node, + }); + cur_degree_of_2 += 1; + } + + // add the largest available powers of two to the current degree + let mut cur_max_degree = 2_usize.pow(cur_degree_of_2); // currently we have node^(2^cur_max_degree) + while cur_max_degree != degree { + let diff = degree - cur_max_degree; + if diff == 1 { + // if we need to add first degree (base node) + let last_node = NodeReference { + node_type: NodeType::EXPR, + index: expressions.len() - 1, + }; + expressions.push(Expression { + op: "MUL".to_string(), + lhs: last_node, + rhs: base_node, + }); + break; + } + if 2_usize.pow(cur_degree_of_2 - 1) <= diff { + let last_node = NodeReference { + node_type: NodeType::EXPR, + index: expressions.len() - 1, + }; + let fitting_degree_of_2_node = NodeReference { + node_type: NodeType::EXPR, + // cur_degree_of_2 shows how many indexes we need to add to reach the largest fitting degree of 2 + index: square_node_index + cur_degree_of_2 as usize - 2, + }; + expressions.push(Expression { + op: "MUL".to_string(), + lhs: last_node, + rhs: fitting_degree_of_2_node, + }); + cur_max_degree += 2_usize.pow(cur_degree_of_2 - 1); + } + cur_degree_of_2 -= 1; + } + + Ok(()) +} diff --git a/codegen/winterfell/Cargo.toml b/codegen/winterfell/Cargo.toml index ab1aaf35..fd73a01f 100644 --- a/codegen/winterfell/Cargo.toml +++ b/codegen/winterfell/Cargo.toml @@ -14,3 +14,6 @@ rust-version = "1.65" [dependencies] codegen = "0.2.0" ir = {package = "air-ir", path="../../ir", version="0.1.0" } + +[dev-dependencies] +parser = { package = "air-parser", path = "../../parser", version = "0.1.0" } \ No newline at end of file diff --git a/codegen/winterfell/src/lib.rs b/codegen/winterfell/src/lib.rs index dbf7ecb5..3253ccff 100644 --- a/codegen/winterfell/src/lib.rs +++ b/codegen/winterfell/src/lib.rs @@ -13,11 +13,11 @@ use air::add_air; /// CodeGenerator is used to generate a Rust implementation of the Winterfell STARK prover library's /// Air trait. The generated Air expresses the constraints specified by the AirIR used to build the /// CodeGenerator. -pub struct CodeGenerator { +pub struct WinterfellCodeGenerator { scope: Scope, } -impl CodeGenerator { +impl WinterfellCodeGenerator { // --- CONSTRUCTOR ---------------------------------------------------------------------------- /// Builds a new Rust scope that represents a Winterfell Air trait implementation for the diff --git a/ir/src/lib.rs b/ir/src/lib.rs index 7b21d22b..cd6f66e6 100644 --- a/ir/src/lib.rs +++ b/ir/src/lib.rs @@ -46,6 +46,10 @@ const NEXT_ROW: usize = 1; pub struct AirIR { air_name: String, constants: Constants, + // I don't think that adding a new field just for generic constraint evaluation worth it. + // TODO: get rid of this field + // amount of named columns: (main, aux) + num_polys: Vec, public_inputs: PublicInputs, periodic_columns: PeriodicColumns, boundary_stmts: BoundaryStmts, @@ -121,7 +125,8 @@ impl AirIR { } } - let (constants, public_inputs, periodic_columns) = symbol_table.into_declarations(); + let (constants, public_inputs, periodic_columns, num_polys) = + symbol_table.into_declarations(); // validate sections validator.check()?; @@ -129,6 +134,7 @@ impl AirIR { Ok(Self { air_name: air_name.to_string(), constants, + num_polys, public_inputs, periodic_columns, boundary_stmts, @@ -146,6 +152,10 @@ impl AirIR { &self.constants } + pub fn num_polys(&self) -> &Vec { + &self.num_polys + } + pub fn public_inputs(&self) -> &PublicInputs { &self.public_inputs } diff --git a/ir/src/symbol_table.rs b/ir/src/symbol_table.rs index 8f35a445..281df257 100644 --- a/ir/src/symbol_table.rs +++ b/ir/src/symbol_table.rs @@ -46,6 +46,9 @@ pub(super) struct SymbolTable { /// The number of trace segments in the AIR. num_trace_segments: usize, + /// A vector of segment lengths (each value is a number of trace columns in i'th segment) + num_polys: Vec, + /// A map of all declared identifiers from their name (the key) to their type. identifiers: BTreeMap, @@ -121,6 +124,11 @@ impl SymbolTable { col_idx += trace_cols.size() as usize; } + if trace_segment >= self.num_polys.len() as u8 { + self.num_polys.resize(trace_segment as usize + 1, 0); + } + self.num_polys[trace_segment as usize] = col_idx as u16; + Ok(()) } @@ -172,8 +180,13 @@ impl SymbolTable { /// Consumes this symbol table and returns the information required for declaring constants, /// public inputs and periodic columns for the AIR. - pub(super) fn into_declarations(self) -> (Constants, PublicInputs, PeriodicColumns) { - (self.constants, self.public_inputs, self.periodic_columns) + pub(super) fn into_declarations(self) -> (Constants, PublicInputs, PeriodicColumns, Vec) { + ( + self.constants, + self.public_inputs, + self.periodic_columns, + self.num_polys, + ) } // --- ACCESSORS ------------------------------------------------------------------------------ diff --git a/ir/src/transition_stmts/graph.rs b/ir/src/transition_stmts/graph.rs index 7bc2b19b..a006a91d 100644 --- a/ir/src/transition_stmts/graph.rs +++ b/ir/src/transition_stmts/graph.rs @@ -36,6 +36,10 @@ impl AlgebraicGraph { &self.nodes[index.0] } + pub fn nodes(&self) -> &Vec { + &self.nodes + } + /// Returns the degree of the subgraph which has the specified node as its tip. pub fn degree(&self, index: &NodeIndex) -> TransitionConstraintDegree { let mut cycles: BTreeMap = BTreeMap::new(); @@ -377,6 +381,12 @@ impl AlgebraicGraph { #[derive(Debug, Default, Clone, Copy, Eq, PartialEq)] pub struct NodeIndex(usize); +impl NodeIndex { + pub fn index(&self) -> usize { + self.0 + } +} + #[derive(Debug)] pub struct Node { /// The operation represented by this node @@ -448,6 +458,10 @@ impl TraceAccess { pub fn row_offset(&self) -> usize { self.row_offset } + + pub fn trace_segment(&self) -> u8 { + self.trace_segment + } } #[derive(Debug, Eq, PartialEq)]