diff --git a/CHANGELOG.md b/CHANGELOG.md index da570a2677..0311a9e05a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,8 @@ #### VM Internals - Simplified range checker and removed 1 main and 1 auxiliary trace column (#949). +- Migrated range checker lookups to use LogUp and reduced the number of trace columns to 2 main and + 1 auxiliary (#1027). - Added `get_mapped_values()` and `get_store_subset()` methods to the `AdviceProvider` trait (#987). - [BREAKING] Added options to specify maximum number of cycles and expected number of cycles for a program (#998). - Improved handling of invalid/incomplete parameters in `StackOutputs` constructors (#1010). diff --git a/air/src/constraints/chiplets/memory/mod.rs b/air/src/constraints/chiplets/memory/mod.rs index 94643a3a9c..e0120aa758 100644 --- a/air/src/constraints/chiplets/memory/mod.rs +++ b/air/src/constraints/chiplets/memory/mod.rs @@ -355,32 +355,3 @@ impl EvaluationFrameExt for &EvaluationFrame { self.selector_next(1) } } - -// EXTERNAL ACCESSORS -// ================================================================================================ -/// Trait to allow other processors to easily access the memory column values they need for -/// constraint calculations. -pub trait MemoryFrameExt { - // --- Column accessors ----------------------------------------------------------------------- - - /// The value of the lower 16-bits of the delta value being tracked between two consecutive - /// context IDs, addresses, or clock cycles in the current row. - fn memory_d0(&self) -> E; - /// The value of the upper 16-bits of the delta value being tracked between two consecutive - /// context IDs, addresses, or clock cycles in the current row. - fn memory_d1(&self) -> E; -} - -impl MemoryFrameExt for &EvaluationFrame { - // --- Column accessors ----------------------------------------------------------------------- - - #[inline(always)] - fn memory_d0(&self) -> E { - self.current()[MEMORY_D0_COL_IDX] - } - - #[inline(always)] - fn memory_d1(&self) -> E { - self.current()[MEMORY_D1_COL_IDX] - } -} diff --git a/air/src/constraints/chiplets/mod.rs b/air/src/constraints/chiplets/mod.rs index 7c960148ad..9810f91cec 100644 --- a/air/src/constraints/chiplets/mod.rs +++ b/air/src/constraints/chiplets/mod.rs @@ -6,7 +6,6 @@ use crate::utils::{are_equal, binary_not, is_binary}; mod bitwise; mod hasher; mod memory; -pub use memory::MemoryFrameExt; // CONSTANTS // ================================================================================================ diff --git a/air/src/constraints/mod.rs b/air/src/constraints/mod.rs index 5d55b79a6c..0b4330ca0e 100644 --- a/air/src/constraints/mod.rs +++ b/air/src/constraints/mod.rs @@ -1,3 +1,106 @@ +use super::{EvaluationFrame, ExtensionOf, Felt, FieldElement}; +use crate::trace::{ + chiplets::{MEMORY_D0_COL_IDX, MEMORY_D1_COL_IDX}, + decoder::{DECODER_OP_BITS_OFFSET, DECODER_USER_OP_HELPERS_OFFSET}, +}; +use crate::utils::binary_not; + pub mod chiplets; pub mod range; pub mod stack; + +// ACCESSORS +// ================================================================================================ +/// Trait to allow other processors to easily access the column values they need for constraint +/// calculations. +pub trait MainFrameExt +where + F: FieldElement, + E: FieldElement + ExtensionOf, +{ + /// Returns true when a u32 stack operation that requires range checks is being performed. + fn u32_rc_op(&self) -> F; + + // --- Range check lookup accessors ----------------------------------------------------------------------- + + /// The value required for the first memory lookup when the memory chiplet requests range + /// checks. The value returned is the denominator used for including the value into the LogUp + /// lookup: (alpha - d0). The value d0 which is being range-checked is the lower 16-bits of the + /// delta value being tracked between two consecutive context IDs, addresses, or clock cycles in + /// the current row. + fn lookup_mv0(&self, alpha: E) -> E; + /// The value required for the second memory lookup when the memory chiplet requests range + /// checks. The value returned is the denominator used for including the value into the LogUp + /// lookup: (alpha - d1). The value d1 which is being range-checked is the upper 16-bits of the + /// delta value being tracked between two consecutive context IDs, addresses, or clock cycles in + /// the current row. + fn lookup_mv1(&self, alpha: E) -> E; + /// The value required for the first stack lookup when the stack requests range checks. The + /// value returned is the denominator used for including the value into the LogUp lookup: + /// (alpha - h0). The value h0 which is being range checked by the stack operation is stored in + /// the helper columns of the decoder section of the trace. + fn lookup_sv0(&self, alpha: E) -> E; + /// The value required for the second stack lookup when the stack requests range checks. The + /// value returned is the denominator used for including the value into the LogUp lookup: + /// (alpha - h1). The value h1 which is being range checked by the stack operation is stored in + /// the helper columns of the decoder section of the trace. + fn lookup_sv1(&self, alpha: E) -> E; + /// The value required for the third stack lookup when the stack requests range checks. The + /// value returned is the denominator used for including the value into the LogUp lookup: + /// (alpha - h2). The value h2 which is being range checked by the stack operation is stored in + /// the helper columns of the decoder section of the trace. + fn lookup_sv2(&self, alpha: E) -> E; + /// The value required for the fourth stack lookup when the stack requests range checks. The + /// value returned is the denominator used for including the value into the LogUp lookup: + /// (alpha - h3). The value h3 which is being range checked by the stack operation is stored in + /// the helper columns of the decoder section of the trace. + fn lookup_sv3(&self, alpha: E) -> E; +} + +impl MainFrameExt for EvaluationFrame +where + F: FieldElement, + E: FieldElement + ExtensionOf, +{ + /// Returns true when the stack operation is a u32 operation that requires range checks. + /// TODO: this is also defined in the op flags. It's redefined here to avoid computing all of + /// the op flags when this is the only one needed, but ideally this should only be defined once. + #[inline(always)] + fn u32_rc_op(&self) -> F { + let not_4 = binary_not(self.current()[DECODER_OP_BITS_OFFSET + 4]); + let not_5 = binary_not(self.current()[DECODER_OP_BITS_OFFSET + 5]); + self.current()[DECODER_OP_BITS_OFFSET + 6].mul(not_5).mul(not_4) + } + + // --- Intermediate values for LogUp lookups -------------------------------------------------- + + #[inline(always)] + fn lookup_mv0(&self, alpha: E) -> E { + alpha - self.current()[MEMORY_D0_COL_IDX].into() + } + + #[inline(always)] + fn lookup_mv1(&self, alpha: E) -> E { + alpha - self.current()[MEMORY_D1_COL_IDX].into() + } + + #[inline(always)] + fn lookup_sv0(&self, alpha: E) -> E { + alpha - self.current()[DECODER_USER_OP_HELPERS_OFFSET].into() + } + + #[inline(always)] + fn lookup_sv1(&self, alpha: E) -> E { + alpha - self.current()[DECODER_USER_OP_HELPERS_OFFSET + 1].into() + } + + #[inline(always)] + fn lookup_sv2(&self, alpha: E) -> E { + alpha - self.current()[DECODER_USER_OP_HELPERS_OFFSET + 2].into() + } + + #[inline(always)] + fn lookup_sv3(&self, alpha: E) -> E { + alpha - self.current()[DECODER_USER_OP_HELPERS_OFFSET + 3].into() + } +} diff --git a/air/src/constraints/range.rs b/air/src/constraints/range.rs index 6d28b07317..c0ba0c6bf1 100644 --- a/air/src/constraints/range.rs +++ b/air/src/constraints/range.rs @@ -1,7 +1,8 @@ use crate::{ - chiplets::{ChipletsFrameExt, MemoryFrameExt}, - trace::range::{B_RANGE_COL_IDX, Q_COL_IDX, S0_COL_IDX, S1_COL_IDX, V_COL_IDX}, - utils::{are_equal, binary_not, is_binary}, + chiplets::ChipletsFrameExt, + constraints::MainFrameExt, + trace::range::{B_RANGE_COL_IDX, M_COL_IDX, V_COL_IDX}, + utils::are_equal, Assertion, EvaluationFrame, Felt, FieldElement, TransitionConstraintDegree, }; use vm_core::{utils::collections::Vec, ExtensionOf}; @@ -15,11 +16,10 @@ use winter_air::AuxTraceRandElements; /// The number of boundary constraints required by the Range Checker pub const NUM_ASSERTIONS: usize = 2; /// The number of transition constraints required by the Range Checker. -pub const NUM_CONSTRAINTS: usize = 3; +pub const NUM_CONSTRAINTS: usize = 1; /// The degrees of the range checker's constraints, in the order they'll be added to the the result /// array when a transition is evaluated. pub const CONSTRAINT_DEGREES: [usize; NUM_CONSTRAINTS] = [ - 2, 2, // Selector flags must be binary: s0, s1. 9, // Enforce values of column v transition. ]; @@ -30,7 +30,7 @@ pub const NUM_AUX_ASSERTIONS: usize = 2; /// The number of transition constraints required by multiset checks for the Range Checker. pub const NUM_AUX_CONSTRAINTS: usize = 1; /// The degrees of the Range Checker's auxiliary column constraints, used for multiset checks. -pub const AUX_CONSTRAINT_DEGREES: [usize; NUM_AUX_CONSTRAINTS] = [7]; +pub const AUX_CONSTRAINT_DEGREES: [usize; NUM_AUX_CONSTRAINTS] = [9]; // BOUNDARY CONSTRAINTS // ================================================================================================ @@ -81,11 +81,16 @@ pub fn get_transition_constraint_count() -> usize { /// Enforces constraints for the range checker. pub fn enforce_constraints(frame: &EvaluationFrame, result: &mut [E]) { - // Constrain the selector flags. - let index = enforce_flags(frame, result); - - // Constrain the transition between rows of the range checker table. - enforce_delta(frame, &mut result[index..]); + // Constrain the transition of the value column between rows in the range checker table. + result[0] = frame.change(V_COL_IDX) + * (frame.change(V_COL_IDX) - E::ONE) + * (frame.change(V_COL_IDX) - E::from(3_u8)) + * (frame.change(V_COL_IDX) - E::from(9_u8)) + * (frame.change(V_COL_IDX) - E::from(27_u8)) + * (frame.change(V_COL_IDX) - E::from(81_u8)) + * (frame.change(V_COL_IDX) - E::from(243_u8)) + * (frame.change(V_COL_IDX) - E::from(729_u16)) + * (frame.change(V_COL_IDX) - E::from(2187_u16)); } // --- AUXILIARY COLUMNS (FOR MULTISET CHECKS) ---------------------------------------------------- @@ -113,48 +118,40 @@ pub fn enforce_aux_constraints( let alpha = aux_rand_elements.get_segment_elements(0)[0]; // Enforce b_range. - enforce_running_product_b_range(main_frame, aux_frame, alpha, &mut result[..]); + enforce_b_range(main_frame, aux_frame, alpha, result); } // TRANSITION CONSTRAINT HELPERS // ================================================================================================ -// --- MAIN TRACE --------------------------------------------------------------------------------- - -/// Constrain the selector flags to binary values. -fn enforce_flags(frame: &EvaluationFrame, result: &mut [E]) -> usize { - let constraint_count = 2; - - result[0] = is_binary(frame.s0()); - result[1] = is_binary(frame.s1()); - - constraint_count -} - -/// Constrain the transition between rows in the range checker table. -fn enforce_delta(frame: &EvaluationFrame, result: &mut [E]) -> usize { - let constraint_count = 1; - - result[0] = frame.change(V_COL_IDX) - * (frame.change(V_COL_IDX) - E::ONE) - * (frame.change(V_COL_IDX) - E::from(3_u8)) - * (frame.change(V_COL_IDX) - E::from(9_u8)) - * (frame.change(V_COL_IDX) - E::from(27_u8)) - * (frame.change(V_COL_IDX) - E::from(81_u8)) - * (frame.change(V_COL_IDX) - E::from(243_u8)) - * (frame.change(V_COL_IDX) - E::from(729_u16)) - * (frame.change(V_COL_IDX) - E::from(2187_u16)); - - constraint_count -} - // --- AUXILIARY COLUMNS (FOR MULTISET CHECKS) ---------------------------------------------------- -/// Ensures that the running product is computed correctly in the column `b_range`. It enforces -/// that the value only changes after the padded rows, where the value of `z` is included at each -/// step, ensuring that the values in the range checker table are multiplied into `b_range` 0, 1, -/// 2, or 4 times, according to the selector flags. -fn enforce_running_product_b_range( +/// Ensures that the range checker bus is computed correctly. It enforces an implementation of the +/// LogUp lookup as a running sum "bus" column. All values in the range checker trace are saved +/// with their lookup multiplicity and the logarithmic derivatives are added to b_range. Values +/// for which lookups are requested from the stack and memory are each looked up with multiplicity +/// one, and the logarithmic derivatives are subtracted from b_range. +/// +/// Define the following variables: +/// - rc_value: the range checker value +/// - rc_multiplicity: the range checker multiplicity value +/// - flag_s: boolean flag indicating a stack operation with range checks. This flag is degree 3. +/// - sv0-sv3: stack value 0-3, the 4 values range-checked from the stack +/// - flag_m: boolean flag indicating the memory chiplet is active (i.e. range checks are required). +/// This flag is degree 3. +/// - mv0-mv1: memory value 0-1, the 2 values range-checked from the memory chiplet +/// +/// The constraint expression looks as follows: +/// b' = b + rc_multiplicity / (alpha - rc_value) +/// - flag_s / (alpha - sv0) - flag_s / (alpha - sv1) +/// - flag_s / (alpha - sv2) - flag_s / (alpha - sv3) +/// - flag_m / (alpha - mv0) - flag_m / (alpha - mv1) +/// +/// However, to enforce the constraint, all denominators are multiplied so that no divisions are +/// included in the actual constraint expression. +/// +/// Constraint degree: 9 +fn enforce_b_range( main_frame: &EvaluationFrame, aux_frame: &EvaluationFrame, alpha: E, @@ -163,53 +160,43 @@ fn enforce_running_product_b_range( F: FieldElement, E: FieldElement + ExtensionOf, { - // The running product column must enforce that the next step has the values from the range - // checker multiplied in (z) and the values from the stack (q) and the memory divided out. This - // is enforced by ensuring that b_range_next multiplied by the stack and memory lookups at this step - // is equal to the combination of b_range and the range checker's values for this step. - let lookups = aux_frame.q() * get_memory_lookups(main_frame, alpha); - let range_checks = get_z(main_frame, alpha); - - result[0] = are_equal(aux_frame.b_range_next() * lookups, aux_frame.b_range() * range_checks); -} - -/// The value to be included in the running product column for memory lookups at this row. These are -/// only included for steps in the memory section of the trace (when the memory_flag is one). -fn get_memory_lookups(main_frame: &EvaluationFrame, alpha: E) -> E -where - F: FieldElement, - E: FieldElement + ExtensionOf, -{ - let memory_flag: E = main_frame.chiplets_memory_flag().into(); - let d0: E = main_frame.memory_d0().into(); - let d1: E = main_frame.memory_d1().into(); - - E::ONE + memory_flag * ((d0 + alpha) * (d1 + alpha) - E::ONE) -} - -/// Returns the value `z` which is included in the running product columns at each step. `z` causes -/// the row's value to be included 0, 1, 2, or 4 times, according to the row's selector flags row. -fn get_z(main_frame: &EvaluationFrame, alpha: E) -> E -where - F: FieldElement, - E: FieldElement + ExtensionOf, -{ - // Get the selectors and the value from the main frame. - let s0: E = main_frame.s0().into(); - let s1: E = main_frame.s1().into(); - let v: E = main_frame.v().into(); - - // Define the flags. - let f0: E = binary_not(s0) * binary_not(s1); - let f1: E = s0 * binary_not(s1); - let f2: E = binary_not(s0) * s1; - let f3: E = s0 * s1; - - // Compute z. - let v_alpha = v + alpha; - let v_alpha2 = v_alpha.square(); - let v_alpha4 = v_alpha2.square(); - f3 * v_alpha4 + f2 * v_alpha2 + f1 * v_alpha + f0 + // The denominator values for the LogUp lookup. + let mv0: E = main_frame.lookup_mv0(alpha); + let mv1: E = main_frame.lookup_mv1(alpha); + let sv0: E = main_frame.lookup_sv0(alpha); + let sv1: E = main_frame.lookup_sv1(alpha); + let sv2: E = main_frame.lookup_sv2(alpha); + let sv3: E = main_frame.lookup_sv3(alpha); + let range_check: E = alpha - main_frame.v().into(); + let memory_lookups: E = mv0.mul(mv1); // degree 2 + let stack_lookups: E = sv0.mul(sv1).mul(sv2).mul(sv3); // degree 4 + let lookups = range_check.mul(stack_lookups).mul(memory_lookups); // degree 7 + + // An intermediate value required by all stack terms that includes the flag indicating a stack + // operation with range checks. This value has degree 6. + let sflag_rc_mem: E = range_check + .mul(memory_lookups) + .mul_base( as MainFrameExt>::u32_rc_op(main_frame)); + // An intermediate value required by all memory terms that includes the flag indicating the + // memory portion of the chiplets trace. This value has degree 8. + let mflag_rc_stack: E = + range_check.mul(stack_lookups).mul_base(main_frame.chiplets_memory_flag()); + + // The terms for the LogUp check after all denominators have been multiplied in. + let b_next_term = aux_frame.b_range_next().mul(lookups); // degree 8 + let b_term = aux_frame.b_range().mul(lookups); // degree 8 + let rc_term = stack_lookups.mul(memory_lookups).mul_base(main_frame.multiplicity()); // degree 7 + let s0_term = sflag_rc_mem.mul(sv1).mul(sv2).mul(sv3); // degree 9 + let s1_term = sflag_rc_mem.mul(sv0).mul(sv2).mul(sv3); // degree 9 + let s2_term = sflag_rc_mem.mul(sv0).mul(sv1).mul(sv3); // degree 9 + let s3_term = sflag_rc_mem.mul(sv0).mul(sv1).mul(sv2); // degree 9 + let m0_term = mflag_rc_stack.mul(mv1); // degree 9 + let m1_term = mflag_rc_stack.mul(mv0); // degree 9 + + result[0] = are_equal( + b_next_term, + b_term + rc_term - s0_term - s1_term - s2_term - s3_term - m0_term - m1_term, + ); } // RANGE CHECKER FRAME EXTENSION TRAIT @@ -220,22 +207,15 @@ where trait EvaluationFrameExt { // --- Column accessors ----------------------------------------------------------------------- - fn s0(&self) -> E; - /// The current value in column s1. - fn s1(&self) -> E; + /// The current value in the lookup multiplicity column. + fn multiplicity(&self) -> E; /// The current value in column V. fn v(&self) -> E; - /// The next value in column V. - fn v_next(&self) -> E; /// The current value in auxiliary column b_range. fn b_range(&self) -> E; - /// The next value in auxiliary column b_range. fn b_range_next(&self) -> E; - /// The current value in auxiliary column q. - fn q(&self) -> E; - // --- Intermediate variables & helpers ------------------------------------------------------- /// The change between the current value in the specified column and the next value, calculated @@ -247,13 +227,8 @@ impl EvaluationFrameExt for &EvaluationFrame { // --- Column accessors ----------------------------------------------------------------------- #[inline(always)] - fn s0(&self) -> E { - self.current()[S0_COL_IDX] - } - - #[inline(always)] - fn s1(&self) -> E { - self.current()[S1_COL_IDX] + fn multiplicity(&self) -> E { + self.current()[M_COL_IDX] } #[inline(always)] @@ -261,11 +236,6 @@ impl EvaluationFrameExt for &EvaluationFrame { self.current()[V_COL_IDX] } - #[inline(always)] - fn v_next(&self) -> E { - self.next()[V_COL_IDX] - } - #[inline(always)] fn b_range(&self) -> E { self.current()[B_RANGE_COL_IDX] @@ -276,11 +246,6 @@ impl EvaluationFrameExt for &EvaluationFrame { self.next()[B_RANGE_COL_IDX] } - #[inline(always)] - fn q(&self) -> E { - self.current()[Q_COL_IDX] - } - // --- Intermediate variables & helpers ------------------------------------------------------- #[inline(always)] diff --git a/air/src/constraints/stack/op_flags/mod.rs b/air/src/constraints/stack/op_flags/mod.rs index 1dc0c0958f..9a684326a7 100644 --- a/air/src/constraints/stack/op_flags/mod.rs +++ b/air/src/constraints/stack/op_flags/mod.rs @@ -989,7 +989,7 @@ impl OpFlags { self.control_flow } - /// Returns the flag when the stack operation is a u32 operation. + /// Returns true when the stack operation is a u32 operation that requires range checks. #[inline(always)] pub fn u32_rc_op(&self) -> E { self.u32_rc_op diff --git a/air/src/trace/decoder/mod.rs b/air/src/trace/decoder/mod.rs index d0e8f54f79..2795cc9646 100644 --- a/air/src/trace/decoder/mod.rs +++ b/air/src/trace/decoder/mod.rs @@ -97,3 +97,8 @@ pub const P2_COL_IDX: usize = DECODER_AUX_TRACE_OFFSET + 1; /// Running product column representing op group table. pub const P3_COL_IDX: usize = DECODER_AUX_TRACE_OFFSET + 2; + +// --- GLOBALLY-INDEXED DECODER COLUMN ACCESSORS -------------------------------------------------- +pub const DECODER_OP_BITS_OFFSET: usize = super::DECODER_TRACE_OFFSET + OP_BITS_OFFSET; +pub const DECODER_USER_OP_HELPERS_OFFSET: usize = + super::DECODER_TRACE_OFFSET + USER_OP_HELPERS_OFFSET; diff --git a/air/src/trace/mod.rs b/air/src/trace/mod.rs index 81c45b2fc5..fa908671f4 100644 --- a/air/src/trace/mod.rs +++ b/air/src/trace/mod.rs @@ -42,7 +42,7 @@ pub const STACK_TRACE_RANGE: Range = range(STACK_TRACE_OFFSET, STACK_TRAC // Range check trace pub const RANGE_CHECK_TRACE_OFFSET: usize = STACK_TRACE_RANGE.end; -pub const RANGE_CHECK_TRACE_WIDTH: usize = 3; +pub const RANGE_CHECK_TRACE_WIDTH: usize = 2; pub const RANGE_CHECK_TRACE_RANGE: Range = range(RANGE_CHECK_TRACE_OFFSET, RANGE_CHECK_TRACE_WIDTH); @@ -57,7 +57,7 @@ pub const TRACE_WIDTH: usize = CHIPLETS_OFFSET + CHIPLETS_WIDTH; // ------------------------------------------------------------------------------------------------ // decoder stack range checks hasher chiplets -// (3 columns) (1 column) (2 columns) (1 column) (1 column) +// (3 columns) (1 column) (1 column) (1 column) (1 column) // ├───────────────┴──────────────┴──────────────┴───────────────┴───────────────┤ // Decoder auxiliary columns @@ -74,7 +74,7 @@ pub const STACK_AUX_TRACE_RANGE: Range = // Range check auxiliary columns pub const RANGE_CHECK_AUX_TRACE_OFFSET: usize = STACK_AUX_TRACE_RANGE.end; -pub const RANGE_CHECK_AUX_TRACE_WIDTH: usize = 2; +pub const RANGE_CHECK_AUX_TRACE_WIDTH: usize = 1; pub const RANGE_CHECK_AUX_TRACE_RANGE: Range = range(RANGE_CHECK_AUX_TRACE_OFFSET, RANGE_CHECK_AUX_TRACE_WIDTH); diff --git a/air/src/trace/range.rs b/air/src/trace/range.rs index c86fffde31..3081bbe3a8 100644 --- a/air/src/trace/range.rs +++ b/air/src/trace/range.rs @@ -5,21 +5,13 @@ use super::{RANGE_CHECK_AUX_TRACE_OFFSET, RANGE_CHECK_TRACE_OFFSET}; // --- Column accessors in the main trace --------------------------------------------------------- -/// A binary selector column to help specify whether or not the value should be included in the -/// running product. -pub const S0_COL_IDX: usize = RANGE_CHECK_TRACE_OFFSET; -/// A binary selector column to help specify whether or not the value should be included in the -/// running product. -pub const S1_COL_IDX: usize = RANGE_CHECK_TRACE_OFFSET + 1; +/// A column to hold the multiplicity of how many times the value is being range-checked. +pub const M_COL_IDX: usize = RANGE_CHECK_TRACE_OFFSET; /// A column to hold the values being range-checked. -pub const V_COL_IDX: usize = RANGE_CHECK_TRACE_OFFSET + 2; +pub const V_COL_IDX: usize = RANGE_CHECK_TRACE_OFFSET + 1; // --- Column accessors in the auxiliary columns -------------------------------------------------- /// The running product column used for verifying that the range check lookups performed in the /// Stack and the Memory chiplet match the values checked in the Range Checker. pub const B_RANGE_COL_IDX: usize = RANGE_CHECK_AUX_TRACE_OFFSET; - -/// An auxiliary trace column of intermediate values used to enforce AIR constraints on `b_range`. -/// It contains the product of the lookups performed by the Stack processor at each cycle. -pub const Q_COL_IDX: usize = B_RANGE_COL_IDX + 1; diff --git a/processor/src/chiplets/memory/mod.rs b/processor/src/chiplets/memory/mod.rs index 41c254dfe1..2b87904fda 100644 --- a/processor/src/chiplets/memory/mod.rs +++ b/processor/src/chiplets/memory/mod.rs @@ -176,7 +176,7 @@ impl Memory { }; let (delta_hi, delta_lo) = split_u32_into_u16(delta); - range.add_mem_checks(row, &[delta_lo, delta_hi]); + range.add_range_checks(row, &[delta_lo, delta_hi]); // update values for the next iteration of the loop prev_ctx = ctx; diff --git a/processor/src/operations/u32_ops.rs b/processor/src/operations/u32_ops.rs index 28d2077c50..7c14d768ec 100644 --- a/processor/src/operations/u32_ops.rs +++ b/processor/src/operations/u32_ops.rs @@ -210,7 +210,7 @@ where let (t3, t2) = split_u32_into_u16(hi.as_int()); // add lookup values to the range checker. - self.range.add_stack_checks(self.system.clk(), &[t0, t1, t2, t3]); + self.range.add_range_checks(self.system.clk(), &[t0, t1, t2, t3]); // save the range check lookups to the decoder's user operation helper columns. let mut helper_values = diff --git a/processor/src/range/aux_trace.rs b/processor/src/range/aux_trace.rs index 548b112ef4..a0d9c258bd 100644 --- a/processor/src/range/aux_trace.rs +++ b/processor/src/range/aux_trace.rs @@ -1,8 +1,6 @@ -use super::{ - build_lookup_table_row_values, uninit_vector, BTreeMap, ColMatrix, CycleRangeChecks, Felt, - FieldElement, RangeCheckFlag, Vec, NUM_RAND_ROWS, -}; -use miden_air::trace::range::V_COL_IDX; +use super::{uninit_vector, BTreeMap, ColMatrix, Felt, FieldElement, Vec, NUM_RAND_ROWS}; +use miden_air::trace::range::{M_COL_IDX, V_COL_IDX}; +use vm_core::StarkField; // AUXILIARY TRACE BUILDER // ================================================================================================ @@ -10,16 +8,11 @@ use miden_air::trace::range::V_COL_IDX; /// Describes how to construct the execution trace of columns related to the range checker in the /// auxiliary segment of the trace. These are used in multiset checks. pub struct AuxTraceBuilder { - // Range check lookups performed by all user operations, grouped and sorted by clock cycle. Each - // cycle is mapped to a single CycleRangeChecks instance which includes lookups from the stack, - // memory, or both. - // TODO: once we switch to backfilling memory range checks this approach can change to tracking - // vectors of hints and rows like in the Stack and Hasher AuxTraceBuilders, and the - // CycleRangeChecks struct can be removed. - cycle_range_checks: BTreeMap, - // A trace-length vector of RangeCheckFlags which indicate how many times the range check value - // at that row should be included in the trace. - row_flags: Vec, + /// A list of the unique values for which range checks are performed. + lookup_values: Vec, + /// Range check lookups performed by all user operations, grouped and sorted by the clock cycle + /// at which they are requested. + cycle_lookups: BTreeMap>, // The index of the first row of Range Checker's trace when the padded rows end and values to // be range checked start. values_start: usize, @@ -29,40 +22,31 @@ impl AuxTraceBuilder { // CONSTRUCTOR // -------------------------------------------------------------------------------------------- pub fn new( - cycle_range_checks: BTreeMap, - row_flags: Vec, + lookup_values: Vec, + cycle_lookups: BTreeMap>, values_start: usize, ) -> Self { Self { - cycle_range_checks, - row_flags, + lookup_values, + cycle_lookups, values_start, } } - // ACCESSORS - // -------------------------------------------------------------------------------------------- - pub fn cycle_range_check_values(&self) -> Vec { - self.cycle_range_checks.values().cloned().collect() - } - // AUX COLUMN BUILDERS // -------------------------------------------------------------------------------------------- /// Builds and returns range checker auxiliary trace columns. Currently this consists of two /// columns: /// - `b_range`: ensures that the range checks performed by the Range Checker match those - /// requested - /// by the Stack and Memory processors. - /// - `q`: a helper column of intermediate values to reduce the degree of the constraints for - /// `b_range`. It contains the product of the lookups performed by the Stack at each row. + /// requested by the Stack and Memory processors. pub fn build_aux_columns>( &self, main_trace: &ColMatrix, rand_elements: &[E], ) -> Vec> { - let (b_range, q) = self.build_aux_col_b_range(main_trace, rand_elements); - vec![b_range, q] + let b_range = self.build_aux_col_b_range(main_trace, rand_elements); + vec![b_range] } /// Builds the execution trace of the range check `b_range` and `q` columns which ensure that the @@ -70,47 +54,39 @@ impl AuxTraceBuilder { fn build_aux_col_b_range>( &self, main_trace: &ColMatrix, - alphas: &[E], - ) -> (Vec, Vec) { - // compute the inverses for range checks performed by operations. - let (_, inv_row_values) = - build_lookup_table_row_values(&self.cycle_range_check_values(), main_trace, alphas); - - // allocate memory for the running product column and set the initial value to ONE - let mut q = unsafe { uninit_vector(main_trace.num_rows()) }; + rand_elements: &[E], + ) -> Vec { + // run batch inversion on the lookup values + let divisors = get_divisors(&self.lookup_values, rand_elements[0]); + + // allocate memory for the running sum column and set the initial value to ONE let mut b_range = unsafe { uninit_vector(main_trace.num_rows()) }; - q[0] = E::ONE; b_range[0] = E::ONE; - // keep track of the last updated row in the `b_range` running product column. the `q` - // column index is always one row behind, since `q` is filled with intermediate values in - // the same row as the operation is executed, whereas `b_range` is filled with result - // values that are added to the next row after the operation's execution. + // keep track of the last updated row in the `b_range` running sum column. `b_range` is + // filled with result values that are added to the next row after the operation's execution. let mut b_range_idx = 0_usize; - // keep track of the next row to be included from the user op range check values. - let mut rc_user_op_idx = 0; // the first half of the trace only includes values from the operations. - for (clk, range_checks) in self.cycle_range_checks.range(0..self.values_start as u32) { + for (clk, range_checks) in self.cycle_lookups.range(0..self.values_start as u32) { let clk = *clk as usize; // if we skipped some cycles since the last update was processed, values in the last - // updated row should by copied over until the current cycle. + // updated row should be copied over until the current cycle. if b_range_idx < clk { let last_value = b_range[b_range_idx]; b_range[(b_range_idx + 1)..=clk].fill(last_value); - q[b_range_idx..clk].fill(E::ONE); } - // move the column pointers to the next row. + // move the column pointer to the next row. b_range_idx = clk + 1; - // update the intermediate values in the q column. - q[clk] = range_checks.to_stack_value(main_trace, alphas); - - // include the operation lookups in the running product. - b_range[b_range_idx] = b_range[clk] * inv_row_values[rc_user_op_idx]; - rc_user_op_idx += 1; + b_range[b_range_idx] = b_range[clk]; + // include the operation lookups + for lookup in range_checks.iter() { + let value = divisors.get(lookup).expect("invalid lookup value {}"); + b_range[b_range_idx] -= *value; + } } // if we skipped some cycles since the last update was processed, values in the last @@ -118,13 +94,13 @@ impl AuxTraceBuilder { if b_range_idx < self.values_start { let last_value = b_range[b_range_idx]; b_range[(b_range_idx + 1)..=self.values_start].fill(last_value); - q[b_range_idx..self.values_start].fill(E::ONE); } - // after the padded section of the range checker table, include `z` in the running product - // at each step and remove lookups from user ops at any step where user ops were executed. - for (row_idx, (hint, lookup)) in self - .row_flags + // after the padded section of the range checker table, include the lookup value specified + // by the range checker into the running sum at each step, and remove lookups from user ops + // at any step where user ops were executed. + for (row_idx, (multiplicity, lookup)) in main_trace + .get_column(M_COL_IDX) .iter() .zip(main_trace.get_column(V_COL_IDX).iter()) .enumerate() @@ -133,32 +109,64 @@ impl AuxTraceBuilder { { b_range_idx = row_idx + 1; - b_range[b_range_idx] = b_range[row_idx] * hint.to_value(*lookup, alphas); - - if let Some(range_check) = self.cycle_range_checks.get(&(row_idx as u32)) { - // update the intermediate values in the q column. - q[row_idx] = range_check.to_stack_value(main_trace, alphas); - - // include the operation lookups in the running product. - b_range[b_range_idx] *= inv_row_values[rc_user_op_idx]; - rc_user_op_idx += 1; + if multiplicity.as_int() != 0 { + // add the value in the range checker: multiplicity / (alpha - lookup) + let value = divisors.get(&(lookup.as_int() as u16)).expect("invalid lookup value"); + b_range[b_range_idx] = b_range[row_idx] + value.mul_base(*multiplicity); } else { - q[row_idx] = E::ONE; + b_range[b_range_idx] = b_range[row_idx]; + } + + // subtract the range checks requested by operations + if let Some(range_checks) = self.cycle_lookups.get(&(row_idx as u32)) { + for lookup in range_checks.iter() { + let value = divisors.get(lookup).expect("invalid lookup value"); + b_range[b_range_idx] -= *value; + } } } // at this point, all range checks from user operations and the range checker should be // matched - so, the last value must be ONE; - assert_eq!(q[b_range_idx - 1], E::ONE); assert_eq!(b_range[b_range_idx], E::ONE); - if (b_range_idx - 1) < b_range.len() - 1 { - q[b_range_idx..].fill(E::ONE); - } if b_range_idx < b_range.len() - 1 { b_range[(b_range_idx + 1)..].fill(E::ONE); } - (b_range, q) + b_range + } +} + +/// Runs batch inversion on all range check lookup values and returns a map which maps of each value +/// to the divisor used for including it in the LogUp lookup. In other words, the map contains +/// mappings of x to 1/(alpha - x). +fn get_divisors>( + lookup_values: &[u16], + alpha: E, +) -> BTreeMap { + // run batch inversion on the lookup values + let mut values = unsafe { uninit_vector(lookup_values.len()) }; + let mut inv_values = unsafe { uninit_vector(lookup_values.len()) }; + let mut log_values = BTreeMap::new(); + + let mut acc = E::ONE; + for (i, (value, inv_value)) in values.iter_mut().zip(inv_values.iter_mut()).enumerate() { + *inv_value = acc; + *value = alpha - E::from(lookup_values[i]); + acc *= *value; } + + // invert the accumulated product + acc = acc.inv(); + + // multiply the accumulated product by the original values to compute the inverses, then + // build a map of inverses for the lookup values + for i in (0..lookup_values.len()).rev() { + inv_values[i] *= acc; + acc *= values[i]; + log_values.insert(lookup_values[i], inv_values[i]); + } + + log_values } diff --git a/processor/src/range/mod.rs b/processor/src/range/mod.rs index 14c5e58ba5..b22b2169a8 100644 --- a/processor/src/range/mod.rs +++ b/processor/src/range/mod.rs @@ -1,15 +1,11 @@ use super::{ - trace::{build_lookup_table_row_values, LookupTableRow, NUM_RAND_ROWS}, - utils::uninit_vector, - BTreeMap, ColMatrix, Felt, FieldElement, RangeCheckTrace, Vec, ONE, ZERO, + trace::NUM_RAND_ROWS, utils::uninit_vector, BTreeMap, ColMatrix, Felt, FieldElement, + RangeCheckTrace, Vec, ZERO, }; mod aux_trace; pub use aux_trace::AuxTraceBuilder; -mod request; -use request::CycleRangeChecks; - #[cfg(test)] mod tests; @@ -23,35 +19,31 @@ mod tests; /// into 16-bits, but rather keeps track of all 16-bit range checks performed by the VM. /// /// ## Execution trace -/// Execution trace generated by the range checker consists of 3 columns. Conceptually, the table -/// starts with value 0 and end with value 65535. +/// The execution trace generated by the range checker consists of 2 columns. Conceptually, the +/// table starts with value 0 and ends with value 65535. /// -/// The layout illustrated below. +/// The layout is illustrated below. /// -/// s0 s1 v -/// ├─────┴──────┴─────┤ +/// m v +/// ├─────┴─────┤ /// /// In the above, the meaning of the columns is as follows: /// - Column `v` contains the value being range-checked where `v` must be a 16-bit value. The /// values must be in increasing order and the jump allowed between two values should be a power /// of 3 less than or equal to 3^7, and duplicates are allowed. -/// - Column `s0` and `s1` specify how many lookups are to be included for a given value. -/// Specifically: (0, 0) means no lookups, (1, 0) means one lookup, (0, 1), means two lookups, -/// and (1, 1) means four lookups. +/// - Column `m` specifies the lookup multiplicity, which is how many lookups are to be included for +/// a given value. /// /// Thus, for example, if a value was range-checked just once, we'll need to add a single row to -/// the table with (s0, s1, v) set to (1, 0, v), where v is the value. -/// -/// If, on the other hand, the value was range-checked 5 times, we'll need two rows in the table: -/// (1, 1, v) and (1, 0, v). The first row specifies that there were 4 lookups and the second -/// row add the fifth lookup. +/// the table with (m, v) set to (1, v), where v is the value. If the value was range-checked 5 +/// times, we'll need to specify the row (5, v). pub struct RangeChecker { /// Tracks lookup count for each checked value. lookups: BTreeMap, - // Range check lookups performed by all user operations, grouped and sorted by clock cycle. Each - // cycle is mapped to a single CycleRangeChecks instance which includes lookups from the stack, - // memory, or both. - cycle_range_checks: BTreeMap, + /// Range check lookups performed by all user operations, grouped and sorted by clock cycle. + /// Each cycle is mapped to a vector of the range checks requested at that cycle, which can come + /// from the stack, memory, or both. + cycle_lookups: BTreeMap>, } impl RangeChecker { @@ -66,47 +58,45 @@ impl RangeChecker { lookups.insert(u16::MAX, 0); Self { lookups, - cycle_range_checks: BTreeMap::new(), + cycle_lookups: BTreeMap::new(), } } // TRACE MUTATORS // -------------------------------------------------------------------------------------------- + /// Adds the specified value to the trace of this range checker's lookups. pub fn add_value(&mut self, value: u16) { self.lookups.entry(value).and_modify(|v| *v += 1).or_insert(1); } - /// Adds range check lookups from the [Stack] to this [RangeChecker] instance. Stack lookups are - /// guaranteed to be added at unique clock cycles, since operations are sequential and no range - /// check lookups are added before or during the stack operation processing. - pub fn add_stack_checks(&mut self, clk: u32, values: &[u16; 4]) { - self.add_value(values[0]); - self.add_value(values[1]); - self.add_value(values[2]); - self.add_value(values[3]); - - // Stack operations are added before memory operations at unique clock cycles. - self.cycle_range_checks.insert(clk, CycleRangeChecks::new_from_stack(values)); - } + /// Adds range check lookups from the stack or memory to this [RangeChecker] instance. + pub fn add_range_checks(&mut self, clk: u32, values: &[u16]) { + // range checks requests only come from memory or from the stack, which always request 2 or + // 4 lookups respectively. + debug_assert!(values.len() == 2 || values.len() == 4); - /// Adds range check lookups from [Memory] to this [RangeChecker] instance. Memory lookups are - /// always added after all stack lookups have completed, since they are processed during trace - /// finalization. - pub fn add_mem_checks(&mut self, clk: u32, values: &[u16; 2]) { - self.add_value(values[0]); - self.add_value(values[1]); + for value in values.iter() { + // add the specified value to the trace of this range checker's lookups. + self.add_value(*value); + } - self.cycle_range_checks + // track the range check requests at each cycle + // TODO: optimize this to use a struct instead of vectors, e.g.: + // struct MemoryLookupValues { + // num_lookups: u8, + // lookup_values: [u16; 6], + // } + self.cycle_lookups .entry(clk) - .and_modify(|entry| entry.add_memory_checks(values)) - .or_insert_with(|| CycleRangeChecks::new_from_memory(values)); + .and_modify(|entry| entry.append(&mut values.to_vec())) + .or_insert_with(|| values.to_vec()); } // EXECUTION TRACE GENERATION (INTERNAL) // -------------------------------------------------------------------------------------------- - /// Converts this [RangeChecker] into an execution trace with 3 columns and the number of rows + /// Converts this [RangeChecker] into an execution trace with 2 columns and the number of rows /// specified by the `target_len` parameter. /// /// If the number of rows need to represent execution trace of this range checker is smaller @@ -133,28 +123,19 @@ impl RangeChecker { // allocated memory for the trace; this memory is un-initialized but this is not a problem // because we'll overwrite all values in it anyway. - let mut trace = unsafe { - [uninit_vector(target_len), uninit_vector(target_len), uninit_vector(target_len)] - }; - // Allocate uninitialized memory for accumulating the precomputed auxiliary column hints. - let mut row_flags = unsafe { uninit_vector(target_len) }; + let mut trace = unsafe { [uninit_vector(target_len), uninit_vector(target_len)] }; // determine the number of padding rows needed to get to target trace length and pad the // table with the required number of rows. let num_padding_rows = target_len - trace_len - num_rand_rows; trace[0][..num_padding_rows].fill(ZERO); trace[1][..num_padding_rows].fill(ZERO); - trace[2][..num_padding_rows].fill(ZERO); - - // Initialize the padded rows of the auxiliary column hints with the default flag, F0, - // indicating s0 = s1 = ZERO. - row_flags[..num_padding_rows].fill(RangeCheckFlag::F0); // build the trace table let mut i = num_padding_rows; let mut prev_value = 0u16; for (&value, &num_lookups) in self.lookups.iter() { - write_rows(&mut trace, &mut i, num_lookups, value, prev_value, &mut row_flags); + write_rows(&mut trace, &mut i, num_lookups, value, prev_value); prev_value = value; } @@ -163,11 +144,15 @@ impl RangeChecker { // (When there is data at the end of the main trace, auxiliary bus columns always need to be // one row longer than the main trace, since values in the bus column are based on data from // the "current" row of the main trace but placed into the "next" row of the bus column.) - write_value(&mut trace, &mut i, 0, (u16::MAX).into(), &mut row_flags); + write_trace_row(&mut trace, &mut i, 0, (u16::MAX).into()); RangeCheckTrace { trace, - aux_builder: AuxTraceBuilder::new(self.cycle_range_checks, row_flags, num_padding_rows), + aux_builder: AuxTraceBuilder::new( + self.lookups.keys().cloned().collect(), + self.cycle_lookups, + num_padding_rows, + ), } } @@ -181,16 +166,16 @@ impl RangeChecker { let mut num_rows = 1; let mut prev_value = 0u16; - for (&value, &num_lookups) in self.lookups.iter() { - // determine how many lookup rows we need for this value - num_rows += lookups_to_rows(num_lookups); + for value in self.lookups.keys() { + // add one row for each value in the range checker table + num_rows += 1; // determine the delta between this and the previous value. we need to know this delta // to determine if we need to insert any "bridge" rows to the table, this is needed // since the gap between two values in the range checker can only be a power of 3 less // than or equal to 3^7. let delta = value - prev_value; num_rows += get_num_bridge_rows(delta); - prev_value = value; + prev_value = *value; } num_rows } @@ -222,58 +207,9 @@ impl Default for RangeChecker { } } -// RANGE CHECKER ROWS -// ================================================================================================ - -/// A precomputed hint value that can be used to help construct the execution trace for the -/// auxiliary column b_range used for multiset checks. The hint is a precomputed flag value based -/// on the selectors s0 and s1 in the trace. -#[derive(Debug, PartialEq, Eq, Clone)] -pub enum RangeCheckFlag { - F0, - F1, - F2, - F3, -} - -impl RangeCheckFlag { - /// Reduces this row to a single field element in the field specified by E. This requires - /// at least 1 alpha value. - pub fn to_value>(&self, value: Felt, alphas: &[E]) -> E { - let alpha: E = alphas[0]; - - match self { - RangeCheckFlag::F0 => E::ONE, - RangeCheckFlag::F1 => alpha + value.into(), - RangeCheckFlag::F2 => (alpha + value.into()).square(), - RangeCheckFlag::F3 => ((alpha + value.into()).square()).square(), - } - } -} - // HELPER FUNCTIONS // ================================================================================================ -/// Returns the number of rows needed to perform the specified number of lookups for an 8-bit -/// value. Note that even if the number of lookups is 0, at least one row is required. This is -/// because for an 8-bit table, rows must contain contiguous values. -/// -/// The number of rows is determined as follows: -/// - First we compute the number of rows for 4 lookups per row. -/// - Then we compute the number of rows for 2 lookups per row. -/// - Then, we compute the number of rows for a single lookup per row. -/// -/// The return value is the sum of these three values. -fn lookups_to_rows(num_lookups: usize) -> usize { - if num_lookups == 0 { - 1 - } else { - let (num_rows4, num_lookups) = div_rem(num_lookups, 4); - let (num_rows2, num_rows1) = div_rem(num_lookups, 2); - num_rows4 + num_rows2 + num_rows1 - } -} - /// Calculates the number of bridge rows that are need to be added to the trace between two values /// to be range checked. pub fn get_num_bridge_rows(delta: u16) -> usize { @@ -299,7 +235,6 @@ fn write_rows( num_lookups: usize, value: u16, prev_value: u16, - row_flags: &mut [RangeCheckFlag], ) { let mut gap = value - prev_value; let mut prev_val = prev_value; @@ -308,62 +243,17 @@ fn write_rows( if gap > stride { gap -= stride; prev_val += stride; - write_value(trace, step, 0, prev_val as u64, row_flags); + write_trace_row(trace, step, 0, prev_val as u64); } else { stride /= 3; } } - write_value(trace, step, num_lookups, value as u64, row_flags); -} - -/// Populates the trace with the rows needed to support the specified number of lookups against -/// the specified value. -fn write_value( - trace: &mut [Vec], - step: &mut usize, - num_lookups: usize, - value: u64, - row_flags: &mut [RangeCheckFlag], -) { - // if the number of lookups is 0, only one trace row is required - if num_lookups == 0 { - row_flags[*step] = RangeCheckFlag::F0; - write_trace_row(trace, step, ZERO, ZERO, value); - return; - } - - // write rows which can support 4 lookups per row - let (num_rows, num_lookups) = div_rem(num_lookups, 4); - for _ in 0..num_rows { - row_flags[*step] = RangeCheckFlag::F3; - write_trace_row(trace, step, ONE, ONE, value); - } - - // write rows which can support 2 lookups per row - let (num_rows, num_lookups) = div_rem(num_lookups, 2); - for _ in 0..num_rows { - row_flags[*step] = RangeCheckFlag::F2; - write_trace_row(trace, step, ZERO, ONE, value); - } - - // write rows which can support only one lookup per row - for _ in 0..num_lookups { - row_flags[*step] = RangeCheckFlag::F1; - write_trace_row(trace, step, ONE, ZERO, value); - } + write_trace_row(trace, step, num_lookups, value as u64); } /// Populates a single row at the specified step in the trace table. -fn write_trace_row(trace: &mut [Vec], step: &mut usize, s0: Felt, s1: Felt, value: u64) { - trace[0][*step] = s0; - trace[1][*step] = s1; - trace[2][*step] = Felt::new(value); +fn write_trace_row(trace: &mut [Vec], step: &mut usize, num_lookups: usize, value: u64) { + trace[0][*step] = Felt::new(num_lookups as u64); + trace[1][*step] = Felt::new(value); *step += 1; } - -/// Returns quotient and remainder of dividing the provided value by the divisor. -fn div_rem(value: usize, divisor: usize) -> (usize, usize) { - let q = value / divisor; - let r = value % divisor; - (q, r) -} diff --git a/processor/src/range/tests.rs b/processor/src/range/tests.rs index 7ec9b3d3ff..1cdce79756 100644 --- a/processor/src/range/tests.rs +++ b/processor/src/range/tests.rs @@ -1,4 +1,4 @@ -use super::{BTreeMap, Felt, RangeChecker, Vec, ONE, ZERO}; +use super::{BTreeMap, Felt, RangeChecker, Vec, ZERO}; use crate::{utils::get_trace_len, RangeCheckTrace}; use rand_utils::rand_array; use vm_core::{utils::ToElements, StarkField}; @@ -10,7 +10,8 @@ fn range_checks() { let values = [0, 1, 2, 2, 2, 2, 3, 3, 3, 4, 4, 100, 355, 620].to_elements(); for &value in values.iter() { - checker.add_value(value.as_int() as u16) + // add the value to the range checker's trace + checker.add_value(value.as_int() as u16); } let RangeCheckTrace { @@ -21,7 +22,7 @@ fn range_checks() { // skip the padded rows let mut i = 0; - while trace[0][i] == ZERO && trace[1][i] == ZERO && trace[2][i] == ZERO { + while trace[0][i] == ZERO && trace[1][i] == ZERO { i += 1; } @@ -29,8 +30,7 @@ fn range_checks() { validate_row(&trace, &mut i, 0, 1); validate_row(&trace, &mut i, 1, 1); validate_row(&trace, &mut i, 2, 4); - validate_row(&trace, &mut i, 3, 2); - validate_row(&trace, &mut i, 3, 1); + validate_row(&trace, &mut i, 3, 3); validate_row(&trace, &mut i, 4, 2); validate_bridge_rows(&trace, &mut i, 4, 100); @@ -67,22 +67,13 @@ fn range_checks_rand() { // ================================================================================================ fn validate_row(trace: &[Vec], row_idx: &mut usize, value: u64, num_lookups: u64) { - let (s0, s1) = match num_lookups { - 0 => (ZERO, ZERO), - 1 => (ONE, ZERO), - 2 => (ZERO, ONE), - 4 => (ONE, ONE), - _ => panic!("invalid lookup value"), - }; - - assert_eq!(s0, trace[0][*row_idx]); - assert_eq!(s1, trace[1][*row_idx]); - assert_eq!(Felt::new(value), trace[2][*row_idx]); + assert_eq!(trace[0][*row_idx], Felt::from(num_lookups)); + assert_eq!(trace[1][*row_idx], Felt::from(value)); *row_idx += 1; } fn validate_trace(trace: &[Vec], lookups: &[Felt]) { - assert_eq!(3, trace.len()); + assert_eq!(2, trace.len()); // trace length must be a power of two let trace_len = get_trace_len(trace); @@ -93,8 +84,8 @@ fn validate_trace(trace: &[Vec], lookups: &[Felt]) { let mut lookups_16bit = BTreeMap::new(); // process the first row - assert_eq!(ZERO, trace[2][i]); - let count = get_lookup_count(trace, i); + assert_eq!(trace[1][i], ZERO); + let count = trace[0][i].as_int(); lookups_16bit.insert(0u16, count); i += 1; @@ -102,7 +93,7 @@ fn validate_trace(trace: &[Vec], lookups: &[Felt]) { let mut prev_value = 0u16; while i < trace_len { // make sure the value is a 16-bit value - let value = trace[2][i].as_int(); + let value = trace[1][i].as_int(); assert!(value <= 65535, "not a 16-bit value"); let value = value as u16; @@ -111,15 +102,18 @@ fn validate_trace(trace: &[Vec], lookups: &[Felt]) { assert!(valid_delta(delta)); // keep track of lookup count for each value - let count = get_lookup_count(trace, i); - lookups_16bit.entry(value).and_modify(|value| *value += count).or_insert(count); + let multiplicity = trace[0][i].as_int(); + lookups_16bit + .entry(value) + .and_modify(|count| *count += multiplicity) + .or_insert(multiplicity); i += 1; prev_value = value; } // validate the last row (must be 65535) - let last_value = trace[2][i - 1].as_int(); + let last_value = trace[1][i - 1].as_int(); assert_eq!(65535, last_value); // remove all the looked up values from the lookup table @@ -161,20 +155,6 @@ fn validate_bridge_rows( } } -fn get_lookup_count(trace: &[Vec], step: usize) -> usize { - if trace[0][step] == ZERO && trace[1][step] == ZERO { - 0 - } else if trace[0][step] == ONE && trace[1][step] == ZERO { - 1 - } else if trace[0][step] == ZERO && trace[1][step] == ONE { - 2 - } else if trace[0][step] == ONE && trace[1][step] == ONE { - 4 - } else { - panic!("not a valid count"); - } -} - /// Checks if the delta between two values is 0 or a power of 3 and at most 3^7 fn valid_delta(delta: u16) -> bool { delta == 0 || (59049 % delta == 0 && delta <= 2187) diff --git a/processor/src/trace/tests/range.rs b/processor/src/trace/tests/range.rs index 54035cbb66..653fdce0fc 100644 --- a/processor/src/trace/tests/range.rs +++ b/processor/src/trace/tests/range.rs @@ -1,50 +1,9 @@ use super::{build_trace_from_ops, Felt, FieldElement, Trace, NUM_RAND_ROWS, ONE, ZERO}; use miden_air::trace::{ - chiplets::hasher::HASH_CYCLE_LEN, - range::{B_RANGE_COL_IDX, Q_COL_IDX}, - AUX_TRACE_RAND_ELEMENTS, + chiplets::hasher::HASH_CYCLE_LEN, range::B_RANGE_COL_IDX, AUX_TRACE_RAND_ELEMENTS, }; use rand_utils::rand_array; -use vm_core::Operation; - -#[test] -#[allow(clippy::needless_range_loop)] -fn q_trace() { - let stack = [1, 255]; - let operations = vec![ - Operation::U32add, - Operation::MStoreW, - Operation::Drop, - Operation::Drop, - Operation::Drop, - Operation::Drop, - ]; - let mut trace = build_trace_from_ops(operations, &stack); - - let rand_elements = rand_array::(); - let alpha = rand_elements[0]; - let aux_columns = trace.build_aux_segment(&[], &rand_elements).unwrap(); - let q = aux_columns.get_column(Q_COL_IDX); - - assert_eq!(trace.length(), q.len()); - - // --- Check the stack processor's range check lookups. --------------------------------------- - - // Before any range checks are executed, the value in b_range should be one. - assert_eq!(Felt::ONE, q[0]); - - // The first range check lookup from the stack will happen when the add operation is executed, - // at cycle 1. (The trace begins by executing `span`). It must be divided out of `b_range`. - // The range-checked values are 0, 256, 0, 0. - let expected = (alpha) * (Felt::new(256) + alpha) * alpha.square(); - assert_eq!(expected, q[1]); - - // --- Check the last value of the q column is one. ------------------------------------------ - - for row in 2..(q.len() - NUM_RAND_ROWS) { - assert_eq!(Felt::ONE, q[row]); - } -} +use vm_core::{ExtensionOf, Operation}; /// This test checks that range check lookups from stack operations are balanced by the range checks /// processed in the Range Checker. @@ -70,37 +29,34 @@ fn b_range_trace_stack() { assert_eq!(Felt::ONE, b_range[1]); // The first range check lookup from the stack will happen when the add operation is executed, - // at cycle 1. (The trace begins by executing `span`). It must be divided out of `b_range`. - // The range-checked values are 0, 256, 0, 0. - let lookup_product = (alpha) * (Felt::new(256) + alpha) * alpha.square(); - let mut expected = lookup_product.inv(); + // at cycle 1. (The trace begins by executing `span`). It must be subtracted out of `b_range`. + // The range-checked values are 0, 256, 0, 0, so the values to subtract are 3/(alpha - 0) and + // 1/(alpha - 256). + let lookups = alpha.inv().mul_base(Felt::new(3)) + (alpha - Felt::new(256)).inv(); + let mut expected = b_range[1] - lookups; assert_eq!(expected, b_range[2]); // --- Check the range checker's lookups. ----------------------------------------------------- - // 45 rows are needed for 0, 0, 243, 252, 255, 256, ... 38 additional bridge rows of - // powers of 3 ..., 65535. (0 is range-checked in 2 rows for a total of 3 lookups. 256 is - // range-checked in one row. 65535 is the max, and the rest are "bridge" values.) An extra row - // is added to pad the u16::MAX value. - let len_16bit = 45 + 1; + // 44 rows are needed for 0, 243, 252, 255, 256, ... 38 additional bridge rows of powers of + // 3 ..., 65535. (0 and 256 are range-checked. 65535 is the max, and the rest are "bridge" + // values.) An extra row is added to pad the u16::MAX value. + let len_16bit = 44 + 1; // The start of the values in the range checker table. let values_start = trace.length() - len_16bit - NUM_RAND_ROWS; // After the padded rows, the first value will be unchanged. assert_eq!(expected, b_range[values_start]); - // We include 2 lookups of 0, so the next value should be multiplied by alpha squared. - expected *= alpha.square(); + // We include 3 lookups of 0. + expected += alpha.inv().mul_base(Felt::new(3)); assert_eq!(expected, b_range[values_start + 1]); - // Then we include our third lookup of 0, so the next value should be multiplied by alpha. - expected *= alpha; - assert_eq!(expected, b_range[values_start + 2]); // Then we have 3 bridge rows between 0 and 255 where the value does not change + assert_eq!(expected, b_range[values_start + 2]); assert_eq!(expected, b_range[values_start + 3]); assert_eq!(expected, b_range[values_start + 4]); - assert_eq!(expected, b_range[values_start + 5]); // Then we include 1 lookup of 256, so it should be multiplied by alpha + 256. - expected *= alpha + Felt::new(256); - assert_eq!(expected, b_range[values_start + 6]); + expected += (alpha - Felt::new(256)).inv(); + assert_eq!(expected, b_range[values_start + 5]); // --- Check the last value of the b_range column is one. ------------------------------------------ @@ -137,16 +93,15 @@ fn b_range_trace_mem() { // The memory section of the chiplets trace starts after the span hash. let memory_start = HASH_CYCLE_LEN; - // 41 rows are needed for 0, 0, 3, 4, ... 36 bridge additional bridge rows of powers of - // 3 ..., 65535. (0 is range-checked in 2 rows for a total of 3 lookups. Four is range - // checked in one row for a total of one lookup. 65535 is the max, and the rest are "bridge" + // 40 rows are needed for 0, 3, 4, ... 36 bridge additional bridge rows of powers of + // 3 ..., 65535. (0 and 4 are both range-checked. 65535 is the max, and the rest are "bridge" // values.) An extra row is added to pad the u16::MAX value. - let len_16bit = 41 + 1; + let len_16bit = 40 + 1; let values_start = trace.length() - len_16bit - NUM_RAND_ROWS; // The value should start at ONE and be unchanged until the memory processor section begins. let mut expected = ONE; - for row in 0..=memory_start { + for row in 0..memory_start { assert_eq!(expected, b_range[row]); } @@ -160,31 +115,33 @@ fn b_range_trace_mem() { let (d0_load, d1_load) = (Felt::new(4), ZERO); // Include the lookups from the `MStoreW` operation at the next row. - expected *= ((d0_store + alpha) * (d1_store + alpha)).inv(); + expected -= (alpha - d0_store).inv() + (alpha - d1_store).inv(); assert_eq!(expected, b_range[memory_start + 1]); // Include the lookup from the `MLoadW` operation at the next row. - expected *= ((d0_load + alpha) * (d1_load + alpha)).inv(); + expected -= (alpha - d0_load).inv() + (alpha - d1_load).inv(); assert_eq!(expected, b_range[memory_start + 2]); + // The value should be unchanged until the range checker's lookups are included. + for row in memory_start + 2..=values_start { + assert_eq!(expected, b_range[row]); + } + // --- Check the range checker's lookups. ----------------------------------------------------- - // We include 2 lookups of ZERO in the next row. - expected *= alpha.square(); + // We include 3 lookups of ZERO in the next row. + expected += alpha.inv().mul_base(Felt::new(3)); assert_eq!(expected, b_range[values_start + 1]); - // We include 1 more lookup of ZERO in the next row. - expected *= d0_store + alpha; - assert_eq!(expected, b_range[values_start + 2]); // then we have one bridge row between 0 and 4 where the value does not change. - assert_eq!(expected, b_range[values_start + 3]); + assert_eq!(expected, b_range[values_start + 2]); // We include 1 lookup of 4 in the next row. - expected *= d0_load + alpha; - assert_eq!(expected, b_range[values_start + 4]); + expected += (alpha - d0_load).inv(); + assert_eq!(expected, b_range[values_start + 3]); // --- The value should now be ONE for the rest of the trace. --------------------------------- assert_eq!(expected, ONE); - for i in (values_start + 4)..(b_range.len() - NUM_RAND_ROWS) { + for i in (values_start + 3)..(b_range.len() - NUM_RAND_ROWS) { assert_eq!(ONE, b_range[i]); } }