From de0ca39deda16f4acec72d3a23965d07d14e1ad8 Mon Sep 17 00:00:00 2001 From: Palash Nigam Date: Sat, 12 Oct 2024 16:15:03 +0530 Subject: [PATCH 1/9] CONTRIBUTING.md: Add lld as a pre-requisite for setup Hi, I was going through this project and tried to run the tests locally but they failed because my machine was missing lld, so i added it as an instruction under the "Install Rust" section --- CONTRIBUTING.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index add16e0da..d44e0bbd1 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -65,6 +65,10 @@ To contribute to this project, you'll need to have Rust installed on your machin rustup component add rustfmt rustup component add clippy ``` + - You will also need to install [lld](https://lld.llvm.org/) which will be required for running tests. For example on Debian based systems you can install it using the package manager: + ```bash + apt install lld + ``` If you run into any issues, please refer to the [official Rust documentation](https://www.rust-lang.org/learn/get-started) for troubleshooting and more detailed installation instructions. From 7e751412a3c3026f8ddf6cc635716b5ad8baf833 Mon Sep 17 00:00:00 2001 From: Mehul Mathur Date: Fri, 11 Oct 2024 20:24:14 +0000 Subject: [PATCH 2/9] chore: manual fix clippy::unnecessary_wraps --- crates/proof-of-sql/src/sql/parse/query_context.rs | 2 +- crates/proof-of-sql/src/sql/parse/query_context_builder.rs | 1 + crates/proof-of-sql/src/sql/proof_exprs/comparison_util.rs | 1 + crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs | 1 + crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs | 1 + 5 files changed, 5 insertions(+), 1 deletion(-) diff --git a/crates/proof-of-sql/src/sql/parse/query_context.rs b/crates/proof-of-sql/src/sql/parse/query_context.rs index 0e3b4660e..a248af13f 100644 --- a/crates/proof-of-sql/src/sql/parse/query_context.rs +++ b/crates/proof-of-sql/src/sql/parse/query_context.rs @@ -117,7 +117,7 @@ impl QueryContext { } } - #[allow(clippy::missing_panics_doc)] + #[allow(clippy::missing_panics_doc, clippy::unnecessary_wraps)] pub fn push_aliased_result_expr(&mut self, expr: AliasedResultExpr) -> ConversionResult<()> { assert!(&self.has_visited_group_by, "Group by must be visited first"); self.res_aliased_exprs.push(expr); diff --git a/crates/proof-of-sql/src/sql/parse/query_context_builder.rs b/crates/proof-of-sql/src/sql/parse/query_context_builder.rs index 99ff7b094..e4c3a76d7 100644 --- a/crates/proof-of-sql/src/sql/parse/query_context_builder.rs +++ b/crates/proof-of-sql/src/sql/parse/query_context_builder.rs @@ -95,6 +95,7 @@ impl<'a> QueryContextBuilder<'a> { Ok(self) } + #[allow(clippy::unnecessary_wraps)] pub fn build(self) -> ConversionResult { Ok(self.context) } diff --git a/crates/proof-of-sql/src/sql/proof_exprs/comparison_util.rs b/crates/proof-of-sql/src/sql/proof_exprs/comparison_util.rs index b95994d9d..ed7794122 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/comparison_util.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/comparison_util.rs @@ -14,6 +14,7 @@ use proof_of_sql_parser::intermediate_ast::BinaryOperator; #[cfg(feature = "rayon")] use rayon::iter::{IndexedParallelIterator, IntoParallelRefMutIterator, ParallelIterator}; +#[allow(clippy::unnecessary_wraps)] fn unchecked_subtract_impl<'a, S: Scalar>( alloc: &'a Bump, lhs: &[S], diff --git a/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs index 28895df61..4429822d1 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs @@ -225,6 +225,7 @@ impl ProverEvaluate for FilterExec { } } +#[allow(clippy::unnecessary_wraps)] fn verify_filter( builder: &mut VerificationBuilder, alpha: C::Scalar, diff --git a/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs index 069aa08d1..ce4379c44 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs @@ -319,6 +319,7 @@ impl ProverEvaluate for GroupByExec { } } +#[allow(clippy::unnecessary_wraps)] fn verify_group_by( builder: &mut VerificationBuilder, alpha: C::Scalar, From 45d6c56cda548af8043c0cc4d0536192de8f5e17 Mon Sep 17 00:00:00 2001 From: Mehul Mathur Date: Fri, 11 Oct 2024 20:34:17 +0000 Subject: [PATCH 3/9] chore: manual fix clippy::should_panic_without_expect --- crates/proof-of-sql-parser/src/identifier.rs | 4 ++-- crates/proof-of-sql-parser/src/intermediate_ast_tests.rs | 2 +- .../src/base/database/owned_and_arrow_conversions_test.rs | 2 +- crates/proof-of-sql/src/base/slice_ops/mul_add_assign_test.rs | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/crates/proof-of-sql-parser/src/identifier.rs b/crates/proof-of-sql-parser/src/identifier.rs index d5cae3125..9b60c328d 100644 --- a/crates/proof-of-sql-parser/src/identifier.rs +++ b/crates/proof-of-sql-parser/src/identifier.rs @@ -262,13 +262,13 @@ mod tests { } #[test] - #[should_panic] + #[should_panic(expected = "Long names are not allowed")] fn long_names_panic() { Identifier::new("t".repeat(65)); } #[test] - #[should_panic] + #[should_panic(expected = "Long Unicode names are not allowed")] fn long_unicode_names_panic() { Identifier::new("茶".repeat(22)); } diff --git a/crates/proof-of-sql-parser/src/intermediate_ast_tests.rs b/crates/proof-of-sql-parser/src/intermediate_ast_tests.rs index dbb6805cf..0a2f63762 100644 --- a/crates/proof-of-sql-parser/src/intermediate_ast_tests.rs +++ b/crates/proof-of-sql-parser/src/intermediate_ast_tests.rs @@ -780,7 +780,7 @@ fn we_can_parse_multiple_order_by() { // But due to some lalrpop restriction, we aren't. // This problem will be addressed in a future PR. #[test] -#[should_panic] +#[should_panic(expected = "lalrpop restriction prevents parsing reserved keywords in order by")] fn we_cannot_parse_order_by_referencing_reserved_keywords_yet() { let ast = "select a as asc from tab order by a asc" .parse::() diff --git a/crates/proof-of-sql/src/base/database/owned_and_arrow_conversions_test.rs b/crates/proof-of-sql/src/base/database/owned_and_arrow_conversions_test.rs index f400a3ae2..974932ced 100644 --- a/crates/proof-of-sql/src/base/database/owned_and_arrow_conversions_test.rs +++ b/crates/proof-of-sql/src/base/database/owned_and_arrow_conversions_test.rs @@ -142,7 +142,7 @@ fn we_cannot_convert_a_record_batch_if_it_has_repeated_column_names() { } #[test] -#[should_panic] +#[should_panic(expected = "Converting an owned table with scalar column causes panic")] fn we_panic_when_converting_an_owned_table_with_a_scalar_column() { let owned_table = owned_table::([scalar("a", [0; 0])]); let _ = RecordBatch::try_from(owned_table); diff --git a/crates/proof-of-sql/src/base/slice_ops/mul_add_assign_test.rs b/crates/proof-of-sql/src/base/slice_ops/mul_add_assign_test.rs index 8c164f0f9..156c92094 100644 --- a/crates/proof-of-sql/src/base/slice_ops/mul_add_assign_test.rs +++ b/crates/proof-of-sql/src/base/slice_ops/mul_add_assign_test.rs @@ -22,7 +22,7 @@ fn test_mul_add_assign_uneven() { /// test [`mul_add_assign`] with with uneven panics when len(a) < len(b) #[test] -#[should_panic] +#[should_panic(expected = "Length of vector a must not be shorter than vector b")] fn test_mul_add_assign_uneven_panic() { let mut a = vec![1, 2, 3, 4]; let b = vec![2, 3, 4, 5, 6]; From 876906c1b6166f194f3a70e625db1edc6fa1d637 Mon Sep 17 00:00:00 2001 From: Mehul Mathur Date: Sun, 13 Oct 2024 11:10:09 +0000 Subject: [PATCH 4/9] chore: manual fix clippy::needless_pass_by_value --- .../column_commitment_metadata_map.rs | 30 ++++++++--------- .../owned_and_arrow_conversions_test.rs | 32 +++++++++---------- .../dory/dory_inner_product.rs | 2 +- .../dory/extended_dory_inner_product.rs | 2 +- .../proof_primitive/dory/scalar_product.rs | 2 +- .../src/sql/parse/enriched_expr.rs | 6 ++-- .../src/sql/parse/query_context_builder.rs | 4 +-- .../proof-of-sql/src/sql/parse/query_expr.rs | 4 +-- .../src/sql/parse/query_expr_tests.rs | 16 +++++----- .../src/sql/proof/query_proof_test.rs | 10 +++--- .../src/sql/proof/verifiable_query_result.rs | 8 ++--- .../src/sql/proof/verification_builder.rs | 4 +-- .../sql/proof/verification_builder_test.rs | 4 +-- .../src/sql/proof_exprs/and_expr.rs | 2 +- .../src/sql/proof_exprs/equals_expr.rs | 4 +-- .../src/sql/proof_exprs/multiply_expr.rs | 2 +- .../src/sql/proof_exprs/or_expr.rs | 2 +- .../src/sql/proof_exprs/sign_expr.rs | 4 +-- .../src/sql/proof_plans/filter_exec.rs | 6 ++-- .../src/sql/proof_plans/group_by_exec.rs | 6 ++-- crates/proof-of-sql/src/tests/sol_test.rs | 6 ++-- .../proof-of-sql/src/tests/sol_test_util.rs | 2 +- 22 files changed, 79 insertions(+), 79 deletions(-) diff --git a/crates/proof-of-sql/src/base/commitment/column_commitment_metadata_map.rs b/crates/proof-of-sql/src/base/commitment/column_commitment_metadata_map.rs index 5b43a98ed..572885b8b 100644 --- a/crates/proof-of-sql/src/base/commitment/column_commitment_metadata_map.rs +++ b/crates/proof-of-sql/src/base/commitment/column_commitment_metadata_map.rs @@ -146,7 +146,7 @@ mod tests { use itertools::Itertools; fn metadata_map_from_owned_table( - table: OwnedTable, + table: &OwnedTable, ) -> ColumnCommitmentMetadataMap { let (identifiers, columns): (Vec<&Identifier>, Vec) = table .inner_table() @@ -171,7 +171,7 @@ mod tests { scalar("scalar_column", [1000, 2000, -1000, 0]), ]); - let metadata_map = metadata_map_from_owned_table(table); + let metadata_map = metadata_map_from_owned_table(&table); assert_eq!(metadata_map.len(), 4); @@ -214,7 +214,7 @@ mod tests { varchar("varchar_column", ["Lorem", "ipsum"]), scalar("scalar_column", [1000, 2000]), ]); - let metadata_a = metadata_map_from_owned_table(table_a); + let metadata_a = metadata_map_from_owned_table(&table_a); let table_b = owned_table([ bigint("bigint_column", [-5, 0, 10]), @@ -222,7 +222,7 @@ mod tests { varchar("varchar_column", ["dolor", "sit", "amet"]), scalar("scalar_column", [-1000, 0, -2000]), ]); - let metadata_b = metadata_map_from_owned_table(table_b); + let metadata_b = metadata_map_from_owned_table(&table_b); let table_c = owned_table([ bigint("bigint_column", [1, 5, -5, 0, 10]), @@ -230,7 +230,7 @@ mod tests { varchar("varchar_column", ["Lorem", "ipsum", "dolor", "sit", "amet"]), scalar("scalar_column", [1000, 2000, -1000, 0, -2000]), ]); - let metadata_c = metadata_map_from_owned_table(table_c); + let metadata_c = metadata_map_from_owned_table(&table_c); assert_eq!(metadata_a.try_union(metadata_b).unwrap(), metadata_c); } @@ -242,7 +242,7 @@ mod tests { varchar("varchar_column", ["Lorem", "ipsum"]), scalar("scalar_column", [1000, 2000]), ]); - let metadata_a = metadata_map_from_owned_table(table_a); + let metadata_a = metadata_map_from_owned_table(&table_a); let table_b = owned_table([ bigint("bigint_column", [1, 5, -5, 0, 10]), @@ -250,7 +250,7 @@ mod tests { varchar("varchar_column", ["Lorem", "ipsum", "dolor", "sit", "amet"]), scalar("scalar_column", [1000, 2000, -1000, 0, -2000]), ]); - let metadata_b = metadata_map_from_owned_table(table_b); + let metadata_b = metadata_map_from_owned_table(&table_b); let b_difference_a = metadata_b.try_difference(metadata_a.clone()).unwrap(); @@ -297,13 +297,13 @@ mod tests { varchar("varchar_column", ["Lorem", "ipsum", "dolor", "sit", "amet"]), scalar("scalar_column", [1000, 2000, -1000, 0, -2000]), ]); - let metadata_a = metadata_map_from_owned_table(table_a); + let metadata_a = metadata_map_from_owned_table(&table_a); let table_b = owned_table([ bigint("bigint_column", [1, 5, -5, 0, 10]), varchar("varchar_column", ["Lorem", "ipsum", "dolor", "sit", "amet"]), ]); - let metadata_b = metadata_map_from_owned_table(table_b); + let metadata_b = metadata_map_from_owned_table(&table_b); assert!(matches!( metadata_a.clone().try_union(metadata_b.clone()), @@ -337,25 +337,25 @@ mod tests { let strings = ["Lorem", "ipsum", "dolor", "sit"]; let ab_ii_metadata = - metadata_map_from_owned_table(owned_table([bigint(id_a, ints), bigint(id_b, ints)])); + metadata_map_from_owned_table(&owned_table([bigint(id_a, ints), bigint(id_b, ints)])); - let ab_iv_metadata = metadata_map_from_owned_table(owned_table([ + let ab_iv_metadata = metadata_map_from_owned_table(&owned_table([ bigint(id_a, ints), varchar(id_b, strings), ])); - let ab_vi_metadata = metadata_map_from_owned_table(owned_table([ + let ab_vi_metadata = metadata_map_from_owned_table(&owned_table([ varchar(id_a, strings), bigint(id_b, ints), ])); let ad_ii_metadata = - metadata_map_from_owned_table(owned_table([bigint(id_a, ints), bigint(id_d, ints)])); + metadata_map_from_owned_table(&owned_table([bigint(id_a, ints), bigint(id_d, ints)])); let cb_ii_metadata = - metadata_map_from_owned_table(owned_table([bigint(id_c, ints), bigint(id_b, ints)])); + metadata_map_from_owned_table(&owned_table([bigint(id_c, ints), bigint(id_b, ints)])); - let cd_vv_metadata = metadata_map_from_owned_table(owned_table([ + let cd_vv_metadata = metadata_map_from_owned_table(&owned_table([ varchar(id_c, strings), varchar(id_d, strings), ])); diff --git a/crates/proof-of-sql/src/base/database/owned_and_arrow_conversions_test.rs b/crates/proof-of-sql/src/base/database/owned_and_arrow_conversions_test.rs index 974932ced..21902ddfb 100644 --- a/crates/proof-of-sql/src/base/database/owned_and_arrow_conversions_test.rs +++ b/crates/proof-of-sql/src/base/database/owned_and_arrow_conversions_test.rs @@ -15,30 +15,30 @@ use arrow::{ }; fn we_can_convert_between_owned_column_and_array_ref_impl( - owned_column: OwnedColumn, + owned_column: &OwnedColumn, array_ref: ArrayRef, ) { let ic_to_ar = ArrayRef::from(owned_column.clone()); let ar_to_ic = OwnedColumn::try_from(array_ref.clone()).unwrap(); assert!(ic_to_ar == array_ref); - assert_eq!(owned_column, ar_to_ic); + assert_eq!(*owned_column, ar_to_ic); } fn we_can_convert_between_boolean_owned_column_and_array_ref_impl(data: Vec) { we_can_convert_between_owned_column_and_array_ref_impl( - OwnedColumn::::Boolean(data.clone()), + &OwnedColumn::::Boolean(data.clone()), Arc::new(BooleanArray::from(data)), ); } fn we_can_convert_between_bigint_owned_column_and_array_ref_impl(data: Vec) { we_can_convert_between_owned_column_and_array_ref_impl( - OwnedColumn::::BigInt(data.clone()), + &OwnedColumn::::BigInt(data.clone()), Arc::new(Int64Array::from(data)), ); } fn we_can_convert_between_int128_owned_column_and_array_ref_impl(data: Vec) { we_can_convert_between_owned_column_and_array_ref_impl( - OwnedColumn::::Int128(data.clone()), + &OwnedColumn::::Int128(data.clone()), Arc::new( Decimal128Array::from(data) .with_precision_and_scale(38, 0) @@ -48,7 +48,7 @@ fn we_can_convert_between_int128_owned_column_and_array_ref_impl(data: Vec } fn we_can_convert_between_varchar_owned_column_and_array_ref_impl(data: Vec) { we_can_convert_between_owned_column_and_array_ref_impl( - OwnedColumn::::VarChar(data.clone()), + &OwnedColumn::::VarChar(data.clone()), Arc::new(StringArray::from(data)), ); } @@ -81,29 +81,29 @@ fn we_get_an_unsupported_type_error_when_trying_to_convert_from_a_float32_array_ } fn we_can_convert_between_owned_table_and_record_batch_impl( - owned_table: OwnedTable, - record_batch: RecordBatch, + owned_table: &OwnedTable, + record_batch: &RecordBatch, ) { let it_to_rb = RecordBatch::try_from(owned_table.clone()).unwrap(); let rb_to_it = OwnedTable::try_from(record_batch.clone()).unwrap(); - assert_eq!(it_to_rb, record_batch); - assert_eq!(rb_to_it, owned_table); + assert_eq!(it_to_rb, *record_batch); + assert_eq!(rb_to_it, *owned_table); } #[test] fn we_can_convert_between_owned_table_and_record_batch() { we_can_convert_between_owned_table_and_record_batch_impl( - OwnedTable::::try_new(IndexMap::default()).unwrap(), - RecordBatch::new_empty(Arc::new(Schema::empty())), + &OwnedTable::::try_new(IndexMap::default()).unwrap(), + &RecordBatch::new_empty(Arc::new(Schema::empty())), ); we_can_convert_between_owned_table_and_record_batch_impl( - owned_table([ + &owned_table([ bigint("int64", [0; 0]), int128("int128", [0; 0]), varchar("string", ["0"; 0]), boolean("boolean", [true; 0]), ]), - record_batch!( + &record_batch!( "int64" => [0_i64; 0], "int128" => [0_i128; 0], "string" => ["0"; 0], @@ -111,7 +111,7 @@ fn we_can_convert_between_owned_table_and_record_batch() { ), ); we_can_convert_between_owned_table_and_record_batch_impl( - owned_table([ + &owned_table([ bigint("int64", [0, 1, 2, 3, 4, 5, 6, i64::MIN, i64::MAX]), int128("int128", [0, 1, 2, 3, 4, 5, 6, i128::MIN, i128::MAX]), varchar("string", ["0", "1", "2", "3", "4", "5", "6", "7", "8"]), @@ -120,7 +120,7 @@ fn we_can_convert_between_owned_table_and_record_batch() { [true, false, true, false, true, false, true, false, true], ), ]), - record_batch!( + &record_batch!( "int64" => [0_i64, 1, 2, 3, 4, 5, 6, i64::MIN, i64::MAX], "int128" => [0_i128, 1, 2, 3, 4, 5, 6, i128::MIN, i128::MAX], "string" => ["0", "1", "2", "3", "4", "5", "6", "7", "8"], diff --git a/crates/proof-of-sql/src/proof_primitive/dory/dory_inner_product.rs b/crates/proof-of-sql/src/proof_primitive/dory/dory_inner_product.rs index fd385ef00..ca2dca4e8 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/dory_inner_product.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/dory_inner_product.rs @@ -18,7 +18,7 @@ pub fn dory_inner_product_prove( for _ in 0..nu { dory_reduce_prove(messages, transcript, &mut state, setup); } - scalar_product_prove(messages, transcript, state); + scalar_product_prove(messages, transcript, &state); } /// This is the verifier side of the Dory-Innerproduct algorithm in section 3.3 of . diff --git a/crates/proof-of-sql/src/proof_primitive/dory/extended_dory_inner_product.rs b/crates/proof-of-sql/src/proof_primitive/dory/extended_dory_inner_product.rs index 8c1895773..bd7e0e6ac 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/extended_dory_inner_product.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/extended_dory_inner_product.rs @@ -25,7 +25,7 @@ pub fn extended_dory_inner_product_prove( extended_dory_reduce_prove(messages, transcript, &mut state, setup); } let base_state = fold_scalars_0_prove(messages, transcript, state, setup); - scalar_product_prove(messages, transcript, base_state); + scalar_product_prove(messages, transcript, &base_state); } /// This is the verifier side of the extended Dory-Innerproduct algorithm in section 4.3 of https://eprint.iacr.org/2020/1274.pdf. diff --git a/crates/proof-of-sql/src/proof_primitive/dory/scalar_product.rs b/crates/proof-of-sql/src/proof_primitive/dory/scalar_product.rs index 583781120..a49a2a7d3 100644 --- a/crates/proof-of-sql/src/proof_primitive/dory/scalar_product.rs +++ b/crates/proof-of-sql/src/proof_primitive/dory/scalar_product.rs @@ -7,7 +7,7 @@ use crate::base::proof::Transcript; pub fn scalar_product_prove( messages: &mut DoryMessages, transcript: &mut impl Transcript, - state: ProverState, + state: &ProverState, ) { // See section 3.1 of https://eprint.iacr.org/2020/1274.pdf. // diff --git a/crates/proof-of-sql/src/sql/parse/enriched_expr.rs b/crates/proof-of-sql/src/sql/parse/enriched_expr.rs index 5078cb5a4..4e2002894 100644 --- a/crates/proof-of-sql/src/sql/parse/enriched_expr.rs +++ b/crates/proof-of-sql/src/sql/parse/enriched_expr.rs @@ -28,12 +28,12 @@ impl EnrichedExpr { /// and the `residue_expression` will contain the remaining expression. pub fn new( expression: AliasedResultExpr, - column_mapping: IndexMap, + column_mapping: &IndexMap, ) -> Self { // TODO: Using new_agg (ironically) disables aggregations in `QueryExpr` for now. // Re-enable aggregations when we add `GroupByExec` generalizations. let res_dyn_proof_expr = - DynProofExprBuilder::new_agg(&column_mapping).build(&expression.expr); + DynProofExprBuilder::new_agg(column_mapping).build(&expression.expr); match res_dyn_proof_expr { Ok(dyn_proof_expr) => { let alias = expression.alias; @@ -50,7 +50,7 @@ impl EnrichedExpr { dyn_proof_expr: None, }, } - } + } /// Get the alias of the `EnrichedExpr`. /// diff --git a/crates/proof-of-sql/src/sql/parse/query_context_builder.rs b/crates/proof-of-sql/src/sql/parse/query_context_builder.rs index e4c3a76d7..7afa45a2f 100644 --- a/crates/proof-of-sql/src/sql/parse/query_context_builder.rs +++ b/crates/proof-of-sql/src/sql/parse/query_context_builder.rs @@ -32,7 +32,7 @@ impl<'a> QueryContextBuilder<'a> { #[allow(clippy::vec_box, clippy::missing_panics_doc)] pub fn visit_table_expr( mut self, - table_expr: Vec>, + table_expr: &[Box], default_schema: Identifier, ) -> Self { assert_eq!(table_expr.len(), 1); @@ -45,7 +45,7 @@ impl<'a> QueryContextBuilder<'a> { } } self - } + } pub fn visit_where_expr( mut self, diff --git a/crates/proof-of-sql/src/sql/parse/query_expr.rs b/crates/proof-of-sql/src/sql/parse/query_expr.rs index 877e633a1..91fb64135 100644 --- a/crates/proof-of-sql/src/sql/parse/query_expr.rs +++ b/crates/proof-of-sql/src/sql/parse/query_expr.rs @@ -56,7 +56,7 @@ impl QueryExpr { where_expr, group_by, } => QueryContextBuilder::new(schema_accessor) - .visit_table_expr(from, default_schema) + .visit_table_expr(&from, default_schema) .visit_group_by_exprs(group_by)? .visit_result_exprs(result_exprs)? .visit_where_expr(where_expr)? @@ -130,7 +130,7 @@ impl QueryExpr { let column_mapping = context.get_column_mapping(); let enriched_exprs = result_aliased_exprs .iter() - .map(|aliased_expr| EnrichedExpr::new(aliased_expr.clone(), column_mapping.clone())) + .map(|aliased_expr| EnrichedExpr::new(aliased_expr.clone(), &column_mapping)) .collect::>(); let select_exprs = enriched_exprs .iter() diff --git a/crates/proof-of-sql/src/sql/parse/query_expr_tests.rs b/crates/proof-of-sql/src/sql/parse/query_expr_tests.rs index c0dfca566..5a0e17504 100644 --- a/crates/proof-of-sql/src/sql/parse/query_expr_tests.rs +++ b/crates/proof-of-sql/src/sql/parse/query_expr_tests.rs @@ -1940,30 +1940,30 @@ fn query_expr_for_test_table(sql_text: &str) -> QueryExpr { } /// Serializes and deserializes [`QueryExpr`] with flexbuffers and asserts that it remains the same. -fn assert_query_expr_serializes_to_and_from_flex_buffers(query_expr: QueryExpr) { - let serialized = flexbuffers::to_vec(&query_expr).unwrap(); +fn assert_query_expr_serializes_to_and_from_flex_buffers(query_expr: &QueryExpr) { + let serialized = flexbuffers::to_vec(query_expr).unwrap(); let deserialized: QueryExpr = flexbuffers::from_slice(serialized.as_slice()).unwrap(); - assert_eq!(deserialized, query_expr); + assert_eq!(deserialized, *query_expr); } #[test] fn basic_query_expr_can_serialize_to_and_from_flex_buffers() { let query_expr = query_expr_for_test_table("select * from table"); - assert_query_expr_serializes_to_and_from_flex_buffers(query_expr); + assert_query_expr_serializes_to_and_from_flex_buffers(&query_expr); } #[test] fn query_expr_with_selected_columns_can_serialize_to_and_from_flex_buffers() { let query_expr = query_expr_for_test_table("select bigint_column, varchar_column, int128_column from table"); - assert_query_expr_serializes_to_and_from_flex_buffers(query_expr); + assert_query_expr_serializes_to_and_from_flex_buffers(&query_expr); } #[test] fn query_expr_with_aggregation_can_serialize_to_and_from_flex_buffers() { let query_expr = query_expr_for_test_table("select count(*) from table group by bigint_column"); - assert_query_expr_serializes_to_and_from_flex_buffers(query_expr); + assert_query_expr_serializes_to_and_from_flex_buffers(&query_expr); } #[test] @@ -1971,7 +1971,7 @@ fn query_expr_with_filters_can_serialize_to_and_from_flex_buffers() { let query_expr = query_expr_for_test_table( "select * from table where bigint_column != 5 and varchar_column = 'example' or int128_column = 10", ); - assert_query_expr_serializes_to_and_from_flex_buffers(query_expr); + assert_query_expr_serializes_to_and_from_flex_buffers(&query_expr); } #[test] @@ -1979,7 +1979,7 @@ fn query_expr_with_order_and_limits_can_serialize_to_and_from_flex_buffers() { let query_expr = query_expr_for_test_table( "select * from table order by int128_column desc limit 1 offset 1", ); - assert_query_expr_serializes_to_and_from_flex_buffers(query_expr); + assert_query_expr_serializes_to_and_from_flex_buffers(&query_expr); } #[test] diff --git a/crates/proof-of-sql/src/sql/proof/query_proof_test.rs b/crates/proof-of-sql/src/sql/proof/query_proof_test.rs index 684cde01e..a4fa8a65a 100644 --- a/crates/proof-of-sql/src/sql/proof/query_proof_test.rs +++ b/crates/proof-of-sql/src/sql/proof/query_proof_test.rs @@ -94,7 +94,7 @@ impl ProofPlan for TrivialTestProofPlan { ) -> Result, ProofError> { assert_eq!(builder.consume_intermediate_mle(), C::Scalar::ZERO); builder.produce_sumcheck_subpolynomial_evaluation( - SumcheckSubpolynomialType::ZeroSum, + &SumcheckSubpolynomialType::ZeroSum, C::Scalar::from(self.evaluation), ); Ok(vec![C::Scalar::ZERO]) @@ -267,7 +267,7 @@ impl ProofPlan for SquareTestProofPlan { let x_eval = builder.consume_anchored_mle(x_commit); let res_eval = builder.consume_intermediate_mle(); builder.produce_sumcheck_subpolynomial_evaluation( - SumcheckSubpolynomialType::Identity, + &SumcheckSubpolynomialType::Identity, res_eval - x_eval * x_eval, ); Ok(vec![res_eval]) @@ -464,13 +464,13 @@ impl ProofPlan for DoubleSquareTestProofPlan { // poly1 builder.produce_sumcheck_subpolynomial_evaluation( - SumcheckSubpolynomialType::Identity, + &SumcheckSubpolynomialType::Identity, z_eval - x_eval * x_eval, ); // poly2 builder.produce_sumcheck_subpolynomial_evaluation( - SumcheckSubpolynomialType::Identity, + &SumcheckSubpolynomialType::Identity, res_eval - z_eval * z_eval, ); Ok(vec![res_eval]) @@ -666,7 +666,7 @@ impl ProofPlan for ChallengeTestProofPlan { let x_eval = builder.consume_anchored_mle(x_commit); let res_eval = builder.consume_intermediate_mle(); builder.produce_sumcheck_subpolynomial_evaluation( - SumcheckSubpolynomialType::Identity, + &SumcheckSubpolynomialType::Identity, alpha * res_eval - alpha * x_eval * x_eval, ); Ok(vec![res_eval]) diff --git a/crates/proof-of-sql/src/sql/proof/verifiable_query_result.rs b/crates/proof-of-sql/src/sql/proof/verifiable_query_result.rs index 78282967d..736bb737f 100644 --- a/crates/proof-of-sql/src/sql/proof/verifiable_query_result.rs +++ b/crates/proof-of-sql/src/sql/proof/verifiable_query_result.rs @@ -7,7 +7,7 @@ use crate::base::{ proof::ProofError, scalar::Scalar, }; -use alloc::{vec, vec::Vec}; +use alloc::vec; use serde::{Deserialize, Serialize}; /// The result of an sql query along with a proof that the query is valid. The @@ -133,7 +133,7 @@ impl VerifiableQueryResult { let result_fields = expr.get_column_result_fields(); - return make_empty_query_result(result_fields); + return make_empty_query_result(&result_fields); } if self.provable_result.is_none() || self.proof.is_none() { @@ -150,7 +150,7 @@ impl VerifiableQueryResult { } } -fn make_empty_query_result(result_fields: Vec) -> QueryResult { +fn make_empty_query_result(result_fields: &[ColumnField]) -> QueryResult { let table = OwnedTable::try_new( result_fields .iter() @@ -179,4 +179,4 @@ fn make_empty_query_result(result_fields: Vec) -> QueryR table, verification_hash: Default::default(), }) -} +} \ No newline at end of file diff --git a/crates/proof-of-sql/src/sql/proof/verification_builder.rs b/crates/proof-of-sql/src/sql/proof/verification_builder.rs index e60be0d16..495c764a8 100644 --- a/crates/proof-of-sql/src/sql/proof/verification_builder.rs +++ b/crates/proof-of-sql/src/sql/proof/verification_builder.rs @@ -103,7 +103,7 @@ impl<'a, C: Commitment> VerificationBuilder<'a, C> { /// Produce the evaluation of a subpolynomial used in sumcheck pub fn produce_sumcheck_subpolynomial_evaluation( &mut self, - subpolynomial_type: SumcheckSubpolynomialType, + subpolynomial_type: &SumcheckSubpolynomialType, eval: C::Scalar, ) { self.sumcheck_evaluation += self.subpolynomial_multipliers[self.produced_subpolynomials] @@ -114,7 +114,7 @@ impl<'a, C: Commitment> VerificationBuilder<'a, C> { SumcheckSubpolynomialType::ZeroSum => eval, }; self.produced_subpolynomials += 1; - } + } #[allow( clippy::missing_panics_doc, diff --git a/crates/proof-of-sql/src/sql/proof/verification_builder_test.rs b/crates/proof-of-sql/src/sql/proof/verification_builder_test.rs index 5d850d7ea..515d26972 100644 --- a/crates/proof-of-sql/src/sql/proof/verification_builder_test.rs +++ b/crates/proof-of-sql/src/sql/proof/verification_builder_test.rs @@ -46,11 +46,11 @@ fn we_build_up_a_sumcheck_polynomial_evaluation_from_subpolynomial_evaluations() Vec::new(), ); builder.produce_sumcheck_subpolynomial_evaluation( - SumcheckSubpolynomialType::ZeroSum, + &SumcheckSubpolynomialType::ZeroSum, Curve25519Scalar::from(2u64), ); builder.produce_sumcheck_subpolynomial_evaluation( - SumcheckSubpolynomialType::ZeroSum, + &SumcheckSubpolynomialType::ZeroSum, Curve25519Scalar::from(3u64), ); let expected_sumcheck_evaluation = subpolynomial_multipliers[0] * Curve25519Scalar::from(2u64) diff --git a/crates/proof-of-sql/src/sql/proof_exprs/and_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/and_expr.rs index d1166733d..48eff8997 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/and_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/and_expr.rs @@ -99,7 +99,7 @@ impl ProofExpr for AndExpr { // subpolynomial: lhs_and_rhs - lhs * rhs builder.produce_sumcheck_subpolynomial_evaluation( - SumcheckSubpolynomialType::Identity, + &SumcheckSubpolynomialType::Identity, lhs_and_rhs - lhs * rhs, ); diff --git a/crates/proof-of-sql/src/sql/proof_exprs/equals_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/equals_expr.rs index 58cfa6155..5805898b1 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/equals_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/equals_expr.rs @@ -156,13 +156,13 @@ pub fn verifier_evaluate_equals_zero( // subpolynomial: selection * lhs builder.produce_sumcheck_subpolynomial_evaluation( - SumcheckSubpolynomialType::Identity, + &SumcheckSubpolynomialType::Identity, selection_eval * lhs_eval, ); // subpolynomial: selection_not - lhs * lhs_pseudo_inv builder.produce_sumcheck_subpolynomial_evaluation( - SumcheckSubpolynomialType::Identity, + &SumcheckSubpolynomialType::Identity, selection_not_eval - lhs_eval * lhs_pseudo_inv_eval, ); diff --git a/crates/proof-of-sql/src/sql/proof_exprs/multiply_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/multiply_expr.rs index 9e3dad92e..7392c1ca6 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/multiply_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/multiply_expr.rs @@ -107,7 +107,7 @@ impl ProofExpr for MultiplyExpr { // subpolynomial: lhs_times_rhs - lhs * rhs builder.produce_sumcheck_subpolynomial_evaluation( - SumcheckSubpolynomialType::Identity, + &SumcheckSubpolynomialType::Identity, lhs_times_rhs - lhs * rhs, ); diff --git a/crates/proof-of-sql/src/sql/proof_exprs/or_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/or_expr.rs index e31c2a9c9..561722232 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/or_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/or_expr.rs @@ -141,7 +141,7 @@ pub fn verifier_evaluate_or( // subpolynomial: lhs_and_rhs - lhs * rhs builder.produce_sumcheck_subpolynomial_evaluation( - SumcheckSubpolynomialType::Identity, + &SumcheckSubpolynomialType::Identity, lhs_and_rhs - *lhs * *rhs, ); diff --git a/crates/proof-of-sql/src/sql/proof_exprs/sign_expr.rs b/crates/proof-of-sql/src/sql/proof_exprs/sign_expr.rs index 5dc668972..3fc36ea15 100644 --- a/crates/proof-of-sql/src/sql/proof_exprs/sign_expr.rs +++ b/crates/proof-of-sql/src/sql/proof_exprs/sign_expr.rs @@ -195,7 +195,7 @@ fn verify_bits_are_binary( ) { for bit_eval in bit_evals { builder.produce_sumcheck_subpolynomial_evaluation( - SumcheckSubpolynomialType::Identity, + &SumcheckSubpolynomialType::Identity, *bit_eval - *bit_eval * *bit_eval, ); } @@ -260,5 +260,5 @@ fn verify_bit_decomposition( eval -= C::Scalar::from(mult) * sign_eval * bit_eval; vary_index += 1; }); - builder.produce_sumcheck_subpolynomial_evaluation(SumcheckSubpolynomialType::Identity, eval); + builder.produce_sumcheck_subpolynomial_evaluation(&SumcheckSubpolynomialType::Identity, eval); } diff --git a/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs index 4429822d1..4259d3d88 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs @@ -244,19 +244,19 @@ fn verify_filter( // sum c_star * s - d_star = 0 builder.produce_sumcheck_subpolynomial_evaluation( - SumcheckSubpolynomialType::ZeroSum, + &SumcheckSubpolynomialType::ZeroSum, c_star_eval * s_eval - d_star_eval, ); // c_fold * c_star - 1 = 0 builder.produce_sumcheck_subpolynomial_evaluation( - SumcheckSubpolynomialType::Identity, + &SumcheckSubpolynomialType::Identity, c_fold_eval * c_star_eval - one_eval, ); // d_bar_fold * d_star - chi = 0 builder.produce_sumcheck_subpolynomial_evaluation( - SumcheckSubpolynomialType::Identity, + &SumcheckSubpolynomialType::Identity, d_bar_fold_eval * d_star_eval - chi_eval, ); diff --git a/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs b/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs index ce4379c44..0a43da82f 100644 --- a/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs +++ b/crates/proof-of-sql/src/sql/proof_plans/group_by_exec.rs @@ -343,19 +343,19 @@ fn verify_group_by( // sum g_in_star * sel_in * sum_in_fold - g_out_star * sum_out_bar_fold = 0 builder.produce_sumcheck_subpolynomial_evaluation( - SumcheckSubpolynomialType::ZeroSum, + &SumcheckSubpolynomialType::ZeroSum, g_in_star_eval * sel_in_eval * sum_in_fold_eval - g_out_star_eval * sum_out_bar_fold_eval, ); // g_in_star * g_in_fold - 1 = 0 builder.produce_sumcheck_subpolynomial_evaluation( - SumcheckSubpolynomialType::Identity, + &SumcheckSubpolynomialType::Identity, g_in_star_eval * g_in_fold_eval - one_eval, ); // g_out_star * g_out_bar_fold - 1 = 0 builder.produce_sumcheck_subpolynomial_evaluation( - SumcheckSubpolynomialType::Identity, + &SumcheckSubpolynomialType::Identity, g_out_star_eval * g_out_bar_fold_eval - one_eval, ); diff --git a/crates/proof-of-sql/src/tests/sol_test.rs b/crates/proof-of-sql/src/tests/sol_test.rs index 1e00ffe2f..6ba7ea6bf 100644 --- a/crates/proof-of-sql/src/tests/sol_test.rs +++ b/crates/proof-of-sql/src/tests/sol_test.rs @@ -8,7 +8,7 @@ fn we_can_run_solidity_script_from_rust() { "./sol_src/tests/TestScript.t.sol", "rustTestWeCanThrowErrorDependingOnParameter", ) - .arg(U256::from(1234)) + .arg(&U256::from(1234)) .execute() .unwrap(); @@ -17,7 +17,7 @@ fn we_can_run_solidity_script_from_rust() { "./sol_src/tests/TestScript.t.sol", "rustTestWeCanThrowErrorDependingOnParameter", ) - .arg(U256::from(0)) + .arg(&U256::from(0)) .execute(), Err(ForgeScriptError::SolidityError { .. }) )); @@ -33,7 +33,7 @@ fn we_can_pass_custom_struct_into_solidity_from_rust() { "./sol_src/tests/TestScript.t.sol", "rustTestWeCanAcceptCustomStructAsEncodedBytes", ) - .arg(arg) + .arg(&arg) .execute() .unwrap(); } diff --git a/crates/proof-of-sql/src/tests/sol_test_util.rs b/crates/proof-of-sql/src/tests/sol_test_util.rs index 2734696f2..0a8a5f7e2 100644 --- a/crates/proof-of-sql/src/tests/sol_test_util.rs +++ b/crates/proof-of-sql/src/tests/sol_test_util.rs @@ -28,7 +28,7 @@ impl ForgeScript { Self { command } } /// Adds an argument to pass to the script. Only one argument can be passed per use. - pub fn arg(&mut self, arg: impl SolValue) -> &mut Self { + pub fn arg(&mut self, arg: &impl SolValue) -> &mut Self { self.command.arg(Bytes::from(arg.abi_encode()).to_string()); self } From aec76f71026e7e8cd9a9a12bdef9d482e6f62e04 Mon Sep 17 00:00:00 2001 From: Mehul Mathur Date: Sun, 13 Oct 2024 11:10:49 +0000 Subject: [PATCH 5/9] chore: added deny lints to toml file --- Cargo.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/Cargo.toml b/Cargo.toml index cb9baf16b..6c23daf15 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -101,3 +101,6 @@ match_wildcard_for_single_variants = "deny" match_bool = "deny" manual_assert = "deny" trivially_copy_pass_by_ref = "deny" +unnecessary_wraps = "deny" +should_panic_without_expect = "deny" +needless_pass_by_value = "deny" From 74d914c0e58f3e5f1d8e4880a690903a990136dc Mon Sep 17 00:00:00 2001 From: Mehul Mathur Date: Sun, 13 Oct 2024 11:11:21 +0000 Subject: [PATCH 6/9] chore: run cargo fmt --- crates/proof-of-sql/src/sql/parse/enriched_expr.rs | 2 +- crates/proof-of-sql/src/sql/parse/query_context_builder.rs | 2 +- crates/proof-of-sql/src/sql/proof/verifiable_query_result.rs | 2 +- crates/proof-of-sql/src/sql/proof/verification_builder.rs | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/proof-of-sql/src/sql/parse/enriched_expr.rs b/crates/proof-of-sql/src/sql/parse/enriched_expr.rs index 4e2002894..4fa7cf0c1 100644 --- a/crates/proof-of-sql/src/sql/parse/enriched_expr.rs +++ b/crates/proof-of-sql/src/sql/parse/enriched_expr.rs @@ -50,7 +50,7 @@ impl EnrichedExpr { dyn_proof_expr: None, }, } - } + } /// Get the alias of the `EnrichedExpr`. /// diff --git a/crates/proof-of-sql/src/sql/parse/query_context_builder.rs b/crates/proof-of-sql/src/sql/parse/query_context_builder.rs index 7afa45a2f..c89ae59fa 100644 --- a/crates/proof-of-sql/src/sql/parse/query_context_builder.rs +++ b/crates/proof-of-sql/src/sql/parse/query_context_builder.rs @@ -45,7 +45,7 @@ impl<'a> QueryContextBuilder<'a> { } } self - } + } pub fn visit_where_expr( mut self, diff --git a/crates/proof-of-sql/src/sql/proof/verifiable_query_result.rs b/crates/proof-of-sql/src/sql/proof/verifiable_query_result.rs index 736bb737f..f65ef23b4 100644 --- a/crates/proof-of-sql/src/sql/proof/verifiable_query_result.rs +++ b/crates/proof-of-sql/src/sql/proof/verifiable_query_result.rs @@ -179,4 +179,4 @@ fn make_empty_query_result(result_fields: &[ColumnField]) -> QueryRes table, verification_hash: Default::default(), }) -} \ No newline at end of file +} diff --git a/crates/proof-of-sql/src/sql/proof/verification_builder.rs b/crates/proof-of-sql/src/sql/proof/verification_builder.rs index 495c764a8..8a3791c69 100644 --- a/crates/proof-of-sql/src/sql/proof/verification_builder.rs +++ b/crates/proof-of-sql/src/sql/proof/verification_builder.rs @@ -114,7 +114,7 @@ impl<'a, C: Commitment> VerificationBuilder<'a, C> { SumcheckSubpolynomialType::ZeroSum => eval, }; self.produced_subpolynomials += 1; - } + } #[allow( clippy::missing_panics_doc, From 2e82d360b830b59f49cf0f9610e5b26e551d3300 Mon Sep 17 00:00:00 2001 From: Mehul Mathur Date: Sun, 13 Oct 2024 12:05:21 +0000 Subject: [PATCH 7/9] chore: fixed tests --- crates/proof-of-sql-parser/src/identifier.rs | 4 ++-- crates/proof-of-sql-parser/src/intermediate_ast_tests.rs | 3 ++- .../src/base/database/owned_and_arrow_conversions_test.rs | 2 +- crates/proof-of-sql/src/base/slice_ops/mul_add_assign.rs | 2 +- crates/proof-of-sql/src/base/slice_ops/mul_add_assign_test.rs | 4 +++- 5 files changed, 9 insertions(+), 6 deletions(-) diff --git a/crates/proof-of-sql-parser/src/identifier.rs b/crates/proof-of-sql-parser/src/identifier.rs index 9b60c328d..b11df6862 100644 --- a/crates/proof-of-sql-parser/src/identifier.rs +++ b/crates/proof-of-sql-parser/src/identifier.rs @@ -262,13 +262,13 @@ mod tests { } #[test] - #[should_panic(expected = "Long names are not allowed")] + #[should_panic(expected = "Identifier too long: CapacityError: insufficient capacity")] fn long_names_panic() { Identifier::new("t".repeat(65)); } #[test] - #[should_panic(expected = "Long Unicode names are not allowed")] + #[should_panic(expected = "Identifier too long: CapacityError: insufficient capacity")] fn long_unicode_names_panic() { Identifier::new("茶".repeat(22)); } diff --git a/crates/proof-of-sql-parser/src/intermediate_ast_tests.rs b/crates/proof-of-sql-parser/src/intermediate_ast_tests.rs index 0a2f63762..c3d1e570b 100644 --- a/crates/proof-of-sql-parser/src/intermediate_ast_tests.rs +++ b/crates/proof-of-sql-parser/src/intermediate_ast_tests.rs @@ -779,8 +779,9 @@ fn we_can_parse_multiple_order_by() { // TODO: we should be able to pass this test. // But due to some lalrpop restriction, we aren't. // This problem will be addressed in a future PR. +#[allow(clippy::should_panic_without_expect)] #[test] -#[should_panic(expected = "lalrpop restriction prevents parsing reserved keywords in order by")] +#[should_panic] fn we_cannot_parse_order_by_referencing_reserved_keywords_yet() { let ast = "select a as asc from tab order by a asc" .parse::() diff --git a/crates/proof-of-sql/src/base/database/owned_and_arrow_conversions_test.rs b/crates/proof-of-sql/src/base/database/owned_and_arrow_conversions_test.rs index 21902ddfb..970df4bad 100644 --- a/crates/proof-of-sql/src/base/database/owned_and_arrow_conversions_test.rs +++ b/crates/proof-of-sql/src/base/database/owned_and_arrow_conversions_test.rs @@ -142,7 +142,7 @@ fn we_cannot_convert_a_record_batch_if_it_has_repeated_column_names() { } #[test] -#[should_panic(expected = "Converting an owned table with scalar column causes panic")] +#[should_panic(expected = "not implemented: Cannot convert Scalar type to arrow type")] fn we_panic_when_converting_an_owned_table_with_a_scalar_column() { let owned_table = owned_table::([scalar("a", [0; 0])]); let _ = RecordBatch::try_from(owned_table); diff --git a/crates/proof-of-sql/src/base/slice_ops/mul_add_assign.rs b/crates/proof-of-sql/src/base/slice_ops/mul_add_assign.rs index c9c019288..b1f6afd48 100644 --- a/crates/proof-of-sql/src/base/slice_ops/mul_add_assign.rs +++ b/crates/proof-of-sql/src/base/slice_ops/mul_add_assign.rs @@ -12,7 +12,7 @@ where T: Send + Sync + Mul + AddAssign + Copy, S: Into + Sync + Copy, { - assert!(result.len() >= to_mul_add.len()); + assert!(result.len() >= to_mul_add.len(), "The length of result must be greater than or equal to the length of the vector of values to be multiplied and added"); if_rayon!( result.par_iter_mut().with_min_len(super::MIN_RAYON_LEN), result.iter_mut() diff --git a/crates/proof-of-sql/src/base/slice_ops/mul_add_assign_test.rs b/crates/proof-of-sql/src/base/slice_ops/mul_add_assign_test.rs index 156c92094..e766f34ad 100644 --- a/crates/proof-of-sql/src/base/slice_ops/mul_add_assign_test.rs +++ b/crates/proof-of-sql/src/base/slice_ops/mul_add_assign_test.rs @@ -22,7 +22,9 @@ fn test_mul_add_assign_uneven() { /// test [`mul_add_assign`] with with uneven panics when len(a) < len(b) #[test] -#[should_panic(expected = "Length of vector a must not be shorter than vector b")] +#[should_panic( + expected = "The length of result must be greater than or equal to the length of the vector of values to be multiplied and added" +)] fn test_mul_add_assign_uneven_panic() { let mut a = vec![1, 2, 3, 4]; let b = vec![2, 3, 4, 5, 6]; From 44e6aea26d15cde8b0841e70ceb7f83e1bc99c94 Mon Sep 17 00:00:00 2001 From: Jay White Date: Wed, 16 Oct 2024 23:50:33 -0400 Subject: [PATCH 8/9] fix: implement `TryFrom for MontScalar` properly --- .../src/base/scalar/mont_scalar.rs | 49 +++++++++---------- .../src/base/scalar/mont_scalar_test.rs | 41 +++++++++++++++- 2 files changed, 63 insertions(+), 27 deletions(-) diff --git a/crates/proof-of-sql/src/base/scalar/mont_scalar.rs b/crates/proof-of-sql/src/base/scalar/mont_scalar.rs index 6c646b9db..84a7a0b94 100644 --- a/crates/proof-of-sql/src/base/scalar/mont_scalar.rs +++ b/crates/proof-of-sql/src/base/scalar/mont_scalar.rs @@ -1,6 +1,9 @@ use super::{Scalar, ScalarConversionError}; -use crate::base::math::decimal::MAX_SUPPORTED_PRECISION; -use alloc::{format, string::String, vec::Vec}; +use alloc::{ + format, + string::{String, ToString}, + vec::Vec, +}; use ark_ff::{BigInteger, Field, Fp, Fp256, MontBackend, MontConfig, PrimeField}; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use bytemuck::TransparentWrapper; @@ -217,35 +220,29 @@ impl> MontScalar { } } -impl> TryFrom for MontScalar { +impl TryFrom for MontScalar +where + T: MontConfig<4>, + MontScalar: Scalar, +{ type Error = ScalarConversionError; fn try_from(value: BigInt) -> Result { - // Obtain the absolute value to ignore the sign when counting digits - let value_abs = value.abs(); - - // Extract digits and check the number of digits directly - let (_, digits) = value_abs.to_u64_digits(); - - // Check if the number of digits exceeds the maximum precision allowed - if digits.len() > MAX_SUPPORTED_PRECISION.into() { - return Err(ScalarConversionError::Overflow{ error: format!( - "Attempted to parse a number with {} digits, which exceeds the max supported precision of {}", - digits.len(), - MAX_SUPPORTED_PRECISION - )}); + if value.abs() > BigInt::from(>::MAX_SIGNED) { + return Err(ScalarConversionError::Overflow { + error: "BigInt too large for Scalar".to_string(), + }); } - // Continue with the previous logic - assert!(digits.len() <= 4); // This should not happen if the precision check is correct - let mut data = [0u64; 4]; - data[..digits.len()].copy_from_slice(&digits); - let result = Self::from_bigint(data); - match value.sign() { - // Updated to use value.sign() for clarity - num_bigint::Sign::Minus => Ok(-result), - _ => Ok(result), - } + let (sign, digits) = value.to_u64_digits(); + assert!(digits.len() <= 4); // This should not happen if the above check is correct + let mut limbs = [0u64; 4]; + limbs[..digits.len()].copy_from_slice(&digits); + let result = Self::from(limbs); + Ok(match sign { + num_bigint::Sign::Minus => -result, + num_bigint::Sign::Plus | num_bigint::Sign::NoSign => result, + }) } } impl> From<[u64; 4]> for MontScalar { diff --git a/crates/proof-of-sql/src/base/scalar/mont_scalar_test.rs b/crates/proof-of-sql/src/base/scalar/mont_scalar_test.rs index 3a26fd8c0..8301bad5d 100644 --- a/crates/proof-of-sql/src/base/scalar/mont_scalar_test.rs +++ b/crates/proof-of-sql/src/base/scalar/mont_scalar_test.rs @@ -1,6 +1,6 @@ use crate::base::{ map::IndexSet, - scalar::{Curve25519Scalar, Scalar, ScalarConversionError}, + scalar::{test_scalar::TestScalar, Curve25519Scalar, Scalar, ScalarConversionError}, }; use alloc::{format, string::ToString, vec::Vec}; use byte_slice_cast::AsByteSlice; @@ -471,3 +471,42 @@ fn the_string_hash_implementation_uses_the_full_range_of_bits() { } } } + +#[test] +fn test_bigint_to_scalar_overflow() { + assert_eq!( + TestScalar::try_from( + "3618502788666131106986593281521497120428558179689953803000975469142727125494" + .parse::() + .unwrap() + ) + .unwrap(), + TestScalar::MAX_SIGNED + ); + assert_eq!( + TestScalar::try_from( + "-3618502788666131106986593281521497120428558179689953803000975469142727125494" + .parse::() + .unwrap() + ) + .unwrap(), + -TestScalar::MAX_SIGNED + ); + + assert!(matches!( + TestScalar::try_from( + "3618502788666131106986593281521497120428558179689953803000975469142727125495" + .parse::() + .unwrap() + ), + Err(ScalarConversionError::Overflow { .. }) + )); + assert!(matches!( + TestScalar::try_from( + "-3618502788666131106986593281521497120428558179689953803000975469142727125495" + .parse::() + .unwrap() + ), + Err(ScalarConversionError::Overflow { .. }) + )); +} From 2c2289171d0eab3950b2a440b402630b06832018 Mon Sep 17 00:00:00 2001 From: Jay White Date: Wed, 16 Oct 2024 23:49:40 -0400 Subject: [PATCH 9/9] refactor: remove dependency on `MAX_SUPPORTED_PRECISION` --- .../src/base/database/column_operation.rs | 50 ++++++++++++++++--- 1 file changed, 43 insertions(+), 7 deletions(-) diff --git a/crates/proof-of-sql/src/base/database/column_operation.rs b/crates/proof-of-sql/src/base/database/column_operation.rs index be6a2fdde..9a18e8e07 100644 --- a/crates/proof-of-sql/src/base/database/column_operation.rs +++ b/crates/proof-of-sql/src/base/database/column_operation.rs @@ -2,7 +2,7 @@ use super::{ColumnOperationError, ColumnOperationResult}; use crate::base::{ database::ColumnType, - math::decimal::{scale_scalar, DecimalError, Precision, MAX_SUPPORTED_PRECISION}, + math::decimal::{scale_scalar, DecimalError, Precision}, scalar::Scalar, }; use alloc::{format, string::ToString, vec::Vec}; @@ -535,7 +535,13 @@ where // If scale difference is above max decimal precision values // are equal if they are both zero and unequal otherwise let upscale = max_scale - lhs_scale; - if upscale > MAX_SUPPORTED_PRECISION as i8 { + if i8::try_from( + right_column_type + .precision_value() + .expect("Decimal types have scale"), + ) + .is_ok_and(|precision| upscale > precision) + { lhs.iter() .zip(rhs.iter()) .map(|(l, r)| -> bool { l.is_zero() && *r == S::ZERO }) @@ -550,7 +556,13 @@ where } } else if rhs_scale < max_scale { let upscale = max_scale - rhs_scale; - if upscale > MAX_SUPPORTED_PRECISION as i8 { + if i8::try_from( + left_column_type + .precision_value() + .expect("Numeric types have scale"), + ) + .is_ok_and(|precision| upscale > precision) + { lhs.iter() .zip(rhs.iter()) .map(|(l, r)| -> bool { l.is_zero() && *r == S::ZERO }) @@ -596,7 +608,13 @@ where // always have larger absolute value than the other one as long as it is nonzero // Hence a (extremely upscaled) <= b if and only if a < 0 or (a == 0 and b >= 0) let upscale = max_scale - lhs_scale; - if upscale > MAX_SUPPORTED_PRECISION as i8 { + if i8::try_from( + right_column_type + .precision_value() + .expect("Decimal types have scale"), + ) + .is_ok_and(|precision| upscale > precision) + { lhs.iter() .zip(rhs.iter()) .map(|(l, r)| -> bool { @@ -616,7 +634,13 @@ where } } else if rhs_scale < max_scale { let upscale = max_scale - rhs_scale; - if upscale > MAX_SUPPORTED_PRECISION as i8 { + if i8::try_from( + left_column_type + .precision_value() + .expect("Numeric types have scale"), + ) + .is_ok_and(|precision| upscale > precision) + { // Similarly with extreme scaling we have // a <= (extremely upscaled) b if and only if a < 0 or (a == 0 and b >= 0) lhs.iter() @@ -669,7 +693,13 @@ where // always have larger absolute value than the other one as long as it is nonzero // Hence a (extremely upscaled) >= b if and only if a > 0 or (a == 0 and b <= 0) let upscale = max_scale - lhs_scale; - if upscale > MAX_SUPPORTED_PRECISION as i8 { + if i8::try_from( + right_column_type + .precision_value() + .expect("Decimal types have scale"), + ) + .is_ok_and(|precision| upscale > precision) + { lhs.iter() .zip(rhs.iter()) .map(|(l, r)| -> bool { @@ -689,7 +719,13 @@ where } } else if rhs_scale < max_scale { let upscale = max_scale - rhs_scale; - if upscale > MAX_SUPPORTED_PRECISION as i8 { + if i8::try_from( + left_column_type + .precision_value() + .expect("Numeric types have scale"), + ) + .is_ok_and(|precision| upscale > precision) + { // Similarly with extreme scaling we have // a >= (extremely upscaled) b if and only if b < 0 or (a >= 0 and b == 0) lhs.iter()