Skip to content

Commit

Permalink
chore: major resolved if_not_else and explicit_deref_methods lints (
Browse files Browse the repository at this point in the history
#262)

# Rationale for this change

We have cargo clippy running in our CI in order to enforce code quality.
In order to increase our standards, we should enable the
clippy::pedantic lint group.

# What changes are included in this PR?

Resolved the following lint warnings

`large_types_passed_by_value `
`map_unwrap_or`
`if_not_else `
`explicit_deref_methods` 
`items_after_statements`

# Are these changes tested?
Yes.

---------

Co-authored-by: Jay White <[email protected]>
  • Loading branch information
mehulmathur16 and JayWhite2357 authored Oct 11, 2024
1 parent 40f1f77 commit d79cbbf
Show file tree
Hide file tree
Showing 7 changed files with 65 additions and 70 deletions.
8 changes: 6 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -90,10 +90,14 @@ manual_let_else = "deny"
struct_field_names = "deny"
unicode_not_nfc = "deny"
manual_string_new = "deny"
large_types_passed_by_value = "deny"
large_types_passed_by_value = "deny"
map_unwrap_or = "deny"
if_not_else = "deny"
explicit_deref_methods = "deny"
items_after_statements = "deny"
bool_to_int_with_if = "deny"
ptr_as_ptr = "deny"
match_wildcard_for_single_variants = "deny"
match_bool = "deny"
manual_assert = "deny"
trivially_copy_pass_by_ref = "deny"
trivially_copy_pass_by_ref = "deny"
27 changes: 10 additions & 17 deletions crates/proof-of-sql/benches/bench_append_rows.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@ use proof_of_sql::{
};
use proof_of_sql_parser::posql_time::{PoSQLTimeUnit, PoSQLTimeZone};
use rand::Rng;
use std::ops::Deref;

/// Bench dory performance when appending rows to a table. This includes the computation of
/// commitments. Chose the number of columns to randomly generate across supported `PoSQL`
Expand Down Expand Up @@ -99,34 +98,28 @@ pub fn generate_random_owned_table<S: Scalar>(
let identifier = format!("column_{}", rng.gen::<u32>());

match column_type {
"bigint" => columns.push(bigint(identifier.deref(), vec![rng.gen::<i64>(); num_rows])),
"bigint" => columns.push(bigint(&*identifier, vec![rng.gen::<i64>(); num_rows])),
"boolean" => columns.push(boolean(
identifier.deref(),
&*identifier,
generate_random_boolean_vector(num_rows),
)),
"int128" => columns.push(int128(
identifier.deref(),
vec![rng.gen::<i128>(); num_rows],
)),
"int128" => columns.push(int128(&*identifier, vec![rng.gen::<i128>(); num_rows])),
"scalar" => columns.push(scalar(
identifier.deref(),
&*identifier,
vec![generate_random_u64_array(); num_rows],
)),
"varchar" => columns.push(varchar(identifier.deref(), gen_rnd_str(num_rows))),
"varchar" => columns.push(varchar(&*identifier, gen_rnd_str(num_rows))),
"decimal75" => columns.push(decimal75(
identifier.deref(),
&*identifier,
12,
2,
vec![generate_random_u64_array(); num_rows],
)),
"tinyint" => columns.push(tinyint(identifier.deref(), vec![rng.gen::<i8>(); num_rows])),
"smallint" => columns.push(smallint(
identifier.deref(),
vec![rng.gen::<i16>(); num_rows],
)),
"int" => columns.push(int(identifier.deref(), vec![rng.gen::<i32>(); num_rows])),
"tinyint" => columns.push(tinyint(&*identifier, vec![rng.gen::<i8>(); num_rows])),
"smallint" => columns.push(smallint(&*identifier, vec![rng.gen::<i16>(); num_rows])),
"int" => columns.push(int(&*identifier, vec![rng.gen::<i32>(); num_rows])),
"timestamptz" => columns.push(timestamptz(
identifier.deref(),
&*identifier,
PoSQLTimeUnit::Second,
PoSQLTimeZone::Utc,
vec![rng.gen::<i64>(); num_rows],
Expand Down
1 change: 1 addition & 0 deletions crates/proof-of-sql/benches/jaeger_benches.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ use std::env;

const SIZE: usize = 1_000_000;

#[allow(clippy::items_after_statements)]
fn main() {
init_backend();
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
Expand Down
6 changes: 3 additions & 3 deletions crates/proof-of-sql/src/base/math/permutation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,13 +63,13 @@ impl Permutation {
where
T: Clone,
{
if slice.len() != self.size() {
if slice.len() == self.size() {
Ok(self.permutation.iter().map(|&i| slice[i].clone()).collect())
} else {
Err(PermutationError::PermutationSizeMismatch {
permutation_size: self.size(),
slice_length: slice.len(),
})
} else {
Ok(self.permutation.iter().map(|&i| slice[i].clone()).collect())
}
}
}
Expand Down
18 changes: 9 additions & 9 deletions crates/proof-of-sql/src/base/slice_ops/batch_inverse_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,10 +47,10 @@ fn we_can_pseudo_invert_arrays_of_length_bigger_than_1_with_zeros_and_non_zeros(
slice_ops::batch_inversion(&mut res[..]);

for (input_val, res_val) in input.iter().zip(res) {
if *input_val != Curve25519Scalar::zero() {
assert!(input_val.inv().unwrap() == res_val);
} else {
if *input_val == Curve25519Scalar::zero() {
assert!(Curve25519Scalar::zero() == res_val);
} else {
assert!(input_val.inv().unwrap() == res_val);
}
}
}
Expand Down Expand Up @@ -78,10 +78,10 @@ fn we_can_pseudo_invert_arrays_with_nonzero_count_bigger_than_min_chunking_size_
slice_ops::batch_inversion(&mut res[..]);

for (input_val, res_val) in input.iter().zip(res) {
if *input_val != Curve25519Scalar::zero() {
assert!(input_val.inv().unwrap() == res_val);
} else {
if *input_val == Curve25519Scalar::zero() {
assert!(Curve25519Scalar::zero() == res_val);
} else {
assert!(input_val.inv().unwrap() == res_val);
}
}
}
Expand Down Expand Up @@ -109,10 +109,10 @@ fn we_can_pseudo_invert_arrays_with_nonzero_count_smaller_than_min_chunking_size
slice_ops::batch_inversion(&mut res[..]);

for (input_val, res_val) in input.iter().zip(res) {
if *input_val != Curve25519Scalar::zero() {
assert!(input_val.inv().unwrap() == res_val);
} else {
if *input_val == Curve25519Scalar::zero() {
assert!(Curve25519Scalar::zero() == res_val);
} else {
assert!(input_val.inv().unwrap() == res_val);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -76,14 +76,14 @@ fn get_aggregate_and_remainder_expressions(
Expression::Column(_) | Expression::Literal(_) | Expression::Wildcard => expr,
Expression::Aggregation { op, expr } => {
let key = (op, (*expr));
if !aggregation_expr_map.contains_key(&key) {
if aggregation_expr_map.contains_key(&key) {
Expression::Column(*aggregation_expr_map.get(&key).unwrap())
} else {
let new_col_id = format!("__col_agg_{}", aggregation_expr_map.len())
.parse()
.unwrap();
aggregation_expr_map.insert(key, new_col_id);
Expression::Column(new_col_id)
} else {
Expression::Column(*aggregation_expr_map.get(&key).unwrap())
}
}
Expression::Binary { op, left, right } => {
Expand Down Expand Up @@ -232,31 +232,28 @@ impl<S: Scalar> PostprocessingStep<S> for GroupByPostprocessing {
let selection_in = vec![true; owned_table.num_rows()];
let (sum_identifiers, sum_columns): (Vec<_>, Vec<_>) = evaluated_columns
.get(&AggregationOperator::Sum)
.map(|tuple| {
.map_or((vec![], vec![]), |tuple| {
tuple
.iter()
.map(|(id, c)| (*id, Column::<S>::from_owned_column(c, &alloc)))
.unzip()
})
.unwrap_or((vec![], vec![]));
});
let (max_identifiers, max_columns): (Vec<_>, Vec<_>) = evaluated_columns
.get(&AggregationOperator::Max)
.map(|tuple| {
.map_or((vec![], vec![]), |tuple| {
tuple
.iter()
.map(|(id, c)| (*id, Column::<S>::from_owned_column(c, &alloc)))
.unzip()
})
.unwrap_or((vec![], vec![]));
});
let (min_identifiers, min_columns): (Vec<_>, Vec<_>) = evaluated_columns
.get(&AggregationOperator::Min)
.map(|tuple| {
.map_or((vec![], vec![]), |tuple| {
tuple
.iter()
.map(|(id, c)| (*id, Column::<S>::from_owned_column(c, &alloc)))
.unzip()
})
.unwrap_or((vec![], vec![]));
});
let aggregation_results = aggregate_columns(
&alloc,
&group_by_ins,
Expand Down
54 changes: 27 additions & 27 deletions crates/proof-of-sql/src/sql/proof_exprs/dyn_proof_expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -74,13 +74,13 @@ impl<C: Commitment> DynProofExpr<C> {
pub fn try_new_equals(lhs: DynProofExpr<C>, rhs: DynProofExpr<C>) -> ConversionResult<Self> {
let lhs_datatype = lhs.data_type();
let rhs_datatype = rhs.data_type();
if !type_check_binary_operation(&lhs_datatype, &rhs_datatype, BinaryOperator::Equal) {
if type_check_binary_operation(&lhs_datatype, &rhs_datatype, BinaryOperator::Equal) {
Ok(Self::Equals(EqualsExpr::new(Box::new(lhs), Box::new(rhs))))
} else {
Err(ConversionError::DataTypeMismatch {
left_type: lhs_datatype.to_string(),
right_type: rhs_datatype.to_string(),
})
} else {
Ok(Self::Equals(EqualsExpr::new(Box::new(lhs), Box::new(rhs))))
}
}
/// Create a new inequality expression
Expand All @@ -91,74 +91,74 @@ impl<C: Commitment> DynProofExpr<C> {
) -> ConversionResult<Self> {
let lhs_datatype = lhs.data_type();
let rhs_datatype = rhs.data_type();
if !type_check_binary_operation(
if type_check_binary_operation(
&lhs_datatype,
&rhs_datatype,
BinaryOperator::LessThanOrEqual,
) {
Err(ConversionError::DataTypeMismatch {
left_type: lhs_datatype.to_string(),
right_type: rhs_datatype.to_string(),
})
} else {
Ok(Self::Inequality(InequalityExpr::new(
Box::new(lhs),
Box::new(rhs),
is_lte,
)))
} else {
Err(ConversionError::DataTypeMismatch {
left_type: lhs_datatype.to_string(),
right_type: rhs_datatype.to_string(),
})
}
}

/// Create a new add expression
pub fn try_new_add(lhs: DynProofExpr<C>, rhs: DynProofExpr<C>) -> ConversionResult<Self> {
let lhs_datatype = lhs.data_type();
let rhs_datatype = rhs.data_type();
if !type_check_binary_operation(&lhs_datatype, &rhs_datatype, BinaryOperator::Add) {
Err(ConversionError::DataTypeMismatch {
left_type: lhs_datatype.to_string(),
right_type: rhs_datatype.to_string(),
})
} else {
if type_check_binary_operation(&lhs_datatype, &rhs_datatype, BinaryOperator::Add) {
Ok(Self::AddSubtract(AddSubtractExpr::new(
Box::new(lhs),
Box::new(rhs),
false,
)))
} else {
Err(ConversionError::DataTypeMismatch {
left_type: lhs_datatype.to_string(),
right_type: rhs_datatype.to_string(),
})
}
}

/// Create a new subtract expression
pub fn try_new_subtract(lhs: DynProofExpr<C>, rhs: DynProofExpr<C>) -> ConversionResult<Self> {
let lhs_datatype = lhs.data_type();
let rhs_datatype = rhs.data_type();
if !type_check_binary_operation(&lhs_datatype, &rhs_datatype, BinaryOperator::Subtract) {
Err(ConversionError::DataTypeMismatch {
left_type: lhs_datatype.to_string(),
right_type: rhs_datatype.to_string(),
})
} else {
if type_check_binary_operation(&lhs_datatype, &rhs_datatype, BinaryOperator::Subtract) {
Ok(Self::AddSubtract(AddSubtractExpr::new(
Box::new(lhs),
Box::new(rhs),
true,
)))
} else {
Err(ConversionError::DataTypeMismatch {
left_type: lhs_datatype.to_string(),
right_type: rhs_datatype.to_string(),
})
}
}

/// Create a new multiply expression
pub fn try_new_multiply(lhs: DynProofExpr<C>, rhs: DynProofExpr<C>) -> ConversionResult<Self> {
let lhs_datatype = lhs.data_type();
let rhs_datatype = rhs.data_type();
if !type_check_binary_operation(&lhs_datatype, &rhs_datatype, BinaryOperator::Multiply) {
Err(ConversionError::DataTypeMismatch {
left_type: lhs_datatype.to_string(),
right_type: rhs_datatype.to_string(),
})
} else {
if type_check_binary_operation(&lhs_datatype, &rhs_datatype, BinaryOperator::Multiply) {
Ok(Self::Multiply(MultiplyExpr::new(
Box::new(lhs),
Box::new(rhs),
)))
} else {
Err(ConversionError::DataTypeMismatch {
left_type: lhs_datatype.to_string(),
right_type: rhs_datatype.to_string(),
})
}
}

Expand Down

0 comments on commit d79cbbf

Please sign in to comment.