Skip to content

Commit

Permalink
chore: enforce existing clippy fixed lints check (#223)
Browse files Browse the repository at this point in the history
# Rationale for this change

This change enforces the fixed lint groups checks in proof-of-sql lib.

# What changes are included in this PR?

Denied the existing lints fixed & fixed the new warnings on updated
main.

# Are these changes tested?

Yes.
  • Loading branch information
mehulmathur16 authored Oct 5, 2024
1 parent b70c212 commit 227f611
Show file tree
Hide file tree
Showing 72 changed files with 208 additions and 176 deletions.
14 changes: 13 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -64,4 +64,16 @@ zerocopy = { version = "0.7.34" }
missing_docs = "warn"

[workspace.lints.clippy]
missing_panics_doc = "warn"
missing_panics_doc = "deny"
doc_markdown = "deny"
match_same_arms = "deny"
similar_names = "deny"
many_single_char_names = "deny"
explicit_iter_loop = "deny"
implicit_clone = "deny"
uninlined_format_args = "deny"
semicolon_if_nothing_returned = "deny"
unnested_or_patterns = "deny"
unreadable_literal = "deny"
must_use_candidate = "deny"
range_plus_one = "deny"
5 changes: 2 additions & 3 deletions crates/proof-of-sql-parser/src/identifier.rs
Original file line number Diff line number Diff line change
Expand Up @@ -195,11 +195,10 @@ mod tests {
"to_timestamp",
];

for keyword in keywords.iter() {
for keyword in &keywords {
assert!(
Identifier::from_str(keyword).is_err(),
"Should not parse keyword as identifier: {}",
keyword
"Should not parse keyword as identifier: {keyword}"
);
}
}
Expand Down
2 changes: 1 addition & 1 deletion crates/proof-of-sql-parser/src/intermediate_ast_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1108,7 +1108,7 @@ fn we_cannot_parse_queries_with_long_identifiers() {
}

////////////////////////////////
/// Tests for the GroupByClause
/// Tests for the `GroupByClause`
////////////////////////////////
#[test]
fn we_can_parse_a_simple_group_by_clause() {
Expand Down
11 changes: 7 additions & 4 deletions crates/proof-of-sql-parser/src/intermediate_decimal.rs
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ mod tests {
};
assert_eq!(
i128::try_from(valid_decimal),
Ok(170141183460469231731687303715884105727i128)
Ok(170_141_183_460_469_231_731_687_303_715_884_105_727_i128)
);

let valid_decimal = IntermediateDecimal {
Expand All @@ -228,7 +228,7 @@ mod tests {
};
assert_eq!(
i128::try_from(valid_decimal_negative),
Ok(-170141183460469231731687303715884105728i128)
Ok(-170_141_183_460_469_231_731_687_303_715_884_105_728_i128)
);

let non_integer = IntermediateDecimal {
Expand All @@ -242,7 +242,10 @@ mod tests {
let valid_decimal = IntermediateDecimal {
value: BigDecimal::from_str("9223372036854775807").unwrap(),
};
assert_eq!(i64::try_from(valid_decimal), Ok(9223372036854775807i64));
assert_eq!(
i64::try_from(valid_decimal),
Ok(9_223_372_036_854_775_807_i64)
);

let valid_decimal = IntermediateDecimal {
value: BigDecimal::from_str("123.000").unwrap(),
Expand All @@ -259,7 +262,7 @@ mod tests {
};
assert_eq!(
i64::try_from(valid_decimal_negative),
Ok(-9223372036854775808i64)
Ok(-9_223_372_036_854_775_808_i64)
);

let non_integer = IntermediateDecimal {
Expand Down
10 changes: 4 additions & 6 deletions crates/proof-of-sql-parser/src/posql_time/timestamp.rs
Original file line number Diff line number Diff line change
Expand Up @@ -147,15 +147,15 @@ mod tests {

#[test]
fn test_unix_epoch_time_timezone() {
let unix_time = 1231006505; // Unix time as string
let unix_time = 1_231_006_505; // Unix time as string
let expected_timezone = PoSQLTimeZone::Utc; // Unix time should always be UTC
let result = PoSQLTimestamp::to_timestamp(unix_time).unwrap();
assert_eq!(result.timezone, expected_timezone);
}

#[test]
fn test_unix_epoch_timestamp_parsing() {
let unix_time = 1231006505; // Example Unix timestamp (seconds since epoch)
let unix_time = 1_231_006_505; // Example Unix timestamp (seconds since epoch)
let expected_datetime = Utc.timestamp_opt(unix_time, 0).unwrap();
let expected_unit = PoSQLTimeUnit::Second; // Assuming basic second precision for Unix timestamp
let input = unix_time; // Simulate input as string since Unix times are often transmitted as strings
Expand Down Expand Up @@ -235,8 +235,7 @@ mod tests {
for input in inputs {
assert!(
DateTime::parse_from_rfc3339(input).is_ok(),
"Should parse correctly: {}",
input
"Should parse correctly: {input}"
);
}
}
Expand Down Expand Up @@ -286,8 +285,7 @@ mod tests {
for input in incorrect_formats {
assert!(
DateTime::parse_from_rfc3339(input).is_err(),
"Should reject incorrect format: {}",
input
"Should reject incorrect format: {input}"
);
}
}
Expand Down
6 changes: 3 additions & 3 deletions crates/proof-of-sql-parser/src/posql_time/timezone.rs
Original file line number Diff line number Diff line change
Expand Up @@ -83,19 +83,19 @@ mod timezone_parsing_tests {
#[test]
fn test_display_fixed_offset_positive() {
let timezone = timezone::PoSQLTimeZone::FixedOffset(4500); // +01:15
assert_eq!(format!("{}", timezone), "+01:15");
assert_eq!(format!("{timezone}"), "+01:15");
}

#[test]
fn test_display_fixed_offset_negative() {
let timezone = timezone::PoSQLTimeZone::FixedOffset(-3780); // -01:03
assert_eq!(format!("{}", timezone), "-01:03");
assert_eq!(format!("{timezone}"), "-01:03");
}

#[test]
fn test_display_utc() {
let timezone = timezone::PoSQLTimeZone::Utc;
assert_eq!(format!("{}", timezone), "+00:00");
assert_eq!(format!("{timezone}"), "+00:00");
}
}

Expand Down
6 changes: 3 additions & 3 deletions crates/proof-of-sql-parser/src/posql_time/unit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ mod time_unit_tests {
let invalid_precisions = [
"1", "2", "4", "5", "7", "8", "10", "zero", "three", "cat", "-1", "-2",
]; // Testing all your various invalid inputs
for &value in invalid_precisions.iter() {
for &value in &invalid_precisions {
let result = PoSQLTimeUnit::try_from(value);
assert!(matches!(
result,
Expand All @@ -88,7 +88,7 @@ mod time_unit_tests {
#[test]
fn test_rfc3339_timestamp_with_microseconds() {
let input = "2023-06-26T12:34:56.123456Z";
let expected = Utc.ymd(2023, 6, 26).and_hms_micro(12, 34, 56, 123456);
let expected = Utc.ymd(2023, 6, 26).and_hms_micro(12, 34, 56, 123_456);
let result = PoSQLTimestamp::try_from(input).unwrap();
assert_eq!(result.timeunit(), PoSQLTimeUnit::Microsecond);
assert_eq!(
Expand All @@ -99,7 +99,7 @@ mod time_unit_tests {
#[test]
fn test_rfc3339_timestamp_with_nanoseconds() {
let input = "2023-06-26T12:34:56.123456789Z";
let expected = Utc.ymd(2023, 6, 26).and_hms_nano(12, 34, 56, 123456789);
let expected = Utc.ymd(2023, 6, 26).and_hms_nano(12, 34, 56, 123_456_789);
let result = PoSQLTimestamp::try_from(input).unwrap();
assert_eq!(result.timeunit(), PoSQLTimeUnit::Nanosecond);
assert_eq!(
Expand Down
5 changes: 3 additions & 2 deletions crates/proof-of-sql/benches/bench_append_rows.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ use rand::Rng;
use std::ops::Deref;

/// Bench dory performance when appending rows to a table. This includes the computation of
/// commitments. Chose the number of columns to randomly generate across supported PoSQL
/// commitments. Chose the number of columns to randomly generate across supported `PoSQL`
/// data types, and choose the number of rows to append at a time.
///
/// ```text
Expand Down Expand Up @@ -72,7 +72,8 @@ fn bench_append_rows(c: &mut Criterion, cols: usize, rows: usize) {
});
}

/// Generates a random OwnedTable with a specified number of columns
/// Generates a random [`OwnedTable`] with a specified number of columns
#[must_use]
pub fn generate_random_owned_table<S: Scalar>(
num_columns: usize,
num_rows: usize,
Expand Down
6 changes: 3 additions & 3 deletions crates/proof-of-sql/benches/jaeger_benches.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
//! cargo bench -p proof-of-sql --bench jaeger_benches InnerProductProof
//! cargo bench -p proof-of-sql --bench jaeger_benches Dory --features="test"
//! ```
//! Then, navigate to http://localhost:16686 to view the traces.
//! Then, navigate to <http://localhost:16686> to view the traces.

#[cfg(feature = "test")]
use ark_std::test_rng;
Expand Down Expand Up @@ -45,7 +45,7 @@ fn main() {
"InnerProductProof" => {
// Run 3 times to ensure that warm-up of the GPU has occurred.
for _ in 0..3 {
for (title, query, columns) in QUERIES.iter() {
for (title, query, columns) in QUERIES {
jaeger_scaffold::<InnerProductProof>(title, query, columns, SIZE, &(), &());
}
}
Expand All @@ -60,7 +60,7 @@ fn main() {
let verifier_setup = DoryVerifierPublicSetup::new(&vs, 10);

for _ in 0..3 {
for (title, query, columns) in QUERIES.iter() {
for (title, query, columns) in QUERIES {
jaeger_scaffold::<DoryEvaluationProof>(
title,
query,
Expand Down
8 changes: 5 additions & 3 deletions crates/proof-of-sql/benches/scaffold/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ pub fn criterion_scaffold<CP: CommitmentEvaluationProof>(
prover_setup: &CP::ProverPublicSetup<'_>,
verifier_setup: &CP::VerifierPublicSetup<'_>,
) {
let mut group = c.benchmark_group(format!("{} - {}", title, query));
let mut group = c.benchmark_group(format!("{title} - {query}"));
group.sample_size(10);
group.plot_config(PlotConfiguration::default().summary_scale(AxisScale::Logarithmic));
init_backend();
Expand All @@ -102,10 +102,12 @@ pub fn criterion_scaffold<CP: CommitmentEvaluationProof>(
&mut rng,
);
group.bench_function("Generate Proof", |b| {
b.iter(|| VerifiableQueryResult::<CP>::new(query.proof_expr(), &accessor, prover_setup))
b.iter(|| {
VerifiableQueryResult::<CP>::new(query.proof_expr(), &accessor, prover_setup)
});
});
group.bench_function("Verify Proof", |b| {
b.iter(|| result.verify(query.proof_expr(), &accessor, verifier_setup))
b.iter(|| result.verify(query.proof_expr(), &accessor, verifier_setup));
});
}
}
4 changes: 2 additions & 2 deletions crates/proof-of-sql/examples/hello_world/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ use std::{
///
/// Will panic if flushing the output fails, which can happen due to issues with the underlying output stream.
fn start_timer(message: &str) -> Instant {
print!("{}...", message);
print!("{message}...");
stdout().flush().unwrap();
Instant::now()
}
Expand Down Expand Up @@ -70,7 +70,7 @@ fn main() {
println!("Query result: {:?}", result.table);
}
Err(e) => {
println!("Error: {:?}", e);
println!("Error: {e:?}");
}
}
}
2 changes: 1 addition & 1 deletion crates/proof-of-sql/examples/posql_db/README.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# posql_db
# `posql_db`

Example demonstrating an implementation of a simple csv-backed database with Proof of SQL capabilities.

Expand Down
4 changes: 2 additions & 2 deletions crates/proof-of-sql/examples/posql_db/commit_accessor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,12 @@ impl<C: Commitment + Serialize + for<'a> Deserialize<'a>> CommitAccessor<C> {
table_ref: &TableRef,
commit: &TableCommitment<C>,
) -> Result<(), Box<dyn Error>> {
let path = self.base_path.join(format!("{}.commit", table_ref));
let path = self.base_path.join(format!("{table_ref}.commit"));
fs::write(path, postcard::to_allocvec(commit)?)?;
Ok(())
}
pub fn load_commit(&mut self, table_ref: TableRef) -> Result<(), Box<dyn Error>> {
let path = self.base_path.join(format!("{}.commit", table_ref));
let path = self.base_path.join(format!("{table_ref}.commit"));
let commit = postcard::from_bytes(&fs::read(path)?)?;
self.inner.insert(table_ref, commit);
Ok(())
Expand Down
2 changes: 1 addition & 1 deletion crates/proof-of-sql/examples/posql_db/csv_accessor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ impl CsvDataAccessor {
Ok(())
}
fn get_table_path(&self, table_ref: &TableRef) -> PathBuf {
self.base_path.join(format!("{}.csv", table_ref))
self.base_path.join(format!("{table_ref}.csv"))
}
pub fn write_table(
&self,
Expand Down
2 changes: 1 addition & 1 deletion crates/proof-of-sql/examples/posql_db/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ enum Commands {
/// Will panic if the call to `stdout().flush()` fails, indicating that the
/// standard output stream could not be flushed
fn start_timer(message: &str) -> Instant {
print!("{}...", message);
print!("{message}...");
stdout().flush().unwrap();
Instant::now()
}
Expand Down
4 changes: 2 additions & 2 deletions crates/proof-of-sql/src/base/bit/bit_matrix_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ fn we_can_compute_the_bit_matrix_for_data_with_a_single_varying_bit() {
let matrix = compute_varying_bit_matrix(&alloc, &data, &dist);
assert_eq!(matrix.len(), 1);
let slice1 = vec![true, false];
assert_eq!(matrix[0], slice1)
assert_eq!(matrix[0], slice1);
}

#[test]
Expand All @@ -40,7 +40,7 @@ fn we_can_compute_the_bit_matrix_for_data_with_a_varying_sign_bit() {
let matrix = compute_varying_bit_matrix(&alloc, &data, &dist);
assert_eq!(matrix.len(), 1);
let slice1 = vec![false, true];
assert_eq!(matrix[0], slice1)
assert_eq!(matrix[0], slice1);
}

#[test]
Expand Down
8 changes: 2 additions & 6 deletions crates/proof-of-sql/src/base/commitment/column_bounds.rs
Original file line number Diff line number Diff line change
Expand Up @@ -625,15 +625,11 @@ mod tests {
for ((bound_a, name_a), (bound_b, name_b)) in bounds.iter().tuple_combinations() {
assert!(
bound_a.try_union(*bound_b).is_err(),
"Expected error when trying to union {} with {}",
name_a,
name_b
"Expected error when trying to union {name_a} with {name_b}"
);
assert!(
bound_b.try_union(*bound_a).is_err(),
"Expected error when trying to union {} with {}",
name_b,
name_a
"Expected error when trying to union {name_b} with {name_a}"
);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -326,6 +326,7 @@ mod tests {
));
}

#[allow(clippy::similar_names)]
#[test]
fn we_cannot_perform_arithmetic_on_mismatched_metadata_maps_with_same_column_counts() {
let id_a = "column_a";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@ mod tests {
.into(),
);

assert_eq!(res_committable_column, test_committable_column)
assert_eq!(res_committable_column, test_committable_column);
}

#[test]
Expand Down
3 changes: 2 additions & 1 deletion crates/proof-of-sql/src/base/commitment/naive_commitment.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ impl Neg for NaiveCommitment {

impl SubAssign for NaiveCommitment {
fn sub_assign(&mut self, rhs: Self) {
self.add_assign(rhs.neg())
self.add_assign(rhs.neg());
}
}

Expand Down Expand Up @@ -154,6 +154,7 @@ impl Commitment for NaiveCommitment {
}
}

#[allow(clippy::similar_names)]
#[test]
fn we_can_compute_commitments_from_commitable_columns() {
let column_a = [1i64, 10, -5, 0, 10];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,7 @@ fn we_can_subtract_naive_commitments_with_both_empty() {

// AddAssign Tests

#[allow(clippy::similar_names)]
#[test]
fn we_can_add_assign_naive_commitments() {
let column_a: Vec<TestScalar> = [1i64, 10, -5, 0, 10].iter().map(|bi| bi.into()).collect();
Expand All @@ -165,6 +166,7 @@ fn we_can_add_assign_naive_commitments() {
assert_eq!(commitment_b_mutable, commitment_sum);
}

#[allow(clippy::similar_names)]
#[test]
fn we_can_add_assign_naive_commitments_with_one_empty() {
let column_a: Vec<TestScalar> = [1i64, 10, -5, 0, 10].iter().map(|bi| bi.into()).collect();
Expand Down Expand Up @@ -220,6 +222,7 @@ fn we_can_sub_assign_naive_commitments() {
assert_eq!(commitment_a_mutable, commitment_difference);
}

#[allow(clippy::similar_names)]
#[test]
fn we_can_sub_assign_naive_commitments_with_one_empty() {
let column_a: Vec<TestScalar> = [1i64, 10, -5, 0, 10].iter().map(|bi| bi.into()).collect();
Expand Down
Loading

0 comments on commit 227f611

Please sign in to comment.