Skip to content

Commit

Permalink
fix: separate element decoding error from verification error
Browse files Browse the repository at this point in the history
- add `QueryError::MiscellaneousDecodingError`, `QueryError::InvalidIndexes` and `QueryError::MiscellaneousEvaluationError` to account for decoding errors beyond overflows and invalid strings
- if decoding errors happen we will use them instead of VerificationError
  • Loading branch information
iajoiner committed Jun 19, 2024
1 parent 623df7d commit 0bef7a5
Show file tree
Hide file tree
Showing 5 changed files with 109 additions and 97 deletions.
36 changes: 13 additions & 23 deletions crates/proof-of-sql/src/sql/proof/provable_query_result.rs
Original file line number Diff line number Diff line change
Expand Up @@ -83,11 +83,11 @@ impl ProvableQueryResult {
evaluation_point: &[S],
table_length: usize,
column_result_fields: &[ColumnField],
) -> Option<Vec<S>> {
) -> Result<Vec<S>, QueryError> {
assert_eq!(self.num_columns as usize, column_result_fields.len());

if !self.indexes.valid(table_length) {
return None;
return Err(QueryError::InvalidIndexes);
}

let evaluation_vec_len = self
Expand Down Expand Up @@ -119,18 +119,17 @@ impl ProvableQueryResult {
decode_and_convert::<i64, S>(&self.data[offset..])
}
}?;

val += evaluation_vec[index as usize] * x;
offset += sz;
}
res.push(val);
}

if offset != self.data.len() {
return None;
return Err(QueryError::MiscellaneousEvaluationError);
}

Some(res)
Ok(res)
}

/// Convert the intermediate query result into a final query result
Expand All @@ -150,56 +149,47 @@ impl ProvableQueryResult {
.iter()
.map(|field| match field.data_type() {
ColumnType::Boolean => {
let (col, num_read) = decode_multiple_elements(&self.data[offset..], n)
.ok_or(QueryError::Overflow)?;
let (col, num_read) = decode_multiple_elements(&self.data[offset..], n)?;
offset += num_read;
Ok((field.name(), OwnedColumn::Boolean(col)))
}
ColumnType::SmallInt => {
let (col, num_read) = decode_multiple_elements(&self.data[offset..], n)
.ok_or(QueryError::Overflow)?;
let (col, num_read) = decode_multiple_elements(&self.data[offset..], n)?;
offset += num_read;
Ok((field.name(), OwnedColumn::SmallInt(col)))
}
ColumnType::Int => {
let (col, num_read) = decode_multiple_elements(&self.data[offset..], n)
.ok_or(QueryError::Overflow)?;
let (col, num_read) = decode_multiple_elements(&self.data[offset..], n)?;
offset += num_read;
Ok((field.name(), OwnedColumn::Int(col)))
}
ColumnType::BigInt => {
let (col, num_read) = decode_multiple_elements(&self.data[offset..], n)
.ok_or(QueryError::Overflow)?;
let (col, num_read) = decode_multiple_elements(&self.data[offset..], n)?;
offset += num_read;
Ok((field.name(), OwnedColumn::BigInt(col)))
}
ColumnType::Int128 => {
let (col, num_read) = decode_multiple_elements(&self.data[offset..], n)
.ok_or(QueryError::Overflow)?;
let (col, num_read) = decode_multiple_elements(&self.data[offset..], n)?;
offset += num_read;
Ok((field.name(), OwnedColumn::Int128(col)))
}
ColumnType::VarChar => {
let (col, num_read) = decode_multiple_elements(&self.data[offset..], n)
.ok_or(QueryError::InvalidString)?;
let (col, num_read) = decode_multiple_elements(&self.data[offset..], n)?;
offset += num_read;
Ok((field.name(), OwnedColumn::VarChar(col)))
}
ColumnType::Scalar => {
let (col, num_read) = decode_multiple_elements(&self.data[offset..], n)
.ok_or(QueryError::Overflow)?;
let (col, num_read) = decode_multiple_elements(&self.data[offset..], n)?;
offset += num_read;
Ok((field.name(), OwnedColumn::Scalar(col)))
}
ColumnType::Decimal75(precision, scale) => {
let (col, num_read) = decode_multiple_elements(&self.data[offset..], n)
.ok_or(QueryError::Overflow)?;
let (col, num_read) = decode_multiple_elements(&self.data[offset..], n)?;
offset += num_read;
Ok((field.name(), OwnedColumn::Decimal75(precision, scale, col)))
}
ColumnType::TimestampTZ(tu, tz) => {
let (col, num_read) = decode_multiple_elements(&self.data[offset..], n)
.ok_or(QueryError::Overflow)?;
let (col, num_read) = decode_multiple_elements(&self.data[offset..], n)?;
offset += num_read;
Ok((field.name(), OwnedColumn::TimestampTZ(tu, tz, col)))
}
Expand Down
75 changes: 41 additions & 34 deletions crates/proof-of-sql/src/sql/proof/provable_query_result_test.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use super::{ProvableQueryResult, ProvableResultColumn};
use super::{ProvableQueryResult, ProvableResultColumn, QueryError};
use crate::{
base::{
database::{ColumnField, ColumnType},
Expand Down Expand Up @@ -206,9 +206,10 @@ fn evaluation_fails_if_indexes_are_out_of_range() {
compute_evaluation_vector(&mut evaluation_vec, &evaluation_point);
let column_fields =
vec![ColumnField::new("a".parse().unwrap(), ColumnType::BigInt); cols.len()];
assert!(res
.evaluate(&evaluation_point, 4, &column_fields[..])
.is_none());
assert!(matches!(
res.evaluate(&evaluation_point, 4, &column_fields[..]),
Err(QueryError::InvalidIndexes)
));
}

#[test]
Expand All @@ -225,9 +226,10 @@ fn evaluation_fails_if_indexes_are_not_sorted() {
compute_evaluation_vector(&mut evaluation_vec, &evaluation_point);
let column_fields =
vec![ColumnField::new("a".parse().unwrap(), ColumnType::BigInt); cols.len()];
assert!(res
.evaluate(&evaluation_point, 4, &column_fields[..])
.is_none());
assert!(matches!(
res.evaluate(&evaluation_point, 4, &column_fields[..]),
Err(QueryError::InvalidIndexes)
));
}

#[test]
Expand All @@ -245,9 +247,10 @@ fn evaluation_fails_if_extra_data_is_included() {
compute_evaluation_vector(&mut evaluation_vec, &evaluation_point);
let column_fields =
vec![ColumnField::new("a".parse().unwrap(), ColumnType::BigInt); cols.len()];
assert!(res
.evaluate(&evaluation_point, 4, &column_fields[..])
.is_none());
assert!(matches!(
res.evaluate(&evaluation_point, 4, &column_fields[..]),
Err(QueryError::MiscellaneousEvaluationError)
));
}

#[test]
Expand All @@ -266,9 +269,30 @@ fn evaluation_fails_if_the_result_cant_be_decoded() {
compute_evaluation_vector(&mut evaluation_vec, &evaluation_point);
let column_fields =
vec![ColumnField::new("a".parse().unwrap(), ColumnType::BigInt); res.num_columns()];
assert!(res
.evaluate(&evaluation_point, 4, &column_fields[..])
.is_none());
assert!(matches!(
res.evaluate(&evaluation_point, 4, &column_fields[..]),
Err(QueryError::Overflow)
));
}

#[test]
fn evaluation_fails_if_integer_overflow_happens() {
let indexes = Indexes::Sparse(vec![0, 2]);
let values: [i64; 3] = [i32::MAX as i64 + 1_i64, 11, 12];
let cols: [Box<dyn ProvableResultColumn>; 1] = [Box::new(values)];
let res = ProvableQueryResult::new(&indexes, &cols);
let evaluation_point = [
Curve25519Scalar::from(10u64),
Curve25519Scalar::from(100u64),
];
let mut evaluation_vec = [Curve25519Scalar::ZERO; 4];
compute_evaluation_vector(&mut evaluation_vec, &evaluation_point);
let column_fields =
vec![ColumnField::new("a".parse().unwrap(), ColumnType::Int); res.num_columns()];
assert!(matches!(
res.evaluate(&evaluation_point, 4, &column_fields[..]),
Err(QueryError::Overflow)
));
}

#[test]
Expand All @@ -286,9 +310,10 @@ fn evaluation_fails_if_data_is_missing() {
compute_evaluation_vector(&mut evaluation_vec, &evaluation_point);
let column_fields =
vec![ColumnField::new("a".parse().unwrap(), ColumnType::BigInt); res.num_columns()];
assert!(res
.evaluate(&evaluation_point, 4, &column_fields[..])
.is_none());
assert!(matches!(
res.evaluate(&evaluation_point, 4, &column_fields[..]),
Err(QueryError::Overflow)
));
}

#[test]
Expand Down Expand Up @@ -406,7 +431,6 @@ fn we_can_convert_a_provable_result_to_a_final_result_with_mixed_data_types() {
.unwrap();
let column_fields: Vec<Field> = column_fields.iter().map(|v| v.into()).collect();
let schema = Arc::new(Schema::new(column_fields));
println!("{:?}", res);
let expected_res = RecordBatch::try_new(
schema,
vec![
Expand Down Expand Up @@ -446,20 +470,3 @@ fn we_cannot_convert_a_provable_result_with_invalid_string_data() {
.to_owned_table::<Curve25519Scalar>(&column_fields)
.is_err());
}

// TODO: we don't correctly detect overflow yet
// #[test]
// #[should_panic]
// fn we_can_detect_overflow() {
// let indexes = [0];
// let values = [i64::MAX];
// let cols : [Box<dyn ProvableResultColumn>; 1] = [
// Box::new(values),
// ];
// let res = ProvableQueryResult::new(&
// &indexes,
// &cols,
// );
// let column_fields = vec![ColumnField::new("a1".parse().unwrap(), ColumnType::BigInt)];
// let res = res.into_query_result(&column_fields).unwrap();
// }
9 changes: 2 additions & 7 deletions crates/proof-of-sql/src/sql/proof/query_proof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -242,16 +242,11 @@ impl<CP: CommitmentEvaluationProof> QueryProof<CP> {
let column_result_fields = expr.get_column_result_fields();

// compute the evaluation of the result MLEs
let result_evaluations = match result.evaluate(
let result_evaluations = result.evaluate(
&subclaim.evaluation_point,
table_length,
&column_result_fields[..],
) {
Some(evaluations) => evaluations,
_ => Err(ProofError::VerificationError(
"failed to evaluate intermediate result MLEs",
))?,
};
)?;

// pass over the provable AST to fill in the verification builder
let sumcheck_evaluations = SumcheckMleEvaluations::new(
Expand Down
9 changes: 9 additions & 0 deletions crates/proof-of-sql/src/sql/proof/query_result.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,15 @@ pub enum QueryError {
/// This just means that the database was supposed to respond with a string that was not valid UTF-8.
#[error("String decode error")]
InvalidString,
/// Decoding errors other than overflow and invalid string.
#[error("Miscellaneous decoding error")]
MiscellaneousDecodingError,
/// Indexes are invalid.
#[error("Invalid indexes")]
InvalidIndexes,
/// Miscellaneous evaluation error.
#[error("Miscellaneous evaluation error")]
MiscellaneousEvaluationError,
/// The proof failed to verify.
#[error(transparent)]
ProofError(#[from] ProofError),
Expand Down
Loading

0 comments on commit 0bef7a5

Please sign in to comment.