diff --git a/crates/proof-of-sql/src/base/commitment/vec_commitment_ext.rs b/crates/proof-of-sql/src/base/commitment/vec_commitment_ext.rs index e25e6952d..56c2dc72b 100644 --- a/crates/proof-of-sql/src/base/commitment/vec_commitment_ext.rs +++ b/crates/proof-of-sql/src/base/commitment/vec_commitment_ext.rs @@ -427,8 +427,6 @@ mod tests { fn we_can_sub_commitment_collections() { let column_a = [12i64, 34, 56, 78, 90]; let column_b = ["Lorem", "ipsum", "dolor", "sit", "amet"].map(String::from); - let front_emptied_column_a = [0i64, 0, 0, 78, 90]; - let front_emptied_column_b = ["", "", "", "sit", "amet"].map(String::from); let columns = vec![ OwnedColumn::::BigInt(column_a[..3].to_vec()), @@ -447,18 +445,17 @@ mod tests { let commitments = commitments_b.try_sub(commitments_a).unwrap(); let committable_columns = [ - CommittableColumn::BigInt(&front_emptied_column_a), + CommittableColumn::BigInt(&column_a[3..]), CommittableColumn::VarChar( - front_emptied_column_b + column_b[3..] .iter() .map(Into::::into) .map(Into::<[u64; 4]>::into) .collect(), ), ]; - let expected_commitments = - NaiveCommitment::compute_commitments(&committable_columns, 0, &()); + NaiveCommitment::compute_commitments(&committable_columns, 3, &()); assert_eq!(commitments, expected_commitments); } diff --git a/crates/proof-of-sql/src/base/database/arrow_array_to_column_conversion.rs b/crates/proof-of-sql/src/base/database/arrow_array_to_column_conversion.rs index ae5bc2556..93da3da23 100644 --- a/crates/proof-of-sql/src/base/database/arrow_array_to_column_conversion.rs +++ b/crates/proof-of-sql/src/base/database/arrow_array_to_column_conversion.rs @@ -57,17 +57,6 @@ pub enum ArrowArrayToColumnConversionError { /// This trait is used to provide utility functions to convert [`ArrayRef`]s into proof types (Column, Scalars, etc.) pub trait ArrayRefExt { - /// Convert an [`ArrayRef`] into a Proof of SQL Vec - /// - /// Note: this function must not be called from unsupported arrays or arrays with nulls. - /// It should only be used during testing. - #[cfg(any(test, feature = "test"))] - #[cfg(feature = "blitzar")] - #[cfg(test)] - fn to_test_scalars( - &self, - ) -> Result, ArrowArrayToColumnConversionError>; - /// Convert an [`ArrayRef`] into a Proof of SQL Column type /// /// Parameters: @@ -90,97 +79,6 @@ pub trait ArrayRefExt { } impl ArrayRefExt for ArrayRef { - #[cfg(any(test, feature = "test"))] - #[cfg(feature = "blitzar")] - #[cfg(test)] - fn to_test_scalars( - &self, - ) -> Result, ArrowArrayToColumnConversionError> - { - if self.null_count() != 0 { - return Err(ArrowArrayToColumnConversionError::ArrayContainsNulls); - } - - let result = match self.data_type() { - DataType::Boolean => self.as_any().downcast_ref::().map(|array| { - array - .iter() - .map(|v| { - v.ok_or(ArrowArrayToColumnConversionError::ArrayContainsNulls) - .map(Into::into) - }) - .collect() - }), - DataType::Int16 => self - .as_any() - .downcast_ref::() - .map(|array| array.values().iter().map(|v| Ok((*v).into())).collect()), - DataType::Int32 => self - .as_any() - .downcast_ref::() - .map(|array| array.values().iter().map(|v| Ok((*v).into())).collect()), - DataType::Int64 => self - .as_any() - .downcast_ref::() - .map(|array| array.values().iter().map(|v| Ok((*v).into())).collect()), - DataType::Decimal128(38, 0) => self - .as_any() - .downcast_ref::() - .map(|array| array.values().iter().map(|v| Ok((*v).into())).collect()), - DataType::Decimal256(_, _) => { - self.as_any() - .downcast_ref::() - .map(|array| { - array - .values() - .iter() - .map(|v| { - convert_i256_to_scalar(v).ok_or( - ArrowArrayToColumnConversionError::DecimalConversionFailed { - number: *v, - }, - ) - }) - .collect() - }) - } - DataType::Utf8 => self.as_any().downcast_ref::().map(|array| { - array - .iter() - .map(|v| { - v.ok_or(ArrowArrayToColumnConversionError::ArrayContainsNulls) - .map(Into::into) - }) - .collect() - }), - DataType::Timestamp(time_unit, _) => match time_unit { - ArrowTimeUnit::Second => self - .as_any() - .downcast_ref::() - .map(|array| array.values().iter().map(|v| Ok((*v).into())).collect()), - ArrowTimeUnit::Millisecond => self - .as_any() - .downcast_ref::() - .map(|array| array.values().iter().map(|v| Ok((*v).into())).collect()), - ArrowTimeUnit::Microsecond => self - .as_any() - .downcast_ref::() - .map(|array| array.values().iter().map(|v| Ok((*v).into())).collect()), - ArrowTimeUnit::Nanosecond => self - .as_any() - .downcast_ref::() - .map(|array| array.values().iter().map(|v| Ok((*v).into())).collect()), - }, - _ => None, - }; - - result.unwrap_or_else(|| { - Err(ArrowArrayToColumnConversionError::UnsupportedType { - datatype: self.data_type().clone(), - }) - }) - } - /// Converts the given `ArrowArray` into a [`Column`] data type based on its [`DataType`]. Returns an /// empty [`Column`] for any empty tange if it is in-bounds. /// @@ -1239,81 +1137,4 @@ mod tests { Column::TimestampTZ(PoSQLTimeUnit::Second, PoSQLTimeZone::Utc, &[]) ); } - - #[test] - fn we_can_convert_valid_boolean_array_refs_into_valid_vec_scalars() { - let data = vec![false, true]; - let array: ArrayRef = Arc::new(arrow::array::BooleanArray::from(data.clone())); - assert_eq!( - array.to_test_scalars(), - Ok(data - .iter() - .map(core::convert::Into::into) - .collect::>()) - ); - } - - #[test] - fn we_can_convert_valid_timestamp_array_refs_into_valid_vec_scalars() { - let data = vec![1_625_072_400, 1_625_076_000]; // Example Unix timestamps - let array: ArrayRef = Arc::new(TimestampSecondArray::with_timezone_opt( - data.clone().into(), - Some("Utc"), - )); - - assert_eq!( - array.to_test_scalars(), - Ok(data - .iter() - .map(|&v| TestScalar::from(v)) - .collect::>()) - ); - } - - #[test] - fn we_can_convert_valid_integer_array_refs_into_valid_vec_scalars() { - let data = vec![1, -3]; - - let array: ArrayRef = Arc::new(Int16Array::from(data.clone())); - assert_eq!( - array.to_test_scalars(), - Ok(data - .iter() - .map(core::convert::Into::into) - .collect::>()) - ); - - let data = vec![1, -3]; - let array: ArrayRef = Arc::new(Int32Array::from(data.clone())); - assert_eq!( - array.to_test_scalars(), - Ok(data - .iter() - .map(core::convert::Into::into) - .collect::>()) - ); - - let data = vec![1, -3]; - let array: ArrayRef = Arc::new(Int64Array::from(data.clone())); - assert_eq!( - array.to_test_scalars(), - Ok(data - .iter() - .map(core::convert::Into::into) - .collect::>()) - ); - } - - #[test] - fn we_can_convert_valid_string_array_refs_into_valid_vec_scalars() { - let data = vec!["ab", "-f34"]; - let array: ArrayRef = Arc::new(arrow::array::StringArray::from(data.clone())); - assert_eq!( - array.to_test_scalars(), - Ok(data - .iter() - .map(core::convert::Into::into) - .collect::>()) - ); - } } diff --git a/crates/proof-of-sql/src/base/database/column.rs b/crates/proof-of-sql/src/base/database/column.rs index 1116b864a..77b4e928e 100644 --- a/crates/proof-of-sql/src/base/database/column.rs +++ b/crates/proof-of-sql/src/base/database/column.rs @@ -41,7 +41,7 @@ pub enum Column<'a, S: Scalar> { /// i128 columns Int128(&'a [i128]), /// Decimal columns with a max width of 252 bits - /// - the backing store maps to the type [`crate::base::scalar::test_scalar::TestScalar`] + /// - the backing store maps to the type `S` Decimal75(Precision, i8, &'a [S]), /// Scalar columns Scalar(&'a [S]), @@ -263,7 +263,7 @@ pub enum ColumnType { /// Mapped to i64 #[serde(alias = "TIMESTAMP", alias = "timestamp")] TimestampTZ(PoSQLTimeUnit, PoSQLTimeZone), - /// Mapped to [`TestScalar`](crate::base::scalar::test_scalar::TestScalar) + /// Mapped to `S` #[serde(alias = "SCALAR", alias = "scalar")] Scalar, } diff --git a/scripts/run_ci_checks.sh b/scripts/run_ci_checks.sh index 434508366..6d1db5c35 100755 --- a/scripts/run_ci_checks.sh +++ b/scripts/run_ci_checks.sh @@ -60,4 +60,4 @@ if [ "$failed_tests" -gt 0 ]; then echo "Error: $failed_tests CI checks (excluding tests and udeps) have FAILED." else echo "All CI checks (excluding tests and udeps) have completed successfully." -fi +fi \ No newline at end of file