diff --git a/native/spark-expr/src/math_funcs/ceil.rs b/native/spark-expr/src/math_funcs/ceil.rs index 2d6b84ab32..64e138fded 100644 --- a/native/spark-expr/src/math_funcs/ceil.rs +++ b/native/spark-expr/src/math_funcs/ceil.rs @@ -81,3 +81,166 @@ fn decimal_ceil_f(scale: &i8) -> impl Fn(i128) -> i128 { let div = 10_i128.pow_wrapping(*scale as u32); move |x: i128| div_ceil(x, div) } + +#[cfg(test)] +mod test { + use crate::spark_ceil; + use arrow::array::{Decimal128Array, Float32Array, Float64Array, Int64Array}; + use arrow::datatypes::DataType; + use datafusion::common::cast::as_int64_array; + use datafusion::common::{Result, ScalarValue}; + use datafusion::physical_plan::ColumnarValue; + use std::sync::Arc; + + #[test] + fn test_ceil_f32_array() -> Result<()> { + let input = Float32Array::from(vec![ + Some(125.2345), + Some(15.0001), + Some(0.1), + Some(-0.9), + Some(-1.1), + Some(123.0), + None, + ]); + let args = vec![ColumnarValue::Array(Arc::new(input))]; + let ColumnarValue::Array(result) = spark_ceil(&args, &DataType::Float32)? else { + unreachable!() + }; + let actual = as_int64_array(&result)?; + let expected = Int64Array::from(vec![ + Some(126), + Some(16), + Some(1), + Some(0), + Some(-1), + Some(123), + None, + ]); + assert_eq!(actual, &expected); + Ok(()) + } + + #[test] + fn test_ceil_f64_array() -> Result<()> { + let input = Float64Array::from(vec![ + Some(125.2345), + Some(15.0001), + Some(0.1), + Some(-0.9), + Some(-1.1), + Some(123.0), + None, + ]); + let args = vec![ColumnarValue::Array(Arc::new(input))]; + let ColumnarValue::Array(result) = spark_ceil(&args, &DataType::Float64)? else { + unreachable!() + }; + let actual = as_int64_array(&result)?; + let expected = Int64Array::from(vec![ + Some(126), + Some(16), + Some(1), + Some(0), + Some(-1), + Some(123), + None, + ]); + assert_eq!(actual, &expected); + Ok(()) + } + + #[test] + fn test_ceil_i64_array() -> Result<()> { + let input = Int64Array::from(vec![Some(-1), Some(0), Some(1), None]); + let args = vec![ColumnarValue::Array(Arc::new(input))]; + let ColumnarValue::Array(result) = spark_ceil(&args, &DataType::Int64)? else { + unreachable!() + }; + let actual = as_int64_array(&result)?; + let expected = Int64Array::from(vec![Some(-1), Some(0), Some(1), None]); + assert_eq!(actual, &expected); + Ok(()) + } + + // https://github.com/apache/datafusion-comet/issues/1729 + #[test] + #[ignore] + fn test_ceil_decimal128_array() -> Result<()> { + let array = Decimal128Array::from(vec![ + Some(12345), // 123.45 + Some(12500), // 125.00 + Some(-12999), // -129.99 + None, + ]) + .with_precision_and_scale(5, 2)?; + let args = vec![ColumnarValue::Array(Arc::new(array))]; + let ColumnarValue::Array(result) = spark_ceil(&args, &DataType::Decimal128(5, 2))? else { + unreachable!() + }; + let expected = Decimal128Array::from(vec![ + Some(12400), // 124.00 + Some(12500), // 125.00 + Some(-12900), // -129.00 + None, + ]) + .with_precision_and_scale(5, 2)?; + let actual = result.as_any().downcast_ref::().unwrap(); + assert_eq!(actual, &expected); + Ok(()) + } + + #[test] + fn test_ceil_f32_scalar() -> Result<()> { + let args = vec![ColumnarValue::Scalar(ScalarValue::Float32(Some(125.2345)))]; + let ColumnarValue::Scalar(ScalarValue::Int64(Some(result))) = + spark_ceil(&args, &DataType::Float32)? + else { + unreachable!() + }; + assert_eq!(result, 126); + Ok(()) + } + + #[test] + fn test_ceil_f64_scalar() -> Result<()> { + let args = vec![ColumnarValue::Scalar(ScalarValue::Float64(Some(-1.1)))]; + let ColumnarValue::Scalar(ScalarValue::Int64(Some(result))) = + spark_ceil(&args, &DataType::Float64)? + else { + unreachable!() + }; + assert_eq!(result, -1); + Ok(()) + } + + #[test] + fn test_ceil_i64_scalar() -> Result<()> { + let args = vec![ColumnarValue::Scalar(ScalarValue::Int64(Some(48)))]; + let ColumnarValue::Scalar(ScalarValue::Int64(Some(result))) = + spark_ceil(&args, &DataType::Int64)? + else { + unreachable!() + }; + assert_eq!(result, 48); + Ok(()) + } + + // https://github.com/apache/datafusion-comet/issues/1729 + #[test] + #[ignore] + fn test_ceil_decimal128_scalar() -> Result<()> { + let args = vec![ColumnarValue::Scalar(ScalarValue::Decimal128( + Some(567), + 3, + 1, + ))]; // 56.7 + let ColumnarValue::Scalar(ScalarValue::Decimal128(Some(result), 3, 1)) = + spark_ceil(&args, &DataType::Decimal128(3, 1))? + else { + unreachable!() + }; + assert_eq!(result, 570); // 57.0 + Ok(()) + } +} diff --git a/native/spark-expr/src/math_funcs/floor.rs b/native/spark-expr/src/math_funcs/floor.rs index 5547d78a27..db2015213f 100644 --- a/native/spark-expr/src/math_funcs/floor.rs +++ b/native/spark-expr/src/math_funcs/floor.rs @@ -81,3 +81,166 @@ fn decimal_floor_f(scale: &i8) -> impl Fn(i128) -> i128 { let div = 10_i128.pow_wrapping(*scale as u32); move |x: i128| div_floor(x, div) } + +#[cfg(test)] +mod test { + use crate::spark_floor; + use arrow::array::{Decimal128Array, Float32Array, Float64Array, Int64Array}; + use arrow::datatypes::DataType; + use datafusion::common::cast::as_int64_array; + use datafusion::common::{Result, ScalarValue}; + use datafusion::physical_plan::ColumnarValue; + use std::sync::Arc; + + #[test] + fn test_floor_f32_array() -> Result<()> { + let input = Float32Array::from(vec![ + Some(125.9345), + Some(15.9999), + Some(0.9), + Some(-0.1), + Some(-1.999), + Some(123.0), + None, + ]); + let args = vec![ColumnarValue::Array(Arc::new(input))]; + let ColumnarValue::Array(result) = spark_floor(&args, &DataType::Float32)? else { + unreachable!() + }; + let actual = as_int64_array(&result)?; + let expected = Int64Array::from(vec![ + Some(125), + Some(15), + Some(0), + Some(-1), + Some(-2), + Some(123), + None, + ]); + assert_eq!(actual, &expected); + Ok(()) + } + + #[test] + fn test_floor_f64_array() -> Result<()> { + let input = Float64Array::from(vec![ + Some(125.9345), + Some(15.9999), + Some(0.9), + Some(-0.1), + Some(-1.999), + Some(123.0), + None, + ]); + let args = vec![ColumnarValue::Array(Arc::new(input))]; + let ColumnarValue::Array(result) = spark_floor(&args, &DataType::Float64)? else { + unreachable!() + }; + let actual = as_int64_array(&result)?; + let expected = Int64Array::from(vec![ + Some(125), + Some(15), + Some(0), + Some(-1), + Some(-2), + Some(123), + None, + ]); + assert_eq!(actual, &expected); + Ok(()) + } + + #[test] + fn test_floor_i64_array() -> Result<()> { + let input = Int64Array::from(vec![Some(-1), Some(0), Some(1), None]); + let args = vec![ColumnarValue::Array(Arc::new(input))]; + let ColumnarValue::Array(result) = spark_floor(&args, &DataType::Int64)? else { + unreachable!() + }; + let actual = as_int64_array(&result)?; + let expected = Int64Array::from(vec![Some(-1), Some(0), Some(1), None]); + assert_eq!(actual, &expected); + Ok(()) + } + + // https://github.com/apache/datafusion-comet/issues/1729 + #[test] + #[ignore] + fn test_floor_decimal128_array() -> Result<()> { + let array = Decimal128Array::from(vec![ + Some(12345), // 123.45 + Some(12500), // 125.00 + Some(-12999), // -129.99 + None, + ]) + .with_precision_and_scale(5, 2)?; + let args = vec![ColumnarValue::Array(Arc::new(array))]; + let ColumnarValue::Array(result) = spark_floor(&args, &DataType::Decimal128(5, 2))? else { + unreachable!() + }; + let expected = Decimal128Array::from(vec![ + Some(12300), // 123.00 + Some(12500), // 125.00 + Some(-13000), // -130.00 + None, + ]) + .with_precision_and_scale(5, 2)?; + let actual = result.as_any().downcast_ref::().unwrap(); + assert_eq!(actual, &expected); + Ok(()) + } + + #[test] + fn test_floor_f32_scalar() -> Result<()> { + let args = vec![ColumnarValue::Scalar(ScalarValue::Float32(Some(125.9345)))]; + let ColumnarValue::Scalar(ScalarValue::Int64(Some(result))) = + spark_floor(&args, &DataType::Float32)? + else { + unreachable!() + }; + assert_eq!(result, 125); + Ok(()) + } + + #[test] + fn test_floor_f64_scalar() -> Result<()> { + let args = vec![ColumnarValue::Scalar(ScalarValue::Float64(Some(-1.999)))]; + let ColumnarValue::Scalar(ScalarValue::Int64(Some(result))) = + spark_floor(&args, &DataType::Float64)? + else { + unreachable!() + }; + assert_eq!(result, -2); + Ok(()) + } + + #[test] + fn test_floor_i64_scalar() -> Result<()> { + let args = vec![ColumnarValue::Scalar(ScalarValue::Int64(Some(48)))]; + let ColumnarValue::Scalar(ScalarValue::Int64(Some(result))) = + spark_floor(&args, &DataType::Int64)? + else { + unreachable!() + }; + assert_eq!(result, 48); + Ok(()) + } + + // https://github.com/apache/datafusion-comet/issues/1729 + #[test] + #[ignore] + fn test_floor_decimal128_scalar() -> Result<()> { + let args = vec![ColumnarValue::Scalar(ScalarValue::Decimal128( + Some(567), + 3, + 1, + ))]; // 56.7 + let ColumnarValue::Scalar(ScalarValue::Decimal128(Some(result), 3, 1)) = + spark_floor(&args, &DataType::Decimal128(3, 1))? + else { + unreachable!() + }; + assert_eq!(result, 560); // 56.0 + Ok(()) + } +}