diff --git a/crates/deltalake-core/src/operations/mod.rs b/crates/deltalake-core/src/operations/mod.rs index 5eef40698f..fb0f25d379 100644 --- a/crates/deltalake-core/src/operations/mod.rs +++ b/crates/deltalake-core/src/operations/mod.rs @@ -7,13 +7,13 @@ //! with a [data stream][datafusion::physical_plan::SendableRecordBatchStream], //! if the operation returns data as well. -use std::collections::{HashMap}; use self::create::CreateBuilder; use self::filesystem_check::FileSystemCheckBuilder; use self::vacuum::VacuumBuilder; use crate::errors::{DeltaResult, DeltaTableError}; use crate::table::builder::DeltaTableBuilder; use crate::DeltaTable; +use std::collections::HashMap; #[cfg(all(feature = "arrow", feature = "parquet"))] pub mod convert_to_delta; @@ -73,10 +73,12 @@ impl DeltaOps { Err(err) => Err(err), } } - /// try from uri with storage options - pub async fn try_from_uri_with_storage_options(uri: impl AsRef, storage_options: HashMap) -> DeltaResult { + pub async fn try_from_uri_with_storage_options( + uri: impl AsRef, + storage_options: HashMap, + ) -> DeltaResult { let mut table = DeltaTableBuilder::from_uri(uri) .with_storage_options(storage_options) .build()?; diff --git a/crates/deltalake-core/src/operations/write.rs b/crates/deltalake-core/src/operations/write.rs index fdc6f82faa..f38b44fc69 100644 --- a/crates/deltalake-core/src/operations/write.rs +++ b/crates/deltalake-core/src/operations/write.rs @@ -362,7 +362,9 @@ impl std::future::IntoFuture for WriteBuilder { .or_else(|_| this.snapshot.arrow_schema()) .unwrap_or(schema.clone()); - if !can_cast_batch(schema.fields(), table_schema.fields()) && !this.overwrite_schema { + if !can_cast_batch(schema.fields(), table_schema.fields()) + && !this.overwrite_schema + { return Err(DeltaTableError::Generic( "Schema of data does not match table schema".to_string(), )); diff --git a/crates/deltalake-core/src/writer/utils.rs b/crates/deltalake-core/src/writer/utils.rs index 49c3c6bfee..5aa1ca4347 100644 --- a/crates/deltalake-core/src/writer/utils.rs +++ b/crates/deltalake-core/src/writer/utils.rs @@ -8,10 +8,10 @@ use arrow::array::{ as_boolean_array, as_generic_binary_array, as_primitive_array, as_string_array, Array, }; use arrow::datatypes::{ - DataType, Date32Type, Date64Type, Int16Type, Int32Type, Int64Type, Int8Type, - Schema as ArrowSchema, SchemaRef as ArrowSchemaRef, TimeUnit, TimestampMicrosecondType, - TimestampMillisecondType, TimestampNanosecondType, TimestampSecondType, UInt16Type, UInt32Type, - UInt64Type, UInt8Type, + DataType, Date32Type, Date64Type, Float32Type, Float64Type, Int16Type, Int32Type, Int64Type, + Int8Type, Schema as ArrowSchema, SchemaRef as ArrowSchemaRef, TimeUnit, + TimestampMicrosecondType, TimestampMillisecondType, TimestampNanosecondType, + TimestampSecondType, UInt16Type, UInt32Type, UInt64Type, UInt8Type, }; use arrow::json::ReaderBuilder; use arrow::record_batch::*; @@ -184,6 +184,8 @@ pub(crate) fn stringified_partition_value( DataType::UInt16 => as_primitive_array::(arr).value(0).to_string(), DataType::UInt32 => as_primitive_array::(arr).value(0).to_string(), DataType::UInt64 => as_primitive_array::(arr).value(0).to_string(), + DataType::Float32 => as_primitive_array::(arr).value(0).to_string(), + DataType::Float64 => as_primitive_array::(arr).value(0).to_string(), DataType::Utf8 => as_string_array(arr).value(0).to_string(), DataType::Boolean => as_boolean_array(arr).value(0).to_string(), DataType::Date32 => as_primitive_array::(arr)