Skip to content

Commit

Permalink
Clippy lints 1.75 (#5257)
Browse files Browse the repository at this point in the history
  • Loading branch information
tustvold authored Dec 29, 2023
1 parent 31b865e commit cbb2f34
Show file tree
Hide file tree
Showing 10 changed files with 47 additions and 64 deletions.
10 changes: 6 additions & 4 deletions arrow-data/src/equal/run.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,11 +43,13 @@ pub(super) fn run_equal(
return false;
}

let lhs_run_ends_array = lhs.child_data().get(0).unwrap();
let lhs_values_array = lhs.child_data().get(1).unwrap();
let lhs_child_data = lhs.child_data();
let lhs_run_ends_array = &lhs_child_data[0];
let lhs_values_array = &lhs_child_data[1];

let rhs_run_ends_array = rhs.child_data().get(0).unwrap();
let rhs_values_array = rhs.child_data().get(1).unwrap();
let rhs_child_data = rhs.child_data();
let rhs_run_ends_array = &rhs_child_data[0];
let rhs_values_array = &rhs_child_data[1];

if lhs_run_ends_array.len() != rhs_run_ends_array.len() {
return false;
Expand Down
8 changes: 4 additions & 4 deletions arrow-data/src/equal/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,10 +67,10 @@ pub(super) fn base_equal(lhs: &ArrayData, rhs: &ArrayData) -> bool {
(DataType::Struct(l_fields), DataType::Struct(r_fields))
if l_fields.len() == 2 && r_fields.len() == 2 =>
{
let l_key_field = l_fields.get(0).unwrap();
let r_key_field = r_fields.get(0).unwrap();
let l_value_field = l_fields.get(1).unwrap();
let r_value_field = r_fields.get(1).unwrap();
let l_key_field = &l_fields[0];
let r_key_field = &r_fields[0];
let l_value_field = &l_fields[1];
let r_value_field = &r_fields[1];

// We don't enforce the equality of field names
let data_type_equal = l_key_field.data_type() == r_key_field.data_type()
Expand Down
2 changes: 1 addition & 1 deletion arrow-flight/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ pub fn flight_data_from_arrow_batch(

/// Convert a slice of wire protocol `FlightData`s into a vector of `RecordBatch`es
pub fn flight_data_to_batches(flight_data: &[FlightData]) -> Result<Vec<RecordBatch>, ArrowError> {
let schema = flight_data.get(0).ok_or_else(|| {
let schema = flight_data.first().ok_or_else(|| {
ArrowError::CastError("Need at least one FlightData for schema".to_string())
})?;
let message = root_as_message(&schema.data_header[..])
Expand Down
12 changes: 4 additions & 8 deletions arrow-integration-test/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -666,8 +666,7 @@ pub fn array_from_json(
DataType::List(child_field) => {
let null_buf = create_null_buf(&json_col);
let children = json_col.children.clone().unwrap();
let child_array =
array_from_json(child_field, children.get(0).unwrap().clone(), dictionaries)?;
let child_array = array_from_json(child_field, children[0].clone(), dictionaries)?;
let offsets: Vec<i32> = json_col
.offset
.unwrap()
Expand All @@ -687,8 +686,7 @@ pub fn array_from_json(
DataType::LargeList(child_field) => {
let null_buf = create_null_buf(&json_col);
let children = json_col.children.clone().unwrap();
let child_array =
array_from_json(child_field, children.get(0).unwrap().clone(), dictionaries)?;
let child_array = array_from_json(child_field, children[0].clone(), dictionaries)?;
let offsets: Vec<i64> = json_col
.offset
.unwrap()
Expand All @@ -711,8 +709,7 @@ pub fn array_from_json(
}
DataType::FixedSizeList(child_field, _) => {
let children = json_col.children.clone().unwrap();
let child_array =
array_from_json(child_field, children.get(0).unwrap().clone(), dictionaries)?;
let child_array = array_from_json(child_field, children[0].clone(), dictionaries)?;
let null_buf = create_null_buf(&json_col);
let list_data = ArrayData::builder(field.data_type().clone())
.len(json_col.count)
Expand Down Expand Up @@ -813,8 +810,7 @@ pub fn array_from_json(
DataType::Map(child_field, _) => {
let null_buf = create_null_buf(&json_col);
let children = json_col.children.clone().unwrap();
let child_array =
array_from_json(child_field, children.get(0).unwrap().clone(), dictionaries)?;
let child_array = array_from_json(child_field, children[0].clone(), dictionaries)?;
let offsets: Vec<i32> = json_col
.offset
.unwrap()
Expand Down
4 changes: 2 additions & 2 deletions arrow-integration-testing/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,10 +78,10 @@ pub fn canonicalize_schema(schema: &Schema) -> Schema {
.map(|field| match field.data_type() {
DataType::Map(child_field, sorted) => match child_field.data_type() {
DataType::Struct(fields) if fields.len() == 2 => {
let first_field = fields.get(0).unwrap();
let first_field = &fields[0];
let key_field =
Arc::new(Field::new("key", first_field.data_type().clone(), false));
let second_field = fields.get(1).unwrap();
let second_field = &fields[1];
let value_field = Arc::new(Field::new(
"value",
second_field.data_type().clone(),
Expand Down
22 changes: 8 additions & 14 deletions arrow-ord/src/sort.rs
Original file line number Diff line number Diff line change
Expand Up @@ -790,7 +790,6 @@ mod tests {
use half::f16;
use rand::rngs::StdRng;
use rand::{Rng, RngCore, SeedableRng};
use std::convert::TryFrom;
use std::sync::Arc;

fn create_decimal128_array(data: Vec<Option<i128>>) -> Decimal128Array {
Expand Down Expand Up @@ -972,19 +971,14 @@ mod tests {

assert_eq!(sorted_dict, dict);

let sorted_strings = StringArray::try_from(
(0..sorted.len())
.map(|i| {
if sorted.is_valid(i) {
Some(sorted_dict.value(sorted_keys.value(i).as_usize()))
} else {
None
}
})
.collect::<Vec<Option<&str>>>(),
)
.expect("Unable to create string array from dictionary");
let expected = StringArray::try_from(expected_data).expect("Unable to create string array");
let sorted_strings = StringArray::from_iter((0..sorted.len()).map(|i| {
if sorted.is_valid(i) {
Some(sorted_dict.value(sorted_keys.value(i).as_usize()))
} else {
None
}
}));
let expected = StringArray::from(expected_data);

assert_eq!(sorted_strings, expected)
}
Expand Down
17 changes: 9 additions & 8 deletions parquet/src/arrow/arrow_writer/levels.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1058,7 +1058,7 @@ mod tests {

assert_eq!(levels.len(), 1);

let list_level = levels.get(0).unwrap();
let list_level = &levels[0];

let expected_level = ArrayLevels {
def_levels: Some(vec![0, 3, 3, 3]),
Expand Down Expand Up @@ -1150,7 +1150,7 @@ mod tests {
assert_eq!(levels.len(), 5);

// test "a" levels
let list_level = levels.get(0).unwrap();
let list_level = &levels[0];

let expected_level = ArrayLevels {
def_levels: None,
Expand Down Expand Up @@ -1295,7 +1295,7 @@ mod tests {
let map = batch.column(0).as_map();

// test key levels
let list_level = levels.get(0).unwrap();
let list_level = &levels[0];

let expected_level = ArrayLevels {
def_levels: Some(vec![1; 7]),
Expand Down Expand Up @@ -1477,9 +1477,10 @@ mod tests {
let field_a2 = Arc::new(Field::new("integers", a2.data_type().clone(), true));

let nulls = Buffer::from([0b00110111]);
let struct_a =
Arc::new(StructArray::try_from((vec![(field_a1, a1), (field_a2, a2)], nulls)).unwrap())
as ArrayRef;
let struct_a = Arc::new(StructArray::from((
vec![(field_a1, a1), (field_a2, a2)],
nulls,
))) as ArrayRef;

let offsets = Buffer::from_iter([0_i32, 0, 2, 2, 3, 5, 5]);
let nulls = Buffer::from([0b00111100]);
Expand Down Expand Up @@ -1566,7 +1567,7 @@ mod tests {

assert_eq!(levels.len(), 1);

let list_level = levels.get(0).unwrap();
let list_level = &levels[0];

let expected_level = ArrayLevels {
def_levels: Some(vec![0, 0, 3, 3]),
Expand Down Expand Up @@ -1756,7 +1757,7 @@ mod tests {

assert_eq!(levels.len(), 1);

let list_level = levels.get(0).unwrap();
let list_level = &levels[0];

let expected_level = ArrayLevels {
def_levels: Some(vec![1, 0, 1]),
Expand Down
18 changes: 6 additions & 12 deletions parquet/src/column/writer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2584,14 +2584,8 @@ mod tests {
}

// page location
assert_eq!(
0,
offset_index.page_locations.get(0).unwrap().first_row_index
);
assert_eq!(
4,
offset_index.page_locations.get(1).unwrap().first_row_index
);
assert_eq!(0, offset_index.page_locations[0].first_row_index);
assert_eq!(4, offset_index.page_locations[1].first_row_index);
}

/// Verify min/max value truncation in the column index works as expected
Expand Down Expand Up @@ -2632,8 +2626,8 @@ mod tests {
assert_eq!(stats.null_count(), 0);
assert_eq!(stats.distinct_count(), None);
if let Statistics::FixedLenByteArray(stats) = stats {
let column_index_min_value = column_index.min_values.get(0).unwrap();
let column_index_max_value = column_index.max_values.get(0).unwrap();
let column_index_min_value = &column_index.min_values[0];
let column_index_max_value = &column_index.max_values[0];

// Column index stats are truncated, while the column chunk's aren't.
assert_ne!(stats.min_bytes(), column_index_min_value.as_slice());
Expand Down Expand Up @@ -2699,8 +2693,8 @@ mod tests {
assert_eq!(stats.null_count(), 0);
assert_eq!(stats.distinct_count(), None);
if let Statistics::FixedLenByteArray(_stats) = stats {
let column_index_min_value = column_index.min_values.get(0).unwrap();
let column_index_max_value = column_index.max_values.get(0).unwrap();
let column_index_min_value = &column_index.min_values[0];
let column_index_max_value = &column_index.max_values[0];

assert_eq!(column_index_min_value.len(), 1);
assert_eq!(column_index_max_value.len(), 1);
Expand Down
15 changes: 6 additions & 9 deletions parquet/src/file/serialized_reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1116,16 +1116,13 @@ mod tests {

assert_eq!(metadata.len(), 3);

assert_eq!(metadata.get(0).unwrap().key, "parquet.proto.descriptor");
assert_eq!(metadata[0].key, "parquet.proto.descriptor");

assert_eq!(metadata.get(1).unwrap().key, "writer.model.name");
assert_eq!(metadata.get(1).unwrap().value, Some("protobuf".to_owned()));
assert_eq!(metadata[1].key, "writer.model.name");
assert_eq!(metadata[1].value, Some("protobuf".to_owned()));

assert_eq!(metadata.get(2).unwrap().key, "parquet.proto.class");
assert_eq!(
metadata.get(2).unwrap().value,
Some("foo.baz.Foobaz$Event".to_owned())
);
assert_eq!(metadata[2].key, "parquet.proto.class");
assert_eq!(metadata[2].value, Some("foo.baz.Foobaz$Event".to_owned()));
}

#[test]
Expand All @@ -1141,7 +1138,7 @@ mod tests {
assert_eq!(col0_metadata.bloom_filter_offset().unwrap(), 192);

// test page encoding stats
let page_encoding_stats = col0_metadata.page_encoding_stats().unwrap().get(0).unwrap();
let page_encoding_stats = &col0_metadata.page_encoding_stats().unwrap()[0];

assert_eq!(page_encoding_stats.page_type, basic::PageType::DATA_PAGE);
assert_eq!(page_encoding_stats.encoding, Encoding::PLAIN);
Expand Down
3 changes: 1 addition & 2 deletions parquet/src/file/writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1024,8 +1024,7 @@ mod tests {
// ARROW-11803: Test that the converted and logical types have been populated
let fields = reader.metadata().file_metadata().schema().get_fields();
assert_eq!(fields.len(), 1);
let read_field = fields.get(0).unwrap();
assert_eq!(read_field, &field);
assert_eq!(fields[0], field);
}

#[test]
Expand Down

0 comments on commit cbb2f34

Please sign in to comment.