Skip to content

Commit

Permalink
Fix new clippy lints (#4734)
Browse files Browse the repository at this point in the history
* Fix new clippy lints

* More clippy

* Even more clippy

* Clippy
  • Loading branch information
tustvold authored Aug 24, 2023
1 parent d9381c6 commit dfb1ea2
Show file tree
Hide file tree
Showing 41 changed files with 271 additions and 515 deletions.
5 changes: 1 addition & 4 deletions arrow-array/src/array/dictionary_array.rs
Original file line number Diff line number Diff line change
Expand Up @@ -800,10 +800,7 @@ pub struct TypedDictionaryArray<'a, K: ArrowDictionaryKeyType, V> {
// Manually implement `Clone` to avoid `V: Clone` type constraint
impl<'a, K: ArrowDictionaryKeyType, V> Clone for TypedDictionaryArray<'a, K, V> {
fn clone(&self) -> Self {
Self {
dictionary: self.dictionary,
values: self.values,
}
*self
}
}

Expand Down
9 changes: 4 additions & 5 deletions arrow-array/src/array/primitive_array.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1580,7 +1580,7 @@ mod tests {
assert_eq!(3, arr.len());
assert_eq!(0, arr.offset());
assert_eq!(0, arr.null_count());
let formatted = vec!["00:00:00.001", "10:30:00.005", "23:59:59.210"];
let formatted = ["00:00:00.001", "10:30:00.005", "23:59:59.210"];
for (i, formatted) in formatted.iter().enumerate().take(3) {
// check that we can't create dates or datetimes from time instances
assert_eq!(None, arr.value_as_datetime(i));
Expand All @@ -1604,7 +1604,7 @@ mod tests {
assert_eq!(3, arr.len());
assert_eq!(0, arr.offset());
assert_eq!(0, arr.null_count());
let formatted = vec!["00:00:00.001", "10:30:00.005", "23:59:59.210"];
let formatted = ["00:00:00.001", "10:30:00.005", "23:59:59.210"];
for (i, item) in formatted.iter().enumerate().take(3) {
// check that we can't create dates or datetimes from time instances
assert_eq!(None, arr.value_as_datetime(i));
Expand Down Expand Up @@ -2219,7 +2219,7 @@ mod tests {

#[test]
fn test_decimal_from_iter_values() {
let array = Decimal128Array::from_iter_values(vec![-100, 0, 101].into_iter());
let array = Decimal128Array::from_iter_values(vec![-100, 0, 101]);
assert_eq!(array.len(), 3);
assert_eq!(array.data_type(), &DataType::Decimal128(38, 10));
assert_eq!(-100_i128, array.value(0));
Expand Down Expand Up @@ -2419,8 +2419,7 @@ mod tests {
expected = "Trying to access an element at index 4 from a PrimitiveArray of length 3"
)]
fn test_fixed_size_binary_array_get_value_index_out_of_bound() {
let array = Decimal128Array::from_iter_values(vec![-100, 0, 101].into_iter());

let array = Decimal128Array::from(vec![-100, 0, 101]);
array.value(4);
}

Expand Down
7 changes: 2 additions & 5 deletions arrow-array/src/array/run_array.rs
Original file line number Diff line number Diff line change
Expand Up @@ -537,10 +537,7 @@ pub struct TypedRunArray<'a, R: RunEndIndexType, V> {
// Manually implement `Clone` to avoid `V: Clone` type constraint
impl<'a, R: RunEndIndexType, V> Clone for TypedRunArray<'a, R, V> {
fn clone(&self) -> Self {
Self {
run_array: self.run_array,
values: self.values,
}
*self
}
}

Expand Down Expand Up @@ -1093,7 +1090,7 @@ mod tests {
let values = Int32Array::from(vec![Some(0), None, Some(1), None]);
let array = RunArray::try_new(&run, &values).unwrap();

let expected = vec![
let expected = [
true, true, true, false, false, false, true, true, true, false, false, false,
];

Expand Down
4 changes: 2 additions & 2 deletions arrow-array/src/array/string_array.rs
Original file line number Diff line number Diff line change
Expand Up @@ -324,14 +324,14 @@ mod tests {

#[test]
fn test_string_array_from_iter_values() {
let data = vec!["hello", "hello2"];
let data = ["hello", "hello2"];
let array1 = StringArray::from_iter_values(data.iter());

assert_eq!(array1.value(0), "hello");
assert_eq!(array1.value(1), "hello2");

// Also works with String types.
let data2: Vec<String> = vec!["goodbye".into(), "goodbye2".into()];
let data2 = ["goodbye".to_string(), "goodbye2".to_string()];
let array2 = StringArray::from_iter_values(data2.iter());

assert_eq!(array2.value(0), "goodbye");
Expand Down
2 changes: 1 addition & 1 deletion arrow-array/src/record_batch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -757,7 +757,7 @@ mod tests {
))))
.add_child_data(a2_child.into_data())
.len(2)
.add_buffer(Buffer::from(vec![0i32, 3, 4].to_byte_slice()))
.add_buffer(Buffer::from([0i32, 3, 4].to_byte_slice()))
.build()
.unwrap();
let a2: ArrayRef = Arc::new(ListArray::from(a2));
Expand Down
4 changes: 2 additions & 2 deletions arrow-array/src/run_iterator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,7 @@ mod tests {
Some(72),
];
let mut builder = PrimitiveRunBuilder::<Int32Type, Int32Type>::new();
builder.extend(input_vec.clone().into_iter());
builder.extend(input_vec.iter().copied());
let ree_array = builder.finish();
let ree_array = ree_array.downcast::<Int32Array>().unwrap();

Expand All @@ -261,7 +261,7 @@ mod tests {
Some(72),
];
let mut builder = PrimitiveRunBuilder::<Int32Type, Int32Type>::new();
builder.extend(input_vec.into_iter());
builder.extend(input_vec);
let ree_array = builder.finish();
let ree_array = ree_array.downcast::<Int32Array>().unwrap();

Expand Down
2 changes: 1 addition & 1 deletion arrow-array/src/trusted_len.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ mod tests {

#[test]
fn trusted_len_unzip_good() {
let vec = vec![Some(1u32), None];
let vec = [Some(1u32), None];
let (null, buffer) = unsafe { trusted_len_unzip(vec.iter()) };
assert_eq!(null.as_slice(), &[0b00000001]);
assert_eq!(buffer.as_slice(), &[1u8, 0, 0, 0, 0, 0, 0, 0]);
Expand Down
2 changes: 1 addition & 1 deletion arrow-buffer/src/native.rs
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,7 @@ pub trait ToByteSlice {
impl<T: ArrowNativeType> ToByteSlice for [T] {
#[inline]
fn to_byte_slice(&self) -> &[u8] {
let raw_ptr = self.as_ptr() as *const T as *const u8;
let raw_ptr = self.as_ptr() as *const u8;
unsafe { std::slice::from_raw_parts(raw_ptr, std::mem::size_of_val(self)) }
}
}
Expand Down
2 changes: 1 addition & 1 deletion arrow-buffer/src/util/bit_chunk_iterator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ impl<'a> UnalignedBitChunk<'a> {
self.prefix
.into_iter()
.chain(self.chunks.iter().cloned())
.chain(self.suffix.into_iter())
.chain(self.suffix)
}

/// Counts the number of ones
Expand Down
47 changes: 11 additions & 36 deletions arrow-cast/src/cast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3428,50 +3428,24 @@ mod tests {

macro_rules! generate_cast_test_case {
($INPUT_ARRAY: expr, $OUTPUT_TYPE_ARRAY: ident, $OUTPUT_TYPE: expr, $OUTPUT_VALUES: expr) => {
let output = $OUTPUT_TYPE_ARRAY::from($OUTPUT_VALUES)
.with_data_type($OUTPUT_TYPE.clone());

// assert cast type
let input_array_type = $INPUT_ARRAY.data_type();
assert!(can_cast_types(input_array_type, $OUTPUT_TYPE));
let casted_array = cast($INPUT_ARRAY, $OUTPUT_TYPE).unwrap();
let result_array = casted_array
.as_any()
.downcast_ref::<$OUTPUT_TYPE_ARRAY>()
.unwrap();
assert_eq!($OUTPUT_TYPE, result_array.data_type());
assert_eq!(result_array.len(), $OUTPUT_VALUES.len());
for (i, x) in $OUTPUT_VALUES.iter().enumerate() {
match x {
Some(x) => {
assert!(!result_array.is_null(i));
assert_eq!(result_array.value(i), *x);
}
None => {
assert!(result_array.is_null(i));
}
}
}
let result = cast($INPUT_ARRAY, $OUTPUT_TYPE).unwrap();
assert_eq!($OUTPUT_TYPE, result.data_type());
assert_eq!(result.as_ref(), &output);

let cast_option = CastOptions {
safe: false,
format_options: FormatOptions::default(),
};
let casted_array_with_option =
let result =
cast_with_options($INPUT_ARRAY, $OUTPUT_TYPE, &cast_option).unwrap();
let result_array = casted_array_with_option
.as_any()
.downcast_ref::<$OUTPUT_TYPE_ARRAY>()
.unwrap();
assert_eq!($OUTPUT_TYPE, result_array.data_type());
assert_eq!(result_array.len(), $OUTPUT_VALUES.len());
for (i, x) in $OUTPUT_VALUES.iter().enumerate() {
match x {
Some(x) => {
assert_eq!(result_array.value(i), *x);
}
None => {
assert!(result_array.is_null(i));
}
}
}
assert_eq!($OUTPUT_TYPE, result.data_type());
assert_eq!(result.as_ref(), &output);
};
}

Expand Down Expand Up @@ -5997,7 +5971,7 @@ mod tests {

#[test]
fn test_str_to_str_casts() {
for data in vec![
for data in [
vec![Some("foo"), Some("bar"), Some("ham")],
vec![Some("foo"), None, Some("bar")],
] {
Expand Down Expand Up @@ -8934,6 +8908,7 @@ mod tests {
};

#[test]
#[allow(clippy::assertions_on_constants)]
fn test_const_options() {
assert!(CAST_OPTIONS.safe)
}
Expand Down
14 changes: 5 additions & 9 deletions arrow-cast/src/parse.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1003,15 +1003,11 @@ impl Interval {
fn parse(value: &str, config: &IntervalParseConfig) -> Result<Self, ArrowError> {
let components = parse_interval_components(value, config)?;

let result = components.into_iter().fold(
Ok(Self::default()),
|result, (amount, unit)| match result {
Ok(result) => result.add(amount, unit),
Err(e) => Err(e),
},
)?;

Ok(result)
components
.into_iter()
.try_fold(Self::default(), |result, (amount, unit)| {
result.add(amount, unit)
})
}

/// Interval addition following Postgres behavior. Fractional units will be spilled into smaller units.
Expand Down
2 changes: 1 addition & 1 deletion arrow-cast/src/pretty.rs
Original file line number Diff line number Diff line change
Expand Up @@ -848,7 +848,7 @@ mod tests {
let mut buf = String::new();
write!(&mut buf, "{}", pretty_format_batches(&[batch]).unwrap()).unwrap();

let s = vec![
let s = [
"+---+-----+",
"| a | b |",
"+---+-----+",
Expand Down
4 changes: 2 additions & 2 deletions arrow-flight/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -166,8 +166,8 @@ pub fn batches_to_flight_data(
flight_data.push(encoded_batch.into());
}
let mut stream = vec![schema_flight_data];
stream.extend(dictionaries.into_iter());
stream.extend(flight_data.into_iter());
stream.extend(dictionaries);
stream.extend(flight_data);
let flight_data: Vec<_> = stream.into_iter().collect();
Ok(flight_data)
}
8 changes: 4 additions & 4 deletions arrow-integration-testing/tests/ipc_reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ use std::fs::File;
fn read_0_1_4() {
let testdata = arrow_test_data();
let version = "0.14.1";
let paths = vec![
let paths = [
"generated_interval",
"generated_datetime",
"generated_dictionary",
Expand All @@ -48,7 +48,7 @@ fn read_0_1_4() {
fn read_0_1_7() {
let testdata = arrow_test_data();
let version = "0.17.1";
let paths = vec!["generated_union"];
let paths = ["generated_union"];
paths.iter().for_each(|path| {
verify_arrow_file(&testdata, version, path);
verify_arrow_stream(&testdata, version, path);
Expand Down Expand Up @@ -76,7 +76,7 @@ fn read_1_0_0_bigendian_dictionary_should_panic() {
#[test]
fn read_1_0_0_bigendian() {
let testdata = arrow_test_data();
let paths = vec![
let paths = [
"generated_interval",
"generated_datetime",
"generated_map",
Expand Down Expand Up @@ -145,7 +145,7 @@ fn read_2_0_0_compression() {
let version = "2.0.0-compression";

// the test is repetitive, thus we can read all supported files at once
let paths = vec!["generated_lz4", "generated_zstd"];
let paths = ["generated_lz4", "generated_zstd"];
paths.iter().for_each(|path| {
verify_arrow_file(&testdata, version, path);
verify_arrow_stream(&testdata, version, path);
Expand Down
28 changes: 15 additions & 13 deletions arrow-integration-testing/tests/ipc_writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ use std::io::Seek;
fn write_0_1_4() {
let testdata = arrow_test_data();
let version = "0.14.1";
let paths = vec![
let paths = [
"generated_interval",
"generated_datetime",
"generated_dictionary",
Expand All @@ -48,7 +48,7 @@ fn write_0_1_4() {
fn write_0_1_7() {
let testdata = arrow_test_data();
let version = "0.17.1";
let paths = vec!["generated_union"];
let paths = ["generated_union"];
paths.iter().for_each(|path| {
roundtrip_arrow_file(&testdata, version, path);
roundtrip_arrow_stream(&testdata, version, path);
Expand All @@ -59,7 +59,7 @@ fn write_0_1_7() {
fn write_1_0_0_littleendian() {
let testdata = arrow_test_data();
let version = "1.0.0-littleendian";
let paths = vec![
let paths = [
"generated_datetime",
"generated_custom_metadata",
"generated_decimal",
Expand Down Expand Up @@ -94,10 +94,10 @@ fn write_1_0_0_littleendian() {
fn write_2_0_0_compression() {
let testdata = arrow_test_data();
let version = "2.0.0-compression";
let paths = vec!["generated_lz4", "generated_zstd"];
let paths = ["generated_lz4", "generated_zstd"];

// writer options for each compression type
let all_options = vec![
let all_options = [
IpcWriteOptions::try_new(8, false, ipc::MetadataVersion::V5)
.unwrap()
.try_with_compression(Some(ipc::CompressionType::LZ4_FRAME))
Expand Down Expand Up @@ -187,11 +187,12 @@ fn roundtrip_arrow_file_with_options(
let rewrite_reader = FileReader::try_new(&tempfile, None).unwrap();

// Compare to original reader
reader.into_iter().zip(rewrite_reader.into_iter()).for_each(
|(batch1, batch2)| {
reader
.into_iter()
.zip(rewrite_reader)
.for_each(|(batch1, batch2)| {
assert_eq!(batch1.unwrap(), batch2.unwrap());
},
);
});
}
}

Expand Down Expand Up @@ -264,10 +265,11 @@ fn roundtrip_arrow_stream_with_options(
let rewrite_reader = StreamReader::try_new(&tempfile, None).unwrap();

// Compare to original reader
reader.into_iter().zip(rewrite_reader.into_iter()).for_each(
|(batch1, batch2)| {
reader
.into_iter()
.zip(rewrite_reader)
.for_each(|(batch1, batch2)| {
assert_eq!(batch1.unwrap(), batch2.unwrap());
},
);
});
}
}
2 changes: 1 addition & 1 deletion arrow-ipc/src/convert.rs
Original file line number Diff line number Diff line change
Expand Up @@ -717,7 +717,7 @@ pub(crate) fn get_fb_field_type<'a>(
RunEndEncoded(run_ends, values) => {
let run_ends_field = build_field(fbb, run_ends);
let values_field = build_field(fbb, values);
let children = vec![run_ends_field, values_field];
let children = [run_ends_field, values_field];
FBFieldType {
type_type: crate::Type::RunEndEncoded,
type_: crate::RunEndEncodedBuilder::new(fbb)
Expand Down
4 changes: 2 additions & 2 deletions arrow-ipc/src/reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1156,7 +1156,7 @@ mod tests {

let array10_input = vec![Some(1_i32), None, None];
let mut array10_builder = PrimitiveRunBuilder::<Int16Type, Int32Type>::new();
array10_builder.extend(array10_input.into_iter());
array10_builder.extend(array10_input);
let array10 = array10_builder.finish();

let array11 = BooleanArray::from(vec![false, false, true]);
Expand Down Expand Up @@ -1411,7 +1411,7 @@ mod tests {

let run_array_2_inupt = vec![Some(1_i32), None, None, Some(2), Some(2)];
let mut run_array_2_builder = PrimitiveRunBuilder::<Int16Type, Int32Type>::new();
run_array_2_builder.extend(run_array_2_inupt.into_iter());
run_array_2_builder.extend(run_array_2_inupt);
let run_array_2 = run_array_2_builder.finish();

let schema = Arc::new(Schema::new(vec![
Expand Down
Loading

0 comments on commit dfb1ea2

Please sign in to comment.