You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@arrow.apache.org by al...@apache.org on 2021/12/05 14:07:11 UTC

[arrow-rs] branch active_release updated: Cherry pick Fix warnings introduced by Rust/Clippy 1.57.0 to active_release (#1004)

This is an automated email from the ASF dual-hosted git repository.

alamb pushed a commit to branch active_release
in repository https://gitbox.apache.org/repos/asf/arrow-rs.git


The following commit(s) were added to refs/heads/active_release by this push:
     new b4507f5  Cherry pick Fix warnings introduced by Rust/Clippy 1.57.0 to active_release (#1004)
b4507f5 is described below

commit b4507f562fb0eddfb79840871cd2733dc0e337cd
Author: Andrew Lamb <an...@nerdnetworks.org>
AuthorDate: Sun Dec 5 09:07:03 2021 -0500

    Cherry pick Fix warnings introduced by Rust/Clippy 1.57.0 to active_release (#1004)
    
    * Fix warnings introduced by Rust/Clippy 1.57.0 (#992)
    
    * Remove needless borrows identified by clippy
    
    https://rust-lang.github.io/rust-clippy/master/index.html#needless_borrow
    
    * Remove muts that are no longer needed
    
    * Derive Default instead of using an equivalent manual impl
    
    Identified by clippy.
    
    https://rust-lang.github.io/rust-clippy/master/index.html#derivable_impls
    
    * Remove redundant closures
    
    Identified by clippy.
    
    https://rust-lang.github.io/rust-clippy/master/index.html#redundant_closure
    
    * Allow dead code on a field Rust now identifies as never read
    
    * Add some extra drops to make it clear what we want to do
    
    Co-authored-by: Carol (Nichols || Goulding) <19...@users.noreply.github.com>
---
 arrow/src/array/equal/utils.rs                           | 12 ++++++------
 arrow/src/array/transform/boolean.rs                     |  2 +-
 arrow/src/compute/kernels/take.rs                        | 10 +---------
 arrow/src/compute/util.rs                                |  4 ++--
 arrow/src/datatypes/field.rs                             |  2 +-
 arrow/src/datatypes/schema.rs                            |  5 +----
 arrow/src/ffi.rs                                         |  4 ++--
 arrow/src/ipc/writer.rs                                  | 16 ++++++++--------
 .../src/flight_server_scenarios/auth_basic_proto.rs      |  1 +
 parquet/src/arrow/arrow_writer.rs                        | 16 ++++++++--------
 parquet/src/record/reader.rs                             | 14 +++++++-------
 parquet/src/util/bit_util.rs                             |  4 ++--
 parquet_derive/src/parquet_field.rs                      | 15 ++++++---------
 13 files changed, 46 insertions(+), 59 deletions(-)

diff --git a/arrow/src/array/equal/utils.rs b/arrow/src/array/equal/utils.rs
index 8eb988c..7ce8e14 100644
--- a/arrow/src/array/equal/utils.rs
+++ b/arrow/src/array/equal/utils.rs
@@ -121,14 +121,14 @@ pub(super) fn child_logical_null_buffer(
             let array_offset = parent_data.offset();
             let bitmap_len = bit_util::ceil(parent_len * len, 8);
             let mut buffer = MutableBuffer::from_len_zeroed(bitmap_len);
-            let mut null_slice = buffer.as_slice_mut();
+            let null_slice = buffer.as_slice_mut();
             (array_offset..parent_len + array_offset).for_each(|index| {
                 let start = index * len;
                 let end = start + len;
                 let mask = parent_bitmap.is_set(index);
                 (start..end).for_each(|child_index| {
                     if mask && self_null_bitmap.is_set(child_index) {
-                        bit_util::set_bit(&mut null_slice, child_index);
+                        bit_util::set_bit(null_slice, child_index);
                     }
                 });
             });
@@ -151,12 +151,12 @@ pub(super) fn child_logical_null_buffer(
             // slow path
             let array_offset = parent_data.offset();
             let mut buffer = MutableBuffer::new_null(parent_len);
-            let mut null_slice = buffer.as_slice_mut();
+            let null_slice = buffer.as_slice_mut();
             (0..parent_len).for_each(|index| {
                 if parent_bitmap.is_set(index + array_offset)
                     && self_null_bitmap.is_set(index + array_offset)
                 {
-                    bit_util::set_bit(&mut null_slice, index);
+                    bit_util::set_bit(null_slice, index);
                 }
             });
             Some(buffer.into())
@@ -182,7 +182,7 @@ fn logical_list_bitmap<OffsetSize: OffsetSizeTrait>(
     let offset_start = offsets.first().unwrap().to_usize().unwrap();
     let offset_len = offsets.get(parent_data.len()).unwrap().to_usize().unwrap();
     let mut buffer = MutableBuffer::new_null(offset_len - offset_start);
-    let mut null_slice = buffer.as_slice_mut();
+    let null_slice = buffer.as_slice_mut();
 
     offsets
         .windows(2)
@@ -194,7 +194,7 @@ fn logical_list_bitmap<OffsetSize: OffsetSizeTrait>(
             let mask = parent_bitmap.is_set(index);
             (start..end).for_each(|child_index| {
                 if mask && child_bitmap.is_set(child_index) {
-                    bit_util::set_bit(&mut null_slice, child_index - offset_start);
+                    bit_util::set_bit(null_slice, child_index - offset_start);
                 }
             });
         });
diff --git a/arrow/src/array/transform/boolean.rs b/arrow/src/array/transform/boolean.rs
index 1829149..456fb6e 100644
--- a/arrow/src/array/transform/boolean.rs
+++ b/arrow/src/array/transform/boolean.rs
@@ -29,7 +29,7 @@ pub(super) fn build_extend(array: &ArrayData) -> Extend {
             let buffer = &mut mutable.buffer1;
             resize_for_bits(buffer, mutable.len + len);
             set_bits(
-                &mut buffer.as_slice_mut(),
+                buffer.as_slice_mut(),
                 values,
                 mutable.len,
                 array.offset() + start,
diff --git a/arrow/src/compute/kernels/take.rs b/arrow/src/compute/kernels/take.rs
index 8d9b4cb..9fe00ea 100644
--- a/arrow/src/compute/kernels/take.rs
+++ b/arrow/src/compute/kernels/take.rs
@@ -296,7 +296,7 @@ where
 }
 
 /// Options that define how `take` should behave
-#[derive(Clone, Debug)]
+#[derive(Clone, Debug, Default)]
 pub struct TakeOptions {
     /// Perform bounds check before taking indices from values.
     /// If enabled, an `ArrowError` is returned if the indices are out of bounds.
@@ -304,14 +304,6 @@ pub struct TakeOptions {
     pub check_bounds: bool,
 }
 
-impl Default for TakeOptions {
-    fn default() -> Self {
-        Self {
-            check_bounds: false,
-        }
-    }
-}
-
 #[inline(always)]
 fn maybe_usize<I: ArrowNativeType>(index: I) -> Result<usize> {
     index
diff --git a/arrow/src/compute/util.rs b/arrow/src/compute/util.rs
index e4808e2..6778be3 100644
--- a/arrow/src/compute/util.rs
+++ b/arrow/src/compute/util.rs
@@ -301,7 +301,7 @@ pub(super) mod tests {
                 values.append(&mut array);
             } else {
                 list_null_count += 1;
-                bit_util::unset_bit(&mut list_bitmap.as_slice_mut(), idx);
+                bit_util::unset_bit(list_bitmap.as_slice_mut(), idx);
             }
             offset.push(values.len() as i64);
         }
@@ -386,7 +386,7 @@ pub(super) mod tests {
                 values.extend(items.into_iter());
             } else {
                 list_null_count += 1;
-                bit_util::unset_bit(&mut list_bitmap.as_slice_mut(), idx);
+                bit_util::unset_bit(list_bitmap.as_slice_mut(), idx);
                 values.extend(vec![None; length as usize].into_iter());
             }
         }
diff --git a/arrow/src/datatypes/field.rs b/arrow/src/datatypes/field.rs
index 4ed0661..22e23fa 100644
--- a/arrow/src/datatypes/field.rs
+++ b/arrow/src/datatypes/field.rs
@@ -286,7 +286,7 @@ impl Field {
                     DataType::Struct(mut fields) => match map.get("children") {
                         Some(Value::Array(values)) => {
                             let struct_fields: Result<Vec<Field>> =
-                                values.iter().map(|v| Field::from(v)).collect();
+                                values.iter().map(Field::from).collect();
                             fields.append(&mut struct_fields?);
                             DataType::Struct(fields)
                         }
diff --git a/arrow/src/datatypes/schema.rs b/arrow/src/datatypes/schema.rs
index ecfbe06..561fa4d 100644
--- a/arrow/src/datatypes/schema.rs
+++ b/arrow/src/datatypes/schema.rs
@@ -227,10 +227,7 @@ impl Schema {
         match *json {
             Value::Object(ref schema) => {
                 let fields = if let Some(Value::Array(fields)) = schema.get("fields") {
-                    fields
-                        .iter()
-                        .map(|f| Field::from(f))
-                        .collect::<Result<_>>()?
+                    fields.iter().map(Field::from).collect::<Result<_>>()?
                 } else {
                     return Err(ArrowError::ParseError(
                         "Schema fields should be an array".to_string(),
diff --git a/arrow/src/ffi.rs b/arrow/src/ffi.rs
index a61f291..ef79479 100644
--- a/arrow/src/ffi.rs
+++ b/arrow/src/ffi.rs
@@ -132,9 +132,9 @@ unsafe extern "C" fn release_schema(schema: *mut FFI_ArrowSchema) {
     let schema = &mut *schema;
 
     // take ownership back to release it.
-    CString::from_raw(schema.format as *mut c_char);
+    drop(CString::from_raw(schema.format as *mut c_char));
     if !schema.name.is_null() {
-        CString::from_raw(schema.name as *mut c_char);
+        drop(CString::from_raw(schema.name as *mut c_char));
     }
     if !schema.private_data.is_null() {
         let private_data = Box::from_raw(schema.private_data as *mut SchemaPrivateData);
diff --git a/arrow/src/ipc/writer.rs b/arrow/src/ipc/writer.rs
index 853fc0f..c354eb4 100644
--- a/arrow/src/ipc/writer.rs
+++ b/arrow/src/ipc/writer.rs
@@ -752,9 +752,9 @@ fn write_continuation<W: Write>(
 /// Write array data to a vector of bytes
 fn write_array_data(
     array_data: &ArrayData,
-    mut buffers: &mut Vec<ipc::Buffer>,
-    mut arrow_data: &mut Vec<u8>,
-    mut nodes: &mut Vec<ipc::FieldNode>,
+    buffers: &mut Vec<ipc::Buffer>,
+    arrow_data: &mut Vec<u8>,
+    nodes: &mut Vec<ipc::FieldNode>,
     offset: i64,
     num_rows: usize,
     null_count: usize,
@@ -775,11 +775,11 @@ fn write_array_data(
             Some(buffer) => buffer.clone(),
         };
 
-        offset = write_buffer(&null_buffer, &mut buffers, &mut arrow_data, offset);
+        offset = write_buffer(&null_buffer, buffers, arrow_data, offset);
     }
 
     array_data.buffers().iter().for_each(|buffer| {
-        offset = write_buffer(buffer, &mut buffers, &mut arrow_data, offset);
+        offset = write_buffer(buffer, buffers, arrow_data, offset);
     });
 
     if !matches!(array_data.data_type(), DataType::Dictionary(_, _)) {
@@ -788,9 +788,9 @@ fn write_array_data(
             // write the nested data (e.g list data)
             offset = write_array_data(
                 data_ref,
-                &mut buffers,
-                &mut arrow_data,
-                &mut nodes,
+                buffers,
+                arrow_data,
+                nodes,
                 offset,
                 data_ref.len(),
                 data_ref.null_count(),
diff --git a/integration-testing/src/flight_server_scenarios/auth_basic_proto.rs b/integration-testing/src/flight_server_scenarios/auth_basic_proto.rs
index ea7ad3c..0bf26cc 100644
--- a/integration-testing/src/flight_server_scenarios/auth_basic_proto.rs
+++ b/integration-testing/src/flight_server_scenarios/auth_basic_proto.rs
@@ -58,6 +58,7 @@ pub async fn scenario_setup(port: &str) -> Result {
 pub struct AuthBasicProtoScenarioImpl {
     username: Arc<str>,
     password: Arc<str>,
+    #[allow(dead_code)]
     peer_identity: Arc<Mutex<Option<String>>>,
 }
 
diff --git a/parquet/src/arrow/arrow_writer.rs b/parquet/src/arrow/arrow_writer.rs
index 8600eb0..643f5a2 100644
--- a/parquet/src/arrow/arrow_writer.rs
+++ b/parquet/src/arrow/arrow_writer.rs
@@ -143,9 +143,9 @@ fn get_col_writer(
 
 #[allow(clippy::borrowed_box)]
 fn write_leaves(
-    mut row_group_writer: &mut Box<dyn RowGroupWriter>,
+    row_group_writer: &mut Box<dyn RowGroupWriter>,
     array: &arrow_array::ArrayRef,
-    mut levels: &mut Vec<LevelInfo>,
+    levels: &mut Vec<LevelInfo>,
 ) -> Result<()> {
     match array.data_type() {
         ArrowDataType::Null
@@ -173,7 +173,7 @@ fn write_leaves(
         | ArrowDataType::LargeUtf8
         | ArrowDataType::Decimal(_, _)
         | ArrowDataType::FixedSizeBinary(_) => {
-            let mut col_writer = get_col_writer(&mut row_group_writer)?;
+            let mut col_writer = get_col_writer(row_group_writer)?;
             write_leaf(
                 &mut col_writer,
                 array,
@@ -186,7 +186,7 @@ fn write_leaves(
             // write the child list
             let data = array.data();
             let child_array = arrow_array::make_array(data.child_data()[0].clone());
-            write_leaves(&mut row_group_writer, &child_array, &mut levels)?;
+            write_leaves(row_group_writer, &child_array, levels)?;
             Ok(())
         }
         ArrowDataType::Struct(_) => {
@@ -195,7 +195,7 @@ fn write_leaves(
                 .downcast_ref::<arrow_array::StructArray>()
                 .expect("Unable to get struct array");
             for field in struct_array.columns() {
-                write_leaves(&mut row_group_writer, field, &mut levels)?;
+                write_leaves(row_group_writer, field, levels)?;
             }
             Ok(())
         }
@@ -204,15 +204,15 @@ fn write_leaves(
                 .as_any()
                 .downcast_ref::<arrow_array::MapArray>()
                 .expect("Unable to get map array");
-            write_leaves(&mut row_group_writer, &map_array.keys(), &mut levels)?;
-            write_leaves(&mut row_group_writer, &map_array.values(), &mut levels)?;
+            write_leaves(row_group_writer, &map_array.keys(), levels)?;
+            write_leaves(row_group_writer, &map_array.values(), levels)?;
             Ok(())
         }
         ArrowDataType::Dictionary(_, value_type) => {
             // cast dictionary to a primitive
             let array = arrow::compute::cast(array, value_type)?;
 
-            let mut col_writer = get_col_writer(&mut row_group_writer)?;
+            let mut col_writer = get_col_writer(row_group_writer)?;
             write_leaf(
                 &mut col_writer,
                 &array,
diff --git a/parquet/src/record/reader.rs b/parquet/src/record/reader.rs
index c45e097..475da44 100644
--- a/parquet/src/record/reader.rs
+++ b/parquet/src/record/reader.rs
@@ -106,7 +106,7 @@ impl TreeBuilder {
     fn reader_tree(
         &self,
         field: TypePtr,
-        mut path: &mut Vec<String>,
+        path: &mut Vec<String>,
         mut curr_def_level: i16,
         mut curr_rep_level: i16,
         paths: &HashMap<ColumnPath, usize>,
@@ -160,7 +160,7 @@ impl TreeBuilder {
                         // Support for backward compatible lists
                         let reader = self.reader_tree(
                             repeated_field,
-                            &mut path,
+                            path,
                             curr_def_level,
                             curr_rep_level,
                             paths,
@@ -180,7 +180,7 @@ impl TreeBuilder {
 
                         let reader = self.reader_tree(
                             child_field,
-                            &mut path,
+                            path,
                             curr_def_level + 1,
                             curr_rep_level + 1,
                             paths,
@@ -235,7 +235,7 @@ impl TreeBuilder {
                     );
                     let key_reader = self.reader_tree(
                         key_type.clone(),
-                        &mut path,
+                        path,
                         curr_def_level + 1,
                         curr_rep_level + 1,
                         paths,
@@ -245,7 +245,7 @@ impl TreeBuilder {
                     let value_type = &key_value_type.get_fields()[1];
                     let value_reader = self.reader_tree(
                         value_type.clone(),
-                        &mut path,
+                        path,
                         curr_def_level + 1,
                         curr_rep_level + 1,
                         paths,
@@ -278,7 +278,7 @@ impl TreeBuilder {
 
                     let reader = self.reader_tree(
                         Arc::new(required_field),
-                        &mut path,
+                        path,
                         curr_def_level,
                         curr_rep_level,
                         paths,
@@ -298,7 +298,7 @@ impl TreeBuilder {
                     for child in field.get_fields() {
                         let reader = self.reader_tree(
                             child.clone(),
-                            &mut path,
+                            path,
                             curr_def_level,
                             curr_rep_level,
                             paths,
diff --git a/parquet/src/util/bit_util.rs b/parquet/src/util/bit_util.rs
index fa8b9dd..162cfd8 100644
--- a/parquet/src/util/bit_util.rs
+++ b/parquet/src/util/bit_util.rs
@@ -383,8 +383,8 @@ impl BitWriter {
             // TODO: should we return `Result` for this func?
             return false;
         }
-        let mut ptr = result.unwrap();
-        memcpy_value(&val, num_bytes, &mut ptr);
+        let ptr = result.unwrap();
+        memcpy_value(&val, num_bytes, ptr);
         true
     }
 
diff --git a/parquet_derive/src/parquet_field.rs b/parquet_derive/src/parquet_field.rs
index 36730c7..8658f59 100644
--- a/parquet_derive/src/parquet_field.rs
+++ b/parquet_derive/src/parquet_field.rs
@@ -769,7 +769,7 @@ mod test {
         };
 
         let fields = extract_fields(snippet);
-        let processed: Vec<_> = fields.iter().map(|field| Field::from(field)).collect();
+        let processed: Vec<_> = fields.iter().map(Field::from).collect();
 
         let column_writers: Vec<_> = processed
             .iter()
@@ -800,7 +800,7 @@ mod test {
         };
 
         let fields = extract_fields(snippet);
-        let processed: Vec<_> = fields.iter().map(|field| Field::from(field)).collect();
+        let processed: Vec<_> = fields.iter().map(Field::from).collect();
         assert_eq!(processed.len(), 3);
 
         assert_eq!(
@@ -840,8 +840,7 @@ mod test {
         };
 
         let fields = extract_fields(snippet);
-        let converted_fields: Vec<_> =
-            fields.iter().map(|field| Type::from(field)).collect();
+        let converted_fields: Vec<_> = fields.iter().map(Type::from).collect();
         let inner_types: Vec<_> = converted_fields
             .iter()
             .map(|field| field.inner_type())
@@ -878,8 +877,7 @@ mod test {
         };
 
         let fields = extract_fields(snippet);
-        let converted_fields: Vec<_> =
-            fields.iter().map(|field| Type::from(field)).collect();
+        let converted_fields: Vec<_> = fields.iter().map(Type::from).collect();
         let physical_types: Vec<_> = converted_fields
             .iter()
             .map(|ty| ty.physical_type())
@@ -911,8 +909,7 @@ mod test {
         };
 
         let fields = extract_fields(snippet);
-        let converted_fields: Vec<_> =
-            fields.iter().map(|field| Type::from(field)).collect();
+        let converted_fields: Vec<_> = fields.iter().map(Type::from).collect();
 
         assert_eq!(
             converted_fields,
@@ -938,7 +935,7 @@ mod test {
         };
 
         let fields = extract_fields(snippet);
-        let types: Vec<_> = fields.iter().map(|field| Type::from(field)).collect();
+        let types: Vec<_> = fields.iter().map(Type::from).collect();
 
         assert_eq!(
             types,