You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@arrow.apache.org by ne...@apache.org on 2020/04/29 07:49:40 UTC

[arrow] branch master updated: ARROW-8597 [Rust] Lints and readability improvements for arrow crate

This is an automated email from the ASF dual-hosted git repository.

nevime pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/arrow.git


The following commit(s) were added to refs/heads/master by this push:
     new 58bd7ee  ARROW-8597 [Rust] Lints and readability improvements for arrow crate
58bd7ee is described below

commit 58bd7ee3dc7ccf0eaa14ec7236c1f177d0f226ff
Author: Drazen Urch <dr...@urch.eu>
AuthorDate: Wed Apr 29 09:49:08 2020 +0200

    ARROW-8597 [Rust] Lints and readability improvements for arrow crate
    
    + Pedantic fixes to `unsafe`
    + Changes to function arguments to pass in references or values as appropriate
    + Refactor pointer arithmetic to use `usize` instead of `isize` casting
    + Ignore generated code clippy warnings
    + Refactor loops to use iterators where appropriate
    + Remove unnecessary `return` statements
    + Remove unnecessary `clone` statements
    + Remove unnecessary closures
    + A bunch of similar small scale refactorings
    
    Tests below are currently failing on master locally as well with `ARROW_TEST_DATA not defined: NotPresent`, assuming that is ok for now:
    
    ```
    ipc::reader::tests::read_generated_files
    ipc::reader::tests::read_generated_streams
    ipc::writer::tests::read_and_rewrite_generated_files
    ipc::writer::tests::read_and_rewrite_generated_streams
    ```
    
    Closes #7042 from durch/lints-and-readability-improvements
    
    Lead-authored-by: Drazen Urch <dr...@urch.eu>
    Co-authored-by: Neville Dipale <ne...@gmail.com>
    Signed-off-by: Neville Dipale <ne...@gmail.com>
---
 rust/arrow/src/array/array.rs                |  70 +++++-----
 rust/arrow/src/array/builder.rs              |   6 +-
 rust/arrow/src/array/equal.rs                |  31 ++---
 rust/arrow/src/bitmap.rs                     |   2 +-
 rust/arrow/src/buffer.rs                     |  35 +++--
 rust/arrow/src/compute/kernels/aggregate.rs  |   8 +-
 rust/arrow/src/compute/kernels/arithmetic.rs |   6 +-
 rust/arrow/src/compute/kernels/cast.rs       |  31 ++---
 rust/arrow/src/compute/kernels/comparison.rs |  12 +-
 rust/arrow/src/compute/kernels/take.rs       |   6 +-
 rust/arrow/src/compute/util.rs               |   4 +-
 rust/arrow/src/csv/reader.rs                 |  46 +++----
 rust/arrow/src/csv/writer.rs                 |  12 +-
 rust/arrow/src/datatypes.rs                  |  19 ++-
 rust/arrow/src/error.rs                      |  26 ++--
 rust/arrow/src/flight/mod.rs                 |  24 ++--
 rust/arrow/src/ipc/convert.rs                |  14 +-
 rust/arrow/src/ipc/reader.rs                 | 130 +++++++++---------
 rust/arrow/src/ipc/writer.rs                 |  36 +++--
 rust/arrow/src/json/reader.rs                | 190 ++++++++++++---------------
 rust/arrow/src/lib.rs                        |   5 +
 rust/arrow/src/memory.rs                     |  26 ++--
 rust/arrow/src/record_batch.rs               |   9 +-
 rust/arrow/src/tensor.rs                     |   6 +-
 rust/arrow/src/util/bit_util.rs              |   4 +-
 rust/arrow/src/util/integration_util.rs      |  10 +-
 rust/arrow/src/util/test_util.rs             |   2 +-
 27 files changed, 362 insertions(+), 408 deletions(-)

diff --git a/rust/arrow/src/array/array.rs b/rust/arrow/src/array/array.rs
index 71f4783..a68be1f 100644
--- a/rust/arrow/src/array/array.rs
+++ b/rust/arrow/src/array/array.rs
@@ -444,25 +444,22 @@ impl<T: ArrowNumericType> PrimitiveArray<T> {
 
     /// Returns a raw pointer to the values of this array.
     pub fn raw_values(&self) -> *const T::Native {
-        unsafe {
-            mem::transmute(self.raw_values.get().offset(self.data.offset() as isize))
-        }
+        unsafe { self.raw_values.get().add(self.data.offset()) }
     }
 
     /// Returns the primitive value at index `i`.
     ///
     /// Note this doesn't do any bound checking, for performance reason.
     pub fn value(&self, i: usize) -> T::Native {
-        unsafe { *(self.raw_values().offset(i as isize)) }
+        unsafe { *(self.raw_values().add(i)) }
     }
 
     /// Returns a slice for the given offset and length
     ///
     /// Note this doesn't do any bound checking, for performance reason.
     pub fn value_slice(&self, offset: usize, len: usize) -> &[T::Native] {
-        let raw = unsafe {
-            std::slice::from_raw_parts(self.raw_values().offset(offset as isize), len)
-        };
+        let raw =
+            unsafe { std::slice::from_raw_parts(self.raw_values().add(offset), len) };
         &raw[..]
     }
 
@@ -711,9 +708,9 @@ macro_rules! def_numeric_from_vec {
                             bit_util::set_bit(null_slice, i);
                             // unwrap() in the following should be safe here since we've
                             // made sure enough space is allocated for the values.
-                            val_buf.write(&n.to_byte_slice()).unwrap();
+                            val_buf.write_all(&n.to_byte_slice()).unwrap();
                         } else {
-                            val_buf.write(&null).unwrap();
+                            val_buf.write_all(&null).unwrap();
                         }
                     }
                 }
@@ -818,9 +815,9 @@ impl<T: ArrowTimestampType> PrimitiveArray<T> {
                     bit_util::set_bit(null_slice, i);
                     // unwrap() in the following should be safe here since we've
                     // made sure enough space is allocated for the values.
-                    val_buf.write(&n.to_byte_slice()).unwrap();
+                    val_buf.write_all(&n.to_byte_slice()).unwrap();
                 } else {
-                    val_buf.write(&null).unwrap();
+                    val_buf.write_all(&null).unwrap();
                 }
             }
         }
@@ -986,7 +983,7 @@ impl ListArray {
 
     #[inline]
     fn value_offset_at(&self, i: usize) -> i32 {
-        unsafe { *self.value_offsets.get().offset(i as isize) }
+        unsafe { *self.value_offsets.get().add(i) }
     }
 }
 
@@ -1014,7 +1011,7 @@ impl From<ArrayDataRef> for ListArray {
             assert_eq!(*value_offsets.offset(0), 0, "offsets do not start at zero");
         }
         Self {
-            data: data.clone(),
+            data,
             values,
             value_offsets: RawPtrBox::new(value_offsets),
         }
@@ -1043,24 +1040,24 @@ where
 {
     for i in 0..std::cmp::min(10, array.len()) {
         if array.is_null(i) {
-            write!(f, "  null,\n")?;
+            writeln!(f, "  null,")?;
         } else {
             write!(f, "  ")?;
             print_item(&array, i, f)?;
-            write!(f, ",\n")?;
+            writeln!(f, ",")?;
         }
     }
     if array.len() > 10 {
         if array.len() > 20 {
-            write!(f, "  ...{} elements...,\n", array.len() - 20)?;
+            writeln!(f, "  ...{} elements...,", array.len() - 20)?;
         }
         for i in array.len() - 10..array.len() {
             if array.is_null(i) {
-                write!(f, "  null,\n")?;
+                writeln!(f, "  null,")?;
             } else {
                 write!(f, "  ")?;
                 print_item(&array, i, f)?;
-                write!(f, ",\n")?;
+                writeln!(f, ",")?;
             }
         }
     }
@@ -1154,7 +1151,7 @@ impl From<ArrayDataRef> for FixedSizeListArray {
             }
         };
         Self {
-            data: data.clone(),
+            data,
             values,
             length,
         }
@@ -1249,7 +1246,7 @@ impl BinaryArray {
 
     #[inline]
     fn value_offset_at(&self, i: usize) -> i32 {
-        unsafe { *self.value_offsets.get().offset(i as isize) }
+        unsafe { *self.value_offsets.get().add(i) }
     }
 }
 
@@ -1298,7 +1295,7 @@ impl StringArray {
 
     #[inline]
     fn value_offset_at(&self, i: usize) -> i32 {
-        unsafe { *self.value_offsets.get().offset(i as isize) }
+        unsafe { *self.value_offsets.get().add(i) }
     }
 }
 
@@ -1360,7 +1357,7 @@ impl From<ArrayDataRef> for BinaryArray {
         );
         let value_data = data.buffers()[1].raw_data();
         Self {
-            data: data.clone(),
+            data,
             value_offsets: RawPtrBox::new(raw_value_offsets as *const i32),
             value_data: RawPtrBox::new(value_data),
         }
@@ -1381,7 +1378,7 @@ impl From<ArrayDataRef> for StringArray {
         );
         let value_data = data.buffers()[1].raw_data();
         Self {
-            data: data.clone(),
+            data,
             value_offsets: RawPtrBox::new(raw_value_offsets as *const i32),
             value_data: RawPtrBox::new(value_data),
         }
@@ -1401,7 +1398,7 @@ impl From<ArrayDataRef> for FixedSizeBinaryArray {
             _ => panic!("Expected data type to be FixedSizeBinary"),
         };
         Self {
-            data: data.clone(),
+            data,
             value_data: RawPtrBox::new(value_data),
             length,
         }
@@ -1739,15 +1736,15 @@ impl fmt::Debug for StructArray {
         write!(f, "StructArray\n[\n")?;
         for (child_index, name) in self.column_names().iter().enumerate() {
             let column = self.column(child_index);
-            write!(
+            writeln!(
                 f,
-                "-- child {}: \"{}\" ({:?})\n",
+                "-- child {}: \"{}\" ({:?})",
                 child_index,
                 name,
                 column.data_type()
             )?;
             fmt::Debug::fmt(column, f)?;
-            write!(f, "\n")?;
+            writeln!(f)?;
         }
         write!(f, "]")
     }
@@ -1841,7 +1838,7 @@ where
             Some(None)
         } else {
             self.i += 1;
-            unsafe { Some(Some((&*self.ptr.offset(i as isize)).clone())) }
+            unsafe { Some(Some((&*self.ptr.add(i)).clone())) }
         }
     }
 
@@ -1859,7 +1856,7 @@ where
             Some(None)
         } else {
             self.i += n + 1;
-            unsafe { Some(Some((&*self.ptr.offset((i + n) as isize)).clone())) }
+            unsafe { Some(Some((&*self.ptr.add(i + n)).clone())) }
         }
     }
 }
@@ -1869,7 +1866,7 @@ impl<'a, K: ArrowPrimitiveType> DictionaryArray<K> {
     pub fn keys(&self) -> NullableIter<'_, K::Native> {
         NullableIter::<'_, K::Native> {
             data: &self.data,
-            ptr: unsafe { self.raw_values.get().offset(self.data.offset() as isize) },
+            ptr: unsafe { self.raw_values.get().add(self.data.offset()) },
             i: 0,
             len: self.data.len(),
         }
@@ -1924,14 +1921,15 @@ impl<T: ArrowPrimitiveType> From<ArrayDataRef> for DictionaryArray<T> {
         let raw_values = data.buffers()[0].raw_data();
         let dtype: &DataType = data.data_type();
         let values = make_array(data.child_data()[0].clone());
-        match dtype {
-            DataType::Dictionary(_, _) => Self {
+        if let DataType::Dictionary(_, _) = dtype {
+            Self {
                 data,
                 raw_values: RawPtrBox::new(raw_values as *const T::Native),
                 values,
                 is_ordered: false,
-            },
-            _ => panic!("DictionaryArray must have Dictionary data type."),
+            }
+        } else {
+            panic!("DictionaryArray must have Dictionary data type.")
         }
     }
 }
@@ -2007,9 +2005,9 @@ impl<T: ArrowPrimitiveType> fmt::Debug for DictionaryArray<T> {
         } else {
             ""
         };
-        write!(
+        writeln!(
             f,
-            "DictionaryArray {{keys: {:?}{} values: {:?}}}\n",
+            "DictionaryArray {{keys: {:?}{} values: {:?}}}",
             keys, elipsis, self.values
         )
     }
diff --git a/rust/arrow/src/array/builder.rs b/rust/arrow/src/array/builder.rs
index b4f0777..32a0147 100644
--- a/rust/arrow/src/array/builder.rs
+++ b/rust/arrow/src/array/builder.rs
@@ -1099,7 +1099,7 @@ impl StructBuilder {
                 let schema = Schema::new(fields.clone());
                 Box::new(Self::from_schema(schema, capacity))
             }
-            t @ _ => panic!("Data type {:?} is not currently supported", t),
+            t => panic!("Data type {:?} is not currently supported", t),
         }
     }
 
@@ -1185,8 +1185,8 @@ where
         values_builder: PrimitiveBuilder<V>,
     ) -> Self {
         Self {
-            keys_builder: keys_builder,
-            values_builder: values_builder,
+            keys_builder,
+            values_builder,
             map: HashMap::new(),
         }
     }
diff --git a/rust/arrow/src/array/equal.rs b/rust/arrow/src/array/equal.rs
index 81f62bf..75bd3d0 100644
--- a/rust/arrow/src/array/equal.rs
+++ b/rust/arrow/src/array/equal.rs
@@ -113,12 +113,11 @@ impl ArrayEqual for BooleanArray {
 
         // TODO: we can do this more efficiently if all values are not-null
         for i in 0..self.len() {
-            if self.is_valid(i) {
-                if bit_util::get_bit(values, i + self.offset())
+            if self.is_valid(i)
+                && bit_util::get_bit(values, i + self.offset())
                     != bit_util::get_bit(other_values, i + other.offset())
-                {
-                    return false;
-                }
+            {
+                return false;
             }
         }
 
@@ -730,7 +729,7 @@ fn value_offset_equal<T: Array + ListArrayOps>(this: &T, other: &T) -> bool {
     }
 
     // The expensive case
-    for i in 0..this.len() + 1 {
+    for i in 0..=this.len() {
         if this.value_offset_at(i) - this.value_offset_at(0)
             != other.value_offset_at(i) - other.value_offset_at(0)
         {
@@ -761,12 +760,10 @@ impl<T: ArrowPrimitiveType> JsonEqual for PrimitiveArray<T> {
             return false;
         }
 
-        let result = (0..self.len()).all(|i| match json[i] {
+        (0..self.len()).all(|i| match json[i] {
             Value::Null => self.is_null(i),
             v => self.is_valid(i) && Some(v) == self.value(i).into_json_value().as_ref(),
-        });
-
-        result
+        })
     }
 }
 
@@ -794,13 +791,11 @@ impl JsonEqual for ListArray {
             return false;
         }
 
-        let result = (0..self.len()).all(|i| match json[i] {
+        (0..self.len()).all(|i| match json[i] {
             Value::Array(v) => self.is_valid(i) && self.value(i).equals_json_values(v),
             Value::Null => self.is_null(i) || self.value_length(i) == 0,
             _ => false,
-        });
-
-        result
+        })
     }
 }
 
@@ -858,13 +853,11 @@ impl JsonEqual for FixedSizeListArray {
             return false;
         }
 
-        let result = (0..self.len()).all(|i| match json[i] {
+        (0..self.len()).all(|i| match json[i] {
             Value::Array(v) => self.is_valid(i) && self.value(i).equals_json_values(v),
             Value::Null => self.is_null(i) || self.value_length() == 0,
             _ => false,
-        });
-
-        result
+        })
     }
 }
 
@@ -916,7 +909,7 @@ impl JsonEqual for StructArray {
             }
         }
 
-        return true;
+        true
     }
 }
 
diff --git a/rust/arrow/src/bitmap.rs b/rust/arrow/src/bitmap.rs
index 48f9a09..83ac542 100644
--- a/rust/arrow/src/bitmap.rs
+++ b/rust/arrow/src/bitmap.rs
@@ -96,7 +96,7 @@ impl PartialEq for Bitmap {
         if self_len != other_len {
             return false;
         }
-        &self.bits.data()[..self_len] == &other.bits.data()[..self_len]
+        self.bits.data()[..self_len] == other.bits.data()[..self_len]
     }
 }
 
diff --git a/rust/arrow/src/buffer.rs b/rust/arrow/src/buffer.rs
index 189b8ed..e98c372 100644
--- a/rust/arrow/src/buffer.rs
+++ b/rust/arrow/src/buffer.rs
@@ -78,7 +78,7 @@ impl PartialEq for BufferData {
 impl Drop for BufferData {
     fn drop(&mut self) {
         if !self.ptr.is_null() && self.owned {
-            memory::free_aligned(self.ptr as *mut u8, self.capacity);
+            unsafe { memory::free_aligned(self.ptr as *mut u8, self.capacity) };
         }
     }
 }
@@ -215,7 +215,7 @@ impl Buffer {
     /// Note that this should be used cautiously, and the returned pointer should not be
     /// stored anywhere, to avoid dangling pointers.
     pub fn raw_data(&self) -> *const u8 {
-        unsafe { self.data.ptr.offset(self.offset as isize) }
+        unsafe { self.data.ptr.add(self.offset) }
     }
 
     /// View buffer as typed slice.
@@ -232,7 +232,7 @@ impl Buffer {
         assert_eq!(self.len() % mem::size_of::<T>(), 0);
         assert!(memory::is_ptr_aligned::<T>(self.raw_data() as *const T));
         from_raw_parts(
-            mem::transmute::<*const u8, *const T>(self.raw_data()),
+            self.raw_data() as *const T,
             self.len() / mem::size_of::<T>(),
         )
     }
@@ -277,21 +277,16 @@ where
     let mut result = MutableBuffer::new(left.len()).with_bitset(left.len(), false);
     let lanes = u8x64::lanes();
     for i in (0..left.len()).step_by(lanes) {
-        let left_data =
-            unsafe { from_raw_parts(left.raw_data().offset(i as isize), lanes) };
-        let right_data =
-            unsafe { from_raw_parts(right.raw_data().offset(i as isize), lanes) };
+        let left_data = unsafe { from_raw_parts(left.raw_data().add(i), lanes) };
+        let right_data = unsafe { from_raw_parts(right.raw_data().add(i), lanes) };
         let result_slice: &mut [u8] = unsafe {
-            from_raw_parts_mut(
-                (result.data_mut().as_mut_ptr() as *mut u8).offset(i as isize),
-                lanes,
-            )
+            from_raw_parts_mut((result.data_mut().as_mut_ptr() as *mut u8).add(i), lanes)
         };
         unsafe {
             bit_util::bitwise_bin_op_simd(&left_data, &right_data, result_slice, &op)
         };
     }
-    return result.freeze();
+    result.freeze()
 }
 
 impl<'a, 'b> BitAnd<&'b Buffer> for &'a Buffer {
@@ -374,11 +369,11 @@ impl Not for &Buffer {
             let lanes = u8x64::lanes();
             for i in (0..self.len()).step_by(lanes) {
                 unsafe {
-                    let data = from_raw_parts(self.raw_data().offset(i as isize), lanes);
+                    let data = from_raw_parts(self.raw_data().add(i), lanes);
                     let data_simd = u8x64::from_slice_unaligned_unchecked(data);
                     let simd_result = !data_simd;
                     let result_slice: &mut [u8] = from_raw_parts_mut(
-                        (result.data_mut().as_mut_ptr() as *mut u8).offset(i as isize),
+                        (result.data_mut().as_mut_ptr() as *mut u8).add(i),
                         lanes,
                     );
                     simd_result.write_to_slice_unaligned_unchecked(result_slice);
@@ -449,7 +444,7 @@ impl MutableBuffer {
     pub fn set_null_bits(&mut self, start: usize, count: usize) {
         assert!(start + count <= self.capacity);
         unsafe {
-            std::ptr::write_bytes(self.data.offset(start as isize), 0, count);
+            std::ptr::write_bytes(self.data.add(start), 0, count);
         }
     }
 
@@ -461,7 +456,8 @@ impl MutableBuffer {
         if capacity > self.capacity {
             let new_capacity = bit_util::round_upto_multiple_of_64(capacity);
             let new_capacity = cmp::max(new_capacity, self.capacity * 2);
-            let new_data = memory::reallocate(self.data, self.capacity, new_capacity);
+            let new_data =
+                unsafe { memory::reallocate(self.data, self.capacity, new_capacity) };
             self.data = new_data as *mut u8;
             self.capacity = new_capacity;
         }
@@ -481,7 +477,8 @@ impl MutableBuffer {
         } else {
             let new_capacity = bit_util::round_upto_multiple_of_64(new_len);
             if new_capacity < self.capacity {
-                let new_data = memory::reallocate(self.data, self.capacity, new_capacity);
+                let new_data =
+                    unsafe { memory::reallocate(self.data, self.capacity, new_capacity) };
                 self.data = new_data as *mut u8;
                 self.capacity = new_capacity;
             }
@@ -571,7 +568,7 @@ impl MutableBuffer {
 impl Drop for MutableBuffer {
     fn drop(&mut self) {
         if !self.data.is_null() {
-            memory::free_aligned(self.data, self.capacity);
+            unsafe { memory::free_aligned(self.data, self.capacity) };
         }
     }
 }
@@ -595,7 +592,7 @@ impl Write for MutableBuffer {
             return Err(IoError::new(ErrorKind::Other, "Buffer not big enough"));
         }
         unsafe {
-            memory::memcpy(self.data.offset(self.len as isize), buf.as_ptr(), buf.len());
+            memory::memcpy(self.data.add(self.len), buf.as_ptr(), buf.len());
             self.len += buf.len();
             Ok(buf.len())
         }
diff --git a/rust/arrow/src/compute/kernels/aggregate.rs b/rust/arrow/src/compute/kernels/aggregate.rs
index 7a04b95..1af5ed6 100644
--- a/rust/arrow/src/compute/kernels/aggregate.rs
+++ b/rust/arrow/src/compute/kernels/aggregate.rs
@@ -80,17 +80,17 @@ where
         let mut n: T::Native = T::default_value();
         let data = array.data();
         let m = array.value_slice(0, data.len());
-        for i in 0..data.len() {
-            n = n + m[i];
+        for item in m.iter().take(data.len()) {
+            n = n + *item;
         }
         Some(n)
     } else {
         let mut n: T::Native = T::default_value();
         let data = array.data();
         let m = array.value_slice(0, data.len());
-        for i in 0..data.len() {
+        for (i, item) in m.iter().enumerate() {
             if data.is_valid(i) {
-                n = n + m[i];
+                n = n + *item;
             }
         }
         Some(n)
diff --git a/rust/arrow/src/compute/kernels/arithmetic.rs b/rust/arrow/src/compute/kernels/arithmetic.rs
index 6c1b77e..85d6840 100644
--- a/rust/arrow/src/compute/kernels/arithmetic.rs
+++ b/rust/arrow/src/compute/kernels/arithmetic.rs
@@ -123,7 +123,7 @@ where
 
         let result_slice: &mut [T::Native] = unsafe {
             from_raw_parts_mut(
-                (result.data_mut().as_mut_ptr() as *mut T::Native).offset(i as isize),
+                (result.data_mut().as_mut_ptr() as *mut T::Native).add(i),
                 lanes,
             )
         };
@@ -186,14 +186,14 @@ where
 
         let result_slice: &mut [T::Native] = unsafe {
             from_raw_parts_mut(
-                (result.data_mut().as_mut_ptr() as *mut T::Native).offset(i as isize),
+                (result.data_mut().as_mut_ptr() as *mut T::Native).add(i),
                 lanes,
             )
         };
         T::write(simd_result, result_slice);
     }
 
-    let null_bit_buffer = bitmap.and_then(|b| Some(b.bits));
+    let null_bit_buffer = bitmap.map(|b| b.bits);
 
     let data = ArrayData::new(
         T::get_data_type(),
diff --git a/rust/arrow/src/compute/kernels/cast.rs b/rust/arrow/src/compute/kernels/cast.rs
index 7459337..beaf2ac 100644
--- a/rust/arrow/src/compute/kernels/cast.rs
+++ b/rust/arrow/src/compute/kernels/cast.rs
@@ -106,7 +106,7 @@ pub fn cast(array: &ArrayRef, to_type: &DataType) -> Result<ArrayRef> {
             // cast primitive to list's primitive
             let cast_array = cast(array, &to)?;
             // create offsets, where if array.len() = 2, we have [0,1,2]
-            let offsets: Vec<i32> = (0..array.len() as i32 + 1).collect();
+            let offsets: Vec<i32> = (0..=array.len() as i32).collect();
             let value_offsets = Buffer::from(offsets[..].to_byte_slice());
             let list_data = ArrayData::new(
                 *to.clone(),
@@ -163,10 +163,7 @@ pub fn cast(array: &ArrayRef, to_type: &DataType) -> Result<ArrayRef> {
                     if array.is_null(i) {
                         b.append(false)?;
                     } else {
-                        b.append_value(match from.value(i) {
-                            true => "1",
-                            false => "0",
-                        })?;
+                        b.append_value(if from.value(i) { "1" } else { "0" })?;
                     }
                 }
 
@@ -708,12 +705,10 @@ where
     for i in 0..from.len() {
         if from.is_null(i) {
             b.append_null()?;
+        } else if from.value(i) != T::default_value() {
+            b.append_value(true)?;
         } else {
-            if from.value(i) != T::default_value() {
-                b.append_value(true)?;
-            } else {
-                b.append_value(false)?;
-            }
+            b.append_value(false)?;
         }
     }
 
@@ -742,16 +737,14 @@ where
     for i in 0..from.len() {
         if from.is_null(i) {
             b.append_null()?;
+        } else if from.value(i) {
+            // a workaround to cast a primitive to T::Native, infallible
+            match num::cast::cast(1) {
+                Some(v) => b.append_value(v)?,
+                None => b.append_null()?,
+            };
         } else {
-            if from.value(i) {
-                // a workaround to cast a primitive to T::Native, infallible
-                match num::cast::cast(1) {
-                    Some(v) => b.append_value(v)?,
-                    None => b.append_null()?,
-                };
-            } else {
-                b.append_value(T::default_value())?;
-            }
+            b.append_value(T::default_value())?;
         }
     }
 
diff --git a/rust/arrow/src/compute/kernels/comparison.rs b/rust/arrow/src/compute/kernels/comparison.rs
index ac46724..390c276 100644
--- a/rust/arrow/src/compute/kernels/comparison.rs
+++ b/rust/arrow/src/compute/kernels/comparison.rs
@@ -265,7 +265,7 @@ where
     T: ArrowNumericType,
 {
     #[cfg(all(any(target_arch = "x86", target_arch = "x86_64"), feature = "simd"))]
-    return simd_compare_op(left, right, |a, b| T::eq(a, b));
+    return simd_compare_op(left, right, T::eq);
 
     #[cfg(any(
         not(any(target_arch = "x86", target_arch = "x86_64")),
@@ -280,7 +280,7 @@ where
     T: ArrowNumericType,
 {
     #[cfg(all(any(target_arch = "x86", target_arch = "x86_64"), feature = "simd"))]
-    return simd_compare_op(left, right, |a, b| T::ne(a, b));
+    return simd_compare_op(left, right, T::ne);
 
     #[cfg(any(
         not(any(target_arch = "x86", target_arch = "x86_64")),
@@ -296,7 +296,7 @@ where
     T: ArrowNumericType,
 {
     #[cfg(all(any(target_arch = "x86", target_arch = "x86_64"), feature = "simd"))]
-    return simd_compare_op(left, right, |a, b| T::lt(a, b));
+    return simd_compare_op(left, right, T::lt);
 
     #[cfg(any(
         not(any(target_arch = "x86", target_arch = "x86_64")),
@@ -315,7 +315,7 @@ where
     T: ArrowNumericType,
 {
     #[cfg(all(any(target_arch = "x86", target_arch = "x86_64"), feature = "simd"))]
-    return simd_compare_op(left, right, |a, b| T::le(a, b));
+    return simd_compare_op(left, right, T::le);
 
     #[cfg(any(
         not(any(target_arch = "x86", target_arch = "x86_64")),
@@ -331,7 +331,7 @@ where
     T: ArrowNumericType,
 {
     #[cfg(all(any(target_arch = "x86", target_arch = "x86_64"), feature = "simd"))]
-    return simd_compare_op(left, right, |a, b| T::gt(a, b));
+    return simd_compare_op(left, right, T::gt);
 
     #[cfg(any(
         not(any(target_arch = "x86", target_arch = "x86_64")),
@@ -350,7 +350,7 @@ where
     T: ArrowNumericType,
 {
     #[cfg(all(any(target_arch = "x86", target_arch = "x86_64"), feature = "simd"))]
-    return simd_compare_op(left, right, |a, b| T::ge(a, b));
+    return simd_compare_op(left, right, T::ge);
 
     #[cfg(any(
         not(any(target_arch = "x86", target_arch = "x86_64")),
diff --git a/rust/arrow/src/compute/kernels/take.rs b/rust/arrow/src/compute/kernels/take.rs
index cb294dd..fc6afb2 100644
--- a/rust/arrow/src/compute/kernels/take.rs
+++ b/rust/arrow/src/compute/kernels/take.rs
@@ -38,7 +38,7 @@ pub fn take(
     indices: &UInt32Array,
     options: Option<TakeOptions>,
 ) -> Result<ArrayRef> {
-    let options = options.unwrap_or(Default::default());
+    let options = options.unwrap_or_default();
     if options.check_bounds {
         let len = values.len();
         for i in 0..indices.len() {
@@ -121,7 +121,7 @@ pub fn take(
                 fields.clone().into_iter().zip(arrays).collect();
             Ok(Arc::new(StructArray::from(pairs)) as ArrayRef)
         }
-        t @ _ => unimplemented!("Take not supported for data type {:?}", t),
+        t => unimplemented!("Take not supported for data type {:?}", t),
     }
 }
 
@@ -210,7 +210,7 @@ fn take_list(values: &ArrayRef, indices: &UInt32Array) -> Result<ArrayRef> {
     let mut null_buf = MutableBuffer::new(num_bytes).with_bitset(num_bytes, false);
     {
         let null_slice = null_buf.data_mut();
-        &offsets[..]
+        offsets[..]
             .windows(2)
             .enumerate()
             .for_each(|(i, window): (usize, &[i32])| {
diff --git a/rust/arrow/src/compute/util.rs b/rust/arrow/src/compute/util.rs
index a6ddf06..03930b6 100644
--- a/rust/arrow/src/compute/util.rs
+++ b/rust/arrow/src/compute/util.rs
@@ -83,7 +83,7 @@ pub(super) fn take_value_indices_from_list(
             if start != end {
                 // type annotation needed to guide compiler a bit
                 let mut offsets: Vec<Option<u32>> =
-                    (start..end).map(|v| Some(v)).collect::<Vec<Option<u32>>>();
+                    (start..end).map(Some).collect::<Vec<Option<u32>>>();
                 values.append(&mut offsets);
             }
         } else {
@@ -113,7 +113,7 @@ where
     let mut validity = T::mask_init(true);
 
     // Validity based on `Bitmap`
-    if let &Some(b) = &bitmap {
+    if let Some(b) = bitmap {
         for j in i..min(array_len, simd_upper_bound) {
             if !b.is_set(j) {
                 validity = T::mask_set(validity, j - i, false);
diff --git a/rust/arrow/src/csv/reader.rs b/rust/arrow/src/csv/reader.rs
index efffcbb..6b99710 100644
--- a/rust/arrow/src/csv/reader.rs
+++ b/rust/arrow/src/csv/reader.rs
@@ -68,18 +68,18 @@ lazy_static! {
 fn infer_field_schema(string: &str) -> DataType {
     // when quoting is enabled in the reader, these quotes aren't escaped, we default to
     // Utf8 for them
-    if string.starts_with("\"") {
+    if string.starts_with('"') {
         return DataType::Utf8;
     }
     // match regex in a particular order
     if BOOLEAN_RE.is_match(string) {
-        return DataType::Boolean;
+        DataType::Boolean
     } else if DECIMAL_RE.is_match(string) {
-        return DataType::Float64;
+        DataType::Float64
     } else if INTEGER_RE.is_match(string) {
-        return DataType::Int64;
+        DataType::Int64
     } else {
-        return DataType::Utf8;
+        DataType::Utf8
     }
 }
 
@@ -106,7 +106,6 @@ fn infer_file_schema<R: Read + Seek>(
         let first_record_count = &csv_reader.headers()?.len();
         (0..*first_record_count)
             .map(|i| format!("column_{}", i + 1))
-            .into_iter()
             .collect()
     };
 
@@ -131,16 +130,12 @@ fn infer_file_schema<R: Read + Seek>(
         let record = result?;
 
         for i in 0..header_length {
-            let string: Option<&str> = record.get(i);
-            match string {
-                Some(s) => {
-                    if s == "" {
-                        nulls[i] = true;
-                    } else {
-                        column_types[i].insert(infer_field_schema(s));
-                    }
+            if let Some(string) = record.get(i) {
+                if string == "" {
+                    nulls[i] = true;
+                } else {
+                    column_types[i].insert(infer_field_schema(string));
                 }
-                _ => {}
             }
         }
     }
@@ -295,7 +290,8 @@ impl<R: Read> Reader<R> {
         let arrays: Result<Vec<ArrayRef>> = projection
             .iter()
             .map(|i| {
-                let field = self.schema.field(*i);
+                let i = *i;
+                let field = self.schema.field(i);
                 match field.data_type() {
                     &DataType::Boolean => {
                         self.build_primitive_array::<BooleanType>(rows, i)
@@ -322,8 +318,8 @@ impl<R: Read> Reader<R> {
                     }
                     &DataType::Utf8 => {
                         let mut builder = StringBuilder::new(rows.len());
-                        for row_index in 0..rows.len() {
-                            match rows[row_index].get(*i) {
+                        for row in rows.iter() {
+                            match row.get(i) {
                                 Some(s) => builder.append_value(s).unwrap(),
                                 _ => builder.append(false).unwrap(),
                             }
@@ -349,22 +345,20 @@ impl<R: Read> Reader<R> {
 
         let projected_schema = Arc::new(Schema::new(projected_fields));
 
-        arrays.and_then(|arr| {
-            RecordBatch::try_new(projected_schema, arr).map(|batch| Some(batch))
-        })
+        arrays.and_then(|arr| RecordBatch::try_new(projected_schema, arr).map(Some))
     }
 
     fn build_primitive_array<T: ArrowPrimitiveType>(
         &self,
         rows: &[StringRecord],
-        col_idx: &usize,
+        col_idx: usize,
     ) -> Result<ArrayRef> {
         let mut builder = PrimitiveBuilder::<T>::new(rows.len());
         let is_boolean_type =
-            *self.schema.field(*col_idx).data_type() == DataType::Boolean;
-        for row_index in 0..rows.len() {
-            match rows[row_index].get(*col_idx) {
-                Some(s) if s.len() > 0 => {
+            *self.schema.field(col_idx).data_type() == DataType::Boolean;
+        for (row_index, row) in rows.iter().enumerate() {
+            match row.get(col_idx) {
+                Some(s) if !s.is_empty() => {
                     let t = if is_boolean_type {
                         s.to_lowercase().parse::<T::Native>()
                     } else {
diff --git a/rust/arrow/src/csv/writer.rs b/rust/arrow/src/csv/writer.rs
index b24efaf..6a3bb95 100644
--- a/rust/arrow/src/csv/writer.rs
+++ b/rust/arrow/src/csv/writer.rs
@@ -254,7 +254,7 @@ impl<W: Write> Writer<W> {
         if self.beginning {
             if self.has_headers {
                 let mut headers: Vec<String> = Vec::with_capacity(num_columns);
-                &batch
+                batch
                     .schema()
                     .fields()
                     .iter()
@@ -366,11 +366,15 @@ impl WriterBuilder {
             writer,
             delimiter,
             has_headers: self.has_headers,
-            date_format: self.date_format.unwrap_or(DEFAULT_DATE_FORMAT.to_string()),
-            time_format: self.time_format.unwrap_or(DEFAULT_TIME_FORMAT.to_string()),
+            date_format: self
+                .date_format
+                .unwrap_or_else(|| DEFAULT_DATE_FORMAT.to_string()),
+            time_format: self
+                .time_format
+                .unwrap_or_else(|| DEFAULT_TIME_FORMAT.to_string()),
             timestamp_format: self
                 .timestamp_format
-                .unwrap_or(DEFAULT_TIMESTAMP_FORMAT.to_string()),
+                .unwrap_or_else(|| DEFAULT_TIMESTAMP_FORMAT.to_string()),
             beginning: false,
         }
     }
diff --git a/rust/arrow/src/datatypes.rs b/rust/arrow/src/datatypes.rs
index 21ecacb..ce6f2d2 100644
--- a/rust/arrow/src/datatypes.rs
+++ b/rust/arrow/src/datatypes.rs
@@ -326,14 +326,13 @@ impl ArrowNativeType for u64 {
 
 impl ArrowNativeType for f32 {
     fn into_json_value(self) -> Option<Value> {
-        Number::from_f64(f64::round(self as f64 * 1000.0) / 1000.0)
-            .map(|num| VNumber(num))
+        Number::from_f64(f64::round(self as f64 * 1000.0) / 1000.0).map(VNumber)
     }
 }
 
 impl ArrowNativeType for f64 {
     fn into_json_value(self) -> Option<Value> {
-        Number::from_f64(self).map(|num| VNumber(num))
+        Number::from_f64(self).map(VNumber)
     }
 }
 
@@ -758,9 +757,9 @@ impl DataType {
                     if let Some(Value::Number(size)) = map.get("byteWidth") {
                         Ok(DataType::FixedSizeBinary(size.as_i64().unwrap() as i32))
                     } else {
-                        Err(ArrowError::ParseError(format!(
-                            "Expecting a byteWidth for fixedsizebinary",
-                        )))
+                        Err(ArrowError::ParseError(
+                            "Expecting a byteWidth for fixedsizebinary".to_string(),
+                        ))
                     }
                 }
                 Some(s) if s == "floatingpoint" => match map.get("precision") {
@@ -888,9 +887,9 @@ impl DataType {
                             size.as_i64().unwrap() as i32,
                         ))
                     } else {
-                        Err(ArrowError::ParseError(format!(
-                            "Expecting a listSize for fixedsizelist",
-                        )))
+                        Err(ArrowError::ParseError(
+                            "Expecting a listSize for fixedsizelist".to_string(),
+                        ))
                     }
                 }
                 Some(s) if s == "struct" => {
@@ -1284,7 +1283,7 @@ impl Schema {
 
     /// Returns an immutable reference of a specific `Field` instance selected by name
     pub fn field_with_name(&self, name: &str) -> Result<&Field> {
-        return Ok(&self.fields[self.index_of(name)?]);
+        Ok(&self.fields[self.index_of(name)?])
     }
 
     /// Find the index of the column with the given name
diff --git a/rust/arrow/src/error.rs b/rust/arrow/src/error.rs
index 3662f0a..52ce86d 100644
--- a/rust/arrow/src/error.rs
+++ b/rust/arrow/src/error.rs
@@ -51,9 +51,7 @@ impl From<csv_crate::Error> for ArrowError {
                 err.to_string()
             )),
             csv_crate::ErrorKind::UnequalLengths {
-                pos: _,
-                expected_len,
-                len,
+                expected_len, len, ..
             } => ArrowError::CsvError(format!(
                 "Encountered unequal lengths between records on CSV file. Expected {} \
                  records, found {} records",
@@ -72,21 +70,21 @@ impl From<::std::string::FromUtf8Error> for ArrowError {
 
 impl Display for ArrowError {
     fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
-        match self {
-            &ArrowError::MemoryError(ref desc) => write!(f, "Memory error: {}", desc),
-            &ArrowError::ParseError(ref desc) => write!(f, "Parser error: {}", desc),
-            &ArrowError::ComputeError(ref desc) => write!(f, "Compute error: {}", desc),
-            &ArrowError::DivideByZero => write!(f, "Divide by zero error"),
-            &ArrowError::CsvError(ref desc) => write!(f, "Csv error: {}", desc),
-            &ArrowError::JsonError(ref desc) => write!(f, "Json error: {}", desc),
-            &ArrowError::IoError(ref desc) => write!(f, "Io error: {}", desc),
-            &ArrowError::InvalidArgumentError(ref desc) => {
+        match *self {
+            ArrowError::MemoryError(ref desc) => write!(f, "Memory error: {}", desc),
+            ArrowError::ParseError(ref desc) => write!(f, "Parser error: {}", desc),
+            ArrowError::ComputeError(ref desc) => write!(f, "Compute error: {}", desc),
+            ArrowError::DivideByZero => write!(f, "Divide by zero error"),
+            ArrowError::CsvError(ref desc) => write!(f, "Csv error: {}", desc),
+            ArrowError::JsonError(ref desc) => write!(f, "Json error: {}", desc),
+            ArrowError::IoError(ref desc) => write!(f, "Io error: {}", desc),
+            ArrowError::InvalidArgumentError(ref desc) => {
                 write!(f, "Invalid argument error: {}", desc)
             }
-            &ArrowError::ParquetError(ref desc) => {
+            ArrowError::ParquetError(ref desc) => {
                 write!(f, "Parquet argument error: {}", desc)
             }
-            &ArrowError::DictionaryKeyOverflowError => {
+            ArrowError::DictionaryKeyOverflowError => {
                 write!(f, "Dictionary key bigger than the key type")
             }
         }
diff --git a/rust/arrow/src/flight/mod.rs b/rust/arrow/src/flight/mod.rs
index b3c6863..c3dabdc 100644
--- a/rust/arrow/src/flight/mod.rs
+++ b/rust/arrow/src/flight/mod.rs
@@ -68,9 +68,11 @@ impl From<&Schema> for FlightData {
 impl TryFrom<&FlightData> for Schema {
     type Error = ArrowError;
     fn try_from(data: &FlightData) -> Result<Self> {
-        convert::schema_from_bytes(&data.data_header[..]).ok_or(ArrowError::ParseError(
-            "Unable to convert flight data to Arrow schema".to_string(),
-        ))
+        convert::schema_from_bytes(&data.data_header[..]).ok_or_else(|| {
+            ArrowError::ParseError(
+                "Unable to convert flight data to Arrow schema".to_string(),
+            )
+        })
     }
 }
 
@@ -80,9 +82,11 @@ impl TryFrom<&FlightData> for Schema {
 impl TryFrom<&SchemaResult> for Schema {
     type Error = ArrowError;
     fn try_from(data: &SchemaResult) -> Result<Self> {
-        convert::schema_from_bytes(&data.schema[..]).ok_or(ArrowError::ParseError(
-            "Unable to convert schema result to Arrow schema".to_string(),
-        ))
+        convert::schema_from_bytes(&data.schema[..]).ok_or_else(|| {
+            ArrowError::ParseError(
+                "Unable to convert schema result to Arrow schema".to_string(),
+            )
+        })
     }
 }
 
@@ -94,11 +98,11 @@ pub fn flight_data_to_batch(
     // check that the data_header is a record batch message
     let message = crate::ipc::get_root_as_message(&data.data_header[..]);
     let dictionaries_by_field = Vec::new();
-    let batch_header = message
-        .header_as_record_batch()
-        .ok_or(ArrowError::ParseError(
+    let batch_header = message.header_as_record_batch().ok_or_else(|| {
+        ArrowError::ParseError(
             "Unable to convert flight data header to a record batch".to_string(),
-        ))?;
+        )
+    })?;
     reader::read_record_batch(
         &data.data_body,
         batch_header,
diff --git a/rust/arrow/src/ipc/convert.rs b/rust/arrow/src/ipc/convert.rs
index e29cc3e..953885f 100644
--- a/rust/arrow/src/ipc/convert.rs
+++ b/rust/arrow/src/ipc/convert.rs
@@ -154,8 +154,10 @@ pub fn fb_to_schema(fb: ipc::Schema) -> Schema {
             let kv = md_fields.get(i);
             let k_str = kv.key();
             let v_str = kv.value();
-            if k_str.is_some() && v_str.is_some() {
-                metadata.insert(k_str.unwrap().to_string(), v_str.unwrap().to_string());
+            if let Some(k) = k_str {
+                if let Some(v) = v_str {
+                    metadata.insert(k.to_string(), v.to_string());
+                }
             }
         }
     }
@@ -239,7 +241,7 @@ pub(crate) fn get_data_type(field: ipc::Field, may_be_dictionary: bool) -> DataT
                     DataType::Time64(TimeUnit::Microsecond)
                 }
                 (64, ipc::TimeUnit::NANOSECOND) => DataType::Time64(TimeUnit::Nanosecond),
-                z @ _ => panic!(
+                z => panic!(
                     "Time type with bit width of {} and unit of {:?} not supported",
                     z.0, z.1
                 ),
@@ -311,7 +313,7 @@ pub(crate) fn get_data_type(field: ipc::Field, may_be_dictionary: bool) -> DataT
 
             DataType::Struct(fields)
         }
-        t @ _ => unimplemented!("Type {:?} not supported", t),
+        t => unimplemented!("Type {:?} not supported", t),
     }
 }
 
@@ -460,7 +462,7 @@ pub(crate) fn get_fb_field_type<'a: 'b, 'b>(
         }
         Timestamp(unit, tz) => {
             let children = fbb.create_vector(&empty_fields[..]);
-            let tz = tz.clone().unwrap_or(Arc::new(String::new()));
+            let tz = tz.clone().unwrap_or_else(|| Arc::new(String::new()));
             let tz_str = fbb.create_string(tz.as_str());
             let mut builder = ipc::TimestampBuilder::new(fbb);
             let time_unit = match unit {
@@ -579,7 +581,7 @@ pub(crate) fn get_fb_field_type<'a: 'b, 'b>(
                 Some(children),
             )
         }
-        t @ _ => unimplemented!("Type {:?} not supported", t),
+        t => unimplemented!("Type {:?} not supported", t),
     }
 }
 
diff --git a/rust/arrow/src/ipc/reader.rs b/rust/arrow/src/ipc/reader.rs
index 37fb0b8..a60c5ef 100644
--- a/rust/arrow/src/ipc/reader.rs
+++ b/rust/arrow/src/ipc/reader.rs
@@ -36,7 +36,7 @@ use DataType::*;
 const CONTINUATION_MARKER: u32 = 0xffff_ffff;
 
 /// Read a buffer based on offset and length
-fn read_buffer(buf: &ipc::Buffer, a_data: &Vec<u8>) -> Buffer {
+fn read_buffer(buf: &ipc::Buffer, a_data: &[u8]) -> Buffer {
     let start_offset = buf.offset() as usize;
     let end_offset = start_offset + buf.length() as usize;
     let buf_data = &a_data[start_offset..end_offset];
@@ -55,9 +55,9 @@ fn read_buffer(buf: &ipc::Buffer, a_data: &Vec<u8>) -> Buffer {
 fn create_array(
     nodes: &[ipc::FieldNode],
     data_type: &DataType,
-    data: &Vec<u8>,
+    data: &[u8],
     buffers: &[ipc::Buffer],
-    dictionaries: &Vec<Option<ArrayRef>>,
+    dictionaries: &[Option<ArrayRef>],
     mut node_index: usize,
     mut buffer_index: usize,
 ) -> (ArrayRef, usize, usize) {
@@ -72,8 +72,8 @@ fn create_array(
                     .map(|buf| read_buffer(buf, data))
                     .collect(),
             );
-            node_index = node_index + 1;
-            buffer_index = buffer_index + 3;
+            node_index += 1;
+            buffer_index += 3;
             array
         }
         FixedSizeBinary(_) => {
@@ -85,8 +85,8 @@ fn create_array(
                     .map(|buf| read_buffer(buf, data))
                     .collect(),
             );
-            node_index = node_index + 1;
-            buffer_index = buffer_index + 2;
+            node_index += 1;
+            buffer_index += 2;
             array
         }
         List(ref list_data_type) => {
@@ -95,8 +95,8 @@ fn create_array(
                 .iter()
                 .map(|buf| read_buffer(buf, data))
                 .collect();
-            node_index = node_index + 1;
-            buffer_index = buffer_index + 2;
+            node_index += 1;
+            buffer_index += 2;
             let triple = create_array(
                 nodes,
                 list_data_type,
@@ -113,12 +113,12 @@ fn create_array(
         }
         FixedSizeList(ref list_data_type, _) => {
             let list_node = &nodes[node_index];
-            let list_buffers: Vec<Buffer> = buffers[buffer_index..buffer_index + 1]
+            let list_buffers: Vec<Buffer> = buffers[buffer_index..=buffer_index]
                 .iter()
                 .map(|buf| read_buffer(buf, data))
                 .collect();
-            node_index = node_index + 1;
-            buffer_index = buffer_index + 1;
+            node_index += 1;
+            buffer_index += 1;
             let triple = create_array(
                 nodes,
                 list_data_type,
@@ -136,8 +136,8 @@ fn create_array(
         Struct(struct_fields) => {
             let struct_node = &nodes[node_index];
             let null_buffer: Buffer = read_buffer(&buffers[buffer_index], data);
-            node_index = node_index + 1;
-            buffer_index = buffer_index + 1;
+            node_index += 1;
+            buffer_index += 1;
 
             // read the arrays for each field
             let mut struct_arrays = vec![];
@@ -178,8 +178,8 @@ fn create_array(
                 .map(|buf| read_buffer(buf, data))
                 .collect();
             let value_array = dictionaries[node_index].clone().unwrap();
-            node_index = node_index + 1;
-            buffer_index = buffer_index + 2;
+            node_index += 1;
+            buffer_index += 2;
 
             create_dictionary_array(
                 index_node,
@@ -197,8 +197,8 @@ fn create_array(
                     .map(|buf| read_buffer(buf, data))
                     .collect(),
             );
-            node_index = node_index + 1;
-            buffer_index = buffer_index + 2;
+            node_index += 1;
+            buffer_index += 2;
             array
         }
     };
@@ -327,7 +327,7 @@ fn create_primitive_array(
             }
             builder.build()
         }
-        t @ _ => panic!("Data type {:?} either unsupported or not primitive", t),
+        t => panic!("Data type {:?} either unsupported or not primitive", t),
     };
 
     make_array(array_data)
@@ -341,7 +341,7 @@ fn create_list_array(
     buffers: &[Buffer],
     child_array: ArrayRef,
 ) -> ArrayRef {
-    if let &DataType::List(_) = data_type {
+    if let DataType::List(_) = *data_type {
         let null_count = field_node.null_count() as usize;
         let mut builder = ArrayData::builder(data_type.clone())
             .len(field_node.length() as usize)
@@ -354,7 +354,7 @@ fn create_list_array(
                 .null_bit_buffer(buffers[0].clone())
         }
         make_array(builder.build())
-    } else if let &DataType::FixedSizeList(_, _) = data_type {
+    } else if let DataType::FixedSizeList(_, _) = *data_type {
         let null_count = field_node.null_count() as usize;
         let mut builder = ArrayData::builder(data_type.clone())
             .len(field_node.length() as usize)
@@ -380,7 +380,7 @@ fn create_dictionary_array(
     buffers: &[Buffer],
     value_array: ArrayRef,
 ) -> ArrayRef {
-    if let &DataType::Dictionary(_, _) = data_type {
+    if let DataType::Dictionary(_, _) = *data_type {
         let null_count = field_node.null_count() as usize;
         let mut builder = ArrayData::builder(data_type.clone())
             .len(field_node.length() as usize)
@@ -400,17 +400,17 @@ fn create_dictionary_array(
 
 /// Creates a record batch from binary data using the `ipc::RecordBatch` indexes and the `Schema`
 pub(crate) fn read_record_batch(
-    buf: &Vec<u8>,
+    buf: &[u8],
     batch: ipc::RecordBatch,
     schema: Arc<Schema>,
-    dictionaries: &Vec<Option<ArrayRef>>,
+    dictionaries: &[Option<ArrayRef>],
 ) -> Result<Option<RecordBatch>> {
-    let buffers = batch.buffers().ok_or(ArrowError::IoError(
-        "Unable to get buffers from IPC RecordBatch".to_string(),
-    ))?;
-    let field_nodes = batch.nodes().ok_or(ArrowError::IoError(
-        "Unable to get field nodes from IPC RecordBatch".to_string(),
-    ))?;
+    let buffers = batch.buffers().ok_or_else(|| {
+        ArrowError::IoError("Unable to get buffers from IPC RecordBatch".to_string())
+    })?;
+    let field_nodes = batch.nodes().ok_or_else(|| {
+        ArrowError::IoError("Unable to get field nodes from IPC RecordBatch".to_string())
+    })?;
     // keep track of buffer and node index, the functions that create arrays mutate these
     let mut buffer_index = 0;
     let mut node_index = 0;
@@ -432,7 +432,7 @@ pub(crate) fn read_record_batch(
         arrays.push(triple.0);
     }
 
-    RecordBatch::try_new(schema.clone(), arrays).map(|batch| Some(batch))
+    RecordBatch::try_new(schema, arrays).map(|batch| Some(batch))
 }
 
 // Linear search for the first dictionary field with a dictionary id.
@@ -509,9 +509,11 @@ impl<R: Read + Seek> FileReader<R> {
         reader.read_exact(&mut footer_data)?;
         let footer = ipc::get_root_as_footer(&footer_data[..]);
 
-        let blocks = footer.recordBatches().ok_or(ArrowError::IoError(
-            "Unable to get record batches from IPC Footer".to_string(),
-        ))?;
+        let blocks = footer.recordBatches().ok_or_else(|| {
+            ArrowError::IoError(
+                "Unable to get record batches from IPC Footer".to_string(),
+            )
+        })?;
 
         let total_blocks = blocks.len();
 
@@ -584,8 +586,7 @@ impl<R: Read + Seek> FileReader<R> {
                     // We don't currently record the isOrdered field. This could be general
                     // attributes of arrays.
                     let fields = ipc_schema.fields().unwrap();
-                    for i in 0..fields.len() {
-                        let field: ipc::Field = fields.get(i);
+                    for (i, field) in fields.iter().enumerate() {
                         if let Some(dictionary) = field.dictionary() {
                             if dictionary.id() == id {
                                 // Add (possibly multiple) array refs to the dictionaries array.
@@ -624,7 +625,7 @@ impl<R: Read + Seek> FileReader<R> {
         // get current block
         if self.current_block < self.total_blocks {
             let block = self.blocks[self.current_block];
-            self.current_block = self.current_block + 1;
+            self.current_block += 1;
 
             // read length from end of offset
             let meta_len = block.metaDataLength() - 4;
@@ -637,16 +638,15 @@ impl<R: Read + Seek> FileReader<R> {
             let message = ipc::get_root_as_message(&block_data[..]);
 
             match message.header_type() {
-                ipc::MessageHeader::Schema => {
-                    return Err(ArrowError::IoError(
-                        "Not expecting a schema when messages are read".to_string(),
-                    ));
-                }
+                ipc::MessageHeader::Schema => Err(ArrowError::IoError(
+                    "Not expecting a schema when messages are read".to_string(),
+                )),
                 ipc::MessageHeader::RecordBatch => {
-                    let batch =
-                        message.header_as_record_batch().ok_or(ArrowError::IoError(
+                    let batch = message.header_as_record_batch().ok_or_else(|| {
+                        ArrowError::IoError(
                             "Unable to read IPC message as record batch".to_string(),
-                        ))?;
+                        )
+                    })?;
                     // read the block that makes up the record batch into a buffer
                     let mut buf = vec![0; block.bodyLength() as usize];
                     self.reader.seek(SeekFrom::Start(
@@ -661,12 +661,10 @@ impl<R: Read + Seek> FileReader<R> {
                         &self.dictionaries_by_field,
                     )
                 }
-                _ => {
-                    return Err(ArrowError::IoError(
-                        "Reading types other than record batches not yet supported"
-                            .to_string(),
-                    ));
-                }
+                _ => Err(ArrowError::IoError(
+                    "Reading types other than record batches not yet supported"
+                        .to_string(),
+                )),
             }
         } else {
             Ok(None)
@@ -746,9 +744,9 @@ impl<R: Read> StreamReader<R> {
         let vecs = &meta_buffer.to_vec();
         let message = ipc::get_root_as_message(vecs);
         // message header is a Schema, so read it
-        let ipc_schema: ipc::Schema = message.header_as_schema().ok_or(
-            ArrowError::IoError("Unable to read IPC message as schema".to_string()),
-        )?;
+        let ipc_schema: ipc::Schema = message.header_as_schema().ok_or_else(|| {
+            ArrowError::IoError("Unable to read IPC message as schema".to_string())
+        })?;
         let schema = ipc::convert::fb_to_schema(ipc_schema);
 
         // Create an array of optional dictionary value arrays, one per field.
@@ -801,28 +799,24 @@ impl<R: Read> StreamReader<R> {
         let message = ipc::get_root_as_message(vecs);
 
         match message.header_type() {
-            ipc::MessageHeader::Schema => {
-                return Err(ArrowError::IoError(
-                    "Not expecting a schema when messages are read".to_string(),
-                ));
-            }
+            ipc::MessageHeader::Schema => Err(ArrowError::IoError(
+                "Not expecting a schema when messages are read".to_string(),
+            )),
             ipc::MessageHeader::RecordBatch => {
-                let batch =
-                    message.header_as_record_batch().ok_or(ArrowError::IoError(
+                let batch = message.header_as_record_batch().ok_or_else(|| {
+                    ArrowError::IoError(
                         "Unable to read IPC message as record batch".to_string(),
-                    ))?;
+                    )
+                })?;
                 // read the block that makes up the record batch into a buffer
                 let mut buf = vec![0; message.bodyLength() as usize];
                 self.reader.read_exact(&mut buf)?;
 
                 read_record_batch(&buf, batch, self.schema(), &self.dictionaries_by_field)
             }
-            _ => {
-                return Err(ArrowError::IoError(
-                    "Reading types other than record batches not yet supported"
-                        .to_string(),
-                ));
-            }
+            _ => Err(ArrowError::IoError(
+                "Reading types other than record batches not yet supported".to_string(),
+            )),
         }
     }
 
diff --git a/rust/arrow/src/ipc/writer.rs b/rust/arrow/src/ipc/writer.rs
index c872c82..b83eb0a 100644
--- a/rust/arrow/src/ipc/writer.rs
+++ b/rust/arrow/src/ipc/writer.rs
@@ -51,9 +51,9 @@ impl<W: Write> FileWriter<W> {
     pub fn try_new(writer: W, schema: &Schema) -> Result<Self> {
         let mut writer = BufWriter::new(writer);
         // write magic to header
-        writer.write(&super::ARROW_MAGIC[..])?;
-        // create an 8-byte boudnary after the header
-        writer.write(&[0, 0])?;
+        writer.write_all(&super::ARROW_MAGIC[..])?;
+        // create an 8-byte boundary after the header
+        writer.write_all(&[0, 0])?;
         // write the schema, set the written bytes to the schema + header
         let written = write_schema(&mut writer, schema)? + 8;
         Ok(Self {
@@ -122,13 +122,10 @@ impl<W: Write> FileWriter<W> {
             let fb_field_list = fbb.create_vector(&fields);
             let fb_metadata_list = fbb.create_vector(&custom_metadata);
 
-            let root = {
-                let mut builder = ipc::SchemaBuilder::new(&mut fbb);
-                builder.add_fields(fb_field_list);
-                builder.add_custom_metadata(fb_metadata_list);
-                builder.finish()
-            };
-            root
+            let mut builder = ipc::SchemaBuilder::new(&mut fbb);
+            builder.add_fields(fb_field_list);
+            builder.add_custom_metadata(fb_metadata_list);
+            builder.finish()
         };
         let root = {
             let mut footer_builder = ipc::FooterBuilder::new(&mut fbb);
@@ -140,7 +137,7 @@ impl<W: Write> FileWriter<W> {
         };
         fbb.finish(root, None);
         write_padded_data(&mut self.writer, fbb.finished_data(), WriteDataType::Footer)?;
-        self.writer.write(&super::ARROW_MAGIC)?;
+        self.writer.write_all(&super::ARROW_MAGIC)?;
         self.writer.flush()?;
         self.finished = true;
 
@@ -192,8 +189,8 @@ impl<W: Write> StreamWriter<W> {
 
     /// Write continuation bytes, and mark the stream as done
     pub fn finish(&mut self) -> Result<()> {
-        self.writer.write(&[0u8, 0, 0, 0])?;
-        self.writer.write(&[255u8, 255, 255, 255])?;
+        self.writer.write_all(&[0u8, 0, 0, 0])?;
+        self.writer.write_all(&[255u8, 255, 255, 255])?;
         self.finished = true;
 
         Ok(())
@@ -255,15 +252,15 @@ fn write_padded_data<R: Write>(
     let total_len = len + pad_len;
     // write data length
     if data_type == WriteDataType::Header {
-        writer.write(&total_len.to_le_bytes()[..])?;
+        writer.write_all(&total_len.to_le_bytes()[..])?;
     }
     // write flatbuffer data
-    writer.write(data)?;
+    writer.write_all(data)?;
     if pad_len > 0 {
-        writer.write(&vec![0u8; pad_len as usize][..])?;
+        writer.write_all(&vec![0u8; pad_len as usize][..])?;
     }
     if data_type == WriteDataType::Footer {
-        writer.write(&total_len.to_le_bytes()[..])?;
+        writer.write_all(&total_len.to_le_bytes()[..])?;
     }
     writer.flush()?;
     Ok(total_len as usize)
@@ -326,7 +323,7 @@ fn write_record_batch<R: Write>(
     // write the length of data if writing to stream
     if is_stream {
         let total_len: u32 = meta_data.len() as u32;
-        writer.write(&total_len.to_le_bytes()[..])?;
+        writer.write_all(&total_len.to_le_bytes()[..])?;
     }
     let meta_written = write_padded_data(writer, &meta_data[..], WriteDataType::Body)?;
     let arrow_data_written =
@@ -352,8 +349,7 @@ fn write_array_data(
             // create a buffer and fill it with valid bits
             let buffer = MutableBuffer::new(num_rows);
             let buffer = buffer.with_bitset(num_rows, true);
-            let buffer = buffer.freeze();
-            buffer
+            buffer.freeze()
         }
         Some(buffer) => buffer.clone(),
     };
diff --git a/rust/arrow/src/json/reader.rs b/rust/arrow/src/json/reader.rs
index ef84b42..3d8ba9a 100644
--- a/rust/arrow/src/json/reader.rs
+++ b/rust/arrow/src/json/reader.rs
@@ -116,7 +116,7 @@ fn coerce_data_type(dt: Vec<&DataType>) -> Result<DataType> {
                             ])?)))
                         }
                     }
-                    (t1 @ _, t2 @ _) => Err(ArrowError::JsonError(format!(
+                    (t1, t2) => Err(ArrowError::JsonError(format!(
                         "Cannot coerce data types for {:?} and {:?}",
                         t1, t2
                     ))),
@@ -163,7 +163,7 @@ fn infer_json_schema(file: File, max_read_records: Option<usize>) -> Result<Arc<
 
     let mut line = String::new();
     for _ in 0..max_read_records.unwrap_or(std::usize::MAX) {
-        &reader.read_line(&mut line)?;
+        reader.read_line(&mut line)?;
         if line.is_empty() {
             break;
         }
@@ -293,7 +293,7 @@ fn infer_json_schema(file: File, max_read_records: Option<usize>) -> Result<Arc<
                     Err(e) => return Err(e),
                 }
             }
-            t @ _ => {
+            t => {
                 return Err(ArrowError::JsonError(format!(
                     "Expected JSON record to be an object, found {:?}",
                     t
@@ -305,7 +305,7 @@ fn infer_json_schema(file: File, max_read_records: Option<usize>) -> Result<Arc<
     let schema = generate_schema(values)?;
 
     // return the reader seek back to the start
-    &reader.into_inner().seek(SeekFrom::Start(0))?;
+    reader.into_inner().seek(SeekFrom::Start(0))?;
 
     Ok(schema)
 }
@@ -379,7 +379,7 @@ impl<R: Read> Reader<R> {
         }
 
         let rows = &rows[..];
-        let projection = self.projection.clone().unwrap_or(vec![]);
+        let projection = self.projection.clone().unwrap_or_else(|| vec![]);
         let arrays: Result<Vec<ArrayRef>> = self
             .schema
             .clone()
@@ -416,16 +416,15 @@ impl<R: Read> Reader<R> {
                     DataType::UInt8 => self.build_primitive_array::<UInt8Type>(rows, field.name()),
                     DataType::Utf8 => {
                         let mut builder = StringBuilder::new(rows.len());
-                        for row_index in 0..rows.len() {
-                            match rows[row_index].get(field.name()) {
-                                Some(value) => {
-                                    match value.as_str() {
-                                        Some(v) => builder.append_value(v)?,
-                                        // TODO: value might exist as something else, coerce so we don't lose it
-                                        None => builder.append(false)?,
-                                    }
+                        for row in rows {
+                            if let Some(value) = row.get(field.name()) {
+                                if let Some(str_v) = value.as_str() {
+                                    builder.append_value(str_v)?
+                                } else {
+                                    builder.append(false)?
                                 }
-                                None => builder.append(false)?,
+                            } else {
+                                builder.append(false)?
                             }
                         }
                         Ok(Arc::new(builder.finish()) as ArrayRef)
@@ -445,9 +444,8 @@ impl<R: Read> Reader<R> {
                         DataType::Utf8 => {
                             let values_builder = StringBuilder::new(rows.len() * 5);
                             let mut builder = ListBuilder::new(values_builder);
-                            for row_index in 0..rows.len() {
-                                match rows[row_index].get(field.name()) {
-                                    Some(value) => {
+                            for row in rows {
+                                if let Some(value) = row.get(field.name()) {
                                         // value can be an array or a scalar
                                         let vals: Vec<Option<String>> = if let Value::String(v) = value {
                                             vec![Some(v.to_string())]
@@ -465,29 +463,26 @@ impl<R: Read> Reader<R> {
                                             }).collect()
                                         } else if let Value::Null = value {
                                             vec![None]
+                                        } else if !value.is_object() {
+                                            vec![Some(value.to_string())]
                                         } else {
-                                            if !value.is_object() {
-                                                vec![Some(value.to_string())]
-                                            } else {
-                                                return Err(ArrowError::JsonError("1Only scalars are currently supported in JSON arrays".to_string()))
-                                            }
+                                            return Err(ArrowError::JsonError("Only scalars are currently supported in JSON arrays".to_string()))
                                         };
-                                        for i in 0..vals.len() {
-                                            match &vals[i] {
-                                                Some(v) => builder.values().append_value(&v)?,
-                                                None => builder.values().append_null()?,
+                                        for val in vals {
+                                           if let Some(v) = val {
+                                                builder.values().append_value(&v)?
+                                            } else {
+                                                builder.values().append_null()?
                                             };
                                         }
-                                    }
-                                    None => {}
                                 }
                                 builder.append(true)?
                             }
                             Ok(Arc::new(builder.finish()) as ArrayRef)
                         }
-                        _ => return Err(ArrowError::JsonError("Data type is currently not supported in a list".to_string())),
+                        _ => Err(ArrowError::JsonError("Data type is currently not supported in a list".to_string())),
                     },
-                    _ => return Err(ArrowError::JsonError("struct types are not yet supported".to_string())),
+                    _ => Err(ArrowError::JsonError("struct types are not yet supported".to_string())),
                 }
             })
             .collect();
@@ -505,22 +500,20 @@ impl<R: Read> Reader<R> {
 
         let projected_schema = Arc::new(Schema::new(projected_fields));
 
-        arrays.and_then(|arr| {
-            RecordBatch::try_new(projected_schema, arr).map(|batch| Some(batch))
-        })
+        arrays.and_then(|arr| RecordBatch::try_new(projected_schema, arr).map(Some))
     }
 
     fn build_boolean_array(&self, rows: &[Value], col_name: &str) -> Result<ArrayRef> {
         let mut builder = BooleanBuilder::new(rows.len());
-        for row_index in 0..rows.len() {
-            match rows[row_index].get(col_name) {
-                Some(value) => match value.as_bool() {
-                    Some(v) => builder.append_value(v)?,
-                    None => builder.append_null()?,
-                },
-                None => {
+        for row in rows {
+            if let Some(value) = row.get(&col_name) {
+                if let Some(boolean) = value.as_bool() {
+                    builder.append_value(boolean)?
+                } else {
                     builder.append_null()?;
                 }
+            } else {
+                builder.append_null()?;
             }
         }
         Ok(Arc::new(builder.finish()))
@@ -533,30 +526,27 @@ impl<R: Read> Reader<R> {
     ) -> Result<ArrayRef> {
         let values_builder = BooleanBuilder::new(rows.len() * 5);
         let mut builder = ListBuilder::new(values_builder);
-        for row_index in 0..rows.len() {
-            match rows[row_index].get(col_name) {
-                Some(value) => {
-                    // value can be an array or a scalar
-                    let vals: Vec<Option<bool>> = if let Value::Bool(v) = value {
-                        vec![Some(*v)]
-                    } else if let Value::Array(n) = value {
-                        n.iter().map(|v: &Value| v.as_bool()).collect()
-                    } else if let Value::Null = value {
-                        vec![None]
-                    } else {
-                        return Err(ArrowError::JsonError(
-                            "2Only scalars are currently supported in JSON arrays"
-                                .to_string(),
-                        ));
+        for row in rows {
+            if let Some(value) = row.get(col_name) {
+                // value can be an array or a scalar
+                let vals: Vec<Option<bool>> = if let Value::Bool(v) = value {
+                    vec![Some(*v)]
+                } else if let Value::Array(n) = value {
+                    n.iter().map(|v: &Value| v.as_bool()).collect()
+                } else if let Value::Null = value {
+                    vec![None]
+                } else {
+                    return Err(ArrowError::JsonError(
+                        "2Only scalars are currently supported in JSON arrays"
+                            .to_string(),
+                    ));
+                };
+                for val in vals {
+                    match val {
+                        Some(v) => builder.values().append_value(v)?,
+                        None => builder.values().append_null()?,
                     };
-                    for i in 0..vals.len() {
-                        match vals[i] {
-                            Some(v) => builder.values().append_value(v)?,
-                            None => builder.values().append_null()?,
-                        };
-                    }
                 }
-                None => {}
             }
             builder.append(true)?
         }
@@ -573,21 +563,18 @@ impl<R: Read> Reader<R> {
         T::Native: num::NumCast,
     {
         let mut builder = PrimitiveBuilder::<T>::new(rows.len());
-        for row_index in 0..rows.len() {
-            match rows[row_index].get(col_name) {
-                Some(value) => {
-                    // check that value is of expected datatype
-                    match value.as_f64() {
-                        Some(v) => match num::cast::cast(v) {
-                            Some(v) => builder.append_value(v)?,
-                            None => builder.append_null()?,
-                        },
+        for row in rows {
+            if let Some(value) = row.get(&col_name) {
+                // check that value is of expected datatype
+                match value.as_f64() {
+                    Some(v) => match num::cast::cast(v) {
+                        Some(v) => builder.append_value(v)?,
                         None => builder.append_null()?,
-                    }
-                }
-                None => {
-                    builder.append_null()?;
+                    },
+                    None => builder.append_null()?,
                 }
+            } else {
+                builder.append_null()?;
             }
         }
         Ok(Arc::new(builder.finish()))
@@ -603,33 +590,30 @@ impl<R: Read> Reader<R> {
     {
         let values_builder: PrimitiveBuilder<T> = PrimitiveBuilder::new(rows.len());
         let mut builder = ListBuilder::new(values_builder);
-        for row_index in 0..rows.len() {
-            match rows[row_index].get(col_name) {
-                Some(value) => {
-                    // value can be an array or a scalar
-                    let vals: Vec<Option<f64>> = if let Value::Number(value) = value {
-                        vec![value.as_f64()]
-                    } else if let Value::Array(n) = value {
-                        n.iter().map(|v: &Value| v.as_f64()).collect()
-                    } else if let Value::Null = value {
-                        vec![None]
-                    } else {
-                        return Err(ArrowError::JsonError(
-                            "3Only scalars are currently supported in JSON arrays"
-                                .to_string(),
-                        ));
-                    };
-                    for i in 0..vals.len() {
-                        match vals[i] {
-                            Some(v) => match num::cast::cast(v) {
-                                Some(v) => builder.values().append_value(v)?,
-                                None => builder.values().append_null()?,
-                            },
+        for row in rows {
+            if let Some(value) = row.get(&col_name) {
+                // value can be an array or a scalar
+                let vals: Vec<Option<f64>> = if let Value::Number(value) = value {
+                    vec![value.as_f64()]
+                } else if let Value::Array(n) = value {
+                    n.iter().map(|v: &Value| v.as_f64()).collect()
+                } else if let Value::Null = value {
+                    vec![None]
+                } else {
+                    return Err(ArrowError::JsonError(
+                        "3Only scalars are currently supported in JSON arrays"
+                            .to_string(),
+                    ));
+                };
+                for val in vals {
+                    match val {
+                        Some(v) => match num::cast::cast(v) {
+                            Some(v) => builder.values().append_value(v)?,
                             None => builder.values().append_null()?,
-                        };
-                    }
+                        },
+                        None => builder.values().append_null()?,
+                    };
                 }
-                None => {}
             }
             builder.append(true)?
         }
@@ -726,11 +710,7 @@ impl ReaderBuilder {
         // check if schema should be inferred
         let schema = match self.schema {
             Some(schema) => schema,
-            None => {
-                let inferred = infer_json_schema(file.try_clone()?, self.max_records)?;
-
-                inferred
-            }
+            None => infer_json_schema(file.try_clone()?, self.max_records)?,
         };
         let buf_reader = BufReader::new(file);
         Ok(Reader::new(
diff --git a/rust/arrow/src/lib.rs b/rust/arrow/src/lib.rs
index 4383922..f7e00b4 100644
--- a/rust/arrow/src/lib.rs
+++ b/rust/arrow/src/lib.rs
@@ -35,6 +35,11 @@ pub mod datatypes;
 pub mod error;
 #[cfg(feature = "flight")]
 pub mod flight;
+#[allow(clippy::redundant_closure)]
+#[allow(clippy::needless_lifetimes)]
+#[allow(clippy::extra_unused_lifetimes)]
+#[allow(clippy::redundant_static_lifetimes)]
+#[allow(clippy::redundant_field_names)]
 pub mod ipc;
 pub mod json;
 pub mod memory;
diff --git a/rust/arrow/src/memory.rs b/rust/arrow/src/memory.rs
index 70ae2b6..ced5b3f 100644
--- a/rust/arrow/src/memory.rs
+++ b/rust/arrow/src/memory.rs
@@ -30,24 +30,20 @@ pub fn allocate_aligned(size: usize) -> *mut u8 {
     }
 }
 
-pub fn free_aligned(p: *mut u8, size: usize) {
-    unsafe {
-        std::alloc::dealloc(p, Layout::from_size_align_unchecked(size, ALIGNMENT));
-    }
+pub unsafe fn free_aligned(p: *mut u8, size: usize) {
+    std::alloc::dealloc(p, Layout::from_size_align_unchecked(size, ALIGNMENT));
 }
 
-pub fn reallocate(ptr: *mut u8, old_size: usize, new_size: usize) -> *mut u8 {
-    unsafe {
-        let new_ptr = std::alloc::realloc(
-            ptr,
-            Layout::from_size_align_unchecked(old_size, ALIGNMENT),
-            new_size,
-        );
-        if !new_ptr.is_null() && new_size > old_size {
-            new_ptr.add(old_size).write_bytes(0, new_size - old_size);
-        }
-        new_ptr
+pub unsafe fn reallocate(ptr: *mut u8, old_size: usize, new_size: usize) -> *mut u8 {
+    let new_ptr = std::alloc::realloc(
+        ptr,
+        Layout::from_size_align_unchecked(old_size, ALIGNMENT),
+        new_size,
+    );
+    if !new_ptr.is_null() && new_size > old_size {
+        new_ptr.add(old_size).write_bytes(0, new_size - old_size);
     }
+    new_ptr
 }
 
 pub unsafe fn memcpy(dst: *mut u8, src: *const u8, len: usize) {
diff --git a/rust/arrow/src/record_batch.rs b/rust/arrow/src/record_batch.rs
index 9ae6505..b7e4451 100644
--- a/rust/arrow/src/record_batch.rs
+++ b/rust/arrow/src/record_batch.rs
@@ -90,17 +90,18 @@ impl RecordBatch {
         }
         // check that all columns have the same row count, and match the schema
         let len = columns[0].data().len();
-        for i in 0..columns.len() {
-            if columns[i].len() != len {
+
+        for (i, column) in columns.iter().enumerate() {
+            if column.len() != len {
                 return Err(ArrowError::InvalidArgumentError(
                     "all columns in a record batch must have the same length".to_string(),
                 ));
             }
-            if columns[i].data_type() != schema.field(i).data_type() {
+            if column.data_type() != schema.field(i).data_type() {
                 return Err(ArrowError::InvalidArgumentError(format!(
                     "column types must match schema types, expected {:?} but found {:?} at column index {}",
                     schema.field(i).data_type(),
-                    columns[i].data_type(),
+                    column.data_type(),
                     i)));
             }
         }
diff --git a/rust/arrow/src/tensor.rs b/rust/arrow/src/tensor.rs
index a37062c..4c63a8f 100644
--- a/rust/arrow/src/tensor.rs
+++ b/rust/arrow/src/tensor.rs
@@ -25,7 +25,7 @@ use crate::buffer::Buffer;
 use crate::datatypes::*;
 
 /// Computes the strides required assuming a row major memory layout
-fn compute_row_major_strides<T: ArrowPrimitiveType>(shape: &Vec<usize>) -> Vec<usize> {
+fn compute_row_major_strides<T: ArrowPrimitiveType>(shape: &[usize]) -> Vec<usize> {
     let mut remaining_bytes = mem::size_of::<T::Native>();
     for i in shape {
         remaining_bytes = remaining_bytes
@@ -42,7 +42,7 @@ fn compute_row_major_strides<T: ArrowPrimitiveType>(shape: &Vec<usize>) -> Vec<u
 }
 
 /// Computes the strides required assuming a column major memory layout
-fn compute_column_major_strides<T: ArrowPrimitiveType>(shape: &Vec<usize>) -> Vec<usize> {
+fn compute_column_major_strides<T: ArrowPrimitiveType>(shape: &[usize]) -> Vec<usize> {
     let mut remaining_bytes = mem::size_of::<T::Native>();
     let mut strides = Vec::<usize>::new();
     for i in shape {
@@ -189,7 +189,7 @@ impl<'a, T: ArrowPrimitiveType> Tensor<'a, T> {
     pub fn size(&self) -> usize {
         match self.shape {
             None => 0,
-            Some(ref s) => s.iter().fold(1, |a, b| a * b),
+            Some(ref s) => s.iter().product(),
         }
     }
 
diff --git a/rust/arrow/src/util/bit_util.rs b/rust/arrow/src/util/bit_util.rs
index b075aea..e576709 100644
--- a/rust/arrow/src/util/bit_util.rs
+++ b/rust/arrow/src/util/bit_util.rs
@@ -60,7 +60,7 @@ pub fn get_bit(data: &[u8], i: usize) -> bool {
 /// responsible to guarantee that `i` is within bounds.
 #[inline]
 pub unsafe fn get_bit_raw(data: *const u8, i: usize) -> bool {
-    (*data.offset((i >> 3) as isize) & BIT_MASK[i & 7]) != 0
+    (*data.add(i >> 3) & BIT_MASK[i & 7]) != 0
 }
 
 /// Sets bit at position `i` for `data`
@@ -75,7 +75,7 @@ pub fn set_bit(data: &mut [u8], i: usize) {
 /// responsible to guarantee that `i` is within bounds.
 #[inline]
 pub unsafe fn set_bit_raw(data: *mut u8, i: usize) {
-    *data.offset((i >> 3) as isize) |= BIT_MASK[i & 7]
+    *data.add(i >> 3) |= BIT_MASK[i & 7]
 }
 
 /// Sets bits in the non-inclusive range `start..end` for `data`
diff --git a/rust/arrow/src/util/integration_util.rs b/rust/arrow/src/util/integration_util.rs
index 0da21df..118cb60 100644
--- a/rust/arrow/src/util/integration_util.rs
+++ b/rust/arrow/src/util/integration_util.rs
@@ -309,9 +309,9 @@ impl ArrowJsonBatch {
                                 &json_array.iter().collect::<Vec<&Value>>()[..],
                             )
                         }
-                        t @ _ => panic!("Unsupported dictionary comparison for {:?}", t),
+                        t => panic!("Unsupported dictionary comparison for {:?}", t),
                     },
-                    t @ _ => panic!("Unsupported comparison for {:?}", t),
+                    t => panic!("Unsupported comparison for {:?}", t),
                 }
             })
     }
@@ -330,7 +330,7 @@ fn json_from_col(col: &ArrowJsonColumn, data_type: &DataType) -> Vec<Value> {
 }
 
 /// Merge VALIDITY and DATA vectors from a primitive data type into a `Value` vector with nulls
-fn merge_json_array(validity: &Vec<u8>, data: &Vec<Value>) -> Vec<Value> {
+fn merge_json_array(validity: &[u8], data: &[Value]) -> Vec<Value> {
     validity
         .iter()
         .zip(data)
@@ -343,7 +343,7 @@ fn merge_json_array(validity: &Vec<u8>, data: &Vec<Value>) -> Vec<Value> {
 }
 
 /// Convert an Arrow JSON column/array of a `DataType::Struct` into a vector of `Value`
-fn json_from_struct_col(col: &ArrowJsonColumn, fields: &Vec<Field>) -> Vec<Value> {
+fn json_from_struct_col(col: &ArrowJsonColumn, fields: &[Field]) -> Vec<Value> {
     let mut values = Vec::with_capacity(col.count);
 
     let children: Vec<Vec<Value>> = col
@@ -379,7 +379,7 @@ fn json_from_list_col(col: &ArrowJsonColumn, data_type: &DataType) -> Vec<Value>
         .unwrap()
         .iter()
         .map(|o| match o {
-            Value::String(s) => *&s.parse::<usize>().unwrap(),
+            Value::String(s) => s.parse::<usize>().unwrap(),
             Value::Number(n) => n.as_u64().unwrap() as usize,
             _ => panic!(
                 "Offsets should be numbers or strings that are convertible to numbers"
diff --git a/rust/arrow/src/util/test_util.rs b/rust/arrow/src/util/test_util.rs
index 9b6cff2..44f7074 100644
--- a/rust/arrow/src/util/test_util.rs
+++ b/rust/arrow/src/util/test_util.rs
@@ -25,7 +25,7 @@ pub fn random_bytes(n: usize) -> Vec<u8> {
     let mut result = vec![];
     let mut rng = thread_rng();
     for _ in 0..n {
-        result.push(rng.gen_range(0, 255) & 0xFF);
+        result.push(rng.gen_range(0, 255));
     }
     result
 }