You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@arrow.apache.org by vi...@apache.org on 2023/01/27 04:42:47 UTC

[arrow-rs] branch master updated: Fix clippy (#3612)

This is an automated email from the ASF dual-hosted git repository.

viirya pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/arrow-rs.git


The following commit(s) were added to refs/heads/master by this push:
     new 9728c676b Fix clippy (#3612)
9728c676b is described below

commit 9728c676b50b19c06643a23daba4aa4a1dc48055
Author: Liang-Chi Hsieh <vi...@gmail.com>
AuthorDate: Thu Jan 26 20:42:40 2023 -0800

    Fix clippy (#3612)
---
 arrow-arith/src/arithmetic.rs                      |  4 +-
 arrow-arith/src/temporal.rs                        |  6 +-
 arrow-array/src/array/binary_array.rs              |  2 +-
 arrow-array/src/array/boolean_array.rs             | 12 +--
 arrow-array/src/array/dictionary_array.rs          | 10 +--
 arrow-array/src/array/fixed_size_binary_array.rs   |  2 +-
 arrow-array/src/array/fixed_size_list_array.rs     |  3 +-
 arrow-array/src/array/list_array.rs                |  2 +-
 arrow-array/src/array/mod.rs                       |  6 +-
 arrow-array/src/array/null_array.rs                |  2 +-
 arrow-array/src/array/primitive_array.rs           | 42 +++++-----
 arrow-array/src/array/run_array.rs                 | 10 +--
 arrow-array/src/array/string_array.rs              |  6 +-
 arrow-array/src/array/struct_array.rs              |  4 +-
 arrow-array/src/array/union_array.rs               |  8 +-
 arrow-array/src/builder/boolean_builder.rs         |  2 +-
 .../src/builder/fixed_size_binary_builder.rs       |  3 +-
 arrow-array/src/builder/primitive_builder.rs       |  2 +-
 arrow-array/src/builder/struct_builder.rs          |  2 +-
 arrow-array/src/record_batch.rs                    |  5 +-
 arrow-array/src/timezone.rs                        |  8 +-
 arrow-array/src/types.rs                           |  4 +-
 arrow-buffer/src/alloc/mod.rs                      |  2 +-
 arrow-buffer/src/bigint.rs                         | 42 ++--------
 arrow-buffer/src/buffer/scalar.rs                  |  3 +-
 arrow-cast/src/cast.rs                             | 95 ++++++++--------------
 arrow-cast/src/display.rs                          |  5 +-
 arrow-cast/src/parse.rs                            | 13 ++-
 arrow-csv/src/lib.rs                               |  8 +-
 arrow-csv/src/reader/mod.rs                        | 48 +++++------
 arrow-csv/src/reader/records.rs                    |  5 +-
 arrow-csv/src/writer.rs                            |  6 +-
 arrow-data/src/data.rs                             | 37 +++------
 arrow-data/src/decimal.rs                          | 18 ++--
 arrow-data/src/transform/mod.rs                    |  2 +-
 arrow-flight/examples/flight_sql_server.rs         |  5 +-
 arrow-flight/src/error.rs                          |  2 +-
 arrow-flight/src/lib.rs                            | 23 +++---
 arrow-flight/src/sql/client.rs                     | 16 ++--
 arrow-flight/src/sql/mod.rs                        |  2 +-
 arrow-flight/src/sql/server.rs                     |  4 +-
 arrow-flight/src/utils.rs                          |  2 +-
 arrow-flight/tests/client.rs                       | 19 ++---
 arrow-integration-test/src/datatype.rs             | 10 +--
 arrow-integration-test/src/field.rs                | 11 ++-
 arrow-integration-test/src/lib.rs                  | 41 ++++------
 .../src/bin/arrow-json-integration-test.rs         |  6 +-
 .../flight_client_scenarios/auth_basic_proto.rs    |  8 +-
 .../flight_client_scenarios/integration_test.rs    |  4 +-
 .../src/flight_client_scenarios/middleware.rs      |  8 +-
 .../src/flight_server_scenarios.rs                 |  2 +-
 .../flight_server_scenarios/integration_test.rs    | 31 ++++---
 arrow-integration-testing/src/lib.rs               |  3 +-
 arrow-integration-testing/tests/ipc_reader.rs      | 15 ++--
 arrow-integration-testing/tests/ipc_writer.rs      | 12 +--
 arrow-ipc/src/compression.rs                       |  3 +-
 arrow-ipc/src/convert.rs                           | 17 ++--
 arrow-ipc/src/reader.rs                            | 28 +++----
 arrow-ipc/src/writer.rs                            |  8 +-
 arrow-json/src/raw/mod.rs                          |  4 +-
 arrow-json/src/raw/tape.rs                         |  2 +-
 arrow-json/src/reader.rs                           | 56 +++++--------
 arrow-json/src/writer.rs                           |  3 +-
 arrow-ord/src/comparison.rs                        |  2 +-
 arrow-ord/src/ord.rs                               | 12 +--
 arrow-ord/src/sort.rs                              | 13 ++-
 arrow-row/src/lib.rs                               |  3 +-
 arrow-schema/src/datatype.rs                       |  2 +-
 arrow-schema/src/error.rs                          | 22 ++---
 arrow-schema/src/field.rs                          |  2 +-
 arrow-schema/src/schema.rs                         | 10 +--
 arrow-select/src/concat.rs                         |  6 +-
 arrow-select/src/take.rs                           |  6 +-
 arrow-string/src/concat_elements.rs                |  3 +-
 arrow-string/src/length.rs                         |  6 +-
 arrow-string/src/like.rs                           | 32 +++-----
 arrow-string/src/regexp.rs                         | 17 ++--
 arrow-string/src/substring.rs                      |  3 +-
 arrow/benches/arithmetic_kernels.rs                | 28 +++----
 arrow/benches/csv_reader.rs                        |  2 +-
 arrow/benches/interleave_kernels.rs                |  7 +-
 arrow/benches/lexsort.rs                           | 13 +--
 arrow/benches/row_format.rs                        |  6 +-
 arrow/benches/string_dictionary_builder.rs         |  5 +-
 arrow/examples/builders.rs                         | 12 +--
 arrow/examples/collect.rs                          |  6 +-
 arrow/examples/tensor_builder.rs                   |  6 +-
 arrow/src/datatypes/ffi.rs                         | 27 +++---
 arrow/src/ffi.rs                                   | 30 +++----
 arrow/src/ffi_stream.rs                            |  3 +-
 arrow/src/util/data_gen.rs                         | 12 +--
 arrow/src/util/pretty.rs                           | 24 +++---
 arrow/src/util/test_util.rs                        |  4 +-
 object_store/src/aws/credential.rs                 | 14 ++--
 object_store/src/aws/mod.rs                        | 17 ++--
 object_store/src/azure/client.rs                   |  8 +-
 object_store/src/azure/credential.rs               |  4 +-
 object_store/src/azure/mod.rs                      |  2 +-
 object_store/src/client/backoff.rs                 |  2 +-
 object_store/src/client/retry.rs                   |  4 +-
 object_store/src/gcp/credential.rs                 |  3 +-
 object_store/src/gcp/mod.rs                        | 16 ++--
 object_store/src/lib.rs                            | 16 ++--
 object_store/src/local.rs                          |  9 +-
 object_store/src/memory.rs                         |  5 +-
 object_store/src/multipart.rs                      |  2 +-
 object_store/src/path/mod.rs                       | 44 +++-------
 parquet/benches/arrow_reader.rs                    |  4 +-
 parquet/examples/async_read_parquet.rs             |  2 +-
 parquet/examples/read_parquet.rs                   |  2 +-
 parquet/src/arrow/arrow_reader/mod.rs              | 29 +++----
 parquet/src/arrow/arrow_writer/mod.rs              | 17 ++--
 parquet/src/arrow/async_reader/metadata.rs         |  3 +-
 parquet/src/arrow/async_reader/mod.rs              | 22 +++--
 parquet/src/arrow/async_reader/store.rs            |  8 +-
 parquet/src/arrow/buffer/bit_util.rs               |  2 +-
 parquet/src/arrow/decoder/delta_byte_array.rs      |  3 +-
 parquet/src/arrow/schema/mod.rs                    |  2 +-
 parquet/src/basic.rs                               | 18 ++--
 parquet/src/bin/parquet-fromcsv.rs                 | 40 ++++-----
 parquet/src/bin/parquet-index.rs                   |  9 +-
 parquet/src/bin/parquet-read.rs                    |  2 +-
 parquet/src/bin/parquet-rowcount.rs                |  2 +-
 parquet/src/bin/parquet-schema.rs                  |  2 +-
 parquet/src/bin/parquet-show-bloom-filter.rs       |  2 +-
 parquet/src/bloom_filter/mod.rs                    | 12 ++-
 parquet/src/column/page.rs                         |  3 +-
 parquet/src/column/reader/decoder.rs               |  4 +-
 parquet/src/column/writer/mod.rs                   | 24 +++---
 parquet/src/encodings/levels.rs                    |  6 +-
 parquet/src/errors.rs                              | 14 ++--
 parquet/src/file/footer.rs                         |  2 +-
 parquet/src/file/metadata.rs                       |  2 +-
 parquet/src/file/properties.rs                     |  3 +-
 parquet/src/file/serialized_reader.rs              |  2 +-
 parquet/src/file/statistics.rs                     | 37 ++++-----
 parquet/src/file/writer.rs                         |  2 +-
 parquet/src/record/api.rs                          | 44 +++++-----
 parquet/src/record/reader.rs                       | 25 ++----
 parquet/src/record/triplet.rs                      |  3 +-
 parquet/src/schema/printer.rs                      | 46 +++++------
 parquet/src/schema/types.rs                        | 49 +++++------
 parquet/src/schema/visitor.rs                      |  2 +-
 parquet/src/util/bit_pack.rs                       |  8 +-
 parquet/src/util/bit_util.rs                       |  3 +-
 parquet/src/util/test_common/rand_gen.rs           |  2 +-
 parquet/tests/arrow_writer_layout.rs               | 14 ++--
 147 files changed, 707 insertions(+), 1013 deletions(-)

diff --git a/arrow-arith/src/arithmetic.rs b/arrow-arith/src/arithmetic.rs
index 8a4657d7e..48f0412bf 100644
--- a/arrow-arith/src/arithmetic.rs
+++ b/arrow-arith/src/arithmetic.rs
@@ -2063,7 +2063,7 @@ mod tests {
         let e = add(&a, &b).expect_err("should have failed due to different lengths");
         assert_eq!(
             "ComputeError(\"Cannot perform binary operation on arrays of different length\")",
-            format!("{:?}", e)
+            format!("{e:?}")
         );
     }
 
@@ -2238,7 +2238,7 @@ mod tests {
 
         let e = divide_scalar_dyn::<Int32Type>(&a, 0_i32)
             .expect_err("should have failed due to divide by zero");
-        assert_eq!("DivideByZero", format!("{:?}", e));
+        assert_eq!("DivideByZero", format!("{e:?}"));
     }
 
     #[test]
diff --git a/arrow-arith/src/temporal.rs b/arrow-arith/src/temporal.rs
index 5dcda8758..ac76358ef 100644
--- a/arrow-arith/src/temporal.rs
+++ b/arrow-arith/src/temporal.rs
@@ -447,7 +447,7 @@ where
                     let values = time_fraction_dyn(array.values(), name, op)?;
                     Ok(Arc::new(array.with_values(&values)))
                 }
-                dt => return_compute_error_with!(format!("{} does not support", name), dt),
+                dt => return_compute_error_with!(format!("{name} does not support"), dt),
             )
         }
         _ => {
@@ -456,7 +456,7 @@ where
                    time_fraction_internal(array, name, op)
                     .map(|a| Arc::new(a) as ArrayRef)
                 }
-                dt => return_compute_error_with!(format!("{} does not support", name), dt),
+                dt => return_compute_error_with!(format!("{name} does not support"), dt),
             )
         }
     }
@@ -486,7 +486,7 @@ where
             })
         }
         _ => return_compute_error_with!(
-            format!("{} does not support", name),
+            format!("{name} does not support"),
             array.data_type()
         ),
     }
diff --git a/arrow-array/src/array/binary_array.rs b/arrow-array/src/array/binary_array.rs
index cb863c563..50757dcbe 100644
--- a/arrow-array/src/array/binary_array.rs
+++ b/arrow-array/src/array/binary_array.rs
@@ -607,7 +607,7 @@ mod tests {
             .scan(0usize, |pos, i| {
                 if *pos < 10 {
                     *pos += 1;
-                    Some(Some(format!("value {}", i)))
+                    Some(Some(format!("value {i}")))
                 } else {
                     // actually returns up to 10 values
                     None
diff --git a/arrow-array/src/array/boolean_array.rs b/arrow-array/src/array/boolean_array.rs
index 920fdabc2..4c83dcf41 100644
--- a/arrow-array/src/array/boolean_array.rs
+++ b/arrow-array/src/array/boolean_array.rs
@@ -407,7 +407,7 @@ mod tests {
         let arr = BooleanArray::from(vec![true, false, false]);
         assert_eq!(
             "BooleanArray\n[\n  true,\n  false,\n  false,\n]",
-            format!("{:?}", arr)
+            format!("{arr:?}")
         );
     }
 
@@ -420,7 +420,7 @@ mod tests {
         let arr = builder.finish();
         assert_eq!(
             "BooleanArray\n[\n  true,\n  null,\n  false,\n]",
-            format!("{:?}", arr)
+            format!("{arr:?}")
         );
     }
 
@@ -435,7 +435,7 @@ mod tests {
         for i in 0..4 {
             assert!(!arr.is_null(i));
             assert!(arr.is_valid(i));
-            assert_eq!(i == 1 || i == 3, arr.value(i), "failed at {}", i)
+            assert_eq!(i == 1 || i == 3, arr.value(i), "failed at {i}")
         }
     }
 
@@ -454,7 +454,7 @@ mod tests {
             } else {
                 assert!(!arr.is_null(i));
                 assert!(arr.is_valid(i));
-                assert_eq!(i == 1 || i == 3, arr.value(i), "failed at {}", i)
+                assert_eq!(i == 1 || i == 3, arr.value(i), "failed at {i}")
             }
         }
     }
@@ -470,7 +470,7 @@ mod tests {
         for i in 0..3 {
             assert!(!arr.is_null(i));
             assert!(arr.is_valid(i));
-            assert_eq!(i == 1 || i == 3, arr.value(i), "failed at {}", i)
+            assert_eq!(i == 1 || i == 3, arr.value(i), "failed at {i}")
         }
     }
 
@@ -510,7 +510,7 @@ mod tests {
         assert_eq!(2, arr.offset());
         assert_eq!(0, arr.null_count());
         for i in 0..3 {
-            assert_eq!(i != 0, arr.value(i), "failed at {}", i);
+            assert_eq!(i != 0, arr.value(i), "failed at {i}");
         }
     }
 
diff --git a/arrow-array/src/array/dictionary_array.rs b/arrow-array/src/array/dictionary_array.rs
index fb2868c27..eb2f1b606 100644
--- a/arrow-array/src/array/dictionary_array.rs
+++ b/arrow-array/src/array/dictionary_array.rs
@@ -725,7 +725,7 @@ mod tests {
         let array = builder.finish();
         assert_eq!(
             "DictionaryArray {keys: PrimitiveArray<UInt8>\n[\n  0,\n  null,\n  1,\n] values: PrimitiveArray<UInt32>\n[\n  12345678,\n  22345678,\n]}\n",
-            format!("{:?}", array)
+            format!("{array:?}")
         );
 
         let mut builder =
@@ -736,7 +736,7 @@ mod tests {
         let array = builder.finish();
         assert_eq!(
             "DictionaryArray {keys: PrimitiveArray<UInt8>\n[\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n] values: PrimitiveArray<UInt32>\n[\n  1,\n]}\n",
-            format!("{:?}", array)
+            format!("{array:?}")
         );
     }
 
@@ -749,13 +749,13 @@ mod tests {
             .collect();
         assert_eq!(
             "DictionaryArray {keys: PrimitiveArray<Int8>\n[\n  0,\n  0,\n  null,\n  1,\n] values: StringArray\n[\n  \"a\",\n  \"c\",\n]}\n",
-            format!("{:?}", array)
+            format!("{array:?}")
         );
 
         let array: DictionaryArray<Int8Type> = test.into_iter().collect();
         assert_eq!(
             "DictionaryArray {keys: PrimitiveArray<Int8>\n[\n  0,\n  0,\n  1,\n  2,\n] values: StringArray\n[\n  \"a\",\n  \"b\",\n  \"c\",\n]}\n",
-            format!("{:?}", array)
+            format!("{array:?}")
         );
     }
 
@@ -900,7 +900,7 @@ mod tests {
 
         assert_eq!(
             "DictionaryArray {keys: PrimitiveArray<Int32>\n[\n  0,\n  2,\n  null,\n  1,\n] values: StringArray\n[\n  \"foo\",\n  \"bar\",\n  \"baz\",\n]}\n",
-            format!("{:?}", array)
+            format!("{array:?}")
         );
     }
 
diff --git a/arrow-array/src/array/fixed_size_binary_array.rs b/arrow-array/src/array/fixed_size_binary_array.rs
index 0d63fdded..9debea08d 100644
--- a/arrow-array/src/array/fixed_size_binary_array.rs
+++ b/arrow-array/src/array/fixed_size_binary_array.rs
@@ -629,7 +629,7 @@ mod tests {
         let arr = FixedSizeBinaryArray::from(array_data);
         assert_eq!(
             "FixedSizeBinaryArray<5>\n[\n  [104, 101, 108, 108, 111],\n  [116, 104, 101, 114, 101],\n  [97, 114, 114, 111, 119],\n]",
-            format!("{:?}", arr)
+            format!("{arr:?}")
         );
     }
 
diff --git a/arrow-array/src/array/fixed_size_list_array.rs b/arrow-array/src/array/fixed_size_list_array.rs
index e9ceb556c..67a20d142 100644
--- a/arrow-array/src/array/fixed_size_list_array.rs
+++ b/arrow-array/src/array/fixed_size_list_array.rs
@@ -173,8 +173,7 @@ impl From<ArrayData> for FixedSizeListArray {
                     assert_eq!(
                         values.len() % *len as usize,
                         0,
-                        "FixedSizeListArray child array length should be a multiple of {}",
-                        len
+                        "FixedSizeListArray child array length should be a multiple of {len}"
                     );
                 }
 
diff --git a/arrow-array/src/array/list_array.rs b/arrow-array/src/array/list_array.rs
index 6c49fc7fc..b378549eb 100644
--- a/arrow-array/src/array/list_array.rs
+++ b/arrow-array/src/array/list_array.rs
@@ -289,7 +289,7 @@ impl<OffsetSize: OffsetSizeTrait> std::fmt::Debug for GenericListArray<OffsetSiz
     fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
         let prefix = OffsetSize::PREFIX;
 
-        write!(f, "{}ListArray\n[\n", prefix)?;
+        write!(f, "{prefix}ListArray\n[\n")?;
         print_long_array(self, f, |array, index, f| {
             std::fmt::Debug::fmt(&array.value(index), f)
         })?;
diff --git a/arrow-array/src/array/mod.rs b/arrow-array/src/array/mod.rs
index 69f6ba4d8..e953781e5 100644
--- a/arrow-array/src/array/mod.rs
+++ b/arrow-array/src/array/mod.rs
@@ -580,7 +580,7 @@ pub fn make_array(data: ArrayData) -> ArrayRef {
             DataType::UInt64 => {
                 Arc::new(DictionaryArray::<UInt64Type>::from(data)) as ArrayRef
             }
-            dt => panic!("Unexpected dictionary key type {:?}", dt),
+            dt => panic!("Unexpected dictionary key type {dt:?}"),
         },
         DataType::RunEndEncoded(ref run_ends_type, _) => {
             match run_ends_type.data_type() {
@@ -593,13 +593,13 @@ pub fn make_array(data: ArrayData) -> ArrayRef {
                 DataType::Int64 => {
                     Arc::new(RunArray::<Int64Type>::from(data)) as ArrayRef
                 }
-                dt => panic!("Unexpected data type for run_ends array {:?}", dt),
+                dt => panic!("Unexpected data type for run_ends array {dt:?}"),
             }
         }
         DataType::Null => Arc::new(NullArray::from(data)) as ArrayRef,
         DataType::Decimal128(_, _) => Arc::new(Decimal128Array::from(data)) as ArrayRef,
         DataType::Decimal256(_, _) => Arc::new(Decimal256Array::from(data)) as ArrayRef,
-        dt => panic!("Unexpected data type {:?}", dt),
+        dt => panic!("Unexpected data type {dt:?}"),
     }
 }
 
diff --git a/arrow-array/src/array/null_array.rs b/arrow-array/src/array/null_array.rs
index a5ba953c2..6b68aace7 100644
--- a/arrow-array/src/array/null_array.rs
+++ b/arrow-array/src/array/null_array.rs
@@ -144,6 +144,6 @@ mod tests {
     #[test]
     fn test_debug_null_array() {
         let array = NullArray::new(1024 * 1024);
-        assert_eq!(format!("{:?}", array), "NullArray(1048576)");
+        assert_eq!(format!("{array:?}"), "NullArray(1048576)");
     }
 }
diff --git a/arrow-array/src/array/primitive_array.rs b/arrow-array/src/array/primitive_array.rs
index 01eda724b..a757eb7dd 100644
--- a/arrow-array/src/array/primitive_array.rs
+++ b/arrow-array/src/array/primitive_array.rs
@@ -765,19 +765,19 @@ where
 impl<T: ArrowPrimitiveType> std::fmt::Debug for PrimitiveArray<T> {
     fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
         let data_type = self.data_type();
-        write!(f, "PrimitiveArray<{:?}>\n[\n", data_type)?;
+        write!(f, "PrimitiveArray<{data_type:?}>\n[\n")?;
         print_long_array(self, f, |array, index, f| match data_type {
             DataType::Date32 | DataType::Date64 => {
                 let v = self.value(index).to_isize().unwrap() as i64;
                 match as_date::<T>(v) {
-                    Some(date) => write!(f, "{:?}", date),
+                    Some(date) => write!(f, "{date:?}"),
                     None => write!(f, "null"),
                 }
             }
             DataType::Time32(_) | DataType::Time64(_) => {
                 let v = self.value(index).to_isize().unwrap() as i64;
                 match as_time::<T>(v) {
-                    Some(time) => write!(f, "{:?}", time),
+                    Some(time) => write!(f, "{time:?}"),
                     None => write!(f, "null"),
                 }
             }
@@ -796,8 +796,7 @@ impl<T: ArrowPrimitiveType> std::fmt::Debug for PrimitiveArray<T> {
                             Err(_) => match as_datetime::<T>(v) {
                                 Some(datetime) => write!(
                                     f,
-                                    "{:?} (Unknown Time Zone '{}')",
-                                    datetime, tz_string
+                                    "{datetime:?} (Unknown Time Zone '{tz_string}')"
                                 ),
                                 None => write!(f, "null"),
                             },
@@ -805,7 +804,7 @@ impl<T: ArrowPrimitiveType> std::fmt::Debug for PrimitiveArray<T> {
                     }
                     // for Timestamp without TimeZone
                     None => match as_datetime::<T>(v) {
-                        Some(datetime) => write!(f, "{:?}", datetime),
+                        Some(datetime) => write!(f, "{datetime:?}"),
                         None => write!(f, "null"),
                     },
                 }
@@ -1136,8 +1135,7 @@ impl<T: DecimalType + ArrowPrimitiveType> PrimitiveArray<T> {
         }
         if scale > 0 && scale as u8 > precision {
             return Err(ArrowError::InvalidArgumentError(format!(
-                "scale {} is greater than precision {}",
-                scale, precision
+                "scale {scale} is greater than precision {precision}"
             )));
         }
 
@@ -1546,7 +1544,7 @@ mod tests {
         let arr = Int32Array::from(vec![0, 1, 2, 3, 4]);
         assert_eq!(
             "PrimitiveArray<Int32>\n[\n  0,\n  1,\n  2,\n  3,\n  4,\n]",
-            format!("{:?}", arr)
+            format!("{arr:?}")
         );
     }
 
@@ -1558,13 +1556,13 @@ mod tests {
                 "PrimitiveArray<Int16>\n[\n{}\n]",
                 values
                     .iter()
-                    .map(|v| { format!("  {},", v) })
+                    .map(|v| { format!("  {v},") })
                     .collect::<Vec<String>>()
                     .join("\n")
             );
             let array = Int16Array::from(values);
 
-            assert_eq!(array_expected, format!("{:?}", array));
+            assert_eq!(array_expected, format!("{array:?}"));
         })
     }
 
@@ -1577,7 +1575,7 @@ mod tests {
         let arr = builder.finish();
         assert_eq!(
             "PrimitiveArray<Int32>\n[\n  0,\n  1,\n  null,\n  3,\n  4,\n]",
-            format!("{:?}", arr)
+            format!("{arr:?}")
         );
     }
 
@@ -1591,7 +1589,7 @@ mod tests {
             ]);
         assert_eq!(
             "PrimitiveArray<Timestamp(Millisecond, None)>\n[\n  2018-12-31T00:00:00,\n  2018-12-31T00:00:00,\n  1921-01-02T00:00:00,\n]",
-            format!("{:?}", arr)
+            format!("{arr:?}")
         );
     }
 
@@ -1606,7 +1604,7 @@ mod tests {
             .with_timezone_utc();
         assert_eq!(
             "PrimitiveArray<Timestamp(Millisecond, Some(\"+00:00\"))>\n[\n  2018-12-31T00:00:00+00:00,\n  2018-12-31T00:00:00+00:00,\n  1921-01-02T00:00:00+00:00,\n]",
-            format!("{:?}", arr)
+            format!("{arr:?}")
         );
     }
 
@@ -1637,11 +1635,11 @@ mod tests {
             ])
             .with_timezone("Asia/Taipei".to_string());
 
-        println!("{:?}", arr);
+        println!("{arr:?}");
 
         assert_eq!(
             "PrimitiveArray<Timestamp(Millisecond, Some(\"Asia/Taipei\"))>\n[\n  2018-12-31T00:00:00 (Unknown Time Zone 'Asia/Taipei'),\n  2018-12-31T00:00:00 (Unknown Time Zone 'Asia/Taipei'),\n  1921-01-02T00:00:00 (Unknown Time Zone 'Asia/Taipei'),\n]",
-            format!("{:?}", arr)
+            format!("{arr:?}")
         );
     }
 
@@ -1656,7 +1654,7 @@ mod tests {
             .with_timezone("+08:00".to_string());
         assert_eq!(
             "PrimitiveArray<Timestamp(Millisecond, Some(\"+08:00\"))>\n[\n  2018-12-31T08:00:00+08:00,\n  2018-12-31T08:00:00+08:00,\n  1921-01-02T08:00:00+08:00,\n]",
-            format!("{:?}", arr)
+            format!("{arr:?}")
         );
     }
 
@@ -1671,7 +1669,7 @@ mod tests {
             .with_timezone("xxx".to_string());
         assert_eq!(
             "PrimitiveArray<Timestamp(Millisecond, Some(\"xxx\"))>\n[\n  2018-12-31T00:00:00 (Unknown Time Zone 'xxx'),\n  2018-12-31T00:00:00 (Unknown Time Zone 'xxx'),\n  1921-01-02T00:00:00 (Unknown Time Zone 'xxx'),\n]",
-            format!("{:?}", arr)
+            format!("{arr:?}")
         );
     }
 
@@ -1697,7 +1695,7 @@ mod tests {
         let arr: PrimitiveArray<Date32Type> = vec![12356, 13548, -365].into();
         assert_eq!(
             "PrimitiveArray<Date32>\n[\n  2003-10-31,\n  2007-02-04,\n  1969-01-01,\n]",
-            format!("{:?}", arr)
+            format!("{arr:?}")
         );
     }
 
@@ -1706,7 +1704,7 @@ mod tests {
         let arr: PrimitiveArray<Time32SecondType> = vec![7201, 60054].into();
         assert_eq!(
             "PrimitiveArray<Time32(Second)>\n[\n  02:00:01,\n  16:40:54,\n]",
-            format!("{:?}", arr)
+            format!("{arr:?}")
         );
     }
 
@@ -1716,7 +1714,7 @@ mod tests {
         let arr: PrimitiveArray<Time32SecondType> = vec![-7201, -60054].into();
         assert_eq!(
             "PrimitiveArray<Time32(Second)>\n[\n  null,\n  null,\n]",
-            format!("{:?}", arr)
+            format!("{arr:?}")
         )
     }
 
@@ -1727,7 +1725,7 @@ mod tests {
             vec![9065525203050843594].into();
         assert_eq!(
             "PrimitiveArray<Timestamp(Microsecond, None)>\n[\n  null,\n]",
-            format!("{:?}", arr)
+            format!("{arr:?}")
         )
     }
 
diff --git a/arrow-array/src/array/run_array.rs b/arrow-array/src/array/run_array.rs
index 0e39cd288..48c4896b6 100644
--- a/arrow-array/src/array/run_array.rs
+++ b/arrow-array/src/array/run_array.rs
@@ -320,7 +320,7 @@ mod tests {
         let array = builder.finish();
         assert_eq!(
             "RunArray {run_ends: PrimitiveArray<Int16>\n[\n  1,\n  2,\n  3,\n], values: PrimitiveArray<UInt32>\n[\n  12345678,\n  null,\n  22345678,\n]}\n",
-            format!("{:?}", array)
+            format!("{array:?}")
         );
 
         let mut builder = PrimitiveRunBuilder::<Int16Type, UInt32Type>::with_capacity(20);
@@ -334,7 +334,7 @@ mod tests {
 
         assert_eq!(
             "RunArray {run_ends: PrimitiveArray<Int16>\n[\n  20,\n], values: PrimitiveArray<UInt32>\n[\n  1,\n]}\n",
-            format!("{:?}", array)
+            format!("{array:?}")
         );
     }
 
@@ -347,7 +347,7 @@ mod tests {
             .collect();
         assert_eq!(
             "RunArray {run_ends: PrimitiveArray<Int16>\n[\n  2,\n  3,\n  4,\n], values: StringArray\n[\n  \"a\",\n  null,\n  \"c\",\n]}\n",
-            format!("{:?}", array)
+            format!("{array:?}")
         );
 
         assert_eq!(array.len(), 4);
@@ -356,7 +356,7 @@ mod tests {
         let array: RunArray<Int16Type> = test.into_iter().collect();
         assert_eq!(
             "RunArray {run_ends: PrimitiveArray<Int16>\n[\n  2,\n  3,\n  4,\n], values: StringArray\n[\n  \"a\",\n  \"b\",\n  \"c\",\n]}\n",
-            format!("{:?}", array)
+            format!("{array:?}")
         );
     }
 
@@ -428,7 +428,7 @@ mod tests {
 
         assert_eq!(
             "RunArray {run_ends: PrimitiveArray<Int32>\n[\n  1,\n  2,\n  3,\n  4,\n], values: StringArray\n[\n  \"foo\",\n  \"bar\",\n  null,\n  \"baz\",\n]}\n",
-            format!("{:?}", array)
+            format!("{array:?}")
         );
     }
 
diff --git a/arrow-array/src/array/string_array.rs b/arrow-array/src/array/string_array.rs
index 14db33882..cb401540d 100644
--- a/arrow-array/src/array/string_array.rs
+++ b/arrow-array/src/array/string_array.rs
@@ -363,7 +363,7 @@ mod tests {
         let arr: StringArray = vec!["hello", "arrow"].into();
         assert_eq!(
             "StringArray\n[\n  \"hello\",\n  \"arrow\",\n]",
-            format!("{:?}", arr)
+            format!("{arr:?}")
         );
     }
 
@@ -372,7 +372,7 @@ mod tests {
         let arr: LargeStringArray = vec!["hello", "arrow"].into();
         assert_eq!(
             "LargeStringArray\n[\n  \"hello\",\n  \"arrow\",\n]",
-            format!("{:?}", arr)
+            format!("{arr:?}")
         );
     }
 
@@ -420,7 +420,7 @@ mod tests {
             .scan(0usize, |pos, i| {
                 if *pos < 10 {
                     *pos += 1;
-                    Some(Some(format!("value {}", i)))
+                    Some(Some(format!("value {i}")))
                 } else {
                     // actually returns up to 10 values
                     None
diff --git a/arrow-array/src/array/struct_array.rs b/arrow-array/src/array/struct_array.rs
index dc949c8e4..9149895f6 100644
--- a/arrow-array/src/array/struct_array.rs
+++ b/arrow-array/src/array/struct_array.rs
@@ -143,8 +143,8 @@ impl TryFrom<Vec<(&str, ArrayRef)>> for StructArray {
             if let Some(len) = len {
                 if len != child_datum_len {
                     return Err(ArrowError::InvalidArgumentError(
-                        format!("Array of field \"{}\" has length {}, but previous elements have length {}.
-                        All arrays in every entry in a struct array must have the same length.", field_name, child_datum_len, len)
+                        format!("Array of field \"{field_name}\" has length {child_datum_len}, but previous elements have length {len}.
+                        All arrays in every entry in a struct array must have the same length.")
                     ));
                 }
             } else {
diff --git a/arrow-array/src/array/union_array.rs b/arrow-array/src/array/union_array.rs
index 092f538bf..5870952d7 100644
--- a/arrow-array/src/array/union_array.rs
+++ b/arrow-array/src/array/union_array.rs
@@ -192,8 +192,7 @@ impl UnionArray {
         if !invalid_type_ids.is_empty() {
             return Err(ArrowError::InvalidArgumentError(format!(
                 "Type Ids must be positive and cannot be greater than the number of \
-                child arrays, found:\n{:?}",
-                invalid_type_ids
+                child arrays, found:\n{invalid_type_ids:?}"
             )));
         }
 
@@ -208,8 +207,7 @@ impl UnionArray {
             if !invalid_offsets.is_empty() {
                 return Err(ArrowError::InvalidArgumentError(format!(
                     "Offsets must be positive and within the length of the Array, \
-                    found:\n{:?}",
-                    invalid_offsets
+                    found:\n{invalid_offsets:?}"
                 )));
             }
         }
@@ -345,7 +343,7 @@ impl std::fmt::Debug for UnionArray {
         } else {
             "UnionArray(Sparse)\n["
         };
-        writeln!(f, "{}", header)?;
+        writeln!(f, "{header}")?;
 
         writeln!(f, "-- type id buffer:")?;
         writeln!(f, "{:?}", self.data().buffers()[0])?;
diff --git a/arrow-array/src/builder/boolean_builder.rs b/arrow-array/src/builder/boolean_builder.rs
index 06709e5f3..eeb39b802 100644
--- a/arrow-array/src/builder/boolean_builder.rs
+++ b/arrow-array/src/builder/boolean_builder.rs
@@ -247,7 +247,7 @@ mod tests {
         for i in 0..10 {
             assert!(!arr.is_null(i));
             assert!(arr.is_valid(i));
-            assert_eq!(i == 3 || i == 6 || i == 9, arr.value(i), "failed at {}", i)
+            assert_eq!(i == 3 || i == 6 || i == 9, arr.value(i), "failed at {i}")
         }
     }
 
diff --git a/arrow-array/src/builder/fixed_size_binary_builder.rs b/arrow-array/src/builder/fixed_size_binary_builder.rs
index 4c8225adf..695b553f0 100644
--- a/arrow-array/src/builder/fixed_size_binary_builder.rs
+++ b/arrow-array/src/builder/fixed_size_binary_builder.rs
@@ -58,8 +58,7 @@ impl FixedSizeBinaryBuilder {
     pub fn with_capacity(capacity: usize, byte_width: i32) -> Self {
         assert!(
             byte_width >= 0,
-            "value length ({}) of the array must >= 0",
-            byte_width
+            "value length ({byte_width}) of the array must >= 0"
         );
         Self {
             values_builder: UInt8BufferBuilder::new(capacity * byte_width as usize),
diff --git a/arrow-array/src/builder/primitive_builder.rs b/arrow-array/src/builder/primitive_builder.rs
index 2d88ea50f..71671fe7d 100644
--- a/arrow-array/src/builder/primitive_builder.rs
+++ b/arrow-array/src/builder/primitive_builder.rs
@@ -455,7 +455,7 @@ mod tests {
         for i in 0..10 {
             assert!(!arr.is_null(i));
             assert!(arr.is_valid(i));
-            assert_eq!(i == 3 || i == 6 || i == 9, arr.value(i), "failed at {}", i)
+            assert_eq!(i == 3 || i == 6 || i == 9, arr.value(i), "failed at {i}")
         }
     }
 
diff --git a/arrow-array/src/builder/struct_builder.rs b/arrow-array/src/builder/struct_builder.rs
index ecf9ca4ff..72aa53e18 100644
--- a/arrow-array/src/builder/struct_builder.rs
+++ b/arrow-array/src/builder/struct_builder.rs
@@ -174,7 +174,7 @@ pub fn make_builder(datatype: &DataType, capacity: usize) -> Box<dyn ArrayBuilde
         DataType::Struct(fields) => {
             Box::new(StructBuilder::from_fields(fields.clone(), capacity))
         }
-        t => panic!("Data type {:?} is not currently supported", t),
+        t => panic!("Data type {t:?} is not currently supported"),
     }
 }
 
diff --git a/arrow-array/src/record_batch.rs b/arrow-array/src/record_batch.rs
index 72b567f75..035efb4f0 100644
--- a/arrow-array/src/record_batch.rs
+++ b/arrow-array/src/record_batch.rs
@@ -192,10 +192,7 @@ impl RecordBatch {
 
         if let Some((i, (col_type, field_type))) = not_match {
             return Err(ArrowError::InvalidArgumentError(format!(
-                "column types must match schema types, expected {:?} but found {:?} at column index {}",
-                field_type,
-                col_type,
-                i)));
+                "column types must match schema types, expected {field_type:?} but found {col_type:?} at column index {i}")));
         }
 
         Ok(RecordBatch {
diff --git a/arrow-array/src/timezone.rs b/arrow-array/src/timezone.rs
index fd8c099c2..3af76c3da 100644
--- a/arrow-array/src/timezone.rs
+++ b/arrow-array/src/timezone.rs
@@ -39,8 +39,7 @@ fn parse_fixed_offset(tz: &str) -> Result<FixedOffset, ArrowError> {
     }
 
     Err(ArrowError::ParseError(format!(
-        "Invalid timezone \"{}\": Expected format [+-]XX:XX, [+-]XX, or [+-]XXXX",
-        tz
+        "Invalid timezone \"{tz}\": Expected format [+-]XX:XX, [+-]XX, or [+-]XXXX"
     )))
 }
 
@@ -88,7 +87,7 @@ mod private {
                 Ok(Self(TzInner::Offset(parse_fixed_offset(tz)?)))
             } else {
                 Ok(Self(TzInner::Timezone(tz.parse().map_err(|e| {
-                    ArrowError::ParseError(format!("Invalid timezone \"{}\": {}", tz, e))
+                    ArrowError::ParseError(format!("Invalid timezone \"{tz}\": {e}"))
                 })?)))
             }
         }
@@ -266,8 +265,7 @@ mod private {
                 Ok(Self(parse_fixed_offset(tz)?))
             } else {
                 Err(ArrowError::ParseError(format!(
-                    "Invalid timezone \"{}\": only offset based timezones supported without chrono-tz feature",
-                    tz
+                    "Invalid timezone \"{tz}\": only offset based timezones supported without chrono-tz feature"
                 )))
             }
         }
diff --git a/arrow-array/src/types.rs b/arrow-array/src/types.rs
index fc02c0e5a..641d4c2fc 100644
--- a/arrow-array/src/types.rs
+++ b/arrow-array/src/types.rs
@@ -688,11 +688,11 @@ fn format_decimal_str(value_str: &str, precision: usize, scale: i8) -> String {
         value_str.to_string()
     } else if scale < 0 {
         let padding = value_str.len() + scale.unsigned_abs() as usize;
-        format!("{:0<width$}", value_str, width = padding)
+        format!("{value_str:0<padding$}")
     } else if rest.len() > scale as usize {
         // Decimal separator is in the middle of the string
         let (whole, decimal) = value_str.split_at(value_str.len() - scale as usize);
-        format!("{}.{}", whole, decimal)
+        format!("{whole}.{decimal}")
     } else {
         // String has to be padded
         format!("{}0.{:0>width$}", sign, rest, width = scale as usize)
diff --git a/arrow-buffer/src/alloc/mod.rs b/arrow-buffer/src/alloc/mod.rs
index a7ce80600..1493d839f 100644
--- a/arrow-buffer/src/alloc/mod.rs
+++ b/arrow-buffer/src/alloc/mod.rs
@@ -144,7 +144,7 @@ impl Debug for Deallocation {
     fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
         match self {
             Deallocation::Arrow(capacity) => {
-                write!(f, "Deallocation::Arrow {{ capacity: {} }}", capacity)
+                write!(f, "Deallocation::Arrow {{ capacity: {capacity} }}")
             }
             Deallocation::Custom(_) => {
                 write!(f, "Deallocation::Custom {{ capacity: unknown }}")
diff --git a/arrow-buffer/src/bigint.rs b/arrow-buffer/src/bigint.rs
index c3a05ba06..0d404df16 100644
--- a/arrow-buffer/src/bigint.rs
+++ b/arrow-buffer/src/bigint.rs
@@ -29,7 +29,7 @@ pub struct i256 {
 
 impl std::fmt::Debug for i256 {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-        write!(f, "{}", self)
+        write!(f, "{self}")
     }
 }
 
@@ -580,7 +580,7 @@ mod tests {
         for v in vals {
             let (t, overflow) = i256::from_bigint_with_overflow(v.clone());
             assert!(!overflow);
-            assert_eq!(t.to_i128(), v.to_i128(), "{} vs {}", v, t);
+            assert_eq!(t.to_i128(), v.to_i128(), "{v} vs {t}");
         }
     }
 
@@ -590,7 +590,7 @@ mod tests {
         let br = BigInt::from_signed_bytes_le(&ir.to_le_bytes());
 
         // Comparison
-        assert_eq!(il.cmp(&ir), bl.cmp(&br), "{} cmp {}", bl, br);
+        assert_eq!(il.cmp(&ir), bl.cmp(&br), "{bl} cmp {br}");
 
         // Conversions
         assert_eq!(i256::from_le_bytes(il.to_le_bytes()), il);
@@ -599,8 +599,8 @@ mod tests {
         assert_eq!(i256::from_be_bytes(ir.to_be_bytes()), ir);
 
         // To i128
-        assert_eq!(il.to_i128(), bl.to_i128(), "{}", bl);
-        assert_eq!(ir.to_i128(), br.to_i128(), "{}", br);
+        assert_eq!(il.to_i128(), bl.to_i128(), "{bl}");
+        assert_eq!(ir.to_i128(), br.to_i128(), "{br}");
 
         // Absolute value
         let (abs, overflow) = i256::from_bigint_with_overflow(bl.abs());
@@ -655,24 +655,12 @@ mod tests {
         match overflow {
             true => assert!(
                 checked.is_none(),
-                "{} * {} = {} vs {} * {} = {}",
-                il,
-                ir,
-                actual,
-                bl,
-                br,
-                expected
+                "{il} * {ir} = {actual} vs {bl} * {br} = {expected}"
             ),
             false => assert_eq!(
                 checked.unwrap(),
                 actual,
-                "{} * {} = {} vs {} * {} = {}",
-                il,
-                ir,
-                actual,
-                bl,
-                br,
-                expected
+                "{il} * {ir} = {actual} vs {bl} * {br} = {expected}"
             ),
         }
 
@@ -687,24 +675,12 @@ mod tests {
             match overflow {
                 true => assert!(
                     checked.is_none(),
-                    "{} ^ {} = {} vs {} * {} = {}",
-                    il,
-                    exp,
-                    actual,
-                    bl,
-                    exp,
-                    expected
+                    "{il} ^ {exp} = {actual} vs {bl} * {exp} = {expected}"
                 ),
                 false => assert_eq!(
                     checked.unwrap(),
                     actual,
-                    "{} ^ {} = {} vs {} * {} = {}",
-                    il,
-                    exp,
-                    actual,
-                    bl,
-                    exp,
-                    expected
+                    "{il} ^ {exp} = {actual} vs {bl} * {exp} = {expected}"
                 ),
             }
         }
diff --git a/arrow-buffer/src/buffer/scalar.rs b/arrow-buffer/src/buffer/scalar.rs
index a9f2df3d9..124f3f6f5 100644
--- a/arrow-buffer/src/buffer/scalar.rs
+++ b/arrow-buffer/src/buffer/scalar.rs
@@ -58,8 +58,7 @@ impl<T: ArrowNativeType> ScalarBuffer<T> {
         let (prefix, offsets, suffix) = unsafe { bytes.align_to::<T>() };
         assert!(
             prefix.is_empty() && suffix.is_empty(),
-            "buffer is not aligned to {} byte boundary",
-            size
+            "buffer is not aligned to {size} byte boundary"
         );
 
         let ptr = offsets.as_ptr();
diff --git a/arrow-cast/src/cast.rs b/arrow-cast/src/cast.rs
index c60e66037..aec665aa3 100644
--- a/arrow-cast/src/cast.rs
+++ b/arrow-cast/src/cast.rs
@@ -654,8 +654,7 @@ pub fn cast_with_options(
             UInt32 => dictionary_cast::<UInt32Type>(array, to_type, cast_options),
             UInt64 => dictionary_cast::<UInt64Type>(array, to_type, cast_options),
             _ => Err(ArrowError::CastError(format!(
-                "Casting from dictionary type {:?} to {:?} not supported",
-                from_type, to_type,
+                "Casting from dictionary type {from_type:?} to {to_type:?} not supported",
             ))),
         },
         (_, Dictionary(index_type, value_type)) => match **index_type {
@@ -668,8 +667,7 @@ pub fn cast_with_options(
             UInt32 => cast_to_dictionary::<UInt32Type>(array, value_type, cast_options),
             UInt64 => cast_to_dictionary::<UInt64Type>(array, value_type, cast_options),
             _ => Err(ArrowError::CastError(format!(
-                "Casting from type {:?} to dictionary type {:?} not supported",
-                from_type, to_type,
+                "Casting from type {from_type:?} to dictionary type {to_type:?} not supported",
             ))),
         },
         (List(_), List(ref to)) => {
@@ -808,8 +806,7 @@ pub fn cast_with_options(
                 }
                 Null => Ok(new_null_array(to_type, array.len())),
                 _ => Err(ArrowError::CastError(format!(
-                    "Casting from {:?} to {:?} not supported",
-                    from_type, to_type
+                    "Casting from {from_type:?} to {to_type:?} not supported"
                 ))),
             }
         }
@@ -876,8 +873,7 @@ pub fn cast_with_options(
                 }
                 Null => Ok(new_null_array(to_type, array.len())),
                 _ => Err(ArrowError::CastError(format!(
-                    "Casting from {:?} to {:?} not supported",
-                    from_type, to_type
+                    "Casting from {from_type:?} to {to_type:?} not supported"
                 ))),
             }
         }
@@ -966,8 +962,7 @@ pub fn cast_with_options(
                 ),
                 Null => Ok(new_null_array(to_type, array.len())),
                 _ => Err(ArrowError::CastError(format!(
-                    "Casting from {:?} to {:?} not supported",
-                    from_type, to_type
+                    "Casting from {from_type:?} to {to_type:?} not supported"
                 ))),
             }
         }
@@ -1056,8 +1051,7 @@ pub fn cast_with_options(
                 ),
                 Null => Ok(new_null_array(to_type, array.len())),
                 _ => Err(ArrowError::CastError(format!(
-                    "Casting from {:?} to {:?} not supported",
-                    from_type, to_type
+                    "Casting from {from_type:?} to {to_type:?} not supported"
                 ))),
             }
         }
@@ -1082,8 +1076,7 @@ pub fn cast_with_options(
             Utf8 => cast_utf8_to_boolean::<i32>(array, cast_options),
             LargeUtf8 => cast_utf8_to_boolean::<i64>(array, cast_options),
             _ => Err(ArrowError::CastError(format!(
-                "Casting from {:?} to {:?} not supported",
-                from_type, to_type,
+                "Casting from {from_type:?} to {to_type:?} not supported",
             ))),
         },
         (Boolean, _) => match to_type {
@@ -1117,8 +1110,7 @@ pub fn cast_with_options(
                 ))
             }
             _ => Err(ArrowError::CastError(format!(
-                "Casting from {:?} to {:?} not supported",
-                from_type, to_type,
+                "Casting from {from_type:?} to {to_type:?} not supported",
             ))),
         },
         (Utf8, _) => match to_type {
@@ -1156,8 +1148,7 @@ pub fn cast_with_options(
                 cast_string_to_timestamp_ns::<i32>(&**array, cast_options)
             }
             _ => Err(ArrowError::CastError(format!(
-                "Casting from {:?} to {:?} not supported",
-                from_type, to_type,
+                "Casting from {from_type:?} to {to_type:?} not supported",
             ))),
         },
         (_, Utf8) => match from_type {
@@ -1214,8 +1205,7 @@ pub fn cast_with_options(
                 ))
             }
             _ => Err(ArrowError::CastError(format!(
-                "Casting from {:?} to {:?} not supported",
-                from_type, to_type,
+                "Casting from {from_type:?} to {to_type:?} not supported",
             ))),
         },
         (_, LargeUtf8) => match from_type {
@@ -1271,8 +1261,7 @@ pub fn cast_with_options(
                 ))
             }
             _ => Err(ArrowError::CastError(format!(
-                "Casting from {:?} to {:?} not supported",
-                from_type, to_type,
+                "Casting from {from_type:?} to {to_type:?} not supported",
             ))),
         },
         (LargeUtf8, _) => match to_type {
@@ -1312,8 +1301,7 @@ pub fn cast_with_options(
                 cast_string_to_timestamp_ns::<i64>(&**array, cast_options)
             }
             _ => Err(ArrowError::CastError(format!(
-                "Casting from {:?} to {:?} not supported",
-                from_type, to_type,
+                "Casting from {from_type:?} to {to_type:?} not supported",
             ))),
         },
         (Binary, _) => match to_type {
@@ -1321,15 +1309,13 @@ pub fn cast_with_options(
                 cast_byte_container::<BinaryType, LargeBinaryType, [u8]>(&**array)
             }
             _ => Err(ArrowError::CastError(format!(
-                "Casting from {:?} to {:?} not supported",
-                from_type, to_type,
+                "Casting from {from_type:?} to {to_type:?} not supported",
             ))),
         },
         (LargeBinary, _) => match to_type {
             Binary => cast_byte_container::<LargeBinaryType, BinaryType, [u8]>(&**array),
             _ => Err(ArrowError::CastError(format!(
-                "Casting from {:?} to {:?} not supported",
-                from_type, to_type,
+                "Casting from {from_type:?} to {to_type:?} not supported",
             ))),
         },
         // start numeric casts
@@ -2041,8 +2027,7 @@ pub fn cast_with_options(
             cast_reinterpret_arrays::<Int64Type, IntervalDayTimeType>(array)
         }
         (_, _) => Err(ArrowError::CastError(format!(
-            "Casting from {:?} to {:?} not supported",
-            from_type, to_type,
+            "Casting from {from_type:?} to {to_type:?} not supported",
         ))),
     }
 }
@@ -2872,8 +2857,7 @@ where
                 invalid_value => match cast_options.safe {
                     true => Ok(None),
                     false => Err(ArrowError::CastError(format!(
-                        "Cannot cast value '{}' to value of Boolean type",
-                        invalid_value,
+                        "Cannot cast value '{invalid_value}' to value of Boolean type",
                     ))),
                 },
             },
@@ -2897,8 +2881,7 @@ where
     let parts: Vec<&str> = value_str.split('.').collect();
     if parts.len() > 2 {
         return Err(ArrowError::InvalidArgumentError(format!(
-            "Invalid decimal format: {:?}",
-            value_str
+            "Invalid decimal format: {value_str:?}"
         )));
     }
 
@@ -2909,8 +2892,7 @@ where
     let number_decimals = if decimals.len() > scale {
         let decimal_number = i256::from_string(decimals).ok_or_else(|| {
             ArrowError::InvalidArgumentError(format!(
-                "Cannot parse decimal format: {}",
-                value_str
+                "Cannot parse decimal format: {value_str}"
             ))
         })?;
 
@@ -2934,8 +2916,7 @@ where
             i256::from_string(integers)
                 .ok_or_else(|| {
                     ArrowError::InvalidArgumentError(format!(
-                        "Cannot parse decimal format: {}",
-                        value_str
+                        "Cannot parse decimal format: {value_str}"
                     ))
                 })
                 .map(|v| {
@@ -2949,8 +2930,8 @@ where
     } else {
         let padding = if scale > decimals.len() { scale } else { 0 };
 
-        let decimals = format!("{:0<width$}", decimals, width = padding);
-        format!("{}{}", integers, decimals)
+        let decimals = format!("{decimals:0<padding$}");
+        format!("{integers}{decimals}")
     };
 
     let value = i256::from_string(number_decimals.as_str()).ok_or_else(|| {
@@ -3032,8 +3013,7 @@ where
 {
     if scale < 0 {
         return Err(ArrowError::InvalidArgumentError(format!(
-            "Cannot cast string to decimal with negative scale {}",
-            scale
+            "Cannot cast string to decimal with negative scale {scale}"
         )));
     }
 
@@ -3199,8 +3179,7 @@ fn dictionary_cast<K: ArrowDictionaryKeyType>(
                 UInt64 => Arc::new(DictionaryArray::<UInt64Type>::from(data)),
                 _ => {
                     return Err(ArrowError::CastError(format!(
-                        "Unsupported type {:?} for dictionary index",
-                        to_index_type
+                        "Unsupported type {to_index_type:?} for dictionary index"
                     )));
                 }
             };
@@ -3323,8 +3302,7 @@ fn cast_to_dictionary<K: ArrowDictionaryKeyType>(
             pack_byte_to_dictionary::<K, GenericBinaryType<i64>>(array, cast_options)
         }
         _ => Err(ArrowError::CastError(format!(
-            "Unsupported output type for dictionary packing: {:?}",
-            dict_value_type
+            "Unsupported output type for dictionary packing: {dict_value_type:?}"
         ))),
     }
 }
@@ -4797,8 +4775,7 @@ mod tests {
                     e.to_string().contains(
                         "Cast error: Cannot cast string 'seven' to value of Int32 type",
                     ),
-                    "Error: {}",
-                    e
+                    "Error: {e}"
                 )
             }
         }
@@ -6689,13 +6666,11 @@ mod tests {
         let cast_type = Dictionary(Box::new(Int8), Box::new(Utf8));
         let res = cast(&array, &cast_type);
         assert!(res.is_err());
-        let actual_error = format!("{:?}", res);
+        let actual_error = format!("{res:?}");
         let expected_error = "Could not convert 72 dictionary indexes from Int32 to Int8";
         assert!(
             actual_error.contains(expected_error),
-            "did not find expected error '{}' in actual error '{}'",
-            actual_error,
-            expected_error
+            "did not find expected error '{actual_error}' in actual error '{expected_error}'"
         );
     }
 
@@ -6711,7 +6686,7 @@ mod tests {
         // dictionary indexed by int32, but not a dictionary indexed
         // with int8)
         for i in 0..200 {
-            let val = format!("val{}", i);
+            let val = format!("val{i}");
             builder.append(&val).unwrap();
         }
         let array: ArrayRef = Arc::new(builder.finish());
@@ -6719,13 +6694,11 @@ mod tests {
         let cast_type = Dictionary(Box::new(Int8), Box::new(Utf8));
         let res = cast(&array, &cast_type);
         assert!(res.is_err());
-        let actual_error = format!("{:?}", res);
+        let actual_error = format!("{res:?}");
         let expected_error = "Could not convert 72 dictionary indexes from Int32 to Int8";
         assert!(
             actual_error.contains(expected_error),
-            "did not find expected error '{}' in actual error '{}'",
-            actual_error,
-            expected_error
+            "did not find expected error '{actual_error}' in actual error '{expected_error}'"
         );
     }
 
@@ -7265,9 +7238,7 @@ mod tests {
         let expected_error = "Cast error: Cannot cast to Decimal128(38, 30)";
         assert!(
             err.contains(expected_error),
-            "did not find expected error '{}' in actual error '{}'",
-            expected_error,
-            err
+            "did not find expected error '{expected_error}' in actual error '{err}'"
         );
     }
 
@@ -7292,9 +7263,7 @@ mod tests {
         let expected_error = "Cast error: Cannot cast to Decimal256(76, 50)";
         assert!(
             err.contains(expected_error),
-            "did not find expected error '{}' in actual error '{}'",
-            expected_error,
-            err
+            "did not find expected error '{expected_error}' in actual error '{err}'"
         );
     }
 
diff --git a/arrow-cast/src/display.rs b/arrow-cast/src/display.rs
index e603260b0..16fbfb0bb 100644
--- a/arrow-cast/src/display.rs
+++ b/arrow-cast/src/display.rs
@@ -543,8 +543,7 @@ fn union_to_string(
     let type_id = list.type_id(row);
     let field_idx = type_ids.iter().position(|t| t == &type_id).ok_or_else(|| {
         ArrowError::InvalidArgumentError(format!(
-            "Repl error: could not get field name for type id: {} in union array.",
-            type_id,
+            "Repl error: could not get field name for type id: {type_id} in union array.",
         ))
     })?;
     let name = fields.get(field_idx).unwrap().name();
@@ -557,7 +556,7 @@ fn union_to_string(
         },
     )?;
 
-    Ok(format!("{{{}={}}}", name, value))
+    Ok(format!("{{{name}={value}}}"))
 }
 /// Converts the value of the dictionary array at `row` to a String
 fn dict_array_value_to_string<K: ArrowPrimitiveType>(
diff --git a/arrow-cast/src/parse.rs b/arrow-cast/src/parse.rs
index 8cf6b4ea7..459b94f37 100644
--- a/arrow-cast/src/parse.rs
+++ b/arrow-cast/src/parse.rs
@@ -136,8 +136,7 @@ pub fn string_to_timestamp_nanos(s: &str) -> Result<i64, ArrowError> {
     // match. Ths any of the specific error messages is likely to be
     // be more confusing than helpful
     Err(ArrowError::CastError(format!(
-        "Error parsing '{}' as timestamp",
-        s
+        "Error parsing '{s}' as timestamp"
     )))
 }
 
@@ -241,7 +240,7 @@ pub fn string_to_time_nanoseconds(s: &str) -> Result<i64, ArrowError> {
             nt.num_seconds_from_midnight() as i64 * 1_000_000_000 + nt.nanosecond() as i64
         })
         // Return generic error if failed to parse as unknown which format user intended for the string
-        .ok_or_else(|| ArrowError::CastError(format!("Error parsing '{}' as time", s)))
+        .ok_or_else(|| ArrowError::CastError(format!("Error parsing '{s}' as time")))
 }
 
 /// Specialized parsing implementations
@@ -550,7 +549,7 @@ mod tests {
     fn parse_timestamp(s: &str) -> Result<i64, ArrowError> {
         let result = string_to_timestamp_nanos(s);
         if let Err(e) = &result {
-            eprintln!("Error parsing timestamp '{}': {:?}", s, e);
+            eprintln!("Error parsing timestamp '{s}': {e:?}");
         }
         result
     }
@@ -558,13 +557,11 @@ mod tests {
     fn expect_timestamp_parse_error(s: &str, expected_err: &str) {
         match string_to_timestamp_nanos(s) {
             Ok(v) => panic!(
-                "Expected error '{}' while parsing '{}', but parsed {} instead",
-                expected_err, s, v
+                "Expected error '{expected_err}' while parsing '{s}', but parsed {v} instead"
             ),
             Err(e) => {
                 assert!(e.to_string().contains(expected_err),
-                        "Can not find expected error '{}' while parsing '{}'. Actual error '{}'",
-                        expected_err, s, e);
+                        "Can not find expected error '{expected_err}' while parsing '{s}'. Actual error '{e}'");
             }
         }
     }
diff --git a/arrow-csv/src/lib.rs b/arrow-csv/src/lib.rs
index a45cf082d..e6dc69935 100644
--- a/arrow-csv/src/lib.rs
+++ b/arrow-csv/src/lib.rs
@@ -31,15 +31,13 @@ fn map_csv_error(error: csv::Error) -> ArrowError {
     match error.kind() {
         csv::ErrorKind::Io(error) => ArrowError::CsvError(error.to_string()),
         csv::ErrorKind::Utf8 { pos: _, err } => ArrowError::CsvError(format!(
-            "Encountered UTF-8 error while reading CSV file: {}",
-            err
+            "Encountered UTF-8 error while reading CSV file: {err}"
         )),
         csv::ErrorKind::UnequalLengths {
             expected_len, len, ..
         } => ArrowError::CsvError(format!(
-            "Encountered unequal lengths between records on CSV file. Expected {} \
-                 records, found {} records",
-            len, expected_len
+            "Encountered unequal lengths between records on CSV file. Expected {len} \
+                 records, found {expected_len} records"
         )),
         _ => ArrowError::CsvError("Error reading CSV file".to_string()),
     }
diff --git a/arrow-csv/src/reader/mod.rs b/arrow-csv/src/reader/mod.rs
index 0c7bfa897..82b033f80 100644
--- a/arrow-csv/src/reader/mod.rs
+++ b/arrow-csv/src/reader/mod.rs
@@ -148,7 +148,7 @@ fn infer_file_schema_with_csv_options<R: Read + Seek>(
     mut reader: R,
     roptions: ReaderOptions,
 ) -> Result<(Schema, usize), ArrowError> {
-    let saved_offset = reader.seek(SeekFrom::Current(0))?;
+    let saved_offset = reader.stream_position()?;
 
     let (schema, records_count) =
         infer_reader_schema_with_csv_options(&mut reader, roptions)?;
@@ -626,14 +626,12 @@ fn parse(
                                 .collect::<DictionaryArray<UInt64Type>>(),
                         ) as ArrayRef),
                         _ => Err(ArrowError::ParseError(format!(
-                            "Unsupported dictionary key type {:?}",
-                            key_type
+                            "Unsupported dictionary key type {key_type:?}"
                         ))),
                     }
                 }
                 other => Err(ArrowError::ParseError(format!(
-                    "Unsupported data type {:?}",
-                    other
+                    "Unsupported data type {other:?}"
                 ))),
             }
         })
@@ -765,14 +763,12 @@ fn parse_decimal_with_parameter(
         match validate_decimal_precision(result, precision) {
             Ok(_) => Ok(result),
             Err(e) => Err(ArrowError::ParseError(format!(
-                "parse decimal overflow: {}",
-                e
+                "parse decimal overflow: {e}"
             ))),
         }
     } else {
         Err(ArrowError::ParseError(format!(
-            "can't parse the string value {} to decimal",
-            s
+            "can't parse the string value {s} to decimal"
         )))
     }
 }
@@ -816,8 +812,7 @@ fn parse_decimal(s: &str) -> Result<i128, ArrowError> {
         }
     } else {
         Err(ArrowError::ParseError(format!(
-            "can't parse the string value {} to decimal",
-            s
+            "can't parse the string value {s} to decimal"
         )))
     }
 }
@@ -1542,7 +1537,7 @@ mod tests {
             Some(e) => match e {
                 Err(e) => assert_eq!(
                     "ParseError(\"Error while parsing value 4.x4 for column 1 at line 4\")",
-                    format!("{:?}", e)
+                    format!("{e:?}")
                 ),
                 Ok(_) => panic!("should have failed"),
             },
@@ -1690,10 +1685,7 @@ mod tests {
         for s in can_not_parse_tests {
             let result = parse_decimal_with_parameter(s, 20, 3);
             assert_eq!(
-                format!(
-                    "Parser error: can't parse the string value {} to decimal",
-                    s
-                ),
+                format!("Parser error: can't parse the string value {s} to decimal"),
                 result.unwrap_err().to_string()
             );
         }
@@ -1705,9 +1697,7 @@ mod tests {
 
             assert!(
                 actual.contains(expected),
-                "actual: '{}', expected: '{}'",
-                actual,
-                expected
+                "actual: '{actual}', expected: '{expected}'"
             );
         }
     }
@@ -1960,10 +1950,10 @@ mod tests {
         let mut csv_text = Vec::new();
         let mut csv_writer = std::io::Cursor::new(&mut csv_text);
         for index in 0..10 {
-            let text1 = format!("id{:}", index);
-            let text2 = format!("value{:}", index);
+            let text1 = format!("id{index:}");
+            let text2 = format!("value{index:}");
             csv_writer
-                .write_fmt(format_args!("~{}~,~{}~\r\n", text1, text2))
+                .write_fmt(format_args!("~{text1}~,~{text2}~\r\n"))
                 .unwrap();
         }
         let mut csv_reader = std::io::Cursor::new(&csv_text);
@@ -1993,10 +1983,10 @@ mod tests {
         let mut csv_text = Vec::new();
         let mut csv_writer = std::io::Cursor::new(&mut csv_text);
         for index in 0..10 {
-            let text1 = format!("id{:}", index);
-            let text2 = format!("value\\\"{:}", index);
+            let text1 = format!("id{index:}");
+            let text2 = format!("value\\\"{index:}");
             csv_writer
-                .write_fmt(format_args!("\"{}\",\"{}\"\r\n", text1, text2))
+                .write_fmt(format_args!("\"{text1}\",\"{text2}\"\r\n"))
                 .unwrap();
         }
         let mut csv_reader = std::io::Cursor::new(&csv_text);
@@ -2026,10 +2016,10 @@ mod tests {
         let mut csv_text = Vec::new();
         let mut csv_writer = std::io::Cursor::new(&mut csv_text);
         for index in 0..10 {
-            let text1 = format!("id{:}", index);
-            let text2 = format!("value{:}", index);
+            let text1 = format!("id{index:}");
+            let text2 = format!("value{index:}");
             csv_writer
-                .write_fmt(format_args!("\"{}\",\"{}\"\n", text1, text2))
+                .write_fmt(format_args!("\"{text1}\",\"{text2}\"\n"))
                 .unwrap();
         }
         let mut csv_reader = std::io::Cursor::new(&csv_text);
@@ -2068,7 +2058,7 @@ mod tests {
                 .next()
                 .unwrap()
                 .unwrap();
-            assert_eq!(b.num_rows(), expected, "{}", idx);
+            assert_eq!(b.num_rows(), expected, "{idx}");
         }
     }
 
diff --git a/arrow-csv/src/reader/records.rs b/arrow-csv/src/reader/records.rs
index 501da4088..76adb719e 100644
--- a/arrow-csv/src/reader/records.rs
+++ b/arrow-csv/src/reader/records.rs
@@ -152,8 +152,7 @@ impl<R: BufRead> RecordReader<R> {
             let read = self.fill_buf(to_skip.min(1024))?;
             if read == 0 {
                 return Err(ArrowError::CsvError(format!(
-                    "Failed to skip {} rows only found {}",
-                    to_skip, skipped
+                    "Failed to skip {to_skip} rows only found {skipped}"
                 )));
             }
 
@@ -175,7 +174,7 @@ impl<R: BufRead> RecordReader<R> {
 
         // Need to truncate data to the actual amount of data read
         let data = std::str::from_utf8(&self.data[..last_offset]).map_err(|e| {
-            ArrowError::CsvError(format!("Encountered invalid UTF-8 data: {}", e))
+            ArrowError::CsvError(format!("Encountered invalid UTF-8 data: {e}"))
         })?;
 
         Ok(StringRecords {
diff --git a/arrow-csv/src/writer.rs b/arrow-csv/src/writer.rs
index 3ab28c2df..bc11eef2f 100644
--- a/arrow-csv/src/writer.rs
+++ b/arrow-csv/src/writer.rs
@@ -90,8 +90,7 @@ where
 
 fn invalid_cast_error(dt: &str, col_index: usize, row_index: usize) -> ArrowError {
     ArrowError::CastError(format!(
-        "Cannot cast to {} at col index: {} row index: {}",
-        dt, col_index, row_index
+        "Cannot cast to {dt} at col index: {col_index} row index: {row_index}"
     ))
 }
 
@@ -264,8 +263,7 @@ impl<W: Write> Writer<W> {
                     // List and Struct arrays not supported by the writer, any
                     // other type needs to be implemented
                     return Err(ArrowError::CsvError(format!(
-                        "CSV Writer does not support {:?} data type",
-                        t
+                        "CSV Writer does not support {t:?} data type"
                     )));
                 }
             };
diff --git a/arrow-data/src/data.rs b/arrow-data/src/data.rs
index 07bbc6642..709262e83 100644
--- a/arrow-data/src/data.rs
+++ b/arrow-data/src/data.rs
@@ -854,8 +854,7 @@ impl ArrayData {
                 // At the moment, constructing a DictionaryArray will also check this
                 if !DataType::is_dictionary_key_type(key_type) {
                     return Err(ArrowError::InvalidArgumentError(format!(
-                        "Dictionary key type must be integer, but was {}",
-                        key_type
+                        "Dictionary key type must be integer, but was {key_type}"
                     )));
                 }
             }
@@ -1366,15 +1365,13 @@ impl ArrayData {
                 // check if the offset can be converted to usize
                 let r = x.to_usize().ok_or_else(|| {
                     ArrowError::InvalidArgumentError(format!(
-                        "Offset invariant failure: Could not convert offset {} to usize at position {}",
-                        x, i))}
+                        "Offset invariant failure: Could not convert offset {x} to usize at position {i}"))}
                     );
                 // check if the offset exceeds the limit
                 match r {
                     Ok(n) if n <= offset_limit => Ok((i, n)),
                     Ok(_) => Err(ArrowError::InvalidArgumentError(format!(
-                        "Offset invariant failure: offset at position {} out of bounds: {} > {}",
-                        i, x, offset_limit))
+                        "Offset invariant failure: offset at position {i} out of bounds: {x} > {offset_limit}"))
                     ),
                     Err(e) => Err(e),
                 }
@@ -1417,8 +1414,7 @@ impl ArrayData {
                         || !values_str.is_char_boundary(range.end)
                     {
                         return Err(ArrowError::InvalidArgumentError(format!(
-                            "incomplete utf-8 byte sequence from index {}",
-                            string_index
+                            "incomplete utf-8 byte sequence from index {string_index}"
                         )));
                     }
                     Ok(())
@@ -1431,8 +1427,7 @@ impl ArrayData {
                 |string_index, range| {
                     std::str::from_utf8(&values_buffer[range.clone()]).map_err(|e| {
                         ArrowError::InvalidArgumentError(format!(
-                            "Invalid UTF8 sequence at string index {} ({:?}): {}",
-                            string_index, range, e
+                            "Invalid UTF8 sequence at string index {string_index} ({range:?}): {e}"
                         ))
                     })?;
                     Ok(())
@@ -1478,15 +1473,13 @@ impl ArrayData {
             }
             let dict_index: i64 = dict_index.try_into().map_err(|_| {
                 ArrowError::InvalidArgumentError(format!(
-                    "Value at position {} out of bounds: {} (can not convert to i64)",
-                    i, dict_index
+                    "Value at position {i} out of bounds: {dict_index} (can not convert to i64)"
                 ))
             })?;
 
             if dict_index < 0 || dict_index > max_value {
                 return Err(ArrowError::InvalidArgumentError(format!(
-                    "Value at position {} out of bounds: {} (should be in [0, {}])",
-                    i, dict_index, max_value
+                    "Value at position {i} out of bounds: {dict_index} (should be in [0, {max_value}])"
                 )));
             }
             Ok(())
@@ -1503,23 +1496,17 @@ impl ArrayData {
         values.iter().enumerate().try_for_each(|(ix, &inp_value)| {
             let value: i64 = inp_value.try_into().map_err(|_| {
                 ArrowError::InvalidArgumentError(format!(
-                    "Value at position {} out of bounds: {} (can not convert to i64)",
-                    ix, inp_value
+                    "Value at position {ix} out of bounds: {inp_value} (can not convert to i64)"
                 ))
             })?;
             if value <= 0_i64 {
                 return Err(ArrowError::InvalidArgumentError(format!(
-                    "The values in run_ends array should be strictly positive. Found value {} at index {} that does not match the criteria.",
-                    value,
-                    ix
+                    "The values in run_ends array should be strictly positive. Found value {value} at index {ix} that does not match the criteria."
                 )));
             }
             if ix > 0 && value <= prev_value {
                 return Err(ArrowError::InvalidArgumentError(format!(
-                    "The values in run_ends array should be strictly increasing. Found value {} at index {} with previous value {} that does not match the criteria.",
-                    value,
-                    ix,
-                    prev_value
+                    "The values in run_ends array should be strictly increasing. Found value {value} at index {ix} with previous value {prev_value} that does not match the criteria."
                 )));
             }
 
@@ -1529,9 +1516,7 @@ impl ArrayData {
 
         if prev_value.as_usize() != array_len {
             return Err(ArrowError::InvalidArgumentError(format!(
-                "The length of array does not match the last value in the run_ends array. The last value of run_ends array is {} and length of array is {}.",
-                prev_value,
-                array_len
+                "The length of array does not match the last value in the run_ends array. The last value of run_ends array is {prev_value} and length of array is {array_len}."
             )));
         }
         Ok(())
diff --git a/arrow-data/src/decimal.rs b/arrow-data/src/decimal.rs
index 9367d4ec2..f74ab880d 100644
--- a/arrow-data/src/decimal.rs
+++ b/arrow-data/src/decimal.rs
@@ -735,8 +735,7 @@ pub const MIN_DECIMAL_FOR_EACH_PRECISION: [i128; 38] = [
 pub fn validate_decimal_precision(value: i128, precision: u8) -> Result<(), ArrowError> {
     if precision > DECIMAL128_MAX_PRECISION {
         return Err(ArrowError::InvalidArgumentError(format!(
-            "Max precision of a Decimal128 is {}, but got {}",
-            DECIMAL128_MAX_PRECISION, precision,
+            "Max precision of a Decimal128 is {DECIMAL128_MAX_PRECISION}, but got {precision}",
         )));
     }
 
@@ -745,13 +744,11 @@ pub fn validate_decimal_precision(value: i128, precision: u8) -> Result<(), Arro
 
     if value > max {
         Err(ArrowError::InvalidArgumentError(format!(
-            "{} is too large to store in a Decimal128 of precision {}. Max is {}",
-            value, precision, max
+            "{value} is too large to store in a Decimal128 of precision {precision}. Max is {max}"
         )))
     } else if value < min {
         Err(ArrowError::InvalidArgumentError(format!(
-            "{} is too small to store in a Decimal128 of precision {}. Min is {}",
-            value, precision, min
+            "{value} is too small to store in a Decimal128 of precision {precision}. Min is {min}"
         )))
     } else {
         Ok(())
@@ -767,8 +764,7 @@ pub fn validate_decimal256_precision(
 ) -> Result<(), ArrowError> {
     if precision > DECIMAL256_MAX_PRECISION {
         return Err(ArrowError::InvalidArgumentError(format!(
-            "Max precision of a Decimal256 is {}, but got {}",
-            DECIMAL256_MAX_PRECISION, precision,
+            "Max precision of a Decimal256 is {DECIMAL256_MAX_PRECISION}, but got {precision}",
         )));
     }
     let max = MAX_DECIMAL_BYTES_FOR_LARGER_EACH_PRECISION[usize::from(precision) - 1];
@@ -776,13 +772,11 @@ pub fn validate_decimal256_precision(
 
     if value > max {
         Err(ArrowError::InvalidArgumentError(format!(
-            "{:?} is too large to store in a Decimal256 of precision {}. Max is {:?}",
-            value, precision, max
+            "{value:?} is too large to store in a Decimal256 of precision {precision}. Max is {max:?}"
         )))
     } else if value < min {
         Err(ArrowError::InvalidArgumentError(format!(
-            "{:?} is too small to store in a Decimal256 of precision {}. Min is {:?}",
-            value, precision, min
+            "{value:?} is too small to store in a Decimal256 of precision {precision}. Min is {min:?}"
         )))
     } else {
         Ok(())
diff --git a/arrow-data/src/transform/mod.rs b/arrow-data/src/transform/mod.rs
index 2a24b1cc2..fef6d4be4 100644
--- a/arrow-data/src/transform/mod.rs
+++ b/arrow-data/src/transform/mod.rs
@@ -381,7 +381,7 @@ impl<'a> MutableArrayData<'a> {
                 array_capacity = *capacity;
                 new_buffers(data_type, *capacity)
             }
-            _ => panic!("Capacities: {:?} not yet supported", capacities),
+            _ => panic!("Capacities: {capacities:?} not yet supported"),
         };
 
         let child_data = match &data_type {
diff --git a/arrow-flight/examples/flight_sql_server.rs b/arrow-flight/examples/flight_sql_server.rs
index 5aff347e4..28aef4e92 100644
--- a/arrow-flight/examples/flight_sql_server.rs
+++ b/arrow-flight/examples/flight_sql_server.rs
@@ -93,8 +93,7 @@ impl FlightSqlService for FlightSqlServiceImpl {
             .map_err(|e| status!("authorization not parsable", e))?;
         if !authorization.starts_with(basic) {
             Err(Status::invalid_argument(format!(
-                "Auth type not implemented: {}",
-                authorization
+                "Auth type not implemented: {authorization}"
             )))?;
         }
         let base64 = &authorization[basic.len()..];
@@ -473,7 +472,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
 
     let svc = FlightServiceServer::new(FlightSqlServiceImpl {});
 
-    println!("Listening on {:?}", addr);
+    println!("Listening on {addr:?}");
 
     let cert = std::fs::read_to_string("arrow-flight/examples/data/server.pem")?;
     let key = std::fs::read_to_string("arrow-flight/examples/data/server.key")?;
diff --git a/arrow-flight/src/error.rs b/arrow-flight/src/error.rs
index 7a43e537a..5524dd1a4 100644
--- a/arrow-flight/src/error.rs
+++ b/arrow-flight/src/error.rs
@@ -50,7 +50,7 @@ impl FlightError {
 impl std::fmt::Display for FlightError {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
         // TODO better format / error
-        write!(f, "{:?}", self)
+        write!(f, "{self:?}")
     }
 }
 
diff --git a/arrow-flight/src/lib.rs b/arrow-flight/src/lib.rs
index a44b4b06e..7aebd92e2 100644
--- a/arrow-flight/src/lib.rs
+++ b/arrow-flight/src/lib.rs
@@ -174,7 +174,7 @@ impl fmt::Display for FlightData {
         write!(f, "FlightData {{")?;
         write!(f, " descriptor: ")?;
         match &self.flight_descriptor {
-            Some(d) => write!(f, "{}", d)?,
+            Some(d) => write!(f, "{d}")?,
             None => write!(f, "None")?,
         };
         write!(f, ", header: ")?;
@@ -200,7 +200,7 @@ impl fmt::Display for FlightDescriptor {
                 write!(f, "path: [")?;
                 let mut sep = "";
                 for element in &self.path {
-                    write!(f, "{}{}", sep, element)?;
+                    write!(f, "{sep}{element}")?;
                     sep = ", ";
                 }
                 write!(f, "]")?;
@@ -218,13 +218,13 @@ impl fmt::Display for FlightEndpoint {
         write!(f, "FlightEndpoint {{")?;
         write!(f, " ticket: ")?;
         match &self.ticket {
-            Some(value) => write!(f, "{}", value),
+            Some(value) => write!(f, "{value}"),
             None => write!(f, " none"),
         }?;
         write!(f, ", location: [")?;
         let mut sep = "";
         for location in &self.location {
-            write!(f, "{}{}", sep, location)?;
+            write!(f, "{sep}{location}")?;
             sep = ", ";
         }
         write!(f, "]")?;
@@ -237,16 +237,16 @@ impl fmt::Display for FlightInfo {
         let ipc_message = IpcMessage(self.schema.clone());
         let schema: Schema = ipc_message.try_into().map_err(|_err| fmt::Error)?;
         write!(f, "FlightInfo {{")?;
-        write!(f, " schema: {}", schema)?;
+        write!(f, " schema: {schema}")?;
         write!(f, ", descriptor:")?;
         match &self.flight_descriptor {
-            Some(d) => write!(f, " {}", d),
+            Some(d) => write!(f, " {d}"),
             None => write!(f, " None"),
         }?;
         write!(f, ", endpoint: [")?;
         let mut sep = "";
         for endpoint in &self.endpoint {
-            write!(f, "{}{}", sep, endpoint)?;
+            write!(f, "{sep}{endpoint}")?;
             sep = ", ";
         }
         write!(f, "], total_records: {}", self.total_records)?;
@@ -339,8 +339,7 @@ impl TryFrom<&FlightData> for Schema {
     fn try_from(data: &FlightData) -> ArrowResult<Self> {
         convert::try_schema_from_flatbuffer_bytes(&data.data_header[..]).map_err(|err| {
             ArrowError::ParseError(format!(
-                "Unable to convert flight data to Arrow schema: {}",
-                err
+                "Unable to convert flight data to Arrow schema: {err}"
             ))
         })
     }
@@ -489,7 +488,7 @@ mod tests {
     fn it_accepts_equal_output() {
         let input = TestVector(vec![91; 10], 10);
 
-        let actual = format!("{}", input);
+        let actual = format!("{input}");
         let expected = format!("{:?}", vec![91; 10]);
         assert_eq!(actual, expected);
     }
@@ -498,7 +497,7 @@ mod tests {
     fn it_accepts_short_output() {
         let input = TestVector(vec![91; 6], 10);
 
-        let actual = format!("{}", input);
+        let actual = format!("{input}");
         let expected = format!("{:?}", vec![91; 6]);
         assert_eq!(actual, expected);
     }
@@ -507,7 +506,7 @@ mod tests {
     fn it_accepts_long_output() {
         let input = TestVector(vec![91; 10], 9);
 
-        let actual = format!("{}", input);
+        let actual = format!("{input}");
         let expected = format!("{:?}", vec![91; 9]);
         assert_eq!(actual, expected);
     }
diff --git a/arrow-flight/src/sql/client.rs b/arrow-flight/src/sql/client.rs
index 5c5f84b3d..31ba1e274 100644
--- a/arrow-flight/src/sql/client.rs
+++ b/arrow-flight/src/sql/client.rs
@@ -90,7 +90,7 @@ impl FlightSqlServiceClient {
         host: &str,
         port: u16,
     ) -> Result<Self, ArrowError> {
-        let addr = format!("https://{}:{}", host, port);
+        let addr = format!("https://{host}:{port}");
 
         let endpoint = Endpoint::new(addr)
             .map_err(|_| ArrowError::IoError("Cannot create endpoint".to_string()))?
@@ -112,7 +112,7 @@ impl FlightSqlServiceClient {
             .map_err(|_| ArrowError::IoError("Cannot create endpoint".to_string()))?;
 
         let channel = endpoint.connect().await.map_err(|e| {
-            ArrowError::IoError(format!("Cannot connect to endpoint: {}", e))
+            ArrowError::IoError(format!("Cannot connect to endpoint: {e}"))
         })?;
         Ok(Self::new(channel))
     }
@@ -173,8 +173,8 @@ impl FlightSqlServiceClient {
             payload: Default::default(),
         };
         let mut req = tonic::Request::new(stream::iter(vec![cmd]));
-        let val = BASE64_STANDARD.encode(format!("{}:{}", username, password));
-        let val = format!("Basic {}", val)
+        let val = BASE64_STANDARD.encode(format!("{username}:{password}"));
+        let val = format!("Basic {val}")
             .parse()
             .map_err(|_| ArrowError::ParseError("Cannot parse header".to_string()))?;
         req.metadata_mut().insert("authorization", val);
@@ -182,7 +182,7 @@ impl FlightSqlServiceClient {
             .flight_client
             .handshake(req)
             .await
-            .map_err(|e| ArrowError::IoError(format!("Can't handshake {}", e)))?;
+            .map_err(|e| ArrowError::IoError(format!("Can't handshake {e}")))?;
         if let Some(auth) = resp.metadata().get("authorization") {
             let auth = auth.to_str().map_err(|_| {
                 ArrowError::ParseError("Can't read auth header".to_string())
@@ -331,7 +331,7 @@ impl FlightSqlServiceClient {
         };
         let mut req = tonic::Request::new(action);
         if let Some(token) = &self.token {
-            let val = format!("Bearer {}", token).parse().map_err(|_| {
+            let val = format!("Bearer {token}").parse().map_err(|_| {
                 ArrowError::IoError("Statement already closed.".to_string())
             })?;
             req.metadata_mut().insert("authorization", val);
@@ -481,7 +481,7 @@ fn decode_error_to_arrow_error(err: prost::DecodeError) -> ArrowError {
 }
 
 fn status_to_arrow_error(status: tonic::Status) -> ArrowError {
-    ArrowError::IoError(format!("{:?}", status))
+    ArrowError::IoError(format!("{status:?}"))
 }
 
 // A polymorphic structure to natively represent different types of data contained in `FlightData`
@@ -496,7 +496,7 @@ pub fn arrow_data_from_flight_data(
     arrow_schema_ref: &SchemaRef,
 ) -> Result<ArrowFlightData, ArrowError> {
     let ipc_message = root_as_message(&flight_data.data_header[..]).map_err(|err| {
-        ArrowError::ParseError(format!("Unable to get root as message: {:?}", err))
+        ArrowError::ParseError(format!("Unable to get root as message: {err:?}"))
     })?;
 
     match ipc_message.header_type() {
diff --git a/arrow-flight/src/sql/mod.rs b/arrow-flight/src/sql/mod.rs
index 88dc6cde9..9ea74c3f3 100644
--- a/arrow-flight/src/sql/mod.rs
+++ b/arrow-flight/src/sql/mod.rs
@@ -154,7 +154,7 @@ impl Any {
             return Ok(None);
         }
         let m = Message::decode(&*self.value).map_err(|err| {
-            ArrowError::ParseError(format!("Unable to decode Any value: {}", err))
+            ArrowError::ParseError(format!("Unable to decode Any value: {err}"))
         })?;
         Ok(Some(m))
     }
diff --git a/arrow-flight/src/sql/server.rs b/arrow-flight/src/sql/server.rs
index e764e0c51..d48181189 100644
--- a/arrow-flight/src/sql/server.rs
+++ b/arrow-flight/src/sql/server.rs
@@ -586,9 +586,9 @@ where
 }
 
 fn decode_error_to_status(err: prost::DecodeError) -> Status {
-    Status::invalid_argument(format!("{:?}", err))
+    Status::invalid_argument(format!("{err:?}"))
 }
 
 fn arrow_error_to_status(err: arrow_schema::ArrowError) -> Status {
-    Status::internal(format!("{:?}", err))
+    Status::internal(format!("{err:?}"))
 }
diff --git a/arrow-flight/src/utils.rs b/arrow-flight/src/utils.rs
index 266f8eb29..ccf1e7386 100644
--- a/arrow-flight/src/utils.rs
+++ b/arrow-flight/src/utils.rs
@@ -85,7 +85,7 @@ pub fn flight_data_to_arrow_batch(
 ) -> Result<RecordBatch, ArrowError> {
     // check that the data_header is a record batch message
     let message = arrow_ipc::root_as_message(&data.data_header[..]).map_err(|err| {
-        ArrowError::ParseError(format!("Unable to get root as message: {:?}", err))
+        ArrowError::ParseError(format!("Unable to get root as message: {err:?}"))
     })?;
 
     message
diff --git a/arrow-flight/tests/client.rs b/arrow-flight/tests/client.rs
index 032dad049..ab1cfa1fb 100644
--- a/arrow-flight/tests/client.rs
+++ b/arrow-flight/tests/client.rs
@@ -92,10 +92,7 @@ fn ensure_metadata(client: &FlightClient, test_server: &TestFlightServer) {
         assert_eq!(
             metadata.get(k).as_ref(),
             Some(&v),
-            "Missing / Mismatched metadata {:?} sent {:?} got {:?}",
-            k,
-            client_metadata,
-            metadata
+            "Missing / Mismatched metadata {k:?} sent {client_metadata:?} got {metadata:?}"
         );
     }
 }
@@ -797,29 +794,23 @@ fn expect_status(error: FlightError, expected: Status) {
     let status = if let FlightError::Tonic(status) = error {
         status
     } else {
-        panic!("Expected FlightError::Tonic, got: {:?}", error);
+        panic!("Expected FlightError::Tonic, got: {error:?}");
     };
 
     assert_eq!(
         status.code(),
         expected.code(),
-        "Got {:?} want {:?}",
-        status,
-        expected
+        "Got {status:?} want {expected:?}"
     );
     assert_eq!(
         status.message(),
         expected.message(),
-        "Got {:?} want {:?}",
-        status,
-        expected
+        "Got {status:?} want {expected:?}"
     );
     assert_eq!(
         status.details(),
         expected.details(),
-        "Got {:?} want {:?}",
-        status,
-        expected
+        "Got {status:?} want {expected:?}"
     );
 }
 
diff --git a/arrow-integration-test/src/datatype.rs b/arrow-integration-test/src/datatype.rs
index c2e326b4f..ece64e16e 100644
--- a/arrow-integration-test/src/datatype.rs
+++ b/arrow-integration-test/src/datatype.rs
@@ -152,7 +152,7 @@ pub fn data_type_from_json(json: &serde_json::Value) -> Result<DataType> {
             },
             Some(s) if s == "int" => match map.get("isSigned") {
                 Some(&Value::Bool(true)) => match map.get("bitWidth") {
-                    Some(&Value::Number(ref n)) => match n.as_u64() {
+                    Some(Value::Number(n)) => match n.as_u64() {
                         Some(8) => Ok(DataType::Int8),
                         Some(16) => Ok(DataType::Int16),
                         Some(32) => Ok(DataType::Int32),
@@ -166,7 +166,7 @@ pub fn data_type_from_json(json: &serde_json::Value) -> Result<DataType> {
                     )),
                 },
                 Some(&Value::Bool(false)) => match map.get("bitWidth") {
-                    Some(&Value::Number(ref n)) => match n.as_u64() {
+                    Some(Value::Number(n)) => match n.as_u64() {
                         Some(8) => Ok(DataType::UInt8),
                         Some(16) => Ok(DataType::UInt16),
                         Some(32) => Ok(DataType::UInt32),
@@ -226,8 +226,7 @@ pub fn data_type_from_json(json: &serde_json::Value) -> Result<DataType> {
                         UnionMode::Dense
                     } else {
                         return Err(ArrowError::ParseError(format!(
-                            "Unknown union mode {:?} for union",
-                            mode
+                            "Unknown union mode {mode:?} for union"
                         )));
                     };
                     if let Some(type_ids) = map.get("typeIds") {
@@ -256,8 +255,7 @@ pub fn data_type_from_json(json: &serde_json::Value) -> Result<DataType> {
                 }
             }
             Some(other) => Err(ArrowError::ParseError(format!(
-                "invalid or unsupported type name: {} in {:?}",
-                other, json
+                "invalid or unsupported type name: {other} in {json:?}"
             ))),
             None => Err(ArrowError::ParseError("type name missing".to_string())),
         },
diff --git a/arrow-integration-test/src/field.rs b/arrow-integration-test/src/field.rs
index dd0519157..abed0bd1d 100644
--- a/arrow-integration-test/src/field.rs
+++ b/arrow-integration-test/src/field.rs
@@ -26,7 +26,7 @@ pub fn field_from_json(json: &serde_json::Value) -> Result<Field> {
     match *json {
         Value::Object(ref map) => {
             let name = match map.get("name") {
-                Some(&Value::String(ref name)) => name.to_string(),
+                Some(Value::String(name)) => name.to_string(),
                 _ => {
                     return Err(ArrowError::ParseError(
                         "Field missing 'name' attribute".to_string(),
@@ -52,7 +52,7 @@ pub fn field_from_json(json: &serde_json::Value) -> Result<Field> {
 
             // Referenced example file: testing/data/arrow-ipc-stream/integration/1.0.0-littleendian/generated_custom_metadata.json.gz
             let metadata = match map.get("metadata") {
-                Some(&Value::Array(ref values)) => {
+                Some(Value::Array(values)) => {
                     let mut res: HashMap<String, String> = HashMap::default();
                     for value in values {
                         match value.as_object() {
@@ -91,15 +91,14 @@ pub fn field_from_json(json: &serde_json::Value) -> Result<Field> {
                 }
                 // We also support map format, because Schema's metadata supports this.
                 // See https://github.com/apache/arrow/pull/5907
-                Some(&Value::Object(ref values)) => {
+                Some(Value::Object(values)) => {
                     let mut res: HashMap<String, String> = HashMap::default();
                     for (k, v) in values {
                         if let Some(str_value) = v.as_str() {
                             res.insert(k.clone(), str_value.to_string().clone());
                         } else {
                             return Err(ArrowError::ParseError(format!(
-                                "Field 'metadata' contains non-string value for key {}",
-                                k
+                                "Field 'metadata' contains non-string value for key {k}"
                             )));
                         }
                     }
@@ -180,7 +179,7 @@ pub fn field_from_json(json: &serde_json::Value) -> Result<Field> {
                                 }
                                 t  => {
                                     return Err(ArrowError::ParseError(
-                                        format!("Map children should be a struct with 2 fields, found {:?}",  t)
+                                        format!("Map children should be a struct with 2 fields, found {t:?}")
                                     ))
                                 }
                             }
diff --git a/arrow-integration-test/src/lib.rs b/arrow-integration-test/src/lib.rs
index a0510edd9..87a7edc87 100644
--- a/arrow-integration-test/src/lib.rs
+++ b/arrow-integration-test/src/lib.rs
@@ -172,8 +172,8 @@ impl ArrowJson {
             match batch {
                 Some(Ok(batch)) => {
                     if json_batch != batch {
-                        println!("json: {:?}", json_batch);
-                        println!("batch: {:?}", batch);
+                        println!("json: {json_batch:?}");
+                        println!("batch: {batch:?}");
                         return Ok(false);
                     }
                 }
@@ -255,8 +255,7 @@ impl ArrowJsonField {
             }
             Err(e) => {
                 eprintln!(
-                    "Encountered error while converting JSON field to Arrow field: {:?}",
-                    e
+                    "Encountered error while converting JSON field to Arrow field: {e:?}"
                 );
                 false
             }
@@ -323,10 +322,7 @@ pub fn array_from_json(
             {
                 match is_valid {
                     1 => b.append_value(value.as_i64().ok_or_else(|| {
-                        ArrowError::JsonError(format!(
-                            "Unable to get {:?} as int64",
-                            value
-                        ))
+                        ArrowError::JsonError(format!("Unable to get {value:?} as int64"))
                     })? as i8),
                     _ => b.append_null(),
                 };
@@ -411,18 +407,16 @@ pub fn array_from_json(
                                             i64::from_le_bytes(bytes)
                                         }
                                         _ => panic!(
-                                            "Unable to parse {:?} as interval daytime",
-                                            value
+                                            "Unable to parse {value:?} as interval daytime"
                                         ),
                                     }
                                 }
                                 _ => panic!(
-                                    "Unable to parse {:?} as interval daytime",
-                                    value
+                                    "Unable to parse {value:?} as interval daytime"
                                 ),
                             }
                         }
-                        _ => panic!("Unable to parse {:?} as number", value),
+                        _ => panic!("Unable to parse {value:?} as number"),
                     }),
                     _ => b.append_null(),
                 };
@@ -502,7 +496,7 @@ pub fn array_from_json(
                                 value.as_u64().expect("Unable to read number as u64"),
                             )
                         } else {
-                            panic!("Unable to parse value {:?} as u64", value)
+                            panic!("Unable to parse value {value:?} as u64")
                         }
                     }
                     _ => b.append_null(),
@@ -542,11 +536,11 @@ pub fn array_from_json(
                                     months_days_ns
                                 }
                                 (_, _, _) => {
-                                    panic!("Unable to parse {:?} as MonthDayNano", v)
+                                    panic!("Unable to parse {v:?} as MonthDayNano")
                                 }
                             }
                         }
-                        _ => panic!("Unable to parse {:?} as MonthDayNano", value),
+                        _ => panic!("Unable to parse {value:?} as MonthDayNano"),
                     }),
                     _ => b.append_null(),
                 };
@@ -760,16 +754,14 @@ pub fn array_from_json(
         DataType::Dictionary(key_type, value_type) => {
             let dict_id = field.dict_id().ok_or_else(|| {
                 ArrowError::JsonError(format!(
-                    "Unable to find dict_id for field {:?}",
-                    field
+                    "Unable to find dict_id for field {field:?}"
                 ))
             })?;
             // find dictionary
             let dictionary = dictionaries
                 .ok_or_else(|| {
                     ArrowError::JsonError(format!(
-                        "Unable to find any dictionaries for field {:?}",
-                        field
+                        "Unable to find any dictionaries for field {field:?}"
                     ))
                 })?
                 .get(&dict_id);
@@ -783,8 +775,7 @@ pub fn array_from_json(
                     dictionaries,
                 ),
                 None => Err(ArrowError::JsonError(format!(
-                    "Unable to find dictionary for field {:?}",
-                    field
+                    "Unable to find dictionary for field {field:?}"
                 ))),
             }
         }
@@ -892,8 +883,7 @@ pub fn array_from_json(
             Ok(Arc::new(array))
         }
         t => Err(ArrowError::JsonError(format!(
-            "data type {:?} not supported",
-            t
+            "data type {t:?} not supported"
         ))),
     }
 }
@@ -963,8 +953,7 @@ pub fn dictionary_array_from_json(
             Ok(array)
         }
         _ => Err(ArrowError::JsonError(format!(
-            "Dictionary key type {:?} not supported",
-            dict_key
+            "Dictionary key type {dict_key:?} not supported"
         ))),
     }
 }
diff --git a/arrow-integration-testing/src/bin/arrow-json-integration-test.rs b/arrow-integration-testing/src/bin/arrow-json-integration-test.rs
index 5eb443b08..0702a8a68 100644
--- a/arrow-integration-testing/src/bin/arrow-json-integration-test.rs
+++ b/arrow-integration-testing/src/bin/arrow-json-integration-test.rs
@@ -62,7 +62,7 @@ fn main() -> Result<()> {
 
 fn json_to_arrow(json_name: &str, arrow_name: &str, verbose: bool) -> Result<()> {
     if verbose {
-        eprintln!("Converting {} to {}", json_name, arrow_name);
+        eprintln!("Converting {json_name} to {arrow_name}");
     }
 
     let json_file = read_json_file(json_name)?;
@@ -81,7 +81,7 @@ fn json_to_arrow(json_name: &str, arrow_name: &str, verbose: bool) -> Result<()>
 
 fn arrow_to_json(arrow_name: &str, json_name: &str, verbose: bool) -> Result<()> {
     if verbose {
-        eprintln!("Converting {} to {}", arrow_name, json_name);
+        eprintln!("Converting {arrow_name} to {json_name}");
     }
 
     let arrow_file = File::open(arrow_name)?;
@@ -155,7 +155,7 @@ fn canonicalize_schema(schema: &Schema) -> Schema {
 
 fn validate(arrow_name: &str, json_name: &str, verbose: bool) -> Result<()> {
     if verbose {
-        eprintln!("Validating {} and {}", arrow_name, json_name);
+        eprintln!("Validating {arrow_name} and {json_name}");
     }
 
     // open JSON file
diff --git a/arrow-integration-testing/src/flight_client_scenarios/auth_basic_proto.rs b/arrow-integration-testing/src/flight_client_scenarios/auth_basic_proto.rs
index 53c6c4412..9f66abf50 100644
--- a/arrow-integration-testing/src/flight_client_scenarios/auth_basic_proto.rs
+++ b/arrow-integration-testing/src/flight_client_scenarios/auth_basic_proto.rs
@@ -30,7 +30,7 @@ type Result<T = (), E = Error> = std::result::Result<T, E>;
 type Client = FlightServiceClient<tonic::transport::Channel>;
 
 pub async fn run_scenario(host: &str, port: u16) -> Result {
-    let url = format!("http://{}:{}", host, port);
+    let url = format!("http://{host}:{port}");
     let mut client = FlightServiceClient::connect(url).await?;
 
     let action = arrow_flight::Action::default();
@@ -41,15 +41,13 @@ pub async fn run_scenario(host: &str, port: u16) -> Result {
         Err(e) => {
             if e.code() != tonic::Code::Unauthenticated {
                 return Err(Box::new(Status::internal(format!(
-                    "Expected UNAUTHENTICATED but got {:?}",
-                    e
+                    "Expected UNAUTHENTICATED but got {e:?}"
                 ))));
             }
         }
         Ok(other) => {
             return Err(Box::new(Status::internal(format!(
-                "Expected UNAUTHENTICATED but got {:?}",
-                other
+                "Expected UNAUTHENTICATED but got {other:?}"
             ))));
         }
     }
diff --git a/arrow-integration-testing/src/flight_client_scenarios/integration_test.rs b/arrow-integration-testing/src/flight_client_scenarios/integration_test.rs
index 1f1b312f9..3c537c5f6 100644
--- a/arrow-integration-testing/src/flight_client_scenarios/integration_test.rs
+++ b/arrow-integration-testing/src/flight_client_scenarios/integration_test.rs
@@ -42,7 +42,7 @@ type Result<T = (), E = Error> = std::result::Result<T, E>;
 type Client = FlightServiceClient<tonic::transport::Channel>;
 
 pub async fn run_scenario(host: &str, port: u16, path: &str) -> Result {
-    let url = format!("http://{}:{}", host, port);
+    let url = format!("http://{host}:{port}");
 
     let client = FlightServiceClient::connect(url).await?;
 
@@ -235,7 +235,7 @@ async fn consume_flight_location(
             let expected_data = expected_batch.column(i).data();
             let actual_data = actual_batch.column(i).data();
 
-            assert_eq!(expected_data, actual_data, "Data for field {}", field_name);
+            assert_eq!(expected_data, actual_data, "Data for field {field_name}");
         }
     }
 
diff --git a/arrow-integration-testing/src/flight_client_scenarios/middleware.rs b/arrow-integration-testing/src/flight_client_scenarios/middleware.rs
index 72ef37d3f..773919ff7 100644
--- a/arrow-integration-testing/src/flight_client_scenarios/middleware.rs
+++ b/arrow-integration-testing/src/flight_client_scenarios/middleware.rs
@@ -26,7 +26,7 @@ type Error = Box<dyn std::error::Error + Send + Sync + 'static>;
 type Result<T = (), E = Error> = std::result::Result<T, E>;
 
 pub async fn run_scenario(host: &str, port: u16) -> Result {
-    let url = format!("http://{}:{}", host, port);
+    let url = format!("http://{host}:{port}");
     let conn = tonic::transport::Endpoint::new(url)?.connect().await?;
     let mut client = FlightServiceClient::with_interceptor(conn, middleware_interceptor);
 
@@ -48,8 +48,7 @@ pub async fn run_scenario(host: &str, port: u16) -> Result {
             if value != "expected value" {
                 let msg = format!(
                     "On failing call: Expected to receive header 'x-middleware: expected value', \
-                     but instead got: '{}'",
-                    value
+                     but instead got: '{value}'"
                 );
                 return Err(Box::new(Status::internal(msg)));
             }
@@ -67,8 +66,7 @@ pub async fn run_scenario(host: &str, port: u16) -> Result {
     if value != "expected value" {
         let msg = format!(
             "On success call: Expected to receive header 'x-middleware: expected value', \
-            but instead got: '{}'",
-            value
+            but instead got: '{value}'"
         );
         return Err(Box::new(Status::internal(msg)));
     }
diff --git a/arrow-integration-testing/src/flight_server_scenarios.rs b/arrow-integration-testing/src/flight_server_scenarios.rs
index 6976c1267..9034776c6 100644
--- a/arrow-integration-testing/src/flight_server_scenarios.rs
+++ b/arrow-integration-testing/src/flight_server_scenarios.rs
@@ -28,7 +28,7 @@ type Error = Box<dyn std::error::Error + Send + Sync + 'static>;
 type Result<T = (), E = Error> = std::result::Result<T, E>;
 
 pub async fn listen_on(port: u16) -> Result<SocketAddr> {
-    let addr: SocketAddr = format!("0.0.0.0:{}", port).parse()?;
+    let addr: SocketAddr = format!("0.0.0.0:{port}").parse()?;
 
     let listener = TcpListener::bind(addr).await?;
     let addr = listener.local_addr()?;
diff --git a/arrow-integration-testing/src/flight_server_scenarios/integration_test.rs b/arrow-integration-testing/src/flight_server_scenarios/integration_test.rs
index 7ad4c676f..51d08d943 100644
--- a/arrow-integration-testing/src/flight_server_scenarios/integration_test.rs
+++ b/arrow-integration-testing/src/flight_server_scenarios/integration_test.rs
@@ -48,7 +48,7 @@ pub async fn scenario_setup(port: u16) -> Result {
     let addr = super::listen_on(port).await?;
 
     let service = FlightServiceImpl {
-        server_location: format!("grpc+tcp://{}", addr),
+        server_location: format!("grpc+tcp://{addr}"),
         ..Default::default()
     };
     let svc = FlightServiceServer::new(service);
@@ -103,13 +103,13 @@ impl FlightService for FlightServiceImpl {
         let ticket = request.into_inner();
 
         let key = String::from_utf8(ticket.ticket.to_vec())
-            .map_err(|e| Status::invalid_argument(format!("Invalid ticket: {:?}", e)))?;
+            .map_err(|e| Status::invalid_argument(format!("Invalid ticket: {e:?}")))?;
 
         let uploaded_chunks = self.uploaded_chunks.lock().await;
 
-        let flight = uploaded_chunks.get(&key).ok_or_else(|| {
-            Status::not_found(format!("Could not find flight. {}", key))
-        })?;
+        let flight = uploaded_chunks
+            .get(&key)
+            .ok_or_else(|| Status::not_found(format!("Could not find flight. {key}")))?;
 
         let options = arrow::ipc::writer::IpcWriteOptions::default();
 
@@ -204,7 +204,7 @@ impl FlightService for FlightServiceImpl {
 
                 Ok(Response::new(info))
             }
-            other => Err(Status::unimplemented(format!("Request type: {}", other))),
+            other => Err(Status::unimplemented(format!("Request type: {other}"))),
         }
     }
 
@@ -231,7 +231,7 @@ impl FlightService for FlightServiceImpl {
         let key = descriptor.path[0].clone();
 
         let schema = Schema::try_from(&flight_data)
-            .map_err(|e| Status::invalid_argument(format!("Invalid schema: {:?}", e)))?;
+            .map_err(|e| Status::invalid_argument(format!("Invalid schema: {e:?}")))?;
         let schema_ref = Arc::new(schema.clone());
 
         let (response_tx, response_rx) = mpsc::channel(10);
@@ -287,7 +287,7 @@ async fn send_app_metadata(
         app_metadata: app_metadata.to_vec().into(),
     }))
     .await
-    .map_err(|e| Status::internal(format!("Could not send PutResult: {:?}", e)))
+    .map_err(|e| Status::internal(format!("Could not send PutResult: {e:?}")))
 }
 
 async fn record_batch_from_message(
@@ -309,9 +309,8 @@ async fn record_batch_from_message(
         &message.version(),
     );
 
-    arrow_batch_result.map_err(|e| {
-        Status::internal(format!("Could not convert to RecordBatch: {:?}", e))
-    })
+    arrow_batch_result
+        .map_err(|e| Status::internal(format!("Could not convert to RecordBatch: {e:?}")))
 }
 
 async fn dictionary_from_message(
@@ -331,9 +330,8 @@ async fn dictionary_from_message(
         dictionaries_by_id,
         &message.version(),
     );
-    dictionary_batch_result.map_err(|e| {
-        Status::internal(format!("Could not convert to Dictionary: {:?}", e))
-    })
+    dictionary_batch_result
+        .map_err(|e| Status::internal(format!("Could not convert to Dictionary: {e:?}")))
 }
 
 async fn save_uploaded_chunks(
@@ -351,7 +349,7 @@ async fn save_uploaded_chunks(
 
     while let Some(Ok(data)) = input_stream.next().await {
         let message = arrow::ipc::root_as_message(&data.data_header[..])
-            .map_err(|e| Status::internal(format!("Could not parse message: {:?}", e)))?;
+            .map_err(|e| Status::internal(format!("Could not parse message: {e:?}")))?;
 
         match message.header_type() {
             ipc::MessageHeader::Schema => {
@@ -384,8 +382,7 @@ async fn save_uploaded_chunks(
             t => {
                 return Err(Status::internal(format!(
                     "Reading types other than record batches not yet supported, \
-                                              unable to read {:?}",
-                    t
+                                              unable to read {t:?}"
                 )));
             }
         }
diff --git a/arrow-integration-testing/src/lib.rs b/arrow-integration-testing/src/lib.rs
index b0c8b85af..fe0cc68a4 100644
--- a/arrow-integration-testing/src/lib.rs
+++ b/arrow-integration-testing/src/lib.rs
@@ -90,8 +90,7 @@ pub fn read_gzip_json(version: &str, path: &str) -> ArrowJson {
 
     let testdata = arrow_test_data();
     let file = File::open(format!(
-        "{}/arrow-ipc-stream/integration/{}/{}.json.gz",
-        testdata, version, path
+        "{testdata}/arrow-ipc-stream/integration/{version}/{path}.json.gz"
     ))
     .unwrap();
     let mut gz = GzDecoder::new(&file);
diff --git a/arrow-integration-testing/tests/ipc_reader.rs b/arrow-integration-testing/tests/ipc_reader.rs
index 6d91eeccb..d6e81cd98 100644
--- a/arrow-integration-testing/tests/ipc_reader.rs
+++ b/arrow-integration-testing/tests/ipc_reader.rs
@@ -89,8 +89,7 @@ fn read_1_0_0_bigendian() {
     ];
     paths.iter().for_each(|path| {
         let file = File::open(format!(
-            "{}/arrow-ipc-stream/integration/1.0.0-bigendian/{}.arrow_file",
-            testdata, path
+            "{testdata}/arrow-ipc-stream/integration/1.0.0-bigendian/{path}.arrow_file"
         ))
         .unwrap();
 
@@ -161,10 +160,8 @@ fn read_2_0_0_compression() {
 /// Verification json file
 /// `arrow-ipc-stream/integration/<version>/<path>.json.gz
 fn verify_arrow_file(testdata: &str, version: &str, path: &str) {
-    let filename = format!(
-        "{}/arrow-ipc-stream/integration/{}/{}.arrow_file",
-        testdata, version, path
-    );
+    let filename =
+        format!("{testdata}/arrow-ipc-stream/integration/{version}/{path}.arrow_file");
     println!("Verifying {filename}");
 
     // Compare contents to the expected output format in JSON
@@ -200,10 +197,8 @@ fn verify_arrow_file(testdata: &str, version: &str, path: &str) {
 /// Verification json file
 /// `arrow-ipc-stream/integration/<version>/<path>.json.gz
 fn verify_arrow_stream(testdata: &str, version: &str, path: &str) {
-    let filename = format!(
-        "{}/arrow-ipc-stream/integration/{}/{}.stream",
-        testdata, version, path
-    );
+    let filename =
+        format!("{testdata}/arrow-ipc-stream/integration/{version}/{path}.stream");
     println!("Verifying {filename}");
 
     // Compare contents to the expected output format in JSON
diff --git a/arrow-integration-testing/tests/ipc_writer.rs b/arrow-integration-testing/tests/ipc_writer.rs
index a521737fa..40f356b1d 100644
--- a/arrow-integration-testing/tests/ipc_writer.rs
+++ b/arrow-integration-testing/tests/ipc_writer.rs
@@ -143,10 +143,8 @@ fn roundtrip_arrow_file_with_options(
     path: &str,
     options: IpcWriteOptions,
 ) {
-    let filename = format!(
-        "{}/arrow-ipc-stream/integration/{}/{}.arrow_file",
-        testdata, version, path
-    );
+    let filename =
+        format!("{testdata}/arrow-ipc-stream/integration/{version}/{path}.arrow_file");
     println!("Verifying {filename}");
 
     let mut tempfile = tempfile::tempfile().unwrap();
@@ -222,10 +220,8 @@ fn roundtrip_arrow_stream_with_options(
     path: &str,
     options: IpcWriteOptions,
 ) {
-    let filename = format!(
-        "{}/arrow-ipc-stream/integration/{}/{}.stream",
-        testdata, version, path
-    );
+    let filename =
+        format!("{testdata}/arrow-ipc-stream/integration/{version}/{path}.stream");
     println!("Verifying {filename}");
 
     let mut tempfile = tempfile::tempfile().unwrap();
diff --git a/arrow-ipc/src/compression.rs b/arrow-ipc/src/compression.rs
index f64d14441..e6e203bc0 100644
--- a/arrow-ipc/src/compression.rs
+++ b/arrow-ipc/src/compression.rs
@@ -37,8 +37,7 @@ impl TryFrom<CompressionType> for CompressionCodec {
             CompressionType::ZSTD => Ok(CompressionCodec::Zstd),
             CompressionType::LZ4_FRAME => Ok(CompressionCodec::Lz4Frame),
             other_type => Err(ArrowError::NotYetImplemented(format!(
-                "compression type {:?} not supported ",
-                other_type
+                "compression type {other_type:?} not supported "
             ))),
         }
     }
diff --git a/arrow-ipc/src/convert.rs b/arrow-ipc/src/convert.rs
index 305bb943c..c5681b0c8 100644
--- a/arrow-ipc/src/convert.rs
+++ b/arrow-ipc/src/convert.rs
@@ -185,8 +185,7 @@ pub fn try_schema_from_ipc_buffer(buffer: &[u8]) -> Result<Schema, ArrowError> {
         let msg =
             size_prefixed_root_as_message(&buffer[begin_offset..]).map_err(|err| {
                 ArrowError::ParseError(format!(
-                    "Unable to convert flight info to a message: {}",
-                    err
+                    "Unable to convert flight info to a message: {err}"
                 ))
             })?;
         let ipc_schema = msg.header_as_schema().ok_or_else(|| {
@@ -259,7 +258,7 @@ pub(crate) fn get_data_type(field: crate::Field, may_be_dictionary: bool) -> Dat
                 crate::Precision::HALF => DataType::Float16,
                 crate::Precision::SINGLE => DataType::Float32,
                 crate::Precision::DOUBLE => DataType::Float64,
-                z => panic!("FloatingPoint type with precision of {:?} not supported", z),
+                z => panic!("FloatingPoint type with precision of {z:?} not supported"),
             }
         }
         crate::Type::Date => {
@@ -267,7 +266,7 @@ pub(crate) fn get_data_type(field: crate::Field, may_be_dictionary: bool) -> Dat
             match date.unit() {
                 crate::DateUnit::DAY => DataType::Date32,
                 crate::DateUnit::MILLISECOND => DataType::Date64,
-                z => panic!("Date type with unit of {:?} not supported", z),
+                z => panic!("Date type with unit of {z:?} not supported"),
             }
         }
         crate::Type::Time => {
@@ -305,7 +304,7 @@ pub(crate) fn get_data_type(field: crate::Field, may_be_dictionary: bool) -> Dat
                 crate::TimeUnit::NANOSECOND => {
                     DataType::Timestamp(TimeUnit::Nanosecond, timezone)
                 }
-                z => panic!("Timestamp type with unit of {:?} not supported", z),
+                z => panic!("Timestamp type with unit of {z:?} not supported"),
             }
         }
         crate::Type::Interval => {
@@ -320,7 +319,7 @@ pub(crate) fn get_data_type(field: crate::Field, may_be_dictionary: bool) -> Dat
                 crate::IntervalUnit::MONTH_DAY_NANO => {
                     DataType::Interval(IntervalUnit::MonthDayNano)
                 }
-                z => panic!("Interval type with unit of {:?} unsupported", z),
+                z => panic!("Interval type with unit of {z:?} unsupported"),
             }
         }
         crate::Type::Duration => {
@@ -330,7 +329,7 @@ pub(crate) fn get_data_type(field: crate::Field, may_be_dictionary: bool) -> Dat
                 crate::TimeUnit::MILLISECOND => DataType::Duration(TimeUnit::Millisecond),
                 crate::TimeUnit::MICROSECOND => DataType::Duration(TimeUnit::Microsecond),
                 crate::TimeUnit::NANOSECOND => DataType::Duration(TimeUnit::Nanosecond),
-                z => panic!("Duration type with unit of {:?} unsupported", z),
+                z => panic!("Duration type with unit of {z:?} unsupported"),
             }
         }
         crate::Type::List => {
@@ -387,7 +386,7 @@ pub(crate) fn get_data_type(field: crate::Field, may_be_dictionary: bool) -> Dat
                     fsb.scale().try_into().unwrap(),
                 )
             } else {
-                panic!("Unexpected decimal bit width {}", bit_width)
+                panic!("Unexpected decimal bit width {bit_width}")
             }
         }
         crate::Type::Union => {
@@ -396,7 +395,7 @@ pub(crate) fn get_data_type(field: crate::Field, may_be_dictionary: bool) -> Dat
             let union_mode = match union.mode() {
                 crate::UnionMode::Dense => UnionMode::Dense,
                 crate::UnionMode::Sparse => UnionMode::Sparse,
-                mode => panic!("Unexpected union mode: {:?}", mode),
+                mode => panic!("Unexpected union mode: {mode:?}"),
             };
 
             let mut fields = vec![];
diff --git a/arrow-ipc/src/reader.rs b/arrow-ipc/src/reader.rs
index 231f72910..17f521e42 100644
--- a/arrow-ipc/src/reader.rs
+++ b/arrow-ipc/src/reader.rs
@@ -203,13 +203,12 @@ fn create_array(
             ];
 
             let dict_id = field.dict_id().ok_or_else(|| {
-                ArrowError::IoError(format!("Field {} does not have dict id", field))
+                ArrowError::IoError(format!("Field {field} does not have dict id"))
             })?;
 
             let value_array = dictionaries_by_id.get(&dict_id).ok_or_else(|| {
                 ArrowError::IoError(format!(
-                    "Cannot find a dictionary batch with dict id: {}",
-                    dict_id
+                    "Cannot find a dictionary batch with dict id: {dict_id}"
                 ))
             })?;
             node_index += 1;
@@ -283,8 +282,7 @@ fn create_array(
 
             if length != null_count {
                 return Err(ArrowError::IoError(format!(
-                    "Field {} of NullArray has unequal null_count {} and len {}",
-                    field, null_count, length
+                    "Field {field} of NullArray has unequal null_count {null_count} and len {length}"
                 )));
             }
 
@@ -797,7 +795,7 @@ impl<R: Read + Seek> FileReader<R> {
         reader.read_exact(&mut footer_data)?;
 
         let footer = crate::root_as_footer(&footer_data[..]).map_err(|err| {
-            ArrowError::IoError(format!("Unable to get root as footer: {:?}", err))
+            ArrowError::IoError(format!("Unable to get root as footer: {err:?}"))
         })?;
 
         let blocks = footer.recordBatches().ok_or_else(|| {
@@ -828,10 +826,7 @@ impl<R: Read + Seek> FileReader<R> {
                 reader.read_exact(&mut block_data)?;
 
                 let message = crate::root_as_message(&block_data[..]).map_err(|err| {
-                    ArrowError::IoError(format!(
-                        "Unable to get root as message: {:?}",
-                        err
-                    ))
+                    ArrowError::IoError(format!("Unable to get root as message: {err:?}"))
                 })?;
 
                 match message.header_type() {
@@ -856,8 +851,7 @@ impl<R: Read + Seek> FileReader<R> {
                     }
                     t => {
                         return Err(ArrowError::IoError(format!(
-                            "Expecting DictionaryBatch in dictionary blocks, found {:?}.",
-                            t
+                            "Expecting DictionaryBatch in dictionary blocks, found {t:?}."
                         )));
                     }
                 }
@@ -925,7 +919,7 @@ impl<R: Read + Seek> FileReader<R> {
         let mut block_data = vec![0; meta_len as usize];
         self.reader.read_exact(&mut block_data)?;
         let message = crate::root_as_message(&block_data[..]).map_err(|err| {
-            ArrowError::IoError(format!("Unable to get root as footer: {:?}", err))
+            ArrowError::IoError(format!("Unable to get root as footer: {err:?}"))
         })?;
 
         // some old test data's footer metadata is not set, so we account for that
@@ -968,7 +962,7 @@ impl<R: Read + Seek> FileReader<R> {
                 Ok(None)
             }
             t => Err(ArrowError::IoError(format!(
-                "Reading types other than record batches not yet supported, unable to read {:?}", t
+                "Reading types other than record batches not yet supported, unable to read {t:?}"
             ))),
         }
     }
@@ -1054,7 +1048,7 @@ impl<R: Read> StreamReader<R> {
         reader.read_exact(&mut meta_buffer)?;
 
         let message = crate::root_as_message(meta_buffer.as_slice()).map_err(|err| {
-            ArrowError::IoError(format!("Unable to get root as message: {:?}", err))
+            ArrowError::IoError(format!("Unable to get root as message: {err:?}"))
         })?;
         // message header is a Schema, so read it
         let ipc_schema: crate::Schema = message.header_as_schema().ok_or_else(|| {
@@ -1133,7 +1127,7 @@ impl<R: Read> StreamReader<R> {
 
         let vecs = &meta_buffer.to_vec();
         let message = crate::root_as_message(vecs).map_err(|err| {
-            ArrowError::IoError(format!("Unable to get root as message: {:?}", err))
+            ArrowError::IoError(format!("Unable to get root as message: {err:?}"))
         })?;
 
         match message.header_type() {
@@ -1173,7 +1167,7 @@ impl<R: Read> StreamReader<R> {
                 Ok(None)
             }
             t => Err(ArrowError::IoError(
-                format!("Reading types other than record batches not yet supported, unable to read {:?} ", t)
+                format!("Reading types other than record batches not yet supported, unable to read {t:?} ")
             )),
         }
     }
diff --git a/arrow-ipc/src/writer.rs b/arrow-ipc/src/writer.rs
index ec3cba64a..ea6eb360e 100644
--- a/arrow-ipc/src/writer.rs
+++ b/arrow-ipc/src/writer.rs
@@ -118,8 +118,7 @@ impl IpcWriteOptions {
                 }
             }
             z => Err(ArrowError::InvalidArgumentError(format!(
-                "Unsupported crate::MetadataVersion {:?}",
-                z
+                "Unsupported crate::MetadataVersion {z:?}"
             ))),
         }
     }
@@ -962,7 +961,7 @@ fn write_continuation<W: Write>(
             writer.write_all(&CONTINUATION_MARKER)?;
             writer.write_all(&total_len.to_le_bytes()[..])?;
         }
-        z => panic!("Unsupported crate::MetadataVersion {:?}", z),
+        z => panic!("Unsupported crate::MetadataVersion {z:?}"),
     };
 
     writer.flush()?;
@@ -1296,8 +1295,7 @@ fn write_buffer(
     .try_into()
     .map_err(|e| {
         ArrowError::InvalidArgumentError(format!(
-            "Could not convert compressed size to i64: {}",
-            e
+            "Could not convert compressed size to i64: {e}"
         ))
     })?;
 
diff --git a/arrow-json/src/raw/mod.rs b/arrow-json/src/raw/mod.rs
index 9ffa7d213..267c8bebc 100644
--- a/arrow-json/src/raw/mod.rs
+++ b/arrow-json/src/raw/mod.rs
@@ -281,9 +281,9 @@ fn make_decoder(
         DataType::LargeList(_) => Ok(Box::new(ListArrayDecoder::<i64>::new(data_type, is_nullable)?)),
         DataType::Struct(_) => Ok(Box::new(StructArrayDecoder::new(data_type, is_nullable)?)),
         DataType::Binary | DataType::LargeBinary | DataType::FixedSizeBinary(_) => {
-            Err(ArrowError::JsonError(format!("{} is not supported by JSON", data_type)))
+            Err(ArrowError::JsonError(format!("{data_type} is not supported by JSON")))
         }
-        d => Err(ArrowError::NotYetImplemented(format!("Support for {} in JSON reader", d)))
+        d => Err(ArrowError::NotYetImplemented(format!("Support for {d} in JSON reader")))
     }
 }
 
diff --git a/arrow-json/src/raw/tape.rs b/arrow-json/src/raw/tape.rs
index 6ca4e2d3f..b0c814c76 100644
--- a/arrow-json/src/raw/tape.rs
+++ b/arrow-json/src/raw/tape.rs
@@ -562,7 +562,7 @@ fn err(b: u8, ctx: &str) -> ArrowError {
 fn char_from_surrogate_pair(low: u16, high: u16) -> Result<char, ArrowError> {
     let n = (((high - 0xD800) as u32) << 10 | (low - 0xDC00) as u32) + 0x1_0000;
     char::from_u32(n).ok_or_else(|| {
-        ArrowError::JsonError(format!("Invalid UTF-16 surrogate pair {}", n))
+        ArrowError::JsonError(format!("Invalid UTF-16 surrogate pair {n}"))
     })
 }
 
diff --git a/arrow-json/src/reader.rs b/arrow-json/src/reader.rs
index c2647ebfc..1d4cfc740 100644
--- a/arrow-json/src/reader.rs
+++ b/arrow-json/src/reader.rs
@@ -46,7 +46,7 @@
 //! let batch = json.next().unwrap().unwrap();
 //! ```
 
-use std::io::{BufRead, BufReader, Read, Seek, SeekFrom};
+use std::io::{BufRead, BufReader, Read, Seek};
 use std::sync::Arc;
 
 use indexmap::map::IndexMap as HashMap;
@@ -104,8 +104,7 @@ impl InferredType {
             // incompatible types
             (s, o) => {
                 return Err(ArrowError::JsonError(format!(
-                    "Incompatible type found during schema inference: {:?} v.s. {:?}",
-                    s, o,
+                    "Incompatible type found during schema inference: {s:?} v.s. {o:?}",
                 )));
             }
         }
@@ -228,8 +227,7 @@ impl<'a, R: Read> Iterator for ValueIter<'a, R> {
                 }
                 Err(e) => {
                     return Some(Err(ArrowError::JsonError(format!(
-                        "Failed to read JSON record: {}",
-                        e
+                        "Failed to read JSON record: {e}"
                     ))));
                 }
                 _ => {
@@ -241,7 +239,7 @@ impl<'a, R: Read> Iterator for ValueIter<'a, R> {
 
                     self.record_count += 1;
                     return Some(serde_json::from_str(trimmed_s).map_err(|e| {
-                        ArrowError::JsonError(format!("Not valid JSON: {}", e))
+                        ArrowError::JsonError(format!("Not valid JSON: {e}"))
                     }));
                 }
             }
@@ -275,7 +273,7 @@ pub fn infer_json_schema_from_seekable<R: Read + Seek>(
 ) -> Result<Schema, ArrowError> {
     let schema = infer_json_schema(reader, max_read_records);
     // return the reader seek back to the start
-    reader.seek(SeekFrom::Start(0))?;
+    reader.rewind()?;
 
     schema
 }
@@ -336,8 +334,7 @@ fn set_object_scalar_field_type(
             Ok(())
         }
         t => Err(ArrowError::JsonError(format!(
-            "Expected scalar or scalar array JSON type, found: {:?}",
-            t,
+            "Expected scalar or scalar array JSON type, found: {t:?}",
         ))),
     }
 }
@@ -363,8 +360,7 @@ fn infer_scalar_array_type(array: &[Value]) -> Result<InferredType, ArrowError>
             }
             Value::Array(_) | Value::Object(_) => {
                 return Err(ArrowError::JsonError(format!(
-                    "Expected scalar value for scalar array, got: {:?}",
-                    v
+                    "Expected scalar value for scalar array, got: {v:?}"
                 )));
             }
         }
@@ -383,8 +379,7 @@ fn infer_nested_array_type(array: &[Value]) -> Result<InferredType, ArrowError>
             }
             x => {
                 return Err(ArrowError::JsonError(format!(
-                    "Got non array element in nested array: {:?}",
-                    x
+                    "Got non array element in nested array: {x:?}"
                 )));
             }
         }
@@ -403,8 +398,7 @@ fn infer_struct_array_type(array: &[Value]) -> Result<InferredType, ArrowError>
             }
             _ => {
                 return Err(ArrowError::JsonError(format!(
-                    "Expected struct value for struct array, got: {:?}",
-                    v
+                    "Expected struct value for struct array, got: {v:?}"
                 )));
             }
         }
@@ -474,8 +468,7 @@ fn collect_field_types_from_object(
                     }
                     t => {
                         return Err(ArrowError::JsonError(format!(
-                            "Expected array json type, found: {:?}",
-                            t,
+                            "Expected array json type, found: {t:?}",
                         )));
                     }
                 }
@@ -509,8 +502,7 @@ fn collect_field_types_from_object(
                     }
                     t => {
                         return Err(ArrowError::JsonError(format!(
-                            "Expected object json type, found: {:?}",
-                            t,
+                            "Expected object json type, found: {t:?}",
                         )));
                     }
                 }
@@ -547,8 +539,7 @@ where
             }
             value => {
                 return Err(ArrowError::JsonError(format!(
-                    "Expected JSON record to be an object, found {:?}",
-                    value
+                    "Expected JSON record to be an object, found {value:?}"
                 )));
             }
         };
@@ -698,8 +689,7 @@ impl Decoder {
                 Value::Object(_) => rows.push(v),
                 _ => {
                     return Err(ArrowError::JsonError(format!(
-                        "Row needs to be of type object, got: {:?}",
-                        v
+                        "Row needs to be of type object, got: {v:?}"
                     )));
                 }
             }
@@ -803,8 +793,7 @@ impl Decoder {
                 self.list_array_string_array_builder::<UInt64Type>(&dtype, col_name, rows)
             }
             ref e => Err(ArrowError::JsonError(format!(
-                "Data type is currently not supported for dictionaries in list : {:?}",
-                e
+                "Data type is currently not supported for dictionaries in list : {e:?}"
             ))),
         }
     }
@@ -832,8 +821,7 @@ impl Decoder {
             }
             e => {
                 return Err(ArrowError::JsonError(format!(
-                    "Nested list data builder type is not supported: {:?}",
-                    e
+                    "Nested list data builder type is not supported: {e:?}"
                 )))
             }
         };
@@ -905,8 +893,7 @@ impl Decoder {
                     }
                     e => {
                         return Err(ArrowError::JsonError(format!(
-                            "Nested list data builder type is not supported: {:?}",
-                            e
+                            "Nested list data builder type is not supported: {e:?}"
                         )))
                     }
                 }
@@ -1174,8 +1161,7 @@ impl Decoder {
             }
             datatype => {
                 return Err(ArrowError::JsonError(format!(
-                    "Nested list of {:?} not supported",
-                    datatype
+                    "Nested list of {datatype:?} not supported"
                 )));
             }
         };
@@ -1288,8 +1274,7 @@ impl Decoder {
                                 field.name(),
                             ),
                         t => Err(ArrowError::JsonError(format!(
-                            "TimeUnit {:?} not supported with Time64",
-                            t
+                            "TimeUnit {t:?} not supported with Time64"
                         ))),
                     },
                     DataType::Time32(unit) => match unit {
@@ -1304,8 +1289,7 @@ impl Decoder {
                                 field.name(),
                             ),
                         t => Err(ArrowError::JsonError(format!(
-                            "TimeUnit {:?} not supported with Time32",
-                            t
+                            "TimeUnit {t:?} not supported with Time32"
                         ))),
                     },
                     DataType::Utf8 => Ok(Arc::new(
@@ -2168,7 +2152,7 @@ mod tests {
         let mut file = File::open("test/data/mixed_arrays.json.gz").unwrap();
         let mut reader = BufReader::new(GzDecoder::new(&file));
         let schema = infer_json_schema(&mut reader, None).unwrap();
-        file.seek(SeekFrom::Start(0)).unwrap();
+        file.rewind().unwrap();
 
         let reader = BufReader::new(GzDecoder::new(&file));
         let options = DecoderOptions::new().with_batch_size(64);
diff --git a/arrow-json/src/writer.rs b/arrow-json/src/writer.rs
index 9045bd3a7..9d241aed3 100644
--- a/arrow-json/src/writer.rs
+++ b/arrow-json/src/writer.rs
@@ -198,8 +198,7 @@ pub fn array_to_json_array(array: &ArrayRef) -> Result<Vec<Value>, ArrowError> {
             Ok(jsonmaps.into_iter().map(Value::Object).collect())
         }
         t => Err(ArrowError::JsonError(format!(
-            "data type {:?} not supported",
-            t
+            "data type {t:?} not supported"
         ))),
     }
 }
diff --git a/arrow-ord/src/comparison.rs b/arrow-ord/src/comparison.rs
index b8b510a2e..89fbccead 100644
--- a/arrow-ord/src/comparison.rs
+++ b/arrow-ord/src/comparison.rs
@@ -452,7 +452,7 @@ fn try_to_type_result<T>(
     ty: &str,
 ) -> Result<T, ArrowError> {
     value.ok_or_else(|| {
-        ArrowError::ComputeError(format!("Could not convert {} with {}", right, ty,))
+        ArrowError::ComputeError(format!("Could not convert {right} with {ty}",))
     })
 }
 
diff --git a/arrow-ord/src/ord.rs b/arrow-ord/src/ord.rs
index 00b6668ad..dc352c5b7 100644
--- a/arrow-ord/src/ord.rs
+++ b/arrow-ord/src/ord.rs
@@ -123,8 +123,7 @@ where
         Int64 => compare_dict_primitive::<Int64Type, VT>(left, right),
         t => {
             return Err(ArrowError::InvalidArgumentError(format!(
-                "Dictionaries do not support keys of type {:?}",
-                t
+                "Dictionaries do not support keys of type {t:?}"
             )));
         }
     })
@@ -255,15 +254,13 @@ pub fn build_compare(
                     Int64 => compare_dict_string::<Int64Type>(left, right),
                     lhs => {
                         return Err(ArrowError::InvalidArgumentError(format!(
-                            "Dictionaries do not support keys of type {:?}",
-                            lhs
+                            "Dictionaries do not support keys of type {lhs:?}"
                         )));
                     }
                 },
                 t => {
                     return Err(ArrowError::InvalidArgumentError(format!(
-                        "Dictionaries of value data type {:?} are not supported",
-                        t
+                        "Dictionaries of value data type {t:?} are not supported"
                     )));
                 }
             }
@@ -278,8 +275,7 @@ pub fn build_compare(
         }
         (lhs, _) => {
             return Err(ArrowError::InvalidArgumentError(format!(
-                "The data type type {:?} has no natural order",
-                lhs
+                "The data type type {lhs:?} has no natural order"
             )));
         }
     })
diff --git a/arrow-ord/src/sort.rs b/arrow-ord/src/sort.rs
index d13a7a03d..f36e91d64 100644
--- a/arrow-ord/src/sort.rs
+++ b/arrow-ord/src/sort.rs
@@ -282,8 +282,7 @@ pub fn sort_to_indices(
             }
             t => {
                 return Err(ArrowError::ComputeError(format!(
-                    "Sort not supported for list type {:?}",
-                    t
+                    "Sort not supported for list type {t:?}"
                 )));
             }
         },
@@ -310,8 +309,7 @@ pub fn sort_to_indices(
             }
             t => {
                 return Err(ArrowError::ComputeError(format!(
-                    "Sort not supported for list type {:?}",
-                    t
+                    "Sort not supported for list type {t:?}"
                 )));
             }
         },
@@ -347,11 +345,11 @@ pub fn sort_to_indices(
                         sort_string_dictionary::<_>(values, &value_indices_map, v, n, &options, limit)
                     },
                     t => return Err(ArrowError::ComputeError(format!(
-                        "Unsupported dictionary value type {}", t
+                        "Unsupported dictionary value type {t}"
                     ))),
                 },
                 t => return Err(ArrowError::ComputeError(format!(
-                    "Unsupported datatype {}", t
+                    "Unsupported datatype {t}"
                 ))),
             )
         }
@@ -361,8 +359,7 @@ pub fn sort_to_indices(
         DataType::LargeBinary => sort_binary::<i64>(values, v, n, &options, limit),
         t => {
             return Err(ArrowError::ComputeError(format!(
-                "Sort not supported for data type {:?}",
-                t
+                "Sort not supported for data type {t:?}"
             )));
         }
     })
diff --git a/arrow-row/src/lib.rs b/arrow-row/src/lib.rs
index eb9dc2984..1d54a008f 100644
--- a/arrow-row/src/lib.rs
+++ b/arrow-row/src/lib.rs
@@ -578,8 +578,7 @@ impl RowConverter {
     pub fn new(fields: Vec<SortField>) -> Result<Self, ArrowError> {
         if !Self::supports_fields(&fields) {
             return Err(ArrowError::NotYetImplemented(format!(
-                "Row format support not yet implemented for: {:?}",
-                fields
+                "Row format support not yet implemented for: {fields:?}"
             )));
         }
 
diff --git a/arrow-schema/src/datatype.rs b/arrow-schema/src/datatype.rs
index 1e5c1321c..78ad0258d 100644
--- a/arrow-schema/src/datatype.rs
+++ b/arrow-schema/src/datatype.rs
@@ -299,7 +299,7 @@ pub enum UnionMode {
 
 impl fmt::Display for DataType {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        write!(f, "{:?}", self)
+        write!(f, "{self:?}")
     }
 }
 
diff --git a/arrow-schema/src/error.rs b/arrow-schema/src/error.rs
index 6213af8bc..cd236c087 100644
--- a/arrow-schema/src/error.rs
+++ b/arrow-schema/src/error.rs
@@ -76,23 +76,23 @@ impl Display for ArrowError {
                 write!(f, "Not yet implemented: {}", &source)
             }
             ArrowError::ExternalError(source) => write!(f, "External error: {}", &source),
-            ArrowError::CastError(desc) => write!(f, "Cast error: {}", desc),
-            ArrowError::MemoryError(desc) => write!(f, "Memory error: {}", desc),
-            ArrowError::ParseError(desc) => write!(f, "Parser error: {}", desc),
-            ArrowError::SchemaError(desc) => write!(f, "Schema error: {}", desc),
-            ArrowError::ComputeError(desc) => write!(f, "Compute error: {}", desc),
+            ArrowError::CastError(desc) => write!(f, "Cast error: {desc}"),
+            ArrowError::MemoryError(desc) => write!(f, "Memory error: {desc}"),
+            ArrowError::ParseError(desc) => write!(f, "Parser error: {desc}"),
+            ArrowError::SchemaError(desc) => write!(f, "Schema error: {desc}"),
+            ArrowError::ComputeError(desc) => write!(f, "Compute error: {desc}"),
             ArrowError::DivideByZero => write!(f, "Divide by zero error"),
-            ArrowError::CsvError(desc) => write!(f, "Csv error: {}", desc),
-            ArrowError::JsonError(desc) => write!(f, "Json error: {}", desc),
-            ArrowError::IoError(desc) => write!(f, "Io error: {}", desc),
+            ArrowError::CsvError(desc) => write!(f, "Csv error: {desc}"),
+            ArrowError::JsonError(desc) => write!(f, "Json error: {desc}"),
+            ArrowError::IoError(desc) => write!(f, "Io error: {desc}"),
             ArrowError::InvalidArgumentError(desc) => {
-                write!(f, "Invalid argument error: {}", desc)
+                write!(f, "Invalid argument error: {desc}")
             }
             ArrowError::ParquetError(desc) => {
-                write!(f, "Parquet argument error: {}", desc)
+                write!(f, "Parquet argument error: {desc}")
             }
             ArrowError::CDataInterface(desc) => {
-                write!(f, "C Data interface error: {}", desc)
+                write!(f, "C Data interface error: {desc}")
             }
             ArrowError::DictionaryKeyOverflowError => {
                 write!(f, "Dictionary key bigger than the key type")
diff --git a/arrow-schema/src/field.rs b/arrow-schema/src/field.rs
index dc3ab3d62..8dcb8cea9 100644
--- a/arrow-schema/src/field.rs
+++ b/arrow-schema/src/field.rs
@@ -476,7 +476,7 @@ impl Field {
 // TODO: improve display with crate https://crates.io/crates/derive_more ?
 impl std::fmt::Display for Field {
     fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
-        write!(f, "{:?}", self)
+        write!(f, "{self:?}")
     }
 }
 
diff --git a/arrow-schema/src/schema.rs b/arrow-schema/src/schema.rs
index e45cedfb6..b7971027f 100644
--- a/arrow-schema/src/schema.rs
+++ b/arrow-schema/src/schema.rs
@@ -151,8 +151,7 @@ impl Schema {
                         if old_val != &value {
                             return Err(ArrowError::SchemaError(format!(
                                 "Fail to merge schema due to conflicting metadata. \
-                                         Key '{}' has different values '{}' and '{}'",
-                                key, old_val, value
+                                         Key '{key}' has different values '{old_val}' and '{value}'"
                             )));
                         }
                     }
@@ -212,8 +211,7 @@ impl Schema {
                 let valid_fields: Vec<String> =
                     self.fields.iter().map(|f| f.name().clone()).collect();
                 ArrowError::SchemaError(format!(
-                    "Unable to get field named \"{}\". Valid fields: {:?}",
-                    name, valid_fields
+                    "Unable to get field named \"{name}\". Valid fields: {valid_fields:?}"
                 ))
             })
     }
@@ -764,9 +762,7 @@ mod tests {
         let expected = "Fail to merge schema due to conflicting metadata. Key 'foo' has different values 'bar' and 'baz'";
         assert!(
             res.to_string().contains(expected),
-            "Could not find expected string '{}' in '{}'",
-            expected,
-            res
+            "Could not find expected string '{expected}' in '{res}'"
         );
     }
 }
diff --git a/arrow-select/src/concat.rs b/arrow-select/src/concat.rs
index cff8fd25b..be6b0a063 100644
--- a/arrow-select/src/concat.rs
+++ b/arrow-select/src/concat.rs
@@ -106,8 +106,7 @@ pub fn concat_batches<'a>(
         .find(|&(_, batch)| batch.schema() != *schema)
     {
         return Err(ArrowError::InvalidArgumentError(format!(
-            "batches[{}] schema is different with argument schema.",
-            i
+            "batches[{i}] schema is different with argument schema."
         )));
     }
     let field_num = schema.fields().len();
@@ -555,8 +554,7 @@ mod tests {
         assert_eq!(
             combined.values(),
             &(Arc::new(StringArray::from(vec!["a", "b", "c"])) as ArrayRef),
-            "Actual: {:#?}",
-            combined
+            "Actual: {combined:#?}"
         );
 
         assert_eq!(
diff --git a/arrow-select/src/take.rs b/arrow-select/src/take.rs
index 9fffa0b5f..d8989fa48 100644
--- a/arrow-select/src/take.rs
+++ b/arrow-select/src/take.rs
@@ -100,7 +100,7 @@ where
                 })?;
                 if ix >= len {
                     return Err(ArrowError::ComputeError(
-                        format!("Array index out of bounds, cannot get item at index {} from {} entries", ix, len))
+                        format!("Array index out of bounds, cannot get item at index {ix} from {len} entries"))
                     );
                 }
                 Ok(())
@@ -112,7 +112,7 @@ where
                 })?;
                 if ix >= len {
                     return Err(ArrowError::ComputeError(
-                        format!("Array index out of bounds, cannot get item at index {} from {} entries", ix, len))
+                        format!("Array index out of bounds, cannot get item at index {ix} from {len} entries"))
                     );
                 }
                 Ok(())
@@ -340,7 +340,7 @@ where
                 if indices_data.is_null(index) {
                     T::default()
                 } else {
-                    panic!("Out-of-bounds index {}", index)
+                    panic!("Out-of-bounds index {index}")
                 }
             }
         })
diff --git a/arrow-string/src/concat_elements.rs b/arrow-string/src/concat_elements.rs
index e9219fb2d..78fe3a47d 100644
--- a/arrow-string/src/concat_elements.rs
+++ b/arrow-string/src/concat_elements.rs
@@ -101,8 +101,7 @@ pub fn concat_elements_utf8_many<Offset: OffsetSizeTrait>(
     let size = arrays[0].len();
     if !arrays.iter().all(|array| array.len() == size) {
         return Err(ArrowError::ComputeError(format!(
-            "Arrays must have the same length of {}",
-            size,
+            "Arrays must have the same length of {size}",
         )));
     }
 
diff --git a/arrow-string/src/length.rs b/arrow-string/src/length.rs
index f7faa0a61..9651bef27 100644
--- a/arrow-string/src/length.rs
+++ b/arrow-string/src/length.rs
@@ -176,8 +176,7 @@ pub fn length(array: &dyn Array) -> Result<ArrayRef, ArrowError> {
         DataType::Binary => Ok(length_binary::<i32, Int32Type>(array)),
         DataType::LargeBinary => Ok(length_binary::<i64, Int64Type>(array)),
         other => Err(ArrowError::ComputeError(format!(
-            "length not supported for {:?}",
-            other
+            "length not supported for {other:?}"
         ))),
     }
 }
@@ -210,8 +209,7 @@ pub fn bit_length(array: &dyn Array) -> Result<ArrayRef, ArrowError> {
         DataType::Binary => Ok(bit_length_binary::<i32, Int32Type>(array)),
         DataType::LargeBinary => Ok(bit_length_binary::<i64, Int64Type>(array)),
         other => Err(ArrowError::ComputeError(format!(
-            "bit_length not supported for {:?}",
-            other
+            "bit_length not supported for {other:?}"
         ))),
     }
 }
diff --git a/arrow-string/src/like.rs b/arrow-string/src/like.rs
index c9cdb7bab..10a58b3c0 100644
--- a/arrow-string/src/like.rs
+++ b/arrow-string/src/like.rs
@@ -266,10 +266,9 @@ fn like<'a, S: ArrayAccessor<Item = &'a str>>(
     right: S,
 ) -> Result<BooleanArray, ArrowError> {
     regex_like(left, right, false, |re_pattern| {
-        Regex::new(&format!("^{}$", re_pattern)).map_err(|e| {
+        Regex::new(&format!("^{re_pattern}$")).map_err(|e| {
             ArrowError::ComputeError(format!(
-                "Unable to build regex from LIKE pattern: {}",
-                e
+                "Unable to build regex from LIKE pattern: {e}"
             ))
         })
     })
@@ -313,10 +312,9 @@ fn like_scalar_op<'a, F: Fn(bool) -> bool, L: ArrayAccessor<Item = &'a str>>(
         }))
     } else {
         let re_pattern = replace_like_wildcards(right)?;
-        let re = Regex::new(&format!("^{}$", re_pattern)).map_err(|e| {
+        let re = Regex::new(&format!("^{re_pattern}$")).map_err(|e| {
             ArrowError::ComputeError(format!(
-                "Unable to build regex from LIKE pattern: {}",
-                e
+                "Unable to build regex from LIKE pattern: {e}"
             ))
         })?;
 
@@ -397,10 +395,9 @@ fn nlike<'a, S: ArrayAccessor<Item = &'a str>>(
     right: S,
 ) -> Result<BooleanArray, ArrowError> {
     regex_like(left, right, true, |re_pattern| {
-        Regex::new(&format!("^{}$", re_pattern)).map_err(|e| {
+        Regex::new(&format!("^{re_pattern}$")).map_err(|e| {
             ArrowError::ComputeError(format!(
-                "Unable to build regex from LIKE pattern: {}",
-                e
+                "Unable to build regex from LIKE pattern: {e}"
             ))
         })
     })
@@ -445,10 +442,9 @@ fn ilike<'a, S: ArrayAccessor<Item = &'a str>>(
     right: S,
 ) -> Result<BooleanArray, ArrowError> {
     regex_like(left, right, false, |re_pattern| {
-        Regex::new(&format!("(?i)^{}$", re_pattern)).map_err(|e| {
+        Regex::new(&format!("(?i)^{re_pattern}$")).map_err(|e| {
             ArrowError::ComputeError(format!(
-                "Unable to build regex from ILIKE pattern: {}",
-                e
+                "Unable to build regex from ILIKE pattern: {e}"
             ))
         })
     })
@@ -491,11 +487,8 @@ fn ilike_scalar_op<O: OffsetSizeTrait, F: Fn(bool) -> bool>(
     }
 
     let re_pattern = replace_like_wildcards(right)?;
-    let re = Regex::new(&format!("(?i)^{}$", re_pattern)).map_err(|e| {
-        ArrowError::ComputeError(format!(
-            "Unable to build regex from ILIKE pattern: {}",
-            e
-        ))
+    let re = Regex::new(&format!("(?i)^{re_pattern}$")).map_err(|e| {
+        ArrowError::ComputeError(format!("Unable to build regex from ILIKE pattern: {e}"))
     })?;
 
     Ok(BooleanArray::from_unary(left, |item| op(re.is_match(item))))
@@ -537,10 +530,9 @@ fn nilike<'a, S: ArrayAccessor<Item = &'a str>>(
     right: S,
 ) -> Result<BooleanArray, ArrowError> {
     regex_like(left, right, true, |re_pattern| {
-        Regex::new(&format!("(?i)^{}$", re_pattern)).map_err(|e| {
+        Regex::new(&format!("(?i)^{re_pattern}$")).map_err(|e| {
             ArrowError::ComputeError(format!(
-                "Unable to build regex from ILIKE pattern: {}",
-                e
+                "Unable to build regex from ILIKE pattern: {e}"
             ))
         })
     })
diff --git a/arrow-string/src/regexp.rs b/arrow-string/src/regexp.rs
index ddb47969c..4072d8ba0 100644
--- a/arrow-string/src/regexp.rs
+++ b/arrow-string/src/regexp.rs
@@ -55,7 +55,7 @@ pub fn regexp_is_match_utf8<OffsetSize: OffsetSizeTrait>(
         Some(flags) => Box::new(regex_array.iter().zip(flags.iter()).map(
             |(pattern, flags)| {
                 pattern.map(|pattern| match flags {
-                    Some(flag) => format!("(?{}){}", flag, pattern),
+                    Some(flag) => format!("(?{flag}){pattern}"),
                     None => pattern.to_string(),
                 })
             },
@@ -84,8 +84,7 @@ pub fn regexp_is_match_utf8<OffsetSize: OffsetSizeTrait>(
                         None => {
                             let re = Regex::new(pattern.as_str()).map_err(|e| {
                                 ArrowError::ComputeError(format!(
-                                    "Regular expression did not compile: {:?}",
-                                    e
+                                    "Regular expression did not compile: {e:?}"
                                 ))
                             })?;
                             patterns.insert(pattern, re.clone());
@@ -127,17 +126,14 @@ pub fn regexp_is_match_utf8_scalar<OffsetSize: OffsetSizeTrait>(
     let mut result = BooleanBufferBuilder::new(array.len());
 
     let pattern = match flag {
-        Some(flag) => format!("(?{}){}", flag, regex),
+        Some(flag) => format!("(?{flag}){regex}"),
         None => regex.to_string(),
     };
     if pattern.is_empty() {
         result.append_n(array.len(), true);
     } else {
         let re = Regex::new(pattern.as_str()).map_err(|e| {
-            ArrowError::ComputeError(format!(
-                "Regular expression did not compile: {:?}",
-                e
-            ))
+            ArrowError::ComputeError(format!("Regular expression did not compile: {e:?}"))
         })?;
         for i in 0..array.len() {
             let value = array.value(i);
@@ -175,7 +171,7 @@ pub fn regexp_match<OffsetSize: OffsetSizeTrait>(
         Some(flags) => Box::new(regex_array.iter().zip(flags.iter()).map(
             |(pattern, flags)| {
                 pattern.map(|pattern| match flags {
-                    Some(value) => format!("(?{}){}", value, pattern),
+                    Some(value) => format!("(?{value}){pattern}"),
                     None => pattern.to_string(),
                 })
             },
@@ -204,8 +200,7 @@ pub fn regexp_match<OffsetSize: OffsetSizeTrait>(
                         None => {
                             let re = Regex::new(pattern.as_str()).map_err(|e| {
                                 ArrowError::ComputeError(format!(
-                                    "Regular expression did not compile: {:?}",
-                                    e
+                                    "Regular expression did not compile: {e:?}"
                                 ))
                             })?;
                             patterns.insert(pattern, re.clone());
diff --git a/arrow-string/src/substring.rs b/arrow-string/src/substring.rs
index ece367553..7d0430477 100644
--- a/arrow-string/src/substring.rs
+++ b/arrow-string/src/substring.rs
@@ -379,8 +379,7 @@ fn utf8_substring<OffsetSize: OffsetSizeTrait>(
                 Ok(offset)
             } else {
                 Err(ArrowError::ComputeError(format!(
-                    "The offset {} is at an invalid utf-8 boundary.",
-                    offset_usize
+                    "The offset {offset_usize} is at an invalid utf-8 boundary."
                 )))
             }
         }
diff --git a/arrow/benches/arithmetic_kernels.rs b/arrow/benches/arithmetic_kernels.rs
index 2aa2e7191..4ed197783 100644
--- a/arrow/benches/arithmetic_kernels.rs
+++ b/arrow/benches/arithmetic_kernels.rs
@@ -33,46 +33,46 @@ fn add_benchmark(c: &mut Criterion) {
         let arr_b = create_primitive_array::<Float32Type>(BATCH_SIZE, null_density);
         let scalar = seedable_rng().gen();
 
-        c.bench_function(&format!("add({})", null_density), |b| {
+        c.bench_function(&format!("add({null_density})"), |b| {
             b.iter(|| criterion::black_box(add(&arr_a, &arr_b).unwrap()))
         });
-        c.bench_function(&format!("add_checked({})", null_density), |b| {
+        c.bench_function(&format!("add_checked({null_density})"), |b| {
             b.iter(|| criterion::black_box(add_checked(&arr_a, &arr_b).unwrap()))
         });
-        c.bench_function(&format!("add_scalar({})", null_density), |b| {
+        c.bench_function(&format!("add_scalar({null_density})"), |b| {
             b.iter(|| criterion::black_box(add_scalar(&arr_a, scalar).unwrap()))
         });
-        c.bench_function(&format!("subtract({})", null_density), |b| {
+        c.bench_function(&format!("subtract({null_density})"), |b| {
             b.iter(|| criterion::black_box(subtract(&arr_a, &arr_b).unwrap()))
         });
-        c.bench_function(&format!("subtract_checked({})", null_density), |b| {
+        c.bench_function(&format!("subtract_checked({null_density})"), |b| {
             b.iter(|| criterion::black_box(subtract_checked(&arr_a, &arr_b).unwrap()))
         });
-        c.bench_function(&format!("subtract_scalar({})", null_density), |b| {
+        c.bench_function(&format!("subtract_scalar({null_density})"), |b| {
             b.iter(|| criterion::black_box(subtract_scalar(&arr_a, scalar).unwrap()))
         });
-        c.bench_function(&format!("multiply({})", null_density), |b| {
+        c.bench_function(&format!("multiply({null_density})"), |b| {
             b.iter(|| criterion::black_box(multiply(&arr_a, &arr_b).unwrap()))
         });
-        c.bench_function(&format!("multiply_checked({})", null_density), |b| {
+        c.bench_function(&format!("multiply_checked({null_density})"), |b| {
             b.iter(|| criterion::black_box(multiply_checked(&arr_a, &arr_b).unwrap()))
         });
-        c.bench_function(&format!("multiply_scalar({})", null_density), |b| {
+        c.bench_function(&format!("multiply_scalar({null_density})"), |b| {
             b.iter(|| criterion::black_box(multiply_scalar(&arr_a, scalar).unwrap()))
         });
-        c.bench_function(&format!("divide({})", null_density), |b| {
+        c.bench_function(&format!("divide({null_density})"), |b| {
             b.iter(|| criterion::black_box(divide(&arr_a, &arr_b).unwrap()))
         });
-        c.bench_function(&format!("divide_checked({})", null_density), |b| {
+        c.bench_function(&format!("divide_checked({null_density})"), |b| {
             b.iter(|| criterion::black_box(divide_checked(&arr_a, &arr_b).unwrap()))
         });
-        c.bench_function(&format!("divide_scalar({})", null_density), |b| {
+        c.bench_function(&format!("divide_scalar({null_density})"), |b| {
             b.iter(|| criterion::black_box(divide_scalar(&arr_a, scalar).unwrap()))
         });
-        c.bench_function(&format!("modulo({})", null_density), |b| {
+        c.bench_function(&format!("modulo({null_density})"), |b| {
             b.iter(|| criterion::black_box(modulus(&arr_a, &arr_b).unwrap()))
         });
-        c.bench_function(&format!("modulo_scalar({})", null_density), |b| {
+        c.bench_function(&format!("modulo_scalar({null_density})"), |b| {
             b.iter(|| criterion::black_box(modulus_scalar(&arr_a, scalar).unwrap()))
         });
     }
diff --git a/arrow/benches/csv_reader.rs b/arrow/benches/csv_reader.rs
index 02c8ca2d2..66a956315 100644
--- a/arrow/benches/csv_reader.rs
+++ b/arrow/benches/csv_reader.rs
@@ -37,7 +37,7 @@ fn do_bench(c: &mut Criterion, name: &str, cols: Vec<ArrayRef>) {
     drop(csv);
 
     for batch_size in [128, 1024, 4096] {
-        c.bench_function(&format!("{} - {}", name, batch_size), |b| {
+        c.bench_function(&format!("{name} - {batch_size}"), |b| {
             b.iter(|| {
                 let cursor = Cursor::new(buf.as_slice());
                 let reader = csv::ReaderBuilder::new()
diff --git a/arrow/benches/interleave_kernels.rs b/arrow/benches/interleave_kernels.rs
index 0c3eec60c..2bb430e40 100644
--- a/arrow/benches/interleave_kernels.rs
+++ b/arrow/benches/interleave_kernels.rs
@@ -53,10 +53,9 @@ fn do_bench(
         })
         .collect();
 
-    c.bench_function(
-        &format!("interleave {} {} {:?}", prefix, len, slices),
-        |b| b.iter(|| criterion::black_box(interleave(&values, &indices).unwrap())),
-    );
+    c.bench_function(&format!("interleave {prefix} {len} {slices:?}"), |b| {
+        b.iter(|| criterion::black_box(interleave(&values, &indices).unwrap()))
+    });
 }
 
 fn add_benchmark(c: &mut Criterion) {
diff --git a/arrow/benches/lexsort.rs b/arrow/benches/lexsort.rs
index 5c161ec8d..30dab9a74 100644
--- a/arrow/benches/lexsort.rs
+++ b/arrow/benches/lexsort.rs
@@ -89,16 +89,11 @@ fn do_bench(c: &mut Criterion, columns: &[Column], len: usize) {
         })
         .collect();
 
-    c.bench_function(
-        &format!("lexsort_to_indices({:?}): {}", columns, len),
-        |b| {
-            b.iter(|| {
-                criterion::black_box(lexsort_to_indices(&sort_columns, None).unwrap())
-            })
-        },
-    );
+    c.bench_function(&format!("lexsort_to_indices({columns:?}): {len}"), |b| {
+        b.iter(|| criterion::black_box(lexsort_to_indices(&sort_columns, None).unwrap()))
+    });
 
-    c.bench_function(&format!("lexsort_rows({:?}): {}", columns, len), |b| {
+    c.bench_function(&format!("lexsort_rows({columns:?}): {len}"), |b| {
         b.iter(|| {
             criterion::black_box({
                 let fields = arrays
diff --git a/arrow/benches/row_format.rs b/arrow/benches/row_format.rs
index ac9f3106f..961cf07de 100644
--- a/arrow/benches/row_format.rs
+++ b/arrow/benches/row_format.rs
@@ -36,7 +36,7 @@ fn do_bench(c: &mut Criterion, name: &str, cols: Vec<ArrayRef>) {
         .map(|x| SortField::new(x.data_type().clone()))
         .collect();
 
-    c.bench_function(&format!("convert_columns {}", name), |b| {
+    c.bench_function(&format!("convert_columns {name}"), |b| {
         b.iter(|| {
             let mut converter = RowConverter::new(fields.clone()).unwrap();
             black_box(converter.convert_columns(&cols).unwrap())
@@ -46,11 +46,11 @@ fn do_bench(c: &mut Criterion, name: &str, cols: Vec<ArrayRef>) {
     let mut converter = RowConverter::new(fields).unwrap();
     let rows = converter.convert_columns(&cols).unwrap();
     // using a pre-prepared row converter should be faster than the first time
-    c.bench_function(&format!("convert_columns_prepared {}", name), |b| {
+    c.bench_function(&format!("convert_columns_prepared {name}"), |b| {
         b.iter(|| black_box(converter.convert_columns(&cols).unwrap()));
     });
 
-    c.bench_function(&format!("convert_rows {}", name), |b| {
+    c.bench_function(&format!("convert_rows {name}"), |b| {
         b.iter(|| black_box(converter.convert_rows(&rows).unwrap()));
     });
 }
diff --git a/arrow/benches/string_dictionary_builder.rs b/arrow/benches/string_dictionary_builder.rs
index 411df3d69..424400674 100644
--- a/arrow/benches/string_dictionary_builder.rs
+++ b/arrow/benches/string_dictionary_builder.rs
@@ -37,10 +37,7 @@ fn criterion_benchmark(c: &mut Criterion) {
 
     let mut do_bench = |dict_size: usize, total_size: usize, key_len: usize| {
         group.bench_function(
-            format!(
-                "(dict_size:{}, len:{}, key_len: {})",
-                dict_size, total_size, key_len
-            ),
+            format!("(dict_size:{dict_size}, len:{total_size}, key_len: {key_len})"),
             |b| {
                 let strings = build_strings(dict_size, total_size, key_len);
                 b.iter(|| {
diff --git a/arrow/examples/builders.rs b/arrow/examples/builders.rs
index bacd550bd..312de11b3 100644
--- a/arrow/examples/builders.rs
+++ b/arrow/examples/builders.rs
@@ -52,17 +52,17 @@ fn main() {
     // Build the `PrimitiveArray`
     let primitive_array = primitive_array_builder.finish();
     // Long arrays will have an ellipsis printed in the middle
-    println!("{:?}", primitive_array);
+    println!("{primitive_array:?}");
 
     // Arrays can also be built from `Vec<Option<T>>`. `None`
     // represents a null value in the array.
     let date_array: PrimitiveArray<Date64Type> =
         vec![Some(1550902545147), None, Some(1550902545147)].into();
-    println!("{:?}", date_array);
+    println!("{date_array:?}");
 
     let time_array: PrimitiveArray<Time64NanosecondType> =
         (0..100).collect::<Vec<i64>>().into();
-    println!("{:?}", time_array);
+    println!("{time_array:?}");
 
     // We can build arrays directly from the underlying buffers.
 
@@ -83,7 +83,7 @@ fn main() {
         .build()
         .unwrap();
     let binary_array = StringArray::from(array_data);
-    println!("{:?}", binary_array);
+    println!("{binary_array:?}");
 
     // ListArrays are similar to ByteArrays: they are arrays of other
     // arrays, where each child array is a slice of the underlying
@@ -109,7 +109,7 @@ fn main() {
         .unwrap();
     let list_array = ListArray::from(list_data);
 
-    println!("{:?}", list_array);
+    println!("{list_array:?}");
 
     // StructArrays are arrays of tuples, where each tuple element is
     // from a child array. (In other words, they're like zipping
@@ -128,5 +128,5 @@ fn main() {
             Arc::new(Int32Array::from(vec![42, 28, 19, 31])),
         ),
     ]);
-    println!("{:?}", struct_array);
+    println!("{struct_array:?}");
 }
diff --git a/arrow/examples/collect.rs b/arrow/examples/collect.rs
index d523a8036..5581186db 100644
--- a/arrow/examples/collect.rs
+++ b/arrow/examples/collect.rs
@@ -29,18 +29,18 @@ fn main() {
 
     // Create an Int8Array with 4 values
     let array: Int8Array = vec![1, 2, 3, 4].into_iter().collect();
-    println!("{:?}", array);
+    println!("{array:?}");
 
     // Arrays can also be built from `Vec<Option<T>>`. `None`
     // represents a null value in the array.
     let array: Int8Array = vec![Some(1_i8), Some(2), None, Some(3)]
         .into_iter()
         .collect();
-    println!("{:?}", array);
+    println!("{array:?}");
     assert!(array.is_null(2));
 
     let array: Float32Array = [Some(1.0_f32), Some(2.3), None].into_iter().collect();
-    println!("{:?}", array);
+    println!("{array:?}");
     assert_eq!(array.value(0), 1.0_f32);
     assert_eq!(array.value(1), 2.3_f32);
     assert!(array.is_null(2));
diff --git a/arrow/examples/tensor_builder.rs b/arrow/examples/tensor_builder.rs
index 1ef53920e..ca31679e2 100644
--- a/arrow/examples/tensor_builder.rs
+++ b/arrow/examples/tensor_builder.rs
@@ -39,7 +39,7 @@ fn main() -> Result<()> {
     // storage data
     let tensor = Int32Tensor::try_new(buf, Some(vec![2, 8]), None, None)?;
     println!("Int32 Tensor");
-    println!("{:?}", tensor);
+    println!("{tensor:?}");
 
     // Creating a tensor using float type buffer builder
     let mut builder = Float32BufferBuilder::new(4);
@@ -54,14 +54,14 @@ fn main() -> Result<()> {
     // storage data
     let tensor = Float32Tensor::try_new(buf, Some(vec![2, 2]), None, None)?;
     println!("\nFloat32 Tensor");
-    println!("{:?}", tensor);
+    println!("{tensor:?}");
 
     // In order to build a tensor from an array the function to_byte_slice add the
     // required padding to the elements in the array.
     let buf = Buffer::from(&[0, 1, 2, 3, 4, 5, 6, 7, 9, 10].to_byte_slice());
     let tensor = Int32Tensor::try_new(buf, Some(vec![2, 5]), None, None)?;
     println!("\nInt32 Tensor");
-    println!("{:?}", tensor);
+    println!("{tensor:?}");
 
     Ok(())
 }
diff --git a/arrow/src/datatypes/ffi.rs b/arrow/src/datatypes/ffi.rs
index 37fa85fcf..58cad3d08 100644
--- a/arrow/src/datatypes/ffi.rs
+++ b/arrow/src/datatypes/ffi.rs
@@ -133,8 +133,7 @@ impl TryFrom<&FFI_ArrowSchema> for DataType {
                             }
                             _ => {
                                 return Err(ArrowError::CDataInterface(format!(
-                                    "The decimal pattern \"d:{:?}\" is not supported in the Rust implementation",
-                                    extra
+                                    "The decimal pattern \"d:{extra:?}\" is not supported in the Rust implementation"
                                 )))
                             }
                         }
@@ -203,8 +202,7 @@ impl TryFrom<&FFI_ArrowSchema> for DataType {
                     }
                     _ => {
                         return Err(ArrowError::CDataInterface(format!(
-                            "The datatype \"{:?}\" is still not supported in Rust implementation",
-                            other
+                            "The datatype \"{other:?}\" is still not supported in Rust implementation"
                         )))
                     }
                 }
@@ -304,13 +302,11 @@ fn get_format_string(dtype: &DataType) -> Result<String> {
         DataType::LargeBinary => Ok("Z".to_string()),
         DataType::Utf8 => Ok("u".to_string()),
         DataType::LargeUtf8 => Ok("U".to_string()),
-        DataType::FixedSizeBinary(num_bytes) => Ok(format!("w:{}", num_bytes)),
-        DataType::FixedSizeList(_, num_elems) => Ok(format!("+w:{}", num_elems)),
-        DataType::Decimal128(precision, scale) => {
-            Ok(format!("d:{},{}", precision, scale))
-        }
+        DataType::FixedSizeBinary(num_bytes) => Ok(format!("w:{num_bytes}")),
+        DataType::FixedSizeList(_, num_elems) => Ok(format!("+w:{num_elems}")),
+        DataType::Decimal128(precision, scale) => Ok(format!("d:{precision},{scale}")),
         DataType::Decimal256(precision, scale) => {
-            Ok(format!("d:{},{},256", precision, scale))
+            Ok(format!("d:{precision},{scale},256"))
         }
         DataType::Date32 => Ok("tdD".to_string()),
         DataType::Date64 => Ok("tdm".to_string()),
@@ -322,10 +318,10 @@ fn get_format_string(dtype: &DataType) -> Result<String> {
         DataType::Timestamp(TimeUnit::Millisecond, None) => Ok("tsm:".to_string()),
         DataType::Timestamp(TimeUnit::Microsecond, None) => Ok("tsu:".to_string()),
         DataType::Timestamp(TimeUnit::Nanosecond, None) => Ok("tsn:".to_string()),
-        DataType::Timestamp(TimeUnit::Second, Some(tz)) => Ok(format!("tss:{}", tz)),
-        DataType::Timestamp(TimeUnit::Millisecond, Some(tz)) => Ok(format!("tsm:{}", tz)),
-        DataType::Timestamp(TimeUnit::Microsecond, Some(tz)) => Ok(format!("tsu:{}", tz)),
-        DataType::Timestamp(TimeUnit::Nanosecond, Some(tz)) => Ok(format!("tsn:{}", tz)),
+        DataType::Timestamp(TimeUnit::Second, Some(tz)) => Ok(format!("tss:{tz}")),
+        DataType::Timestamp(TimeUnit::Millisecond, Some(tz)) => Ok(format!("tsm:{tz}")),
+        DataType::Timestamp(TimeUnit::Microsecond, Some(tz)) => Ok(format!("tsu:{tz}")),
+        DataType::Timestamp(TimeUnit::Nanosecond, Some(tz)) => Ok(format!("tsn:{tz}")),
         DataType::Duration(TimeUnit::Second) => Ok("tDs".to_string()),
         DataType::Duration(TimeUnit::Millisecond) => Ok("tDm".to_string()),
         DataType::Duration(TimeUnit::Microsecond) => Ok("tDu".to_string()),
@@ -343,8 +339,7 @@ fn get_format_string(dtype: &DataType) -> Result<String> {
             }
         }
         other => Err(ArrowError::CDataInterface(format!(
-            "The datatype \"{:?}\" is still not supported in Rust implementation",
-            other
+            "The datatype \"{other:?}\" is still not supported in Rust implementation"
         ))),
     }
 }
diff --git a/arrow/src/ffi.rs b/arrow/src/ffi.rs
index 4111b858d..9fcca3c5d 100644
--- a/arrow/src/ffi.rs
+++ b/arrow/src/ffi.rs
@@ -345,8 +345,7 @@ fn bit_width(data_type: &DataType, i: usize) -> Result<usize> {
         (DataType::Timestamp(..), _) |
         (DataType::Duration(..), _) => {
             return Err(ArrowError::CDataInterface(format!(
-                "The datatype \"{:?}\" expects 2 buffers, but requested {}. Please verify that the C data interface is correctly implemented.",
-                data_type, i
+                "The datatype \"{data_type:?}\" expects 2 buffers, but requested {i}. Please verify that the C data interface is correctly implemented."
             )))
         }
         (DataType::FixedSizeBinary(num_bytes), 1) => size_of::<u8>() * (*num_bytes as usize) * 8,
@@ -356,8 +355,7 @@ fn bit_width(data_type: &DataType, i: usize) -> Result<usize> {
         },
         (DataType::FixedSizeBinary(_), _) | (DataType::FixedSizeList(_, _), _) => {
             return Err(ArrowError::CDataInterface(format!(
-                "The datatype \"{:?}\" expects 2 buffers, but requested {}. Please verify that the C data interface is correctly implemented.",
-                data_type, i
+                "The datatype \"{data_type:?}\" expects 2 buffers, but requested {i}. Please verify that the C data interface is correctly implemented."
             )))
         },
         // Variable-size list and map have one i32 buffer.
@@ -367,14 +365,12 @@ fn bit_width(data_type: &DataType, i: usize) -> Result<usize> {
         (DataType::Utf8, 2) | (DataType::Binary, 2) => size_of::<u8>() * 8,
         (DataType::List(_), _) | (DataType::Map(_, _), _) => {
             return Err(ArrowError::CDataInterface(format!(
-                "The datatype \"{:?}\" expects 2 buffers, but requested {}. Please verify that the C data interface is correctly implemented.",
-                data_type, i
+                "The datatype \"{data_type:?}\" expects 2 buffers, but requested {i}. Please verify that the C data interface is correctly implemented."
             )))
         }
         (DataType::Utf8, _) | (DataType::Binary, _) => {
             return Err(ArrowError::CDataInterface(format!(
-                "The datatype \"{:?}\" expects 3 buffers, but requested {}. Please verify that the C data interface is correctly implemented.",
-                data_type, i
+                "The datatype \"{data_type:?}\" expects 3 buffers, but requested {i}. Please verify that the C data interface is correctly implemented."
             )))
         }
         // Variable-sized binaries: have two buffers.
@@ -383,8 +379,7 @@ fn bit_width(data_type: &DataType, i: usize) -> Result<usize> {
         (DataType::LargeUtf8, 2) | (DataType::LargeBinary, 2) | (DataType::LargeList(_), 2)=> size_of::<u8>() * 8,
         (DataType::LargeUtf8, _) | (DataType::LargeBinary, _) | (DataType::LargeList(_), _)=> {
             return Err(ArrowError::CDataInterface(format!(
-                "The datatype \"{:?}\" expects 3 buffers, but requested {}. Please verify that the C data interface is correctly implemented.",
-                data_type, i
+                "The datatype \"{data_type:?}\" expects 3 buffers, but requested {i}. Please verify that the C data interface is correctly implemented."
             )))
         }
         // type ids. UnionArray doesn't have null bitmap so buffer index begins with 0.
@@ -393,28 +388,24 @@ fn bit_width(data_type: &DataType, i: usize) -> Result<usize> {
         (DataType::Union(_, _, UnionMode::Dense), 1) => size_of::<i32>() * 8,
         (DataType::Union(_, _, UnionMode::Sparse), _) => {
             return Err(ArrowError::CDataInterface(format!(
-                "The datatype \"{:?}\" expects 1 buffer, but requested {}. Please verify that the C data interface is correctly implemented.",
-                data_type, i
+                "The datatype \"{data_type:?}\" expects 1 buffer, but requested {i}. Please verify that the C data interface is correctly implemented."
             )))
         }
         (DataType::Union(_, _, UnionMode::Dense), _) => {
             return Err(ArrowError::CDataInterface(format!(
-                "The datatype \"{:?}\" expects 2 buffer, but requested {}. Please verify that the C data interface is correctly implemented.",
-                data_type, i
+                "The datatype \"{data_type:?}\" expects 2 buffer, but requested {i}. Please verify that the C data interface is correctly implemented."
             )))
         }
         (_, 0) => {
             // We don't call this `bit_width` to compute buffer length for null buffer. If any types that don't have null buffer like
             // UnionArray, they should be handled above.
             return Err(ArrowError::CDataInterface(format!(
-                "The datatype \"{:?}\" doesn't expect buffer at index 0. Please verify that the C data interface is correctly implemented.",
-                data_type
+                "The datatype \"{data_type:?}\" doesn't expect buffer at index 0. Please verify that the C data interface is correctly implemented."
             )))
         }
         _ => {
             return Err(ArrowError::CDataInterface(format!(
-                "The datatype \"{:?}\" is still not supported in Rust implementation",
-                data_type
+                "The datatype \"{data_type:?}\" is still not supported in Rust implementation"
             )))
         }
     })
@@ -708,8 +699,7 @@ pub trait ArrowArrayRef {
                         Ok(MutableBuffer::new(0).into())
                     }
                     None => Err(ArrowError::CDataInterface(format!(
-                        "The external buffer at position {} is null.",
-                        index
+                        "The external buffer at position {index} is null."
                     ))),
                 }
             })
diff --git a/arrow/src/ffi_stream.rs b/arrow/src/ffi_stream.rs
index 3a85f2ef6..4313eaaaf 100644
--- a/arrow/src/ffi_stream.rs
+++ b/arrow/src/ffi_stream.rs
@@ -287,8 +287,7 @@ fn get_stream_schema(stream_ptr: *mut FFI_ArrowArrayStream) -> Result<SchemaRef>
         Ok(Arc::new(schema))
     } else {
         Err(ArrowError::CDataInterface(format!(
-            "Cannot get schema from input stream. Error code: {:?}",
-            ret_code
+            "Cannot get schema from input stream. Error code: {ret_code:?}"
         )))
     }
 }
diff --git a/arrow/src/util/data_gen.rs b/arrow/src/util/data_gen.rs
index 8db4b154e..5fc8e4d43 100644
--- a/arrow/src/util/data_gen.rs
+++ b/arrow/src/util/data_gen.rs
@@ -101,8 +101,7 @@ pub fn create_random_array(
             >(size, null_density)),
             _ => {
                 return Err(ArrowError::InvalidArgumentError(format!(
-                    "Unsupported unit {:?} for Time32",
-                    unit
+                    "Unsupported unit {unit:?} for Time32"
                 )))
             }
         },
@@ -115,8 +114,7 @@ pub fn create_random_array(
             >(size, null_density)),
             _ => {
                 return Err(ArrowError::InvalidArgumentError(format!(
-                    "Unsupported unit {:?} for Time64",
-                    unit
+                    "Unsupported unit {unit:?} for Time64"
                 )))
             }
         },
@@ -153,8 +151,7 @@ pub fn create_random_array(
         }
         other => {
             return Err(ArrowError::NotYetImplemented(format!(
-                "Generating random arrays not yet implemented for {:?}",
-                other
+                "Generating random arrays not yet implemented for {other:?}"
             )))
         }
     })
@@ -186,8 +183,7 @@ fn create_random_list_array(
         }
         _ => {
             return Err(ArrowError::InvalidArgumentError(format!(
-                "Cannot create list array for field {:?}",
-                field
+                "Cannot create list array for field {field:?}"
             )))
         }
     };
diff --git a/arrow/src/util/pretty.rs b/arrow/src/util/pretty.rs
index 53ae0fdde..9027a1cdc 100644
--- a/arrow/src/util/pretty.rs
+++ b/arrow/src/util/pretty.rs
@@ -167,7 +167,7 @@ mod tests {
 
         let actual: Vec<&str> = table.lines().collect();
 
-        assert_eq!(expected, actual, "Actual result:\n{}", table);
+        assert_eq!(expected, actual, "Actual result:\n{table}");
 
         Ok(())
     }
@@ -193,7 +193,7 @@ mod tests {
 
         let actual: Vec<&str> = table.lines().collect();
 
-        assert_eq!(expected, actual, "Actual result:\n{}", table);
+        assert_eq!(expected, actual, "Actual result:\n{table}");
 
         Ok(())
     }
@@ -231,7 +231,7 @@ mod tests {
 
         let actual: Vec<&str> = table.lines().collect();
 
-        assert_eq!(expected, actual, "Actual result:\n{:#?}", table);
+        assert_eq!(expected, actual, "Actual result:\n{table:#?}");
     }
 
     #[test]
@@ -264,7 +264,7 @@ mod tests {
 
         let actual: Vec<&str> = table.lines().collect();
 
-        assert_eq!(expected, actual, "Actual result:\n{}", table);
+        assert_eq!(expected, actual, "Actual result:\n{table}");
 
         Ok(())
     }
@@ -304,7 +304,7 @@ mod tests {
 
         let actual: Vec<&str> = table.lines().collect();
 
-        assert_eq!(expected, actual, "Actual result:\n{}", table);
+        assert_eq!(expected, actual, "Actual result:\n{table}");
 
         Ok(())
     }
@@ -337,7 +337,7 @@ mod tests {
 
         let actual: Vec<&str> = table.lines().collect();
 
-        assert_eq!(expected, actual, "Actual result:\n{}", table);
+        assert_eq!(expected, actual, "Actual result:\n{table}");
 
         Ok(())
     }
@@ -669,7 +669,7 @@ mod tests {
         ];
 
         let actual: Vec<&str> = table.lines().collect();
-        assert_eq!(expected, actual, "Actual result:\n{}", table);
+        assert_eq!(expected, actual, "Actual result:\n{table}");
 
         Ok(())
     }
@@ -702,7 +702,7 @@ mod tests {
         ];
 
         let actual: Vec<&str> = table.lines().collect();
-        assert_eq!(expected, actual, "Actual result:\n{}", table);
+        assert_eq!(expected, actual, "Actual result:\n{table}");
 
         Ok(())
     }
@@ -761,7 +761,7 @@ mod tests {
         ];
 
         let actual: Vec<&str> = table.lines().collect();
-        assert_eq!(expected, actual, "Actual result:\n{}", table);
+        assert_eq!(expected, actual, "Actual result:\n{table}");
 
         Ok(())
     }
@@ -983,7 +983,7 @@ mod tests {
         ];
 
         let actual: Vec<&str> = table.lines().collect();
-        assert_eq!(expected, actual, "Actual result:\n{}", table);
+        assert_eq!(expected, actual, "Actual result:\n{table}");
 
         Ok(())
     }
@@ -1024,7 +1024,7 @@ mod tests {
 
         let actual: Vec<&str> = table.lines().collect();
 
-        assert_eq!(expected, actual, "Actual result:\n{}", table);
+        assert_eq!(expected, actual, "Actual result:\n{table}");
 
         Ok(())
     }
@@ -1079,7 +1079,7 @@ mod tests {
 
         let actual: Vec<&str> = table.lines().collect();
 
-        assert_eq!(expected, actual, "Actual result:\n{}", table);
+        assert_eq!(expected, actual, "Actual result:\n{table}");
 
         Ok(())
     }
diff --git a/arrow/src/util/test_util.rs b/arrow/src/util/test_util.rs
index 83107aa79..fd051dea1 100644
--- a/arrow/src/util/test_util.rs
+++ b/arrow/src/util/test_util.rs
@@ -78,7 +78,7 @@ pub fn get_temp_file(file_name: &str, content: &[u8]) -> fs::File {
 pub fn arrow_test_data() -> String {
     match get_data_dir("ARROW_TEST_DATA", "../testing/data") {
         Ok(pb) => pb.display().to_string(),
-        Err(err) => panic!("failed to get arrow data dir: {}", err),
+        Err(err) => panic!("failed to get arrow data dir: {err}"),
     }
 }
 
@@ -100,7 +100,7 @@ pub fn arrow_test_data() -> String {
 pub fn parquet_test_data() -> String {
     match get_data_dir("PARQUET_TEST_DATA", "../parquet-testing/data") {
         Ok(pb) => pb.display().to_string(),
-        Err(err) => panic!("failed to get parquet data dir: {}", err),
+        Err(err) => panic!("failed to get parquet data dir: {err}"),
     }
 }
 
diff --git a/object_store/src/aws/credential.rs b/object_store/src/aws/credential.rs
index 3a6976d11..cba55845e 100644
--- a/object_store/src/aws/credential.rs
+++ b/object_store/src/aws/credential.rs
@@ -207,7 +207,7 @@ fn hex_encode(bytes: &[u8]) -> String {
     let mut out = String::with_capacity(bytes.len() * 2);
     for byte in bytes {
         // String writing is infallible
-        let _ = write!(out, "{:02x}", byte);
+        let _ = write!(out, "{byte:02x}");
     }
     out
 }
@@ -397,7 +397,7 @@ async fn instance_creds(
     const CREDENTIALS_PATH: &str = "latest/meta-data/iam/security-credentials";
     const AWS_EC2_METADATA_TOKEN_HEADER: &str = "X-aws-ec2-metadata-token";
 
-    let token_url = format!("{}/latest/api/token", endpoint);
+    let token_url = format!("{endpoint}/latest/api/token");
 
     let token_result = client
         .request(Method::PUT, token_url)
@@ -416,7 +416,7 @@ async fn instance_creds(
         Err(e) => return Err(e.into()),
     };
 
-    let role_url = format!("{}/{}/", endpoint, CREDENTIALS_PATH);
+    let role_url = format!("{endpoint}/{CREDENTIALS_PATH}/");
     let mut role_request = client.request(Method::GET, role_url);
 
     if let Some(token) = &token {
@@ -425,7 +425,7 @@ async fn instance_creds(
 
     let role = role_request.send_retry(retry_config).await?.text().await?;
 
-    let creds_url = format!("{}/{}/{}", endpoint, CREDENTIALS_PATH, role);
+    let creds_url = format!("{endpoint}/{CREDENTIALS_PATH}/{role}");
     let mut creds_request = client.request(Method::GET, creds_url);
     if let Some(token) = &token {
         creds_request = creds_request.header(AWS_EC2_METADATA_TOKEN_HEADER, token);
@@ -483,7 +483,7 @@ async fn web_identity(
     endpoint: &str,
 ) -> Result<TemporaryToken<Arc<AwsCredential>>, StdError> {
     let token = std::fs::read_to_string(token_path)
-        .map_err(|e| format!("Failed to read token file '{}': {}", token_path, e))?;
+        .map_err(|e| format!("Failed to read token file '{token_path}': {e}"))?;
 
     let bytes = client
         .request(Method::POST, endpoint)
@@ -501,7 +501,7 @@ async fn web_identity(
         .await?;
 
     let resp: AssumeRoleResponse = quick_xml::de::from_reader(bytes.reader())
-        .map_err(|e| format!("Invalid AssumeRoleWithWebIdentity response: {}", e))?;
+        .map_err(|e| format!("Invalid AssumeRoleWithWebIdentity response: {e}"))?;
 
     let creds = resp.assume_role_with_web_identity_result.credentials;
     let now = Utc::now();
@@ -677,7 +677,7 @@ mod tests {
 
         // Verify only allows IMDSv2
         let resp = client
-            .request(Method::GET, format!("{}/latest/meta-data/ami-id", endpoint))
+            .request(Method::GET, format!("{endpoint}/latest/meta-data/ami-id"))
             .send()
             .await
             .unwrap();
diff --git a/object_store/src/aws/mod.rs b/object_store/src/aws/mod.rs
index 20174692f..a1c9eae84 100644
--- a/object_store/src/aws/mod.rs
+++ b/object_store/src/aws/mod.rs
@@ -614,7 +614,7 @@ impl AmazonS3Builder {
             std::env::var("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI")
         {
             builder.metadata_endpoint =
-                Some(format!("{}{}", METADATA_ENDPOINT, metadata_relative_uri));
+                Some(format!("{METADATA_ENDPOINT}{metadata_relative_uri}"));
         }
 
         if let Ok(text) = std::env::var("AWS_ALLOW_HTTP") {
@@ -896,7 +896,7 @@ impl AmazonS3Builder {
                     let session_name = std::env::var("AWS_ROLE_SESSION_NAME")
                         .unwrap_or_else(|_| "WebIdentitySession".to_string());
 
-                    let endpoint = format!("https://sts.{}.amazonaws.com", region);
+                    let endpoint = format!("https://sts.{region}.amazonaws.com");
 
                     // Disallow non-HTTPs requests
                     let client = self
@@ -948,15 +948,15 @@ impl AmazonS3Builder {
         // `virtual_hosted_style_request`. i.e. if `virtual_hosted_style_request` is true then
         // `endpoint` should have bucket name included.
         if self.virtual_hosted_style_request {
-            endpoint = self.endpoint.unwrap_or_else(|| {
-                format!("https://{}.s3.{}.amazonaws.com", bucket, region)
-            });
+            endpoint = self
+                .endpoint
+                .unwrap_or_else(|| format!("https://{bucket}.s3.{region}.amazonaws.com"));
             bucket_endpoint = endpoint.clone();
         } else {
             endpoint = self
                 .endpoint
-                .unwrap_or_else(|| format!("https://s3.{}.amazonaws.com", region));
-            bucket_endpoint = format!("{}/{}", endpoint, bucket);
+                .unwrap_or_else(|| format!("https://s3.{region}.amazonaws.com"));
+            bucket_endpoint = format!("{endpoint}/{bucket}");
         }
 
         let config = S3Config {
@@ -1137,8 +1137,7 @@ mod tests {
         assert_eq!(builder.endpoint.unwrap(), aws_endpoint);
         assert_eq!(builder.token.unwrap(), aws_session_token);
 
-        let metadata_uri =
-            format!("{}{}", METADATA_ENDPOINT, container_creds_relative_uri);
+        let metadata_uri = format!("{METADATA_ENDPOINT}{container_creds_relative_uri}");
         assert_eq!(builder.metadata_endpoint.unwrap(), metadata_uri);
     }
 
diff --git a/object_store/src/azure/client.rs b/object_store/src/azure/client.rs
index e42950b90..39da7177f 100644
--- a/object_store/src/azure/client.rs
+++ b/object_store/src/azure/client.rs
@@ -179,12 +179,12 @@ impl AzureClient {
                 Ok(AzureCredential::AuthorizationToken(
                     // we do the conversion to a HeaderValue here, since it is fallible
                     // and we wna to use it in an infallible function
-                    HeaderValue::from_str(&format!("Bearer {}", token)).map_err(
-                        |err| crate::Error::Generic {
+                    HeaderValue::from_str(&format!("Bearer {token}")).map_err(|err| {
+                        crate::Error::Generic {
                             store: "MicrosoftAzure",
                             source: Box::new(err),
-                        },
-                    )?,
+                        }
+                    })?,
                 ))
             }
             CredentialProvider::SASToken(sas) => {
diff --git a/object_store/src/azure/credential.rs b/object_store/src/azure/credential.rs
index 280d84300..67023d2f0 100644
--- a/object_store/src/azure/credential.rs
+++ b/object_store/src/azure/credential.rs
@@ -593,7 +593,7 @@ mod tests {
             Some("client_id".into()),
             None,
             None,
-            Some(format!("{}/metadata/identity/oauth2/token", endpoint)),
+            Some(format!("{endpoint}/metadata/identity/oauth2/token")),
             client.clone(),
         );
 
@@ -618,7 +618,7 @@ mod tests {
 
         // Test IMDS
         server.push_fn(move |req| {
-            assert_eq!(req.uri().path(), format!("/{}/oauth2/v2.0/token", tenant));
+            assert_eq!(req.uri().path(), format!("/{tenant}/oauth2/v2.0/token"));
             assert_eq!(req.method(), &Method::POST);
             let body = block_on(to_bytes(req.into_body())).unwrap();
             let body = String::from_utf8(body.to_vec()).unwrap();
diff --git a/object_store/src/azure/mod.rs b/object_store/src/azure/mod.rs
index 1eea27801..529690634 100644
--- a/object_store/src/azure/mod.rs
+++ b/object_store/src/azure/mod.rs
@@ -327,7 +327,7 @@ impl CloudMultiPartUploadImpl for AzureMultiPartUpload {
         buf: Vec<u8>,
         part_idx: usize,
     ) -> Result<UploadPart, io::Error> {
-        let content_id = format!("{:20}", part_idx);
+        let content_id = format!("{part_idx:20}");
         let block_id: BlockId = content_id.clone().into();
 
         self.client
diff --git a/object_store/src/client/backoff.rs b/object_store/src/client/backoff.rs
index 5a6126cc4..a4ca9765e 100644
--- a/object_store/src/client/backoff.rs
+++ b/object_store/src/client/backoff.rs
@@ -123,7 +123,7 @@ mod tests {
         };
 
         let assert_fuzzy_eq =
-            |a: f64, b: f64| assert!((b - a).abs() < 0.0001, "{} != {}", a, b);
+            |a: f64, b: f64| assert!((b - a).abs() < 0.0001, "{a} != {b}");
 
         // Create a static rng that takes the minimum of the range
         let rng = Box::new(StepRng::new(0, 0));
diff --git a/object_store/src/client/retry.rs b/object_store/src/client/retry.rs
index cee86b344..e6dd2eb81 100644
--- a/object_store/src/client/retry.rs
+++ b/object_store/src/client/retry.rs
@@ -41,7 +41,7 @@ impl std::fmt::Display for Error {
             self.message, self.retries
         )?;
         if let Some(source) = &self.source {
-            write!(f, ": {}", source)?;
+            write!(f, ": {source}")?;
         }
         Ok(())
     }
@@ -171,7 +171,7 @@ impl RetryExt for reqwest::RequestBuilder {
                                     true => match r.text().await {
                                         Ok(message) if !message.is_empty() => message,
                                         Ok(_) => "No Body".to_string(),
-                                        Err(e) => format!("error getting response body: {}", e)
+                                        Err(e) => format!("error getting response body: {e}")
                                     }
                                     false => status.to_string(),
                                 };
diff --git a/object_store/src/gcp/credential.rs b/object_store/src/gcp/credential.rs
index 56468568b..c12b37cdd 100644
--- a/object_store/src/gcp/credential.rs
+++ b/object_store/src/gcp/credential.rs
@@ -352,8 +352,7 @@ async fn make_metadata_request(
     audience: &str,
 ) -> Result<TokenResponse> {
     let url = format!(
-        "http://{}/computeMetadata/v1/instance/service-accounts/default/token",
-        hostname
+        "http://{hostname}/computeMetadata/v1/instance/service-accounts/default/token"
     );
     let response: TokenResponse = client
         .request(Method::GET, url)
diff --git a/object_store/src/gcp/mod.rs b/object_store/src/gcp/mod.rs
index 871413b43..97f44446f 100644
--- a/object_store/src/gcp/mod.rs
+++ b/object_store/src/gcp/mod.rs
@@ -1271,8 +1271,7 @@ mod test {
 
         assert!(
             matches!(err, ObjectStoreError::NotFound { .. }),
-            "unexpected error type: {}",
-            err
+            "unexpected error type: {err}"
         );
     }
 
@@ -1291,8 +1290,7 @@ mod test {
 
         assert!(
             matches!(err, ObjectStoreError::NotFound { .. }),
-            "unexpected error type: {}",
-            err
+            "unexpected error type: {err}"
         );
     }
 
@@ -1305,8 +1303,7 @@ mod test {
         let err = integration.delete(&location).await.unwrap_err();
         assert!(
             matches!(err, ObjectStoreError::NotFound { .. }),
-            "unexpected error type: {}",
-            err
+            "unexpected error type: {err}"
         );
     }
 
@@ -1322,8 +1319,7 @@ mod test {
         let err = integration.delete(&location).await.unwrap_err();
         assert!(
             matches!(err, ObjectStoreError::NotFound { .. }),
-            "unexpected error type: {}",
-            err
+            "unexpected error type: {err}"
         );
     }
 
@@ -1352,7 +1348,7 @@ mod test {
     #[tokio::test]
     async fn gcs_test_proxy_url() {
         let mut tfile = NamedTempFile::new().unwrap();
-        write!(tfile, "{}", FAKE_KEY).unwrap();
+        write!(tfile, "{FAKE_KEY}").unwrap();
         let service_account_path = tfile.path();
         let gcs = GoogleCloudStorageBuilder::new()
             .with_service_account_path(service_account_path.to_str().unwrap())
@@ -1400,7 +1396,7 @@ mod test {
     #[test]
     fn gcs_test_service_account_key_and_path() {
         let mut tfile = NamedTempFile::new().unwrap();
-        write!(tfile, "{}", FAKE_KEY).unwrap();
+        write!(tfile, "{FAKE_KEY}").unwrap();
         let _ = GoogleCloudStorageBuilder::new()
             .with_service_account_key(FAKE_KEY)
             .with_service_account_path(tfile.path().to_str().unwrap())
diff --git a/object_store/src/lib.rs b/object_store/src/lib.rs
index 4ec58c387..8c202886b 100644
--- a/object_store/src/lib.rs
+++ b/object_store/src/lib.rs
@@ -441,11 +441,9 @@ impl GetResult {
                         }
                     })?;
 
-                    file.seek(SeekFrom::Start(0)).map_err(|source| {
-                        local::Error::Seek {
-                            source,
-                            path: path.clone(),
-                        }
+                    file.rewind().map_err(|source| local::Error::Seek {
+                        source,
+                        path: path.clone(),
                     })?;
 
                     let mut buffer = Vec::with_capacity(len as usize);
@@ -611,8 +609,7 @@ mod tests {
         let content_list = flatten_list_stream(storage, None).await.unwrap();
         assert!(
             content_list.is_empty(),
-            "Expected list to be empty; found: {:?}",
-            content_list
+            "Expected list to be empty; found: {content_list:?}"
         );
 
         let location = Path::from("test_dir/test_file.json");
@@ -815,7 +812,7 @@ mod tests {
         storage.delete(&path).await.unwrap();
 
         let files = flatten_list_stream(storage, None).await.unwrap();
-        assert!(files.is_empty(), "{:?}", files);
+        assert!(files.is_empty(), "{files:?}");
     }
 
     fn get_vec_of_bytes(chunk_length: usize, num_chunks: usize) -> Vec<Bytes> {
@@ -900,8 +897,7 @@ mod tests {
         let content_list = flatten_list_stream(storage, None).await.unwrap();
         assert!(
             content_list.is_empty(),
-            "Expected list to be empty; found: {:?}",
-            content_list
+            "Expected list to be empty; found: {content_list:?}"
         );
 
         let location1 = Path::from("foo/x.json");
diff --git a/object_store/src/local.rs b/object_store/src/local.rs
index 2ef87adbb..9a518ba47 100644
--- a/object_store/src/local.rs
+++ b/object_store/src/local.rs
@@ -555,7 +555,7 @@ impl ObjectStore for LocalFileSystem {
 
 fn get_upload_stage_path(dest: &std::path::Path, multipart_id: &MultipartId) -> PathBuf {
     let mut staging_path = dest.as_os_str().to_owned();
-    staging_path.push(format!("#{}", multipart_id));
+    staging_path.push(format!("#{multipart_id}"));
     staging_path.into()
 }
 
@@ -607,7 +607,7 @@ impl AsyncWrite for LocalUpload {
             |condition: &str| -> std::task::Poll<Result<usize, io::Error>> {
                 Poll::Ready(Err(io::Error::new(
                     io::ErrorKind::InvalidInput,
-                    format!("Tried to write to file {}.", condition),
+                    format!("Tried to write to file {condition}."),
                 )))
             };
 
@@ -1040,12 +1040,11 @@ mod tests {
             let source_variant = source.downcast_ref::<std::io::Error>();
             assert!(
                 matches!(source_variant, Some(std::io::Error { .. }),),
-                "got: {:?}",
-                source_variant
+                "got: {source_variant:?}"
             );
             assert!(path.ends_with(NON_EXISTENT_NAME), "{}", path);
         } else {
-            panic!("unexpected error type: {:?}", err);
+            panic!("unexpected error type: {err:?}");
         }
     }
 
diff --git a/object_store/src/memory.rs b/object_store/src/memory.rs
index e4be5b2af..372164c2b 100644
--- a/object_store/src/memory.rs
+++ b/object_store/src/memory.rs
@@ -365,12 +365,11 @@ mod tests {
             let source_variant = source.downcast_ref::<Error>();
             assert!(
                 matches!(source_variant, Some(Error::NoDataInMemory { .. }),),
-                "got: {:?}",
-                source_variant
+                "got: {source_variant:?}"
             );
             assert_eq!(path, NON_EXISTENT_NAME);
         } else {
-            panic!("unexpected error type: {:?}", err);
+            panic!("unexpected error type: {err:?}");
         }
     }
 }
diff --git a/object_store/src/multipart.rs b/object_store/src/multipart.rs
index 65427d1f2..0606fb51e 100644
--- a/object_store/src/multipart.rs
+++ b/object_store/src/multipart.rs
@@ -222,7 +222,7 @@ where
                 part.ok_or_else(|| {
                     io::Error::new(
                         io::ErrorKind::Other,
-                        format!("Missing information for upload part {}", idx),
+                        format!("Missing information for upload part {idx}"),
                     )
                 })
             })
diff --git a/object_store/src/path/mod.rs b/object_store/src/path/mod.rs
index 020e5f58e..4b0862e44 100644
--- a/object_store/src/path/mod.rs
+++ b/object_store/src/path/mod.rs
@@ -454,63 +454,49 @@ mod tests {
         // self starts with self
         assert!(
             haystack.prefix_matches(&haystack),
-            "{:?} should have started with {:?}",
-            haystack,
-            haystack
+            "{haystack:?} should have started with {haystack:?}"
         );
 
         // a longer prefix doesn't match
         let needle = needle.child("longer now");
         assert!(
             !haystack.prefix_matches(&needle),
-            "{:?} shouldn't have started with {:?}",
-            haystack,
-            needle
+            "{haystack:?} shouldn't have started with {needle:?}"
         );
 
         // one dir prefix matches
         let needle = Path::from_iter(["foo/bar"]);
         assert!(
             haystack.prefix_matches(&needle),
-            "{:?} should have started with {:?}",
-            haystack,
-            needle
+            "{haystack:?} should have started with {needle:?}"
         );
 
         // two dir prefix matches
         let needle = needle.child("baz%2Ftest");
         assert!(
             haystack.prefix_matches(&needle),
-            "{:?} should have started with {:?}",
-            haystack,
-            needle
+            "{haystack:?} should have started with {needle:?}"
         );
 
         // partial dir prefix doesn't match
         let needle = Path::from_iter(["f"]);
         assert!(
             !haystack.prefix_matches(&needle),
-            "{:?} should not have started with {:?}",
-            haystack,
-            needle
+            "{haystack:?} should not have started with {needle:?}"
         );
 
         // one dir and one partial dir doesn't match
         let needle = Path::from_iter(["foo/bar", "baz"]);
         assert!(
             !haystack.prefix_matches(&needle),
-            "{:?} should not have started with {:?}",
-            haystack,
-            needle
+            "{haystack:?} should not have started with {needle:?}"
         );
 
         // empty prefix matches
         let needle = Path::from("");
         assert!(
             haystack.prefix_matches(&needle),
-            "{:?} should have started with {:?}",
-            haystack,
-            needle
+            "{haystack:?} should have started with {needle:?}"
         );
     }
 
@@ -524,9 +510,7 @@ mod tests {
 
         assert!(
             !haystack.prefix_matches(&needle),
-            "{:?} should not have started with {:?}",
-            haystack,
-            needle
+            "{haystack:?} should not have started with {needle:?}"
         );
 
         // All directories match but file name is not a prefix
@@ -534,9 +518,7 @@ mod tests {
 
         assert!(
             !haystack.prefix_matches(&needle),
-            "{:?} should not have started with {:?}",
-            haystack,
-            needle
+            "{haystack:?} should not have started with {needle:?}"
         );
 
         // Not all directories match; file name is a prefix of the next directory; this
@@ -545,9 +527,7 @@ mod tests {
 
         assert!(
             !haystack.prefix_matches(&needle),
-            "{:?} should not have started with {:?}",
-            haystack,
-            needle
+            "{haystack:?} should not have started with {needle:?}"
         );
 
         // Not all directories match; file name is NOT a prefix of the next directory;
@@ -556,9 +536,7 @@ mod tests {
 
         assert!(
             !haystack.prefix_matches(&needle),
-            "{:?} should not have started with {:?}",
-            haystack,
-            needle
+            "{haystack:?} should not have started with {needle:?}"
         );
     }
 
diff --git a/parquet/benches/arrow_reader.rs b/parquet/benches/arrow_reader.rs
index d8a7f07fb..f6f65bea8 100644
--- a/parquet/benches/arrow_reader.rs
+++ b/parquet/benches/arrow_reader.rs
@@ -281,7 +281,7 @@ fn build_plain_encoded_string_page_iterator(
                 };
                 if def_level == max_def_level {
                     let string_value =
-                        format!("Test value {}, row group: {}, page: {}", k, i, j);
+                        format!("Test value {k}, row group: {i}, page: {j}");
                     values
                         .push(parquet::data_type::ByteArray::from(string_value.as_str()));
                 }
@@ -312,7 +312,7 @@ fn build_dictionary_encoded_string_page_iterator(
     // generate 1% unique values
     const NUM_UNIQUE_VALUES: usize = VALUES_PER_PAGE / 100;
     let unique_values = (0..NUM_UNIQUE_VALUES)
-        .map(|x| format!("Dictionary value {}", x))
+        .map(|x| format!("Dictionary value {x}"))
         .collect::<Vec<_>>();
     let mut rng = seedable_rng();
     let mut pages: Vec<Vec<parquet::column::page::Page>> = Vec::new();
diff --git a/parquet/examples/async_read_parquet.rs b/parquet/examples/async_read_parquet.rs
index 9b4b6d4ff..f600cd0d1 100644
--- a/parquet/examples/async_read_parquet.rs
+++ b/parquet/examples/async_read_parquet.rs
@@ -27,7 +27,7 @@ use tokio::fs::File;
 async fn main() -> Result<()> {
     // Create parquet file that will be read.
     let testdata = arrow::util::test_util::parquet_test_data();
-    let path = format!("{}/alltypes_plain.parquet", testdata);
+    let path = format!("{testdata}/alltypes_plain.parquet");
     let file = File::open(path).await.unwrap();
 
     // Create a async parquet reader builder with batch_size.
diff --git a/parquet/examples/read_parquet.rs b/parquet/examples/read_parquet.rs
index 3d6d70aee..f374fcd2e 100644
--- a/parquet/examples/read_parquet.rs
+++ b/parquet/examples/read_parquet.rs
@@ -23,7 +23,7 @@ use std::fs::File;
 fn main() -> Result<()> {
     // Create parquet file that will be read.
     let testdata = arrow::util::test_util::parquet_test_data();
-    let path = format!("{}/alltypes_plain.parquet", testdata);
+    let path = format!("{testdata}/alltypes_plain.parquet");
     let file = File::open(path).unwrap();
 
     // Create a sync parquet reader with batch_size.
diff --git a/parquet/src/arrow/arrow_reader/mod.rs b/parquet/src/arrow/arrow_reader/mod.rs
index 312f01407..87165ef8e 100644
--- a/parquet/src/arrow/arrow_reader/mod.rs
+++ b/parquet/src/arrow/arrow_reader/mod.rs
@@ -1161,7 +1161,7 @@ mod tests {
             ("int64", 10),
         ];
         for (prefix, target_precision) in file_variants {
-            let path = format!("{}/{}_decimal.parquet", testdata, prefix);
+            let path = format!("{testdata}/{prefix}_decimal.parquet");
             let file = File::open(path).unwrap();
             let mut record_reader = ParquetRecordBatchReader::try_new(file, 32).unwrap();
 
@@ -1726,7 +1726,7 @@ mod tests {
         // a column that has the same name as one of the struct fields
         // (see: ARROW-11452)
         let testdata = arrow::util::test_util::parquet_test_data();
-        let path = format!("{}/nested_structs.rust.parquet", testdata);
+        let path = format!("{testdata}/nested_structs.rust.parquet");
         let file = File::open(&path).unwrap();
         let record_batch_reader = ParquetRecordBatchReader::try_new(file, 60).unwrap();
 
@@ -1776,7 +1776,7 @@ mod tests {
     #[test]
     fn test_read_maps() {
         let testdata = arrow::util::test_util::parquet_test_data();
-        let path = format!("{}/nested_maps.snappy.parquet", testdata);
+        let path = format!("{testdata}/nested_maps.snappy.parquet");
         let file = File::open(path).unwrap();
         let record_batch_reader = ParquetRecordBatchReader::try_new(file, 60).unwrap();
 
@@ -1968,7 +1968,7 @@ mod tests {
     #[test]
     fn test_read_null_list() {
         let testdata = arrow::util::test_util::parquet_test_data();
-        let path = format!("{}/null_list.parquet", testdata);
+        let path = format!("{testdata}/null_list.parquet");
         let file = File::open(path).unwrap();
         let mut record_batch_reader =
             ParquetRecordBatchReader::try_new(file, 60).unwrap();
@@ -1993,7 +1993,7 @@ mod tests {
     #[test]
     fn test_null_schema_inference() {
         let testdata = arrow::util::test_util::parquet_test_data();
-        let path = format!("{}/null_list.parquet", testdata);
+        let path = format!("{testdata}/null_list.parquet");
         let file = File::open(path).unwrap();
 
         let arrow_field = Field::new(
@@ -2084,7 +2084,7 @@ mod tests {
     #[test]
     fn test_empty_projection() {
         let testdata = arrow::util::test_util::parquet_test_data();
-        let path = format!("{}/alltypes_plain.parquet", testdata);
+        let path = format!("{testdata}/alltypes_plain.parquet");
         let file = File::open(path).unwrap();
 
         let builder = ParquetRecordBatchReaderBuilder::try_new(file).unwrap();
@@ -2256,7 +2256,7 @@ mod tests {
     #[test]
     fn test_scan_row_with_selection() {
         let testdata = arrow::util::test_util::parquet_test_data();
-        let path = format!("{}/alltypes_tiny_pages_plain.parquet", testdata);
+        let path = format!("{testdata}/alltypes_tiny_pages_plain.parquet");
         let test_file = File::open(&path).unwrap();
 
         let mut serial_reader =
@@ -2273,10 +2273,7 @@ mod tests {
                 assert_eq!(
                     skip_reader.collect::<Result<Vec<_>, _>>().unwrap(),
                     expected,
-                    "batch_size: {}, selection_len: {}, skip_first: {}",
-                    batch_size,
-                    selection_len,
-                    skip_first
+                    "batch_size: {batch_size}, selection_len: {selection_len}, skip_first: {skip_first}"
                 );
             }
         };
@@ -2315,7 +2312,7 @@ mod tests {
     fn test_batch_size_overallocate() {
         let testdata = arrow::util::test_util::parquet_test_data();
         // `alltypes_plain.parquet` only have 8 rows
-        let path = format!("{}/alltypes_plain.parquet", testdata);
+        let path = format!("{testdata}/alltypes_plain.parquet");
         let test_file = File::open(path).unwrap();
 
         let builder = ParquetRecordBatchReaderBuilder::try_new(test_file).unwrap();
@@ -2394,7 +2391,7 @@ mod tests {
     #[test]
     fn test_read_lz4_raw() {
         let testdata = arrow::util::test_util::parquet_test_data();
-        let path = format!("{}/lz4_raw_compressed.parquet", testdata);
+        let path = format!("{testdata}/lz4_raw_compressed.parquet");
         let file = File::open(path).unwrap();
 
         let batches = ParquetRecordBatchReader::try_new(file, 1024)
@@ -2438,7 +2435,7 @@ mod tests {
             "non_hadoop_lz4_compressed.parquet",
         ] {
             let testdata = arrow::util::test_util::parquet_test_data();
-            let path = format!("{}/{}", testdata, file);
+            let path = format!("{testdata}/{file}");
             let file = File::open(path).unwrap();
             let expected_rows = 4;
 
@@ -2470,7 +2467,7 @@ mod tests {
     #[test]
     fn test_read_lz4_hadoop_large() {
         let testdata = arrow::util::test_util::parquet_test_data();
-        let path = format!("{}/hadoop_lz4_compressed_larger.parquet", testdata);
+        let path = format!("{testdata}/hadoop_lz4_compressed_larger.parquet");
         let file = File::open(path).unwrap();
         let expected_rows = 10000;
 
@@ -2496,7 +2493,7 @@ mod tests {
     #[cfg(feature = "snap")]
     fn test_read_nested_lists() {
         let testdata = arrow::util::test_util::parquet_test_data();
-        let path = format!("{}/nested_lists.snappy.parquet", testdata);
+        let path = format!("{testdata}/nested_lists.snappy.parquet");
         let file = File::open(path).unwrap();
 
         let f = file.try_clone().unwrap();
diff --git a/parquet/src/arrow/arrow_writer/mod.rs b/parquet/src/arrow/arrow_writer/mod.rs
index c459d40d7..87b4ebc2b 100644
--- a/parquet/src/arrow/arrow_writer/mod.rs
+++ b/parquet/src/arrow/arrow_writer/mod.rs
@@ -363,8 +363,7 @@ fn write_leaves<W: Write>(
         ArrowDataType::FixedSizeList(_, _) | ArrowDataType::Union(_, _, _) | ArrowDataType::RunEndEncoded(_, _) => {
             Err(ParquetError::NYI(
                 format!(
-                    "Attempting to write an Arrow type {:?} to parquet that is not yet implemented",
-                    data_type
+                    "Attempting to write an Arrow type {data_type:?} to parquet that is not yet implemented"
                 )
             ))
         }
@@ -499,8 +498,7 @@ fn write_leaf(
                     _ => {
                         return Err(ParquetError::NYI(
                             format!(
-                                "Attempting to write an Arrow interval type {:?} to parquet that is not yet implemented",
-                                interval_unit
+                                "Attempting to write an Arrow interval type {interval_unit:?} to parquet that is not yet implemented"
                             )
                         ));
                     }
@@ -536,8 +534,8 @@ fn write_leaf(
     Ok(written as i64)
 }
 
-fn write_primitive<'a, T: DataType>(
-    writer: &mut ColumnWriterImpl<'a, T>,
+fn write_primitive<T: DataType>(
+    writer: &mut ColumnWriterImpl<'_, T>,
     values: &[T::T],
     levels: LevelInfo,
 ) -> Result<usize> {
@@ -1197,8 +1195,7 @@ mod tests {
         assert_eq!(
             offset_index.len(),
             10,
-            "Expected 9 pages but got {:#?}",
-            offset_index
+            "Expected 9 pages but got {offset_index:#?}"
         );
     }
 
@@ -1422,10 +1419,10 @@ mod tests {
                     {
                         bloom_filters.push(sbbf.clone());
                     } else {
-                        panic!("No bloom filter for column named {} found", file_column);
+                        panic!("No bloom filter for column named {file_column} found");
                     }
                 } else {
-                    panic!("No column named {} found", file_column);
+                    panic!("No column named {file_column} found");
                 }
             }
 
diff --git a/parquet/src/arrow/async_reader/metadata.rs b/parquet/src/arrow/async_reader/metadata.rs
index 9c96d0650..7470814fa 100644
--- a/parquet/src/arrow/async_reader/metadata.rs
+++ b/parquet/src/arrow/async_reader/metadata.rs
@@ -49,8 +49,7 @@ where
 {
     if file_size < 8 {
         return Err(ParquetError::EOF(format!(
-            "file size of {} is less than footer",
-            file_size
+            "file size of {file_size} is less than footer"
         )));
     }
 
diff --git a/parquet/src/arrow/async_reader/mod.rs b/parquet/src/arrow/async_reader/mod.rs
index 780ba6f3b..0397df206 100644
--- a/parquet/src/arrow/async_reader/mod.rs
+++ b/parquet/src/arrow/async_reader/mod.rs
@@ -692,8 +692,7 @@ impl<'a> RowGroupCollection for InMemoryRowGroup<'a> {
     fn column_chunks(&self, i: usize) -> Result<Box<dyn PageIterator>> {
         match &self.column_chunks[i] {
             None => Err(ParquetError::General(format!(
-                "Invalid column index {}, column was not fetched",
-                i
+                "Invalid column index {i}, column was not fetched"
             ))),
             Some(data) => {
                 let page_locations = self
@@ -757,8 +756,7 @@ impl ChunkReader for ColumnChunkData {
                 .map(|idx| data[idx].1.slice(0..length))
                 .map_err(|_| {
                     ParquetError::General(format!(
-                        "Invalid offset in sparse column chunk data: {}",
-                        start
+                        "Invalid offset in sparse column chunk data: {start}"
                     ))
                 }),
             ColumnChunkData::Dense { offset, data } => {
@@ -831,7 +829,7 @@ mod tests {
     #[tokio::test]
     async fn test_async_reader() {
         let testdata = arrow::util::test_util::parquet_test_data();
-        let path = format!("{}/alltypes_plain.parquet", testdata);
+        let path = format!("{testdata}/alltypes_plain.parquet");
         let data = Bytes::from(std::fs::read(path).unwrap());
 
         let metadata = parse_metadata(&data).unwrap();
@@ -886,7 +884,7 @@ mod tests {
     #[tokio::test]
     async fn test_async_reader_with_index() {
         let testdata = arrow::util::test_util::parquet_test_data();
-        let path = format!("{}/alltypes_tiny_pages_plain.parquet", testdata);
+        let path = format!("{testdata}/alltypes_tiny_pages_plain.parquet");
         let data = Bytes::from(std::fs::read(path).unwrap());
 
         let metadata = parse_metadata(&data).unwrap();
@@ -948,7 +946,7 @@ mod tests {
     #[tokio::test]
     async fn test_async_reader_skip_pages() {
         let testdata = arrow::util::test_util::parquet_test_data();
-        let path = format!("{}/alltypes_tiny_pages_plain.parquet", testdata);
+        let path = format!("{testdata}/alltypes_tiny_pages_plain.parquet");
         let data = Bytes::from(std::fs::read(path).unwrap());
 
         let metadata = parse_metadata(&data).unwrap();
@@ -1005,7 +1003,7 @@ mod tests {
     #[tokio::test]
     async fn test_fuzz_async_reader_selection() {
         let testdata = arrow::util::test_util::parquet_test_data();
-        let path = format!("{}/alltypes_tiny_pages_plain.parquet", testdata);
+        let path = format!("{testdata}/alltypes_tiny_pages_plain.parquet");
         let data = Bytes::from(std::fs::read(path).unwrap());
 
         let metadata = parse_metadata(&data).unwrap();
@@ -1072,7 +1070,7 @@ mod tests {
     async fn test_async_reader_zero_row_selector() {
         //See https://github.com/apache/arrow-rs/issues/2669
         let testdata = arrow::util::test_util::parquet_test_data();
-        let path = format!("{}/alltypes_tiny_pages_plain.parquet", testdata);
+        let path = format!("{testdata}/alltypes_tiny_pages_plain.parquet");
         let data = Bytes::from(std::fs::read(path).unwrap());
 
         let metadata = parse_metadata(&data).unwrap();
@@ -1209,7 +1207,7 @@ mod tests {
     #[tokio::test]
     async fn test_row_filter_with_index() {
         let testdata = arrow::util::test_util::parquet_test_data();
-        let path = format!("{}/alltypes_tiny_pages_plain.parquet", testdata);
+        let path = format!("{testdata}/alltypes_tiny_pages_plain.parquet");
         let data = Bytes::from(std::fs::read(path).unwrap());
 
         let metadata = parse_metadata(&data).unwrap();
@@ -1259,7 +1257,7 @@ mod tests {
     #[tokio::test]
     async fn test_in_memory_row_group_sparse() {
         let testdata = arrow::util::test_util::parquet_test_data();
-        let path = format!("{}/alltypes_tiny_pages.parquet", testdata);
+        let path = format!("{testdata}/alltypes_tiny_pages.parquet");
         let data = Bytes::from(std::fs::read(path).unwrap());
 
         let metadata = parse_metadata(&data).unwrap();
@@ -1345,7 +1343,7 @@ mod tests {
     async fn test_batch_size_overallocate() {
         let testdata = arrow::util::test_util::parquet_test_data();
         // `alltypes_plain.parquet` only have 8 rows
-        let path = format!("{}/alltypes_plain.parquet", testdata);
+        let path = format!("{testdata}/alltypes_plain.parquet");
         let data = Bytes::from(std::fs::read(path).unwrap());
 
         let metadata = parse_metadata(&data).unwrap();
diff --git a/parquet/src/arrow/async_reader/store.rs b/parquet/src/arrow/async_reader/store.rs
index e5de8eae6..eb64b11b9 100644
--- a/parquet/src/arrow/async_reader/store.rs
+++ b/parquet/src/arrow/async_reader/store.rs
@@ -62,7 +62,7 @@ impl AsyncFileReader for ParquetObjectReader {
         self.store
             .get_range(&self.meta.location, range)
             .map_err(|e| {
-                ParquetError::General(format!("AsyncChunkReader::get_bytes error: {}", e))
+                ParquetError::General(format!("AsyncChunkReader::get_bytes error: {e}"))
             })
             .boxed()
     }
@@ -80,8 +80,7 @@ impl AsyncFileReader for ParquetObjectReader {
                 .await
                 .map_err(|e| {
                     ParquetError::General(format!(
-                        "ParquetObjectReader::get_byte_ranges error: {}",
-                        e
+                        "ParquetObjectReader::get_byte_ranges error: {e}"
                     ))
                 })
         }
@@ -96,8 +95,7 @@ impl AsyncFileReader for ParquetObjectReader {
                         .get_range(&self.meta.location, range)
                         .map_err(|e| {
                             ParquetError::General(format!(
-                                "ParquetObjectReader::get_metadata error: {}",
-                                e
+                                "ParquetObjectReader::get_metadata error: {e}"
                             ))
                         })
                 },
diff --git a/parquet/src/arrow/buffer/bit_util.rs b/parquet/src/arrow/buffer/bit_util.rs
index 34a0a4b83..278119033 100644
--- a/parquet/src/arrow/buffer/bit_util.rs
+++ b/parquet/src/arrow/buffer/bit_util.rs
@@ -53,7 +53,7 @@ pub fn iter_set_bits_rev(bytes: &[u8]) -> impl Iterator<Item = usize> + '_ {
 
 /// Performs big endian sign extension
 pub fn sign_extend_be<const N: usize>(b: &[u8]) -> [u8; N] {
-    assert!(b.len() <= N, "Array too large, expected less than {}", N);
+    assert!(b.len() <= N, "Array too large, expected less than {N}");
     let is_negative = (b[0] & 128u8) == 128u8;
     let mut result = if is_negative { [255u8; N] } else { [0u8; N] };
     for (d, s) in result.iter_mut().skip(N - b.len()).zip(b) {
diff --git a/parquet/src/arrow/decoder/delta_byte_array.rs b/parquet/src/arrow/decoder/delta_byte_array.rs
index af73f4f25..dd4a8fa87 100644
--- a/parquet/src/arrow/decoder/delta_byte_array.rs
+++ b/parquet/src/arrow/decoder/delta_byte_array.rs
@@ -49,8 +49,7 @@ impl DeltaByteArrayDecoder {
 
         if num_prefix != num_suffix {
             return Err(general_err!(format!(
-                "inconsistent DELTA_BYTE_ARRAY lengths, prefixes: {}, suffixes: {}",
-                num_prefix, num_suffix
+                "inconsistent DELTA_BYTE_ARRAY lengths, prefixes: {num_prefix}, suffixes: {num_suffix}"
             )));
         }
 
diff --git a/parquet/src/arrow/schema/mod.rs b/parquet/src/arrow/schema/mod.rs
index d81d6a69b..a000a4656 100644
--- a/parquet/src/arrow/schema/mod.rs
+++ b/parquet/src/arrow/schema/mod.rs
@@ -852,7 +852,7 @@ mod tests {
 
         assert_eq!(arrow_fields.len(), converted_fields.len());
         for i in 0..arrow_fields.len() {
-            assert_eq!(arrow_fields[i], converted_fields[i], "{}", i);
+            assert_eq!(arrow_fields[i], converted_fields[i], "{i}");
         }
     }
 
diff --git a/parquet/src/basic.rs b/parquet/src/basic.rs
index bdc203b74..e971c8632 100644
--- a/parquet/src/basic.rs
+++ b/parquet/src/basic.rs
@@ -456,49 +456,49 @@ impl ColumnOrder {
 
 impl fmt::Display for Type {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        write!(f, "{:?}", self)
+        write!(f, "{self:?}")
     }
 }
 
 impl fmt::Display for ConvertedType {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        write!(f, "{:?}", self)
+        write!(f, "{self:?}")
     }
 }
 
 impl fmt::Display for Repetition {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        write!(f, "{:?}", self)
+        write!(f, "{self:?}")
     }
 }
 
 impl fmt::Display for Encoding {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        write!(f, "{:?}", self)
+        write!(f, "{self:?}")
     }
 }
 
 impl fmt::Display for Compression {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        write!(f, "{:?}", self)
+        write!(f, "{self:?}")
     }
 }
 
 impl fmt::Display for PageType {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        write!(f, "{:?}", self)
+        write!(f, "{self:?}")
     }
 }
 
 impl fmt::Display for SortOrder {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        write!(f, "{:?}", self)
+        write!(f, "{self:?}")
     }
 }
 
 impl fmt::Display for ColumnOrder {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        write!(f, "{:?}", self)
+        write!(f, "{self:?}")
     }
 }
 
@@ -735,7 +735,7 @@ impl From<Option<LogicalType>> for ConvertedType {
                     (16, false) => ConvertedType::UINT_16,
                     (32, false) => ConvertedType::UINT_32,
                     (64, false) => ConvertedType::UINT_64,
-                    t => panic!("Integer type {:?} is not supported", t),
+                    t => panic!("Integer type {t:?} is not supported"),
                 },
                 LogicalType::Unknown => ConvertedType::NONE,
                 LogicalType::Json => ConvertedType::JSON,
diff --git a/parquet/src/bin/parquet-fromcsv.rs b/parquet/src/bin/parquet-fromcsv.rs
index 23913f0ea..b1de492f5 100644
--- a/parquet/src/bin/parquet-fromcsv.rs
+++ b/parquet/src/bin/parquet-fromcsv.rs
@@ -133,13 +133,13 @@ impl ParquetFromCsvError {
 impl Display for ParquetFromCsvError {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
         match self {
-            ParquetFromCsvError::CommandLineParseError(e) => write!(f, "{}", e),
-            ParquetFromCsvError::IoError(e) => write!(f, "{}", e),
-            ParquetFromCsvError::ArrowError(e) => write!(f, "{}", e),
-            ParquetFromCsvError::ParquetError(e) => write!(f, "{}", e),
+            ParquetFromCsvError::CommandLineParseError(e) => write!(f, "{e}"),
+            ParquetFromCsvError::IoError(e) => write!(f, "{e}"),
+            ParquetFromCsvError::ArrowError(e) => write!(f, "{e}"),
+            ParquetFromCsvError::ParquetError(e) => write!(f, "{e}"),
             ParquetFromCsvError::WithContext(c, e) => {
-                writeln!(f, "{}", e)?;
-                write!(f, "context: {}", c)
+                writeln!(f, "{e}")?;
+                write!(f, "context: {c}")
             }
         }
     }
@@ -219,7 +219,7 @@ fn compression_from_str(cmp: &str) -> Result<Compression, String> {
         "LZ4" => Ok(Compression::LZ4),
         "ZSTD" => Ok(Compression::ZSTD),
         v => Err(
-            format!("Unknown compression {0} : possible values UNCOMPRESSED, SNAPPY, GZIP, LZO, BROTLI, LZ4, ZSTD \n\nFor more information try --help",v)
+            format!("Unknown compression {v} : possible values UNCOMPRESSED, SNAPPY, GZIP, LZO, BROTLI, LZ4, ZSTD \n\nFor more information try --help")
         )
     }
 }
@@ -228,10 +228,7 @@ fn writer_version_from_str(cmp: &str) -> Result<WriterVersion, String> {
     match cmp.to_uppercase().as_str() {
         "1" => Ok(WriterVersion::PARQUET_1_0),
         "2" => Ok(WriterVersion::PARQUET_2_0),
-        v => Err(format!(
-            "Unknown writer version {0} : possible values 1, 2",
-            v
-        )),
+        v => Err(format!("Unknown writer version {v} : possible values 1, 2")),
     }
 }
 
@@ -397,7 +394,7 @@ fn main() -> Result<(), ParquetFromCsvError> {
 #[cfg(test)]
 mod tests {
     use std::{
-        io::{Seek, SeekFrom, Write},
+        io::{Seek, Write},
         path::{Path, PathBuf},
     };
 
@@ -424,8 +421,7 @@ mod tests {
         actual = actual[pos..].to_string();
         assert_eq!(
             expected, actual,
-            "help text not match. please update to \n---\n{}\n---\n",
-            actual
+            "help text not match. please update to \n---\n{actual}\n---\n"
         )
     }
 
@@ -527,18 +523,16 @@ mod tests {
         match parse_args(vec!["--parquet-compression", "zip"]) {
             Ok(_) => panic!("unexpected success"),
             Err(e) => assert_eq!(
-                format!("{}", e),
+                format!("{e}"),
                 "error: invalid value 'zip' for '--parquet-compression <PARQUET_COMPRESSION>': Unknown compression ZIP : possible values UNCOMPRESSED, SNAPPY, GZIP, LZO, BROTLI, LZ4, ZSTD \n\nFor more information try --help\n"),
         }
     }
 
     fn assert_debug_text(debug_text: &str, name: &str, value: &str) {
-        let pattern = format!(" {}: {}", name, value);
+        let pattern = format!(" {name}: {value}");
         assert!(
             debug_text.contains(&pattern),
-            "\"{}\" not contains \"{}\"",
-            debug_text,
-            pattern
+            "\"{debug_text}\" not contains \"{pattern}\""
         )
     }
 
@@ -571,7 +565,7 @@ mod tests {
         ]));
 
         let reader_builder = configure_reader_builder(&args, arrow_schema);
-        let builder_debug = format!("{:?}", reader_builder);
+        let builder_debug = format!("{reader_builder:?}");
         assert_debug_text(&builder_debug, "has_header", "false");
         assert_debug_text(&builder_debug, "delimiter", "Some(44)");
         assert_debug_text(&builder_debug, "quote", "Some(34)");
@@ -605,7 +599,7 @@ mod tests {
             Field::new("field5", DataType::Utf8, false),
         ]));
         let reader_builder = configure_reader_builder(&args, arrow_schema);
-        let builder_debug = format!("{:?}", reader_builder);
+        let builder_debug = format!("{reader_builder:?}");
         assert_debug_text(&builder_debug, "has_header", "true");
         assert_debug_text(&builder_debug, "delimiter", "Some(9)");
         assert_debug_text(&builder_debug, "quote", "None");
@@ -627,10 +621,10 @@ mod tests {
         {
             let csv = input_file.as_file_mut();
             for index in 1..2000 {
-                write!(csv, "{},\"name_{}\"\r\n", index, index).unwrap();
+                write!(csv, "{index},\"name_{index}\"\r\n").unwrap();
             }
             csv.flush().unwrap();
-            csv.seek(SeekFrom::Start(0)).unwrap();
+            csv.rewind().unwrap();
         }
         let output_parquet = NamedTempFile::new().unwrap();
 
diff --git a/parquet/src/bin/parquet-index.rs b/parquet/src/bin/parquet-index.rs
index a924ef373..d8a72dd79 100644
--- a/parquet/src/bin/parquet-index.rs
+++ b/parquet/src/bin/parquet-index.rs
@@ -86,11 +86,10 @@ impl Args {
             .zip(reader.metadata().row_groups())
             .enumerate()
         {
-            println!("Row Group: {}", row_group_idx);
+            println!("Row Group: {row_group_idx}");
             let offset_index = offset_indices.get(column_idx).ok_or_else(|| {
                 ParquetError::General(format!(
-                    "No offset index for row group {} column chunk {}",
-                    row_group_idx, column_idx
+                    "No offset index for row group {row_group_idx} column chunk {column_idx}"
                 ))
             })?;
 
@@ -156,12 +155,12 @@ fn print_index<T: std::fmt::Display>(
             idx, o.offset, o.compressed_page_size, row_count
         );
         match &c.min {
-            Some(m) => print!(", min {:>10}", m),
+            Some(m) => print!(", min {m:>10}"),
             None => print!(", min {:>10}", "NONE"),
         }
 
         match &c.max {
-            Some(m) => print!(", max {:>10}", m),
+            Some(m) => print!(", max {m:>10}"),
             None => print!(", max {:>10}", "NONE"),
         }
         println!()
diff --git a/parquet/src/bin/parquet-read.rs b/parquet/src/bin/parquet-read.rs
index 117f9ee0b..c1e08387a 100644
--- a/parquet/src/bin/parquet-read.rs
+++ b/parquet/src/bin/parquet-read.rs
@@ -102,6 +102,6 @@ fn print_row(row: &Row, json: bool) {
     if json {
         println!("{}", row.to_json_value())
     } else {
-        println!("{}", row);
+        println!("{row}");
     }
 }
diff --git a/parquet/src/bin/parquet-rowcount.rs b/parquet/src/bin/parquet-rowcount.rs
index 5069d4b25..45eb1c9a4 100644
--- a/parquet/src/bin/parquet-rowcount.rs
+++ b/parquet/src/bin/parquet-rowcount.rs
@@ -67,6 +67,6 @@ fn main() {
             total_num_rows += group_metadata.num_rows();
         }
 
-        eprintln!("File {}: rowcount={}", filename, total_num_rows);
+        eprintln!("File {filename}: rowcount={total_num_rows}");
     }
 }
diff --git a/parquet/src/bin/parquet-schema.rs b/parquet/src/bin/parquet-schema.rs
index ff7798a91..ae79fe429 100644
--- a/parquet/src/bin/parquet-schema.rs
+++ b/parquet/src/bin/parquet-schema.rs
@@ -60,7 +60,7 @@ fn main() {
     let verbose = args.verbose;
 
     match SerializedFileReader::new(file) {
-        Err(e) => panic!("Error when parsing Parquet file: {}", e),
+        Err(e) => panic!("Error when parsing Parquet file: {e}"),
         Ok(parquet_reader) => {
             let metadata = parquet_reader.metadata();
             println!("Metadata for file: {}", &filename);
diff --git a/parquet/src/bin/parquet-show-bloom-filter.rs b/parquet/src/bin/parquet-show-bloom-filter.rs
index ca8f558a6..77e29c6fb 100644
--- a/parquet/src/bin/parquet-show-bloom-filter.rs
+++ b/parquet/src/bin/parquet-show-bloom-filter.rs
@@ -80,7 +80,7 @@ fn main() {
     .expect("Unable to open file as Parquet");
     let metadata = file_reader.metadata();
     for (ri, row_group) in metadata.row_groups().iter().enumerate() {
-        println!("Row group #{}", ri);
+        println!("Row group #{ri}");
         println!("{}", "=".repeat(80));
         if let Some((column_index, _)) = row_group
             .columns()
diff --git a/parquet/src/bloom_filter/mod.rs b/parquet/src/bloom_filter/mod.rs
index e255a8dc1..4d2040b7f 100644
--- a/parquet/src/bloom_filter/mod.rs
+++ b/parquet/src/bloom_filter/mod.rs
@@ -156,7 +156,7 @@ fn read_bloom_filter_header_and_length(
     let mut buf_reader = buffer.reader();
     let mut prot = TCompactInputProtocol::new(&mut buf_reader);
     let header = BloomFilterHeader::read_from_in_protocol(&mut prot).map_err(|e| {
-        ParquetError::General(format!("Could not read bloom filter header: {}", e))
+        ParquetError::General(format!("Could not read bloom filter header: {e}"))
     })?;
     Ok((
         header,
@@ -190,8 +190,7 @@ impl Sbbf {
     pub(crate) fn new_with_ndv_fpp(ndv: u64, fpp: f64) -> Result<Self, ParquetError> {
         if !(0.0..1.0).contains(&fpp) {
             return Err(ParquetError::General(format!(
-                "False positive probability must be between 0.0 and 1.0, got {}",
-                fpp
+                "False positive probability must be between 0.0 and 1.0, got {fpp}"
             )));
         }
         let num_bits = num_of_bits_from_ndv_fpp(ndv, fpp);
@@ -227,7 +226,7 @@ impl Sbbf {
         let mut protocol = TCompactOutputProtocol::new(&mut writer);
         let header = self.header();
         header.write_to_out_protocol(&mut protocol).map_err(|e| {
-            ParquetError::General(format!("Could not write bloom filter header: {}", e))
+            ParquetError::General(format!("Could not write bloom filter header: {e}"))
         })?;
         protocol.flush()?;
         self.write_bitset(&mut writer)?;
@@ -241,8 +240,7 @@ impl Sbbf {
                 .write_all(block.to_le_bytes().as_slice())
                 .map_err(|e| {
                     ParquetError::General(format!(
-                        "Could not write bloom filter bit set: {}",
-                        e
+                        "Could not write bloom filter bit set: {e}"
                     ))
                 })?;
         }
@@ -389,7 +387,7 @@ mod tests {
         ];
         let sbbf = Sbbf::new(bitset);
         for a in 0..10i64 {
-            let value = format!("a{}", a);
+            let value = format!("a{a}");
             assert!(sbbf.check(&value.as_str()));
         }
     }
diff --git a/parquet/src/column/page.rs b/parquet/src/column/page.rs
index ddb6d243e..bd3568d13 100644
--- a/parquet/src/column/page.rs
+++ b/parquet/src/column/page.rs
@@ -222,8 +222,7 @@ impl TryFrom<&PageHeader> for PageMetadata {
                 is_dict: false,
             }),
             other => Err(ParquetError::General(format!(
-                "page type {:?} cannot be converted to PageMetadata",
-                other
+                "page type {other:?} cannot be converted to PageMetadata"
             ))),
         }
     }
diff --git a/parquet/src/column/reader/decoder.rs b/parquet/src/column/reader/decoder.rs
index da7fa78fe..f57b3e16d 100644
--- a/parquet/src/column/reader/decoder.rs
+++ b/parquet/src/column/reader/decoder.rs
@@ -245,7 +245,7 @@ impl<T: DataType> ColumnValueDecoder for ColumnValueDecoderImpl<T> {
         let current_decoder = self
             .decoders
             .get_mut(&encoding)
-            .unwrap_or_else(|| panic!("decoder for encoding {} should be set", encoding));
+            .unwrap_or_else(|| panic!("decoder for encoding {encoding} should be set"));
 
         current_decoder.get(&mut out[range])
     }
@@ -258,7 +258,7 @@ impl<T: DataType> ColumnValueDecoder for ColumnValueDecoderImpl<T> {
         let current_decoder = self
             .decoders
             .get_mut(&encoding)
-            .unwrap_or_else(|| panic!("decoder for encoding {} should be set", encoding));
+            .unwrap_or_else(|| panic!("decoder for encoding {encoding} should be set"));
 
         current_decoder.skip(num_values)
     }
diff --git a/parquet/src/column/writer/mod.rs b/parquet/src/column/writer/mod.rs
index f2417900d..51e261499 100644
--- a/parquet/src/column/writer/mod.rs
+++ b/parquet/src/column/writer/mod.rs
@@ -1137,7 +1137,7 @@ mod tests {
         assert!(res.is_err());
         if let Err(err) = res {
             assert_eq!(
-                format!("{}", err),
+                format!("{err}"),
                 "Parquet error: Inconsistent length of definition and repetition levels: 3 != 2"
             );
         }
@@ -1152,7 +1152,7 @@ mod tests {
         assert!(res.is_err());
         if let Err(err) = res {
             assert_eq!(
-                format!("{}", err),
+                format!("{err}"),
                 "Parquet error: Definition levels are required, because max definition level = 1"
             );
         }
@@ -1167,7 +1167,7 @@ mod tests {
         assert!(res.is_err());
         if let Err(err) = res {
             assert_eq!(
-                format!("{}", err),
+                format!("{err}"),
                 "Parquet error: Repetition levels are required, because max repetition level = 1"
             );
         }
@@ -1182,7 +1182,7 @@ mod tests {
         assert!(res.is_err());
         if let Err(err) = res {
             assert_eq!(
-                format!("{}", err),
+                format!("{err}"),
                 "Parquet error: Expected to write 4 values, but have only 2"
             );
         }
@@ -1907,7 +1907,7 @@ mod tests {
             assert_eq!(stats.min(), &false);
             assert_eq!(stats.max(), &true);
         } else {
-            panic!("expecting Statistics::Boolean, got {:?}", stats);
+            panic!("expecting Statistics::Boolean, got {stats:?}");
         }
     }
 
@@ -1920,7 +1920,7 @@ mod tests {
             assert_eq!(stats.min(), &-2);
             assert_eq!(stats.max(), &3);
         } else {
-            panic!("expecting Statistics::Int32, got {:?}", stats);
+            panic!("expecting Statistics::Int32, got {stats:?}");
         }
     }
 
@@ -1933,7 +1933,7 @@ mod tests {
             assert_eq!(stats.min(), &-2);
             assert_eq!(stats.max(), &3);
         } else {
-            panic!("expecting Statistics::Int64, got {:?}", stats);
+            panic!("expecting Statistics::Int64, got {stats:?}");
         }
     }
 
@@ -1955,7 +1955,7 @@ mod tests {
             assert_eq!(stats.min(), &Int96::from(vec![0, 20, 30]));
             assert_eq!(stats.max(), &Int96::from(vec![3, 20, 10]));
         } else {
-            panic!("expecting Statistics::Int96, got {:?}", stats);
+            panic!("expecting Statistics::Int96, got {stats:?}");
         }
     }
 
@@ -1968,7 +1968,7 @@ mod tests {
             assert_eq!(stats.min(), &-2.0);
             assert_eq!(stats.max(), &3.0);
         } else {
-            panic!("expecting Statistics::Float, got {:?}", stats);
+            panic!("expecting Statistics::Float, got {stats:?}");
         }
     }
 
@@ -1981,7 +1981,7 @@ mod tests {
             assert_eq!(stats.min(), &-2.0);
             assert_eq!(stats.max(), &3.0);
         } else {
-            panic!("expecting Statistics::Double, got {:?}", stats);
+            panic!("expecting Statistics::Double, got {stats:?}");
         }
     }
 
@@ -1999,7 +1999,7 @@ mod tests {
             assert_eq!(stats.min(), &ByteArray::from("aaw"));
             assert_eq!(stats.max(), &ByteArray::from("zz"));
         } else {
-            panic!("expecting Statistics::ByteArray, got {:?}", stats);
+            panic!("expecting Statistics::ByteArray, got {stats:?}");
         }
     }
 
@@ -2022,7 +2022,7 @@ mod tests {
             let expected_max: FixedLenByteArray = ByteArray::from("zz   ").into();
             assert_eq!(stats.max(), &expected_max);
         } else {
-            panic!("expecting Statistics::FixedLenByteArray, got {:?}", stats);
+            panic!("expecting Statistics::FixedLenByteArray, got {stats:?}");
         }
     }
 
diff --git a/parquet/src/encodings/levels.rs b/parquet/src/encodings/levels.rs
index cf1da20b6..0727935c3 100644
--- a/parquet/src/encodings/levels.rs
+++ b/parquet/src/encodings/levels.rs
@@ -40,7 +40,7 @@ pub fn max_buffer_size(
     match encoding {
         Encoding::RLE => RleEncoder::max_buffer_size(bit_width, num_buffered_values),
         Encoding::BIT_PACKED => ceil(num_buffered_values * bit_width as usize, 8),
-        _ => panic!("Unsupported encoding type {}", encoding),
+        _ => panic!("Unsupported encoding type {encoding}"),
     }
 }
 
@@ -76,7 +76,7 @@ impl LevelEncoder {
                 // `max_buffer_size()` method.
                 LevelEncoder::BitPacked(bit_width, BitWriter::new_from_buf(buffer))
             }
-            _ => panic!("Unsupported encoding type {}", encoding),
+            _ => panic!("Unsupported encoding type {encoding}"),
         }
     }
 
@@ -160,7 +160,7 @@ impl LevelDecoder {
             Encoding::BIT_PACKED => {
                 LevelDecoder::BitPacked(None, bit_width, BitReader::from(Vec::new()))
             }
-            _ => panic!("Unsupported encoding type {}", encoding),
+            _ => panic!("Unsupported encoding type {encoding}"),
         }
     }
 
diff --git a/parquet/src/errors.rs b/parquet/src/errors.rs
index 703ff51f4..62f7656f1 100644
--- a/parquet/src/errors.rs
+++ b/parquet/src/errors.rs
@@ -48,16 +48,16 @@ impl std::fmt::Display for ParquetError {
     fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
         match &self {
             ParquetError::General(message) => {
-                write!(fmt, "Parquet error: {}", message)
+                write!(fmt, "Parquet error: {message}")
             }
-            ParquetError::NYI(message) => write!(fmt, "NYI: {}", message),
-            ParquetError::EOF(message) => write!(fmt, "EOF: {}", message),
+            ParquetError::NYI(message) => write!(fmt, "NYI: {message}"),
+            ParquetError::EOF(message) => write!(fmt, "EOF: {message}"),
             #[cfg(feature = "arrow")]
-            ParquetError::ArrowError(message) => write!(fmt, "Arrow: {}", message),
+            ParquetError::ArrowError(message) => write!(fmt, "Arrow: {message}"),
             ParquetError::IndexOutOfBound(index, ref bound) => {
-                write!(fmt, "Index {} out of bound: {}", index, bound)
+                write!(fmt, "Index {index} out of bound: {bound}")
             }
-            ParquetError::External(e) => write!(fmt, "External: {}", e),
+            ParquetError::External(e) => write!(fmt, "External: {e}"),
         }
     }
 }
@@ -157,6 +157,6 @@ macro_rules! arrow_err {
 #[cfg(feature = "arrow")]
 impl From<ParquetError> for ArrowError {
     fn from(p: ParquetError) -> Self {
-        Self::ParquetError(format!("{}", p))
+        Self::ParquetError(format!("{p}"))
     }
 }
diff --git a/parquet/src/file/footer.rs b/parquet/src/file/footer.rs
index 760caa977..a14b3ce4d 100644
--- a/parquet/src/file/footer.rs
+++ b/parquet/src/file/footer.rs
@@ -72,7 +72,7 @@ pub fn decode_metadata(metadata_read: &[u8]) -> Result<ParquetMetaData> {
     // TODO: row group filtering
     let mut prot = TCompactInputProtocol::new(metadata_read);
     let t_file_metadata: TFileMetaData = TFileMetaData::read_from_in_protocol(&mut prot)
-        .map_err(|e| ParquetError::General(format!("Could not parse metadata: {}", e)))?;
+        .map_err(|e| ParquetError::General(format!("Could not parse metadata: {e}")))?;
     let schema = types::from_thrift(&t_file_metadata.schema)?;
     let schema_descr = Arc::new(SchemaDescriptor::new(schema));
     let mut row_groups = Vec::new();
diff --git a/parquet/src/file/metadata.rs b/parquet/src/file/metadata.rs
index 51a5264e3..0696b2901 100644
--- a/parquet/src/file/metadata.rs
+++ b/parquet/src/file/metadata.rs
@@ -996,7 +996,7 @@ mod tests {
         assert!(row_group_meta.is_err());
         if let Err(e) = row_group_meta {
             assert_eq!(
-                format!("{}", e),
+                format!("{e}"),
                 "Parquet error: Column length mismatch: 2 != 0"
             );
         }
diff --git a/parquet/src/file/properties.rs b/parquet/src/file/properties.rs
index 7d20b736e..cbd31f9a1 100644
--- a/parquet/src/file/properties.rs
+++ b/parquet/src/file/properties.rs
@@ -690,8 +690,7 @@ impl ColumnProperties {
     fn set_bloom_filter_fpp(&mut self, value: f64) {
         assert!(
             value > 0. && value < 1.0,
-            "fpp must be between 0 and 1 exclusive, got {}",
-            value
+            "fpp must be between 0 and 1 exclusive, got {value}"
         );
 
         self.bloom_filter_properties
diff --git a/parquet/src/file/serialized_reader.rs b/parquet/src/file/serialized_reader.rs
index 8ee37352b..95108ad58 100644
--- a/parquet/src/file/serialized_reader.rs
+++ b/parquet/src/file/serialized_reader.rs
@@ -921,7 +921,7 @@ mod tests {
 
                 r.into_iter().project(proj).unwrap()
             })
-            .map(|r| format!("{}", r))
+            .map(|r| format!("{r}"))
             .collect::<Vec<_>>()
             .join(",");
 
diff --git a/parquet/src/file/statistics.rs b/parquet/src/file/statistics.rs
index 76885fdbf..939ce037f 100644
--- a/parquet/src/file/statistics.rs
+++ b/parquet/src/file/statistics.rs
@@ -126,8 +126,7 @@ pub fn from_thrift(
             let null_count = stats.null_count.unwrap_or(0);
             assert!(
                 null_count >= 0,
-                "Statistics null count is negative ({})",
-                null_count
+                "Statistics null count is negative ({null_count})"
             );
 
             // Generic null count.
@@ -399,14 +398,14 @@ impl Statistics {
 impl fmt::Display for Statistics {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         match self {
-            Statistics::Boolean(typed) => write!(f, "{}", typed),
-            Statistics::Int32(typed) => write!(f, "{}", typed),
-            Statistics::Int64(typed) => write!(f, "{}", typed),
-            Statistics::Int96(typed) => write!(f, "{}", typed),
-            Statistics::Float(typed) => write!(f, "{}", typed),
-            Statistics::Double(typed) => write!(f, "{}", typed),
-            Statistics::ByteArray(typed) => write!(f, "{}", typed),
-            Statistics::FixedLenByteArray(typed) => write!(f, "{}", typed),
+            Statistics::Boolean(typed) => write!(f, "{typed}"),
+            Statistics::Int32(typed) => write!(f, "{typed}"),
+            Statistics::Int64(typed) => write!(f, "{typed}"),
+            Statistics::Int96(typed) => write!(f, "{typed}"),
+            Statistics::Float(typed) => write!(f, "{typed}"),
+            Statistics::Double(typed) => write!(f, "{typed}"),
+            Statistics::ByteArray(typed) => write!(f, "{typed}"),
+            Statistics::FixedLenByteArray(typed) => write!(f, "{typed}"),
         }
     }
 }
@@ -536,17 +535,17 @@ impl<T: ParquetValueType> fmt::Display for ValueStatistics<T> {
         write!(f, "{{")?;
         write!(f, "min: ")?;
         match self.min {
-            Some(ref value) => write!(f, "{}", value)?,
+            Some(ref value) => write!(f, "{value}")?,
             None => write!(f, "N/A")?,
         }
         write!(f, ", max: ")?;
         match self.max {
-            Some(ref value) => write!(f, "{}", value)?,
+            Some(ref value) => write!(f, "{value}")?,
             None => write!(f, "N/A")?,
         }
         write!(f, ", distinct_count: ")?;
         match self.distinct_count {
-            Some(value) => write!(f, "{}", value)?,
+            Some(value) => write!(f, "{value}")?,
             None => write!(f, "N/A")?,
         }
         write!(f, ", null_count: {}", self.null_count)?;
@@ -619,14 +618,14 @@ mod tests {
     fn test_statistics_debug() {
         let stats = Statistics::int32(Some(1), Some(12), None, 12, true);
         assert_eq!(
-            format!("{:?}", stats),
+            format!("{stats:?}"),
             "Int32({min: Some(1), max: Some(12), distinct_count: None, null_count: 12, \
              min_max_deprecated: true, min_max_backwards_compatible: true})"
         );
 
         let stats = Statistics::int32(None, None, None, 7, false);
         assert_eq!(
-            format!("{:?}", stats),
+            format!("{stats:?}"),
             "Int32({min: None, max: None, distinct_count: None, null_count: 7, \
              min_max_deprecated: false, min_max_backwards_compatible: false})"
         )
@@ -636,13 +635,13 @@ mod tests {
     fn test_statistics_display() {
         let stats = Statistics::int32(Some(1), Some(12), None, 12, true);
         assert_eq!(
-            format!("{}", stats),
+            format!("{stats}"),
             "{min: 1, max: 12, distinct_count: N/A, null_count: 12, min_max_deprecated: true}"
         );
 
         let stats = Statistics::int64(None, None, None, 7, false);
         assert_eq!(
-            format!("{}", stats),
+            format!("{stats}"),
             "{min: N/A, max: N/A, distinct_count: N/A, null_count: 7, min_max_deprecated: \
              false}"
         );
@@ -655,7 +654,7 @@ mod tests {
             true,
         );
         assert_eq!(
-            format!("{}", stats),
+            format!("{stats}"),
             "{min: [1, 0, 0], max: [2, 3, 4], distinct_count: N/A, null_count: 3, \
              min_max_deprecated: true}"
         );
@@ -668,7 +667,7 @@ mod tests {
             false,
         );
         assert_eq!(
-            format!("{}", stats),
+            format!("{stats}"),
             "{min: [1], max: [2], distinct_count: 5, null_count: 7, min_max_deprecated: false}"
         );
     }
diff --git a/parquet/src/file/writer.rs b/parquet/src/file/writer.rs
index 65f254185..4983ed55f 100644
--- a/parquet/src/file/writer.rs
+++ b/parquet/src/file/writer.rs
@@ -754,7 +754,7 @@ mod tests {
         assert!(res.is_err());
         if let Err(err) = res {
             assert_eq!(
-                format!("{}", err),
+                format!("{err}"),
                 "Parquet error: Column length mismatch: 1 != 0"
             );
         }
diff --git a/parquet/src/record/api.rs b/parquet/src/record/api.rs
index 8c942cb44..f3511c03d 100644
--- a/parquet/src/record/api.rs
+++ b/parquet/src/record/api.rs
@@ -245,7 +245,7 @@ pub fn make_row(fields: Vec<(String, Field)>) -> Row {
 impl fmt::Display for Row {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         write!(f, "{{")?;
-        for (i, &(ref key, ref value)) in self.fields.iter().enumerate() {
+        for (i, (key, value)) in self.fields.iter().enumerate() {
             key.fmt(f)?;
             write!(f, ": ")?;
             value.fmt(f)?;
@@ -724,37 +724,37 @@ impl fmt::Display for Field {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         match *self {
             Field::Null => write!(f, "null"),
-            Field::Bool(value) => write!(f, "{}", value),
-            Field::Byte(value) => write!(f, "{}", value),
-            Field::Short(value) => write!(f, "{}", value),
-            Field::Int(value) => write!(f, "{}", value),
-            Field::Long(value) => write!(f, "{}", value),
-            Field::UByte(value) => write!(f, "{}", value),
-            Field::UShort(value) => write!(f, "{}", value),
-            Field::UInt(value) => write!(f, "{}", value),
-            Field::ULong(value) => write!(f, "{}", value),
+            Field::Bool(value) => write!(f, "{value}"),
+            Field::Byte(value) => write!(f, "{value}"),
+            Field::Short(value) => write!(f, "{value}"),
+            Field::Int(value) => write!(f, "{value}"),
+            Field::Long(value) => write!(f, "{value}"),
+            Field::UByte(value) => write!(f, "{value}"),
+            Field::UShort(value) => write!(f, "{value}"),
+            Field::UInt(value) => write!(f, "{value}"),
+            Field::ULong(value) => write!(f, "{value}"),
             Field::Float(value) => {
                 if !(1e-15..=1e19).contains(&value) {
-                    write!(f, "{:E}", value)
+                    write!(f, "{value:E}")
                 } else if value.trunc() == value {
-                    write!(f, "{}.0", value)
+                    write!(f, "{value}.0")
                 } else {
-                    write!(f, "{}", value)
+                    write!(f, "{value}")
                 }
             }
             Field::Double(value) => {
                 if !(1e-15..=1e19).contains(&value) {
-                    write!(f, "{:E}", value)
+                    write!(f, "{value:E}")
                 } else if value.trunc() == value {
-                    write!(f, "{}.0", value)
+                    write!(f, "{value}.0")
                 } else {
-                    write!(f, "{}", value)
+                    write!(f, "{value}")
                 }
             }
             Field::Decimal(ref value) => {
                 write!(f, "{}", convert_decimal_to_string(value))
             }
-            Field::Str(ref value) => write!(f, "\"{}\"", value),
+            Field::Str(ref value) => write!(f, "\"{value}\""),
             Field::Bytes(ref value) => write!(f, "{:?}", value.data()),
             Field::Date(value) => write!(f, "{}", convert_date_to_string(value)),
             Field::TimestampMillis(value) => {
@@ -763,7 +763,7 @@ impl fmt::Display for Field {
             Field::TimestampMicros(value) => {
                 write!(f, "{}", convert_timestamp_micros_to_string(value))
             }
-            Field::Group(ref fields) => write!(f, "{}", fields),
+            Field::Group(ref fields) => write!(f, "{fields}"),
             Field::ListInternal(ref list) => {
                 let elems = &list.elements;
                 write!(f, "[")?;
@@ -778,7 +778,7 @@ impl fmt::Display for Field {
             Field::MapInternal(ref map) => {
                 let entries = &map.entries;
                 write!(f, "{{")?;
-                for (i, &(ref key, ref value)) in entries.iter().enumerate() {
+                for (i, (key, value)) in entries.iter().enumerate() {
                     key.fmt(f)?;
                     write!(f, " -> ")?;
                     value.fmt(f)?;
@@ -1248,7 +1248,7 @@ mod tests {
             ("a".to_string(), Field::Str("abc".to_string())),
         ];
         let row = Field::Group(make_row(fields));
-        assert_eq!(format!("{}", row), "{x: null, Y: 2, z: 3.1, a: \"abc\"}");
+        assert_eq!(format!("{row}"), "{x: null, Y: 2, z: 3.1, a: \"abc\"}");
 
         let row = Field::ListInternal(make_list(vec![
             Field::Int(2),
@@ -1256,14 +1256,14 @@ mod tests {
             Field::Null,
             Field::Int(12),
         ]));
-        assert_eq!(format!("{}", row), "[2, 1, null, 12]");
+        assert_eq!(format!("{row}"), "[2, 1, null, 12]");
 
         let row = Field::MapInternal(make_map(vec![
             (Field::Int(1), Field::Float(1.2)),
             (Field::Int(2), Field::Float(4.5)),
             (Field::Int(3), Field::Float(2.3)),
         ]));
-        assert_eq!(format!("{}", row), "{1 -> 1.2, 2 -> 4.5, 3 -> 2.3}");
+        assert_eq!(format!("{row}"), "{1 -> 1.2, 2 -> 4.5, 3 -> 2.3}");
     }
 
     #[test]
diff --git a/parquet/src/record/reader.rs b/parquet/src/record/reader.rs
index a84693536..eb16c13f6 100644
--- a/parquet/src/record/reader.rs
+++ b/parquet/src/record/reader.rs
@@ -150,16 +150,14 @@ impl TreeBuilder {
                     assert_eq!(
                         field.get_fields().len(),
                         1,
-                        "Invalid list type {:?}",
-                        field
+                        "Invalid list type {field:?}"
                     );
 
                     let repeated_field = field.get_fields()[0].clone();
                     assert_eq!(
                         repeated_field.get_basic_info().repetition(),
                         Repetition::REPEATED,
-                        "Invalid list type {:?}",
-                        field
+                        "Invalid list type {field:?}"
                     );
 
                     if Reader::is_element_type(&repeated_field) {
@@ -208,27 +206,23 @@ impl TreeBuilder {
                     assert_eq!(
                         field.get_fields().len(),
                         1,
-                        "Invalid map type: {:?}",
-                        field
+                        "Invalid map type: {field:?}"
                     );
                     assert!(
                         !field.get_fields()[0].is_primitive(),
-                        "Invalid map type: {:?}",
-                        field
+                        "Invalid map type: {field:?}"
                     );
 
                     let key_value_type = field.get_fields()[0].clone();
                     assert_eq!(
                         key_value_type.get_basic_info().repetition(),
                         Repetition::REPEATED,
-                        "Invalid map type: {:?}",
-                        field
+                        "Invalid map type: {field:?}"
                     );
                     assert_eq!(
                         key_value_type.get_fields().len(),
                         2,
-                        "Invalid map type: {:?}",
-                        field
+                        "Invalid map type: {field:?}"
                     );
 
                     path.push(String::from(key_value_type.name()));
@@ -236,8 +230,7 @@ impl TreeBuilder {
                     let key_type = &key_value_type.get_fields()[0];
                     assert!(
                         key_type.is_primitive(),
-                        "Map key type is expected to be a primitive type, but found {:?}",
-                        key_type
+                        "Map key type is expected to be a primitive type, but found {key_type:?}"
                     );
                     let key_reader = self.reader_tree(
                         key_type.clone(),
@@ -411,7 +404,7 @@ impl Reader {
                 }
                 make_row(fields)
             }
-            _ => panic!("Cannot call read() on {}", self),
+            _ => panic!("Cannot call read() on {self}"),
         }
     }
 
@@ -611,7 +604,7 @@ impl fmt::Display for Reader {
             Reader::RepeatedReader(..) => "RepeatedReader",
             Reader::KeyValueReader(..) => "KeyValueReader",
         };
-        write!(f, "{}", s)
+        write!(f, "{s}")
     }
 }
 
diff --git a/parquet/src/record/triplet.rs b/parquet/src/record/triplet.rs
index b4b4ea2f4..b7318b3d3 100644
--- a/parquet/src/record/triplet.rs
+++ b/parquet/src/record/triplet.rs
@@ -200,8 +200,7 @@ impl<T: DataType> TypedTripletIter<T> {
     fn new(descr: ColumnDescPtr, batch_size: usize, column_reader: ColumnReader) -> Self {
         assert!(
             batch_size > 0,
-            "Expected positive batch size, found: {}",
-            batch_size
+            "Expected positive batch size, found: {batch_size}"
         );
 
         let max_def_level = descr.max_def_level();
diff --git a/parquet/src/schema/printer.rs b/parquet/src/schema/printer.rs
index 5cfd30dd9..d90dc423c 100644
--- a/parquet/src/schema/printer.rs
+++ b/parquet/src/schema/printer.rs
@@ -62,7 +62,7 @@ pub fn print_parquet_metadata(out: &mut dyn io::Write, metadata: &ParquetMetaDat
     writeln!(out, "row groups:");
     writeln!(out);
     for (i, rg) in metadata.row_groups().iter().enumerate() {
-        writeln!(out, "row group {}:", i);
+        writeln!(out, "row group {i}:");
         print_dashes(out, 80);
         print_row_group_metadata(out, rg);
     }
@@ -75,7 +75,7 @@ pub fn print_file_metadata(out: &mut dyn io::Write, file_metadata: &FileMetaData
     writeln!(out, "version: {}", file_metadata.version());
     writeln!(out, "num of rows: {}", file_metadata.num_rows());
     if let Some(created_by) = file_metadata.created_by().as_ref() {
-        writeln!(out, "created by: {}", created_by);
+        writeln!(out, "created by: {created_by}");
     }
     if let Some(metadata) = file_metadata.key_value_metadata() {
         writeln!(out, "metadata:");
@@ -102,7 +102,7 @@ pub fn print_schema(out: &mut dyn io::Write, tp: &Type) {
         let mut printer = Printer::new(&mut s);
         printer.print(tp);
     }
-    writeln!(out, "{}", s);
+    writeln!(out, "{s}");
 }
 
 #[allow(unused_must_use)]
@@ -114,7 +114,7 @@ fn print_row_group_metadata(out: &mut dyn io::Write, rg_metadata: &RowGroupMetaD
     writeln!(out, "columns: ");
     for (i, cc) in rg_metadata.columns().iter().enumerate() {
         writeln!(out);
-        writeln!(out, "column {}:", i);
+        writeln!(out, "column {i}:");
         print_dashes(out, 80);
         print_column_chunk_metadata(out, cc);
     }
@@ -130,11 +130,11 @@ fn print_column_chunk_metadata(
     let encoding_strs: Vec<_> = cc_metadata
         .encodings()
         .iter()
-        .map(|e| format!("{}", e))
+        .map(|e| format!("{e}"))
         .collect();
     writeln!(out, "encodings: {}", encoding_strs.join(" "));
     let file_path_str = cc_metadata.file_path().unwrap_or("N/A");
-    writeln!(out, "file path: {}", file_path_str);
+    writeln!(out, "file path: {file_path_str}");
     writeln!(out, "file offset: {}", cc_metadata.file_offset());
     writeln!(out, "num of values: {}", cc_metadata.num_values());
     writeln!(
@@ -152,42 +152,42 @@ fn print_column_chunk_metadata(
         None => "N/A".to_owned(),
         Some(ipo) => ipo.to_string(),
     };
-    writeln!(out, "index page offset: {}", index_page_offset_str);
+    writeln!(out, "index page offset: {index_page_offset_str}");
     let dict_page_offset_str = match cc_metadata.dictionary_page_offset() {
         None => "N/A".to_owned(),
         Some(dpo) => dpo.to_string(),
     };
-    writeln!(out, "dictionary page offset: {}", dict_page_offset_str);
+    writeln!(out, "dictionary page offset: {dict_page_offset_str}");
     let statistics_str = match cc_metadata.statistics() {
         None => "N/A".to_owned(),
         Some(stats) => stats.to_string(),
     };
-    writeln!(out, "statistics: {}", statistics_str);
+    writeln!(out, "statistics: {statistics_str}");
     let bloom_filter_offset_str = match cc_metadata.bloom_filter_offset() {
         None => "N/A".to_owned(),
         Some(bfo) => bfo.to_string(),
     };
-    writeln!(out, "bloom filter offset: {}", bloom_filter_offset_str);
+    writeln!(out, "bloom filter offset: {bloom_filter_offset_str}");
     let offset_index_offset_str = match cc_metadata.offset_index_offset() {
         None => "N/A".to_owned(),
         Some(oio) => oio.to_string(),
     };
-    writeln!(out, "offset index offset: {}", offset_index_offset_str);
+    writeln!(out, "offset index offset: {offset_index_offset_str}");
     let offset_index_length_str = match cc_metadata.offset_index_length() {
         None => "N/A".to_owned(),
         Some(oil) => oil.to_string(),
     };
-    writeln!(out, "offset index length: {}", offset_index_length_str);
+    writeln!(out, "offset index length: {offset_index_length_str}");
     let column_index_offset_str = match cc_metadata.column_index_offset() {
         None => "N/A".to_owned(),
         Some(cio) => cio.to_string(),
     };
-    writeln!(out, "column index offset: {}", column_index_offset_str);
+    writeln!(out, "column index offset: {column_index_offset_str}");
     let column_index_length_str = match cc_metadata.column_index_length() {
         None => "N/A".to_owned(),
         Some(cil) => cil.to_string(),
     };
-    writeln!(out, "column index length: {}", column_index_length_str);
+    writeln!(out, "column index length: {column_index_length_str}");
     writeln!(out);
 }
 
@@ -242,10 +242,10 @@ fn print_logical_and_converted(
                 bit_width,
                 is_signed,
             } => {
-                format!("INTEGER({},{})", bit_width, is_signed)
+                format!("INTEGER({bit_width},{is_signed})")
             }
             LogicalType::Decimal { scale, precision } => {
-                format!("DECIMAL({},{})", precision, scale)
+                format!("DECIMAL({precision},{scale})")
             }
             LogicalType::Timestamp {
                 is_adjusted_to_u_t_c,
@@ -283,15 +283,15 @@ fn print_logical_and_converted(
                     // DECIMAL(9) - DECIMAL
                     let precision_scale = match (precision, scale) {
                         (p, s) if p > 0 && s > 0 => {
-                            format!("({},{})", p, s)
+                            format!("({p},{s})")
                         }
-                        (p, 0) if p > 0 => format!("({})", p),
+                        (p, 0) if p > 0 => format!("({p})"),
                         _ => String::new(),
                     };
-                    format!("{}{}", decimal, precision_scale)
+                    format!("{decimal}{precision_scale}")
                 }
                 other_converted_type => {
-                    format!("{}", other_converted_type)
+                    format!("{other_converted_type}")
                 }
             }
         }
@@ -313,9 +313,9 @@ impl<'a> Printer<'a> {
                 let phys_type_str = match physical_type {
                     PhysicalType::FIXED_LEN_BYTE_ARRAY => {
                         // We need to include length for fixed byte array
-                        format!("{} ({})", physical_type, type_length)
+                        format!("{physical_type} ({type_length})")
                     }
-                    _ => format!("{}", physical_type),
+                    _ => format!("{physical_type}"),
                 };
                 // Also print logical type if it is available
                 // If there is a logical type, do not print converted type
@@ -358,7 +358,7 @@ impl<'a> Printer<'a> {
                         0,
                     );
                     if !logical_str.is_empty() {
-                        write!(self.output, "({}) ", logical_str);
+                        write!(self.output, "({logical_str}) ");
                     }
                     writeln!(self.output, "{{");
                 } else {
diff --git a/parquet/src/schema/types.rs b/parquet/src/schema/types.rs
index 1b966b414..151f2b69f 100644
--- a/parquet/src/schema/types.rs
+++ b/parquet/src/schema/types.rs
@@ -949,9 +949,10 @@ impl SchemaDescriptor {
             self.leaves.len()
         );
 
-        *self.leaf_to_base.get(leaf).unwrap_or_else(|| {
-            panic!("Expected a value for index {} but found None", leaf)
-        })
+        *self
+            .leaf_to_base
+            .get(leaf)
+            .unwrap_or_else(|| panic!("Expected a value for index {leaf} but found None"))
     }
 
     fn column_root_of(&self, i: usize) -> &TypePtr {
@@ -1279,7 +1280,7 @@ mod tests {
         assert!(result.is_err());
         if let Err(e) = result {
             assert_eq!(
-                format!("{}", e),
+                format!("{e}"),
                 "Parquet error: Cannot annotate Integer { bit_width: 8, is_signed: true } from INT64 for field 'foo'"
             );
         }
@@ -1292,7 +1293,7 @@ mod tests {
         assert!(result.is_err());
         if let Err(e) = result {
             assert_eq!(
-                format!("{}", e),
+                format!("{e}"),
                 "Parquet error: BSON cannot annotate field 'foo' because it is not a BYTE_ARRAY field"
             );
         }
@@ -1306,7 +1307,7 @@ mod tests {
         assert!(result.is_err());
         if let Err(e) = result {
             assert_eq!(
-                format!("{}", e),
+                format!("{e}"),
                 "Parquet error: DECIMAL can only annotate INT32, INT64, BYTE_ARRAY and FIXED_LEN_BYTE_ARRAY"
             );
         }
@@ -1323,7 +1324,7 @@ mod tests {
         assert!(result.is_err());
         if let Err(e) = result {
             assert_eq!(
-                format!("{}", e),
+                format!("{e}"),
                 "Parquet error: DECIMAL logical type scale 32 must match self.scale -1 for field 'foo'"
             );
         }
@@ -1337,7 +1338,7 @@ mod tests {
         assert!(result.is_err());
         if let Err(e) = result {
             assert_eq!(
-                format!("{}", e),
+                format!("{e}"),
                 "Parquet error: Invalid DECIMAL precision: -1"
             );
         }
@@ -1351,7 +1352,7 @@ mod tests {
         assert!(result.is_err());
         if let Err(e) = result {
             assert_eq!(
-                format!("{}", e),
+                format!("{e}"),
                 "Parquet error: Invalid DECIMAL precision: 0"
             );
         }
@@ -1364,7 +1365,7 @@ mod tests {
             .build();
         assert!(result.is_err());
         if let Err(e) = result {
-            assert_eq!(format!("{}", e), "Parquet error: Invalid DECIMAL scale: -1");
+            assert_eq!(format!("{e}"), "Parquet error: Invalid DECIMAL scale: -1");
         }
 
         result = Type::primitive_type_builder("foo", PhysicalType::BYTE_ARRAY)
@@ -1376,7 +1377,7 @@ mod tests {
         assert!(result.is_err());
         if let Err(e) = result {
             assert_eq!(
-                format!("{}", e),
+                format!("{e}"),
                 "Parquet error: Invalid DECIMAL: scale (2) cannot be greater than precision (1)"
             );
         }
@@ -1399,7 +1400,7 @@ mod tests {
         assert!(result.is_err());
         if let Err(e) = result {
             assert_eq!(
-                format!("{}", e),
+                format!("{e}"),
                 "Parquet error: Cannot represent INT32 as DECIMAL with precision 18"
             );
         }
@@ -1413,7 +1414,7 @@ mod tests {
         assert!(result.is_err());
         if let Err(e) = result {
             assert_eq!(
-                format!("{}", e),
+                format!("{e}"),
                 "Parquet error: Cannot represent INT64 as DECIMAL with precision 32"
             );
         }
@@ -1428,7 +1429,7 @@ mod tests {
         assert!(result.is_err());
         if let Err(e) = result {
             assert_eq!(
-                format!("{}", e),
+                format!("{e}"),
                 "Parquet error: Cannot represent FIXED_LEN_BYTE_ARRAY as DECIMAL with length 5 and precision 12. The max precision can only be 11"
             );
         }
@@ -1440,7 +1441,7 @@ mod tests {
         assert!(result.is_err());
         if let Err(e) = result {
             assert_eq!(
-                format!("{}", e),
+                format!("{e}"),
                 "Parquet error: UINT_8 cannot annotate field 'foo' because it is not a INT32 field"
             );
         }
@@ -1452,7 +1453,7 @@ mod tests {
         assert!(result.is_err());
         if let Err(e) = result {
             assert_eq!(
-                format!("{}", e),
+                format!("{e}"),
                 "Parquet error: TIME_MICROS cannot annotate field 'foo' because it is not a INT64 field"
             );
         }
@@ -1464,7 +1465,7 @@ mod tests {
         assert!(result.is_err());
         if let Err(e) = result {
             assert_eq!(
-                format!("{}", e),
+                format!("{e}"),
                 "Parquet error: INTERVAL cannot annotate field 'foo' because it is not a FIXED_LEN_BYTE_ARRAY(12) field"
             );
         }
@@ -1477,7 +1478,7 @@ mod tests {
         assert!(result.is_err());
         if let Err(e) = result {
             assert_eq!(
-                format!("{}", e),
+                format!("{e}"),
                 "Parquet error: INTERVAL cannot annotate field 'foo' because it is not a FIXED_LEN_BYTE_ARRAY(12) field"
             );
         }
@@ -1489,7 +1490,7 @@ mod tests {
         assert!(result.is_err());
         if let Err(e) = result {
             assert_eq!(
-                format!("{}", e),
+                format!("{e}"),
                 "Parquet error: ENUM cannot annotate field 'foo' because it is not a BYTE_ARRAY field"
             );
         }
@@ -1501,7 +1502,7 @@ mod tests {
         assert!(result.is_err());
         if let Err(e) = result {
             assert_eq!(
-                format!("{}", e),
+                format!("{e}"),
                 "Parquet error: MAP cannot be applied to primitive field 'foo'"
             );
         }
@@ -1514,7 +1515,7 @@ mod tests {
         assert!(result.is_err());
         if let Err(e) = result {
             assert_eq!(
-                format!("{}", e),
+                format!("{e}"),
                 "Parquet error: Invalid FIXED_LEN_BYTE_ARRAY length: -1 for field 'foo'"
             );
         }
@@ -1660,8 +1661,8 @@ mod tests {
 
         for i in 0..nleaves {
             let col = descr.column(i);
-            assert_eq!(col.max_def_level(), ex_max_def_levels[i], "{}", i);
-            assert_eq!(col.max_rep_level(), ex_max_rep_levels[i], "{}", i);
+            assert_eq!(col.max_def_level(), ex_max_def_levels[i], "{i}");
+            assert_eq!(col.max_rep_level(), ex_max_rep_levels[i], "{i}");
         }
 
         assert_eq!(descr.column(0).path().string(), "a");
@@ -1989,7 +1990,7 @@ mod tests {
         assert!(thrift_schema.is_err());
         if let Err(e) = thrift_schema {
             assert_eq!(
-                format!("{}", e),
+                format!("{e}"),
                 "Parquet error: Root schema must be Group type"
             );
         }
diff --git a/parquet/src/schema/visitor.rs b/parquet/src/schema/visitor.rs
index 9d28fa5e8..f83782c63 100644
--- a/parquet/src/schema/visitor.rs
+++ b/parquet/src/schema/visitor.rs
@@ -49,7 +49,7 @@ pub trait TypeVisitor<R, C> {
     fn visit_list(&mut self, list_type: TypePtr, context: C) -> Result<R> {
         match list_type.as_ref() {
             Type::PrimitiveType { .. } => {
-                panic!("{:?} is a list type and must be a group type", list_type)
+                panic!("{list_type:?} is a list type and must be a group type")
             }
             Type::GroupType {
                 basic_info: _,
diff --git a/parquet/src/util/bit_pack.rs b/parquet/src/util/bit_pack.rs
index 8cea20de2..94ab9578b 100644
--- a/parquet/src/util/bit_pack.rs
+++ b/parquet/src/util/bit_pack.rs
@@ -106,7 +106,7 @@ mod tests {
             let mut output = [0; 8];
             unpack8(&input, &mut output, i);
             for (idx, out) in output.iter().enumerate() {
-                assert_eq!(out.trailing_ones() as usize, i, "out[{}] = {}", idx, out);
+                assert_eq!(out.trailing_ones() as usize, i, "out[{idx}] = {out}");
             }
         }
 
@@ -114,7 +114,7 @@ mod tests {
             let mut output = [0; 16];
             unpack16(&input, &mut output, i);
             for (idx, out) in output.iter().enumerate() {
-                assert_eq!(out.trailing_ones() as usize, i, "out[{}] = {}", idx, out);
+                assert_eq!(out.trailing_ones() as usize, i, "out[{idx}] = {out}");
             }
         }
 
@@ -122,7 +122,7 @@ mod tests {
             let mut output = [0; 32];
             unpack32(&input, &mut output, i);
             for (idx, out) in output.iter().enumerate() {
-                assert_eq!(out.trailing_ones() as usize, i, "out[{}] = {}", idx, out);
+                assert_eq!(out.trailing_ones() as usize, i, "out[{idx}] = {out}");
             }
         }
 
@@ -130,7 +130,7 @@ mod tests {
             let mut output = [0; 64];
             unpack64(&input, &mut output, i);
             for (idx, out) in output.iter().enumerate() {
-                assert_eq!(out.trailing_ones() as usize, i, "out[{}] = {}", idx, out);
+                assert_eq!(out.trailing_ones() as usize, i, "out[{idx}] = {out}");
             }
         }
     }
diff --git a/parquet/src/util/bit_util.rs b/parquet/src/util/bit_util.rs
index c229ea3da..597190a46 100644
--- a/parquet/src/util/bit_util.rs
+++ b/parquet/src/util/bit_util.rs
@@ -638,8 +638,7 @@ impl BitReader {
             shift += 7;
             assert!(
                 shift <= MAX_VLQ_BYTE_LEN * 7,
-                "Num of bytes exceed MAX_VLQ_BYTE_LEN ({})",
-                MAX_VLQ_BYTE_LEN
+                "Num of bytes exceed MAX_VLQ_BYTE_LEN ({MAX_VLQ_BYTE_LEN})"
             );
             if byte & 0x80 == 0 {
                 return Some(v);
diff --git a/parquet/src/util/test_common/rand_gen.rs b/parquet/src/util/test_common/rand_gen.rs
index 4e54aa799..c36b9060c 100644
--- a/parquet/src/util/test_common/rand_gen.rs
+++ b/parquet/src/util/test_common/rand_gen.rs
@@ -194,7 +194,7 @@ pub fn make_pages<T: DataType>(
             Encoding::PLAIN => {
                 pb.add_values::<T>(encoding, &values[value_range]);
             }
-            enc => panic!("Unexpected encoding {}", enc),
+            enc => panic!("Unexpected encoding {enc}"),
         }
 
         let data_page = pb.consume();
diff --git a/parquet/tests/arrow_writer_layout.rs b/parquet/tests/arrow_writer_layout.rs
index bf24950e9..0c66fcd10 100644
--- a/parquet/tests/arrow_writer_layout.rs
+++ b/parquet/tests/arrow_writer_layout.rs
@@ -98,8 +98,7 @@ fn assert_layout(file_reader: &Bytes, meta: &ParquetMetaData, layout: &Layout) {
                 assert_eq!(
                     page.compressed_page_size as usize,
                     page_layout.compressed_size + page_layout.page_header_size,
-                    "index page {} size mismatch",
-                    idx
+                    "index page {idx} size mismatch"
                 );
                 let next_first_row_index = column_index
                     .get(idx + 1)
@@ -109,8 +108,7 @@ fn assert_layout(file_reader: &Bytes, meta: &ParquetMetaData, layout: &Layout) {
                 let num_rows = next_first_row_index - page.first_row_index;
                 assert_eq!(
                     num_rows as usize, page_layout.rows,
-                    "index page {} row count",
-                    idx
+                    "index page {idx} row count"
                 );
             }
         }
@@ -146,8 +144,7 @@ fn assert_layout(file_reader: &Bytes, meta: &ParquetMetaData, layout: &Layout) {
                 pages.len(),
                 column_layout.pages.len()
                     + column_layout.dictionary_page.is_some() as usize,
-                "page {} count mismatch",
-                idx
+                "page {idx} count mismatch"
             );
 
             let page_layouts = column_layout
@@ -160,8 +157,7 @@ fn assert_layout(file_reader: &Bytes, meta: &ParquetMetaData, layout: &Layout) {
                 assert_eq!(
                     page.buffer().len(),
                     page_layout.compressed_size,
-                    "page {} size mismatch",
-                    idx
+                    "page {idx} size mismatch"
                 );
                 assert_eq!(page.page_type(), page_layout.page_type);
             }
@@ -345,7 +341,7 @@ fn test_primitive() {
 #[test]
 fn test_string() {
     let array = Arc::new(StringArray::from_iter_values(
-        (0..2000).map(|x| format!("{:04}", x)),
+        (0..2000).map(|x| format!("{x:04}")),
     )) as _;
     let batch = RecordBatch::try_from_iter([("col", array)]).unwrap();
     let props = WriterProperties::builder()