You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@arrow.apache.org by al...@apache.org on 2023/04/01 10:21:50 UTC

[arrow-datafusion] branch main updated: Support `round()` function with two parameters (#5807)

This is an automated email from the ASF dual-hosted git repository.

alamb pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/arrow-datafusion.git


The following commit(s) were added to refs/heads/main by this push:
     new 771c20ce2 Support `round()` function with two parameters (#5807)
771c20ce2 is described below

commit 771c20ce2f0ade29a2d334e4e8494e9fbd7a5940
Author: Liang-Chi Hsieh <vi...@gmail.com>
AuthorDate: Sat Apr 1 03:21:44 2023 -0700

    Support `round()` function with two parameters (#5807)
    
    * Physical round expression supports two parameters
    
    * fix
    
    * Update datafusion/physical-expr/src/math_expressions.rs
    
    Co-authored-by: Andrew Lamb <an...@nerdnetworks.org>
    
    * fix format
    
    * Add sqllogictests test for math
    
    ---------
    
    Co-authored-by: Andrew Lamb <an...@nerdnetworks.org>
---
 .../core/tests/sqllogictests/test_files/math.slt   |  89 ++++++++++++++
 datafusion/expr/src/expr_fn.rs                     |   5 +-
 datafusion/physical-expr/src/functions.rs          |   4 +-
 datafusion/physical-expr/src/math_expressions.rs   | 102 +++++++++++++++-
 datafusion/proto/src/logical_plan/from_proto.rs    |   7 +-
 datafusion/sql/tests/integration_test.rs           | 128 ++++++++++-----------
 6 files changed, 266 insertions(+), 69 deletions(-)

diff --git a/datafusion/core/tests/sqllogictests/test_files/math.slt b/datafusion/core/tests/sqllogictests/test_files/math.slt
new file mode 100644
index 000000000..84f0d4988
--- /dev/null
+++ b/datafusion/core/tests/sqllogictests/test_files/math.slt
@@ -0,0 +1,89 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+
+#   http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+##########
+## Math expression Tests
+##########
+
+statement ok
+CREATE external table aggregate_simple(c1 real, c2 double, c3 boolean) STORED as CSV WITH HEADER ROW LOCATION 'tests/data/aggregate_simple.csv';
+
+# Round
+query R
+SELECT ROUND(c1) FROM aggregate_simple
+----
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+
+# Round
+query R
+SELECT round(c1/3, 2) FROM aggregate_simple order by c1
+----
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+
+# Round
+query R
+SELECT round(c1, 4) FROM aggregate_simple order by c1
+----
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0.0001
+0.0001
+0.0001
+0.0001
+0.0001
+
+# Round
+query RRRRRRRR
+SELECT round(125.2345, -3), round(125.2345, -2), round(125.2345, -1), round(125.2345), round(125.2345, 0), round(125.2345, 1), round(125.2345, 2), round(125.2345, 3)
+----
+0 100 130 125 125 125.2 125.23 125.235
diff --git a/datafusion/expr/src/expr_fn.rs b/datafusion/expr/src/expr_fn.rs
index b20629946..abf0e1be9 100644
--- a/datafusion/expr/src/expr_fn.rs
+++ b/datafusion/expr/src/expr_fn.rs
@@ -464,7 +464,7 @@ scalar_expr!(
     num,
     "nearest integer greater than or equal to argument"
 );
-scalar_expr!(Round, round, num, "round to nearest integer");
+nary_scalar_expr!(Round, round, "round to nearest integer");
 scalar_expr!(Trunc, trunc, num, "truncate toward zero");
 scalar_expr!(Abs, abs, num, "absolute value");
 scalar_expr!(Signum, signum, num, "sign of the argument (-1, 0, +1) ");
@@ -766,7 +766,8 @@ mod test {
         test_unary_scalar_expr!(Atan, atan);
         test_unary_scalar_expr!(Floor, floor);
         test_unary_scalar_expr!(Ceil, ceil);
-        test_unary_scalar_expr!(Round, round);
+        test_nary_scalar_expr!(Round, round, input);
+        test_nary_scalar_expr!(Round, round, input, decimal_places);
         test_unary_scalar_expr!(Trunc, trunc);
         test_unary_scalar_expr!(Abs, abs);
         test_unary_scalar_expr!(Signum, signum);
diff --git a/datafusion/physical-expr/src/functions.rs b/datafusion/physical-expr/src/functions.rs
index 0d4c895b4..035e72136 100644
--- a/datafusion/physical-expr/src/functions.rs
+++ b/datafusion/physical-expr/src/functions.rs
@@ -347,7 +347,9 @@ pub fn create_physical_fun(
         BuiltinScalarFunction::Log10 => Arc::new(math_expressions::log10),
         BuiltinScalarFunction::Log2 => Arc::new(math_expressions::log2),
         BuiltinScalarFunction::Random => Arc::new(math_expressions::random),
-        BuiltinScalarFunction::Round => Arc::new(math_expressions::round),
+        BuiltinScalarFunction::Round => {
+            Arc::new(|args| make_scalar_function(math_expressions::round)(args))
+        }
         BuiltinScalarFunction::Signum => Arc::new(math_expressions::signum),
         BuiltinScalarFunction::Sin => Arc::new(math_expressions::sin),
         BuiltinScalarFunction::Sqrt => Arc::new(math_expressions::sqrt),
diff --git a/datafusion/physical-expr/src/math_expressions.rs b/datafusion/physical-expr/src/math_expressions.rs
index 7de887b2e..ea89c2cd4 100644
--- a/datafusion/physical-expr/src/math_expressions.rs
+++ b/datafusion/physical-expr/src/math_expressions.rs
@@ -113,6 +113,18 @@ macro_rules! make_function_inputs2 {
             })
             .collect::<$ARRAY_TYPE>()
     }};
+    ($ARG1: expr, $ARG2: expr, $NAME1:expr, $NAME2: expr, $ARRAY_TYPE1:ident, $ARRAY_TYPE2:ident, $FUNC: block) => {{
+        let arg1 = downcast_arg!($ARG1, $NAME1, $ARRAY_TYPE1);
+        let arg2 = downcast_arg!($ARG2, $NAME2, $ARRAY_TYPE2);
+
+        arg1.iter()
+            .zip(arg2.iter())
+            .map(|(a1, a2)| match (a1, a2) {
+                (Some(a1), Some(a2)) => Some($FUNC(a1, a2.try_into().ok()?)),
+                _ => None,
+            })
+            .collect::<$ARRAY_TYPE1>()
+    }};
 }
 
 math_unary_function!("sqrt", sqrt);
@@ -124,7 +136,6 @@ math_unary_function!("acos", acos);
 math_unary_function!("atan", atan);
 math_unary_function!("floor", floor);
 math_unary_function!("ceil", ceil);
-math_unary_function!("round", round);
 math_unary_function!("trunc", trunc);
 math_unary_function!("abs", abs);
 math_unary_function!("signum", signum);
@@ -149,6 +160,59 @@ pub fn random(args: &[ColumnarValue]) -> Result<ColumnarValue> {
     Ok(ColumnarValue::Array(Arc::new(array)))
 }
 
+/// Round SQL function
+pub fn round(args: &[ArrayRef]) -> Result<ArrayRef> {
+    if args.len() != 1 && args.len() != 2 {
+        return Err(DataFusionError::Internal(format!(
+            "round function requires one or two arguments, got {}",
+            args.len()
+        )));
+    }
+
+    let mut decimal_places =
+        &(Arc::new(Int64Array::from_value(0, args[0].len())) as ArrayRef);
+
+    if args.len() == 2 {
+        decimal_places = &args[1];
+    }
+
+    match args[0].data_type() {
+        DataType::Float64 => Ok(Arc::new(make_function_inputs2!(
+            &args[0],
+            decimal_places,
+            "value",
+            "decimal_places",
+            Float64Array,
+            Int64Array,
+            {
+                |value: f64, decimal_places: i64| {
+                    (value * 10.0_f64.powi(decimal_places.try_into().unwrap())).round()
+                        / 10.0_f64.powi(decimal_places.try_into().unwrap())
+                }
+            }
+        )) as ArrayRef),
+
+        DataType::Float32 => Ok(Arc::new(make_function_inputs2!(
+            &args[0],
+            decimal_places,
+            "value",
+            "decimal_places",
+            Float32Array,
+            Int64Array,
+            {
+                |value: f32, decimal_places: i64| {
+                    (value * 10.0_f32.powi(decimal_places.try_into().unwrap())).round()
+                        / 10.0_f32.powi(decimal_places.try_into().unwrap())
+                }
+            }
+        )) as ArrayRef),
+
+        other => Err(DataFusionError::Internal(format!(
+            "Unsupported data type {other:?} for function round"
+        ))),
+    }
+}
+
 /// Power SQL function
 pub fn power(args: &[ArrayRef]) -> Result<ArrayRef> {
     match args[0].data_type() {
@@ -365,4 +429,40 @@ mod tests {
         assert_eq!(floats.value(2), 4.0);
         assert_eq!(floats.value(3), 4.0);
     }
+
+    #[test]
+    fn test_round_f32() {
+        let args: Vec<ArrayRef> = vec![
+            Arc::new(Float32Array::from(vec![125.2345; 10])), // input
+            Arc::new(Int64Array::from(vec![0, 1, 2, 3, 4, 5, -1, -2, -3, -4])), // decimal_places
+        ];
+
+        let result = round(&args).expect("failed to initialize function round");
+        let floats =
+            as_float32_array(&result).expect("failed to initialize function round");
+
+        let expected = Float32Array::from(vec![
+            125.0, 125.2, 125.23, 125.235, 125.2345, 125.2345, 130.0, 100.0, 0.0, 0.0,
+        ]);
+
+        assert_eq!(floats, &expected);
+    }
+
+    #[test]
+    fn test_round_f64() {
+        let args: Vec<ArrayRef> = vec![
+            Arc::new(Float64Array::from(vec![125.2345; 10])), // input
+            Arc::new(Int64Array::from(vec![0, 1, 2, 3, 4, 5, -1, -2, -3, -4])), // decimal_places
+        ];
+
+        let result = round(&args).expect("failed to initialize function round");
+        let floats =
+            as_float64_array(&result).expect("failed to initialize function round");
+
+        let expected = Float64Array::from(vec![
+            125.0, 125.2, 125.23, 125.235, 125.2345, 125.2345, 130.0, 100.0, 0.0, 0.0,
+        ]);
+
+        assert_eq!(floats, &expected);
+    }
 }
diff --git a/datafusion/proto/src/logical_plan/from_proto.rs b/datafusion/proto/src/logical_plan/from_proto.rs
index 845cb60d1..755b942e4 100644
--- a/datafusion/proto/src/logical_plan/from_proto.rs
+++ b/datafusion/proto/src/logical_plan/from_proto.rs
@@ -1139,7 +1139,12 @@ pub fn parse_expr(
                 ScalarFunction::Log10 => Ok(log10(parse_expr(&args[0], registry)?)),
                 ScalarFunction::Floor => Ok(floor(parse_expr(&args[0], registry)?)),
                 ScalarFunction::Ceil => Ok(ceil(parse_expr(&args[0], registry)?)),
-                ScalarFunction::Round => Ok(round(parse_expr(&args[0], registry)?)),
+                ScalarFunction::Round => Ok(round(
+                    args.to_owned()
+                        .iter()
+                        .map(|expr| parse_expr(expr, registry))
+                        .collect::<Result<Vec<_>, _>>()?,
+                )),
                 ScalarFunction::Trunc => Ok(trunc(parse_expr(&args[0], registry)?)),
                 ScalarFunction::Abs => Ok(abs(parse_expr(&args[0], registry)?)),
                 ScalarFunction::Signum => Ok(signum(parse_expr(&args[0], registry)?)),
diff --git a/datafusion/sql/tests/integration_test.rs b/datafusion/sql/tests/integration_test.rs
index ef6142305..ea5b35937 100644
--- a/datafusion/sql/tests/integration_test.rs
+++ b/datafusion/sql/tests/integration_test.rs
@@ -17,22 +17,23 @@
 
 #[cfg(test)]
 use std::collections::HashMap;
-use std::sync::Arc;
-use std::vec;
+use std::{sync::Arc, vec};
 
 use arrow_schema::*;
 use sqlparser::dialect::{Dialect, GenericDialect, HiveDialect, MySqlDialect};
 
-use datafusion_common::config::ConfigOptions;
-use datafusion_common::TableReference;
-use datafusion_common::{assert_contains, ScalarValue};
-use datafusion_common::{DataFusionError, Result};
-use datafusion_expr::logical_plan::LogicalPlan;
-use datafusion_expr::logical_plan::Prepare;
-use datafusion_expr::TableSource;
-use datafusion_expr::{AggregateUDF, ScalarUDF};
-use datafusion_sql::parser::DFParser;
-use datafusion_sql::planner::{ContextProvider, ParserOptions, SqlToRel};
+use datafusion_common::{
+    assert_contains, config::ConfigOptions, DataFusionError, Result, ScalarValue,
+    TableReference,
+};
+use datafusion_expr::{
+    logical_plan::{LogicalPlan, Prepare},
+    AggregateUDF, ScalarUDF, TableSource,
+};
+use datafusion_sql::{
+    parser::DFParser,
+    planner::{ContextProvider, ParserOptions, SqlToRel},
+};
 
 use rstest::rstest;
 
@@ -386,8 +387,7 @@ fn select_compound_filter() {
 
 #[test]
 fn test_timestamp_filter() {
-    let sql =
-            "SELECT state FROM person WHERE birth_date < CAST (158412331400600000 as timestamp)";
+    let sql = "SELECT state FROM person WHERE birth_date < CAST (158412331400600000 as timestamp)";
 
     let expected = "Projection: person.state\
             \n  Filter: person.birth_date < CAST(Int64(158412331400600000) AS Timestamp(Nanosecond, None))\
@@ -505,9 +505,9 @@ fn table_with_column_alias_number_cols() {
                    FROM lineitem l (a, b)";
     let err = logical_plan(sql).expect_err("query should have failed");
     assert_eq!(
-            "Plan(\"Source table contains 3 columns but only 2 names given as column alias\")",
-            format!("{err:?}")
-        );
+        "Plan(\"Source table contains 3 columns but only 2 names given as column alias\")",
+        format!("{err:?}")
+    );
 }
 
 #[test]
@@ -622,7 +622,8 @@ fn using_join_multiple_keys_subquery() {
 #[test]
 fn using_join_multiple_keys_qualified_wildcard_select() {
     let sql = "SELECT a.* FROM person a join person b using (id, age)";
-    let expected = "Projection: a.id, a.first_name, a.last_name, a.age, a.state, a.salary, a.birth_date, a.😀\
+    let expected =
+        "Projection: a.id, a.first_name, a.last_name, a.age, a.state, a.salary, a.birth_date, a.😀\
                         \n  Inner Join: Using a.id = b.id, a.age = b.age\
                         \n    SubqueryAlias: a\
                         \n      TableScan: person\
@@ -930,8 +931,7 @@ fn select_aggregate_compound_aliased_with_group_by_with_having_referencing_compo
                    FROM person
                    GROUP BY first_name
                    HAVING max_age_plus_one > 100";
-    let expected =
-            "Projection: person.first_name, MAX(person.age) + Int64(1) AS max_age_plus_one\
+    let expected = "Projection: person.first_name, MAX(person.age) + Int64(1) AS max_age_plus_one\
                         \n  Filter: MAX(person.age) + Int64(1) > Int64(100)\
                         \n    Aggregate: groupBy=[[person.first_name]], aggr=[[MAX(person.age)]]\
                         \n      TableScan: person";
@@ -1080,11 +1080,11 @@ fn select_simple_aggregate_repeated_aggregate_with_repeated_aliases() {
 #[test]
 fn select_simple_aggregate_with_groupby() {
     quick_test(
-            "SELECT state, MIN(age), MAX(age) FROM person GROUP BY state",
-            "Projection: person.state, MIN(person.age), MAX(person.age)\
+        "SELECT state, MIN(age), MAX(age) FROM person GROUP BY state",
+        "Projection: person.state, MIN(person.age), MAX(person.age)\
             \n  Aggregate: groupBy=[[person.state]], aggr=[[MIN(person.age), MAX(person.age)]]\
             \n    TableScan: person",
-        );
+    );
 }
 
 #[test]
@@ -1110,11 +1110,11 @@ fn select_simple_aggregate_with_groupby_with_aliases_repeated() {
 #[test]
 fn select_simple_aggregate_with_groupby_column_unselected() {
     quick_test(
-            "SELECT MIN(age), MAX(age) FROM person GROUP BY state",
-            "Projection: MIN(person.age), MAX(person.age)\
+        "SELECT MIN(age), MAX(age) FROM person GROUP BY state",
+        "Projection: MIN(person.age), MAX(person.age)\
              \n  Aggregate: groupBy=[[person.state]], aggr=[[MIN(person.age), MAX(person.age)]]\
              \n    TableScan: person",
-        );
+    );
 }
 
 #[test]
@@ -1184,27 +1184,27 @@ fn select_array_non_literal_type() {
 #[test]
 fn select_simple_aggregate_with_groupby_and_column_is_in_aggregate_and_groupby() {
     quick_test(
-            "SELECT MAX(first_name) FROM person GROUP BY first_name",
-            "Projection: MAX(person.first_name)\
+        "SELECT MAX(first_name) FROM person GROUP BY first_name",
+        "Projection: MAX(person.first_name)\
              \n  Aggregate: groupBy=[[person.first_name]], aggr=[[MAX(person.first_name)]]\
              \n    TableScan: person",
-        );
+    );
 }
 
 #[test]
 fn select_simple_aggregate_with_groupby_can_use_positions() {
     quick_test(
-            "SELECT state, age AS b, COUNT(1) FROM person GROUP BY 1, 2",
-            "Projection: person.state, person.age AS b, COUNT(Int64(1))\
+        "SELECT state, age AS b, COUNT(1) FROM person GROUP BY 1, 2",
+        "Projection: person.state, person.age AS b, COUNT(Int64(1))\
              \n  Aggregate: groupBy=[[person.state, person.age]], aggr=[[COUNT(Int64(1))]]\
              \n    TableScan: person",
-        );
+    );
     quick_test(
-            "SELECT state, age AS b, COUNT(1) FROM person GROUP BY 2, 1",
-            "Projection: person.state, person.age AS b, COUNT(Int64(1))\
+        "SELECT state, age AS b, COUNT(1) FROM person GROUP BY 2, 1",
+        "Projection: person.state, person.age AS b, COUNT(Int64(1))\
              \n  Aggregate: groupBy=[[person.age, person.state]], aggr=[[COUNT(Int64(1))]]\
              \n    TableScan: person",
-        );
+    );
 }
 
 #[test]
@@ -1257,27 +1257,27 @@ fn select_simple_aggregate_with_groupby_aggregate_repeated_and_one_has_alias() {
 #[test]
 fn select_simple_aggregate_with_groupby_non_column_expression_unselected() {
     quick_test(
-            "SELECT MIN(first_name) FROM person GROUP BY age + 1",
-            "Projection: MIN(person.first_name)\
+        "SELECT MIN(first_name) FROM person GROUP BY age + 1",
+        "Projection: MIN(person.first_name)\
              \n  Aggregate: groupBy=[[person.age + Int64(1)]], aggr=[[MIN(person.first_name)]]\
              \n    TableScan: person",
-        );
+    );
 }
 
 #[test]
 fn select_simple_aggregate_with_groupby_non_column_expression_selected_and_resolvable() {
     quick_test(
-            "SELECT age + 1, MIN(first_name) FROM person GROUP BY age + 1",
-            "Projection: person.age + Int64(1), MIN(person.first_name)\
+        "SELECT age + 1, MIN(first_name) FROM person GROUP BY age + 1",
+        "Projection: person.age + Int64(1), MIN(person.first_name)\
              \n  Aggregate: groupBy=[[person.age + Int64(1)]], aggr=[[MIN(person.first_name)]]\
              \n    TableScan: person",
-        );
+    );
     quick_test(
-            "SELECT MIN(first_name), age + 1 FROM person GROUP BY age + 1",
-            "Projection: MIN(person.first_name), person.age + Int64(1)\
+        "SELECT MIN(first_name), age + 1 FROM person GROUP BY age + 1",
+        "Projection: MIN(person.first_name), person.age + Int64(1)\
              \n  Aggregate: groupBy=[[person.age + Int64(1)]], aggr=[[MIN(person.first_name)]]\
              \n    TableScan: person",
-        );
+    );
 }
 
 #[test]
@@ -1294,8 +1294,7 @@ fn select_simple_aggregate_with_groupby_non_column_expression_nested_and_resolva
 fn select_simple_aggregate_with_groupby_non_column_expression_nested_and_not_resolvable()
 {
     // The query should fail, because age + 9 is not in the group by.
-    let sql =
-            "SELECT ((age + 1) / 2) * (age + 9), MIN(first_name) FROM person GROUP BY age + 1";
+    let sql = "SELECT ((age + 1) / 2) * (age + 9), MIN(first_name) FROM person GROUP BY age + 1";
     let err = logical_plan(sql).expect_err("query should have failed");
     assert_eq!(
             "Plan(\"Projection references non-aggregate values: Expression person.age could not be resolved from available columns: person.age + Int64(1), MIN(person.first_name)\")",
@@ -1573,11 +1572,11 @@ fn select_7480_2() {
     let sql = "SELECT c1, c13, MIN(c12) FROM aggregate_test_100 GROUP BY c1";
     let err = logical_plan(sql).expect_err("query should have failed");
     assert_eq!(
-            "Plan(\"Projection references non-aggregate values: \
+        "Plan(\"Projection references non-aggregate values: \
             Expression aggregate_test_100.c13 could not be resolved from available columns: \
             aggregate_test_100.c1, MIN(aggregate_test_100.c12)\")",
-            format!("{err:?}")
-        );
+        format!("{err:?}")
+    );
 }
 
 #[test]
@@ -1638,11 +1637,11 @@ fn create_external_table_with_compression_type() {
 
     // negative case
     let sqls = vec![
-            "CREATE EXTERNAL TABLE t STORED AS AVRO COMPRESSION TYPE GZIP LOCATION 'foo.avro'",
-            "CREATE EXTERNAL TABLE t STORED AS AVRO COMPRESSION TYPE BZIP2 LOCATION 'foo.avro'",
-            "CREATE EXTERNAL TABLE t STORED AS PARQUET COMPRESSION TYPE GZIP LOCATION 'foo.parquet'",
-            "CREATE EXTERNAL TABLE t STORED AS PARQUET COMPRESSION TYPE BZIP2 LOCATION 'foo.parquet'",
-        ];
+        "CREATE EXTERNAL TABLE t STORED AS AVRO COMPRESSION TYPE GZIP LOCATION 'foo.avro'",
+        "CREATE EXTERNAL TABLE t STORED AS AVRO COMPRESSION TYPE BZIP2 LOCATION 'foo.avro'",
+        "CREATE EXTERNAL TABLE t STORED AS PARQUET COMPRESSION TYPE GZIP LOCATION 'foo.parquet'",
+        "CREATE EXTERNAL TABLE t STORED AS PARQUET COMPRESSION TYPE BZIP2 LOCATION 'foo.parquet'",
+    ];
     for sql in sqls {
         let err = logical_plan(sql).expect_err("query should have failed");
         assert_eq!(
@@ -1981,9 +1980,9 @@ fn union_with_incompatible_data_types() {
     let sql = "SELECT 'a' a UNION ALL SELECT true a";
     let err = logical_plan(sql).expect_err("query should have failed");
     assert_eq!(
-            "Plan(\"UNION Column a (type: Boolean) is not compatible with column a (type: Utf8)\")",
-            format!("{err:?}")
-        );
+        "Plan(\"UNION Column a (type: Boolean) is not compatible with column a (type: Utf8)\")",
+        format!("{err:?}")
+    );
 }
 
 #[test]
@@ -2039,8 +2038,7 @@ fn empty_over_plus() {
 
 #[test]
 fn empty_over_multiple() {
-    let sql =
-            "SELECT order_id, MAX(qty) OVER (), min(qty) over (), aVg(qty) OVER () from orders";
+    let sql = "SELECT order_id, MAX(qty) OVER (), min(qty) over (), aVg(qty) OVER () from orders";
     let expected = "\
         Projection: orders.order_id, MAX(orders.qty) ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING, MIN(orders.qty) ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING, AVG(orders.qty) ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING\
         \n  WindowAggr: windowExpr=[[MAX(orders.qty) ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING, MIN(orders.qty) ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING, AVG(orders.qty) ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING]]\
@@ -2182,7 +2180,6 @@ fn over_order_by_sort_keys_sorting() {
 ///                     Sort Key: order_id, qty
 ///                     ->  Seq Scan on orders  (cost=0.00..20.00 rows=1000 width=8)
 /// ```
-///
 #[test]
 fn over_order_by_sort_keys_sorting_prefix_compacting() {
     let sql = "SELECT order_id, MAX(qty) OVER (ORDER BY order_id), SUM(qty) OVER (), MIN(qty) OVER (ORDER BY order_id, qty) from orders";
@@ -2256,7 +2253,7 @@ fn over_partition_by_order_by() {
 #[test]
 fn over_partition_by_order_by_no_dup() {
     let sql =
-            "SELECT order_id, MAX(qty) OVER (PARTITION BY order_id, qty ORDER BY qty) from orders";
+        "SELECT order_id, MAX(qty) OVER (PARTITION BY order_id, qty ORDER BY qty) from orders";
     let expected = "\
         Projection: orders.order_id, MAX(orders.qty) PARTITION BY [orders.order_id, orders.qty] ORDER BY [orders.qty ASC NULLS LAST] RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW\
         \n  WindowAggr: windowExpr=[[MAX(orders.qty) PARTITION BY [orders.order_id, orders.qty] ORDER BY [orders.qty ASC NULLS LAST] RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW]]\
@@ -2730,7 +2727,8 @@ fn not_in_subquery_correlated() {
 
 #[test]
 fn scalar_subquery() {
-    let sql = "SELECT p.id, (SELECT MAX(id) FROM person WHERE last_name = p.last_name) FROM person p";
+    let sql =
+        "SELECT p.id, (SELECT MAX(id) FROM person WHERE last_name = p.last_name) FROM person p";
 
     let expected = "Projection: p.id, (<subquery>)\
         \n  Subquery:\
@@ -2951,7 +2949,8 @@ fn hive_aggregate_with_filter() -> Result<()> {
     let plan = logical_plan_with_dialect(sql, dialect)?;
     let expected = "Projection: SUM(person.age) FILTER (WHERE age > Int64(4))\
         \n  Aggregate: groupBy=[[]], aggr=[[SUM(person.age) FILTER (WHERE age > Int64(4))]]\
-        \n    TableScan: person".to_string();
+        \n    TableScan: person"
+        .to_string();
     assert_eq!(plan.display_indent().to_string(), expected);
     Ok(())
 }
@@ -3045,7 +3044,8 @@ fn test_distribute_by() {
 
 #[test]
 fn test_double_quoted_literal_string() {
-    // Assert double quoted literal string is parsed correctly like single quoted one in specific dialect.
+    // Assert double quoted literal string is parsed correctly like single quoted one in specific
+    // dialect.
     let dialect = &MySqlDialect {};
     let single_quoted_res = format!(
         "{:?}",