You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@arrow.apache.org by al...@apache.org on 2022/02/17 14:02:07 UTC

[arrow-datafusion] branch master updated: Fix warnings with default features disabled (#1845)

This is an automated email from the ASF dual-hosted git repository.

alamb pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/arrow-datafusion.git


The following commit(s) were added to refs/heads/master by this push:
     new 67fe623  Fix warnings with default features disabled (#1845)
67fe623 is described below

commit 67fe623bd5882d0ce567e3aa3145a96fdf563443
Author: Andrew Lamb <an...@nerdnetworks.org>
AuthorDate: Thu Feb 17 09:02:03 2022 -0500

    Fix warnings with default features disabled (#1845)
    
    * Fix warnings with default features disabled
    
    * do not run tests that require unicode
    
    * more fixing up
---
 datafusion/src/sql/planner.rs           | 80 ++++++++++++++++-----------------
 datafusion/tests/dataframe_functions.rs | 15 +++++++
 datafusion/tests/sql/mod.rs             | 23 +---------
 datafusion/tests/sql/unicode.rs         | 21 +++++++++
 4 files changed, 77 insertions(+), 62 deletions(-)

diff --git a/datafusion/src/sql/planner.rs b/datafusion/src/sql/planner.rs
index 382dbf1..8b59ccd 100644
--- a/datafusion/src/sql/planner.rs
+++ b/datafusion/src/sql/planner.rs
@@ -1581,52 +1581,52 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
                 ref right,
             } => self.parse_sql_binary_op(left, op, right, schema),
 
+            #[cfg(feature = "unicode_expressions")]
             SQLExpr::Substring {
                 expr,
                 substring_from,
                 substring_for,
             } => {
-                #[cfg(feature = "unicode_expressions")]
-                {
-                    let arg = self.sql_expr_to_logical_expr(expr, schema)?;
-                    let args = match (substring_from, substring_for) {
-                        (Some(from_expr), Some(for_expr)) => {
-                            let from_logic =
-                                self.sql_expr_to_logical_expr(from_expr, schema)?;
-                            let for_logic =
-                                self.sql_expr_to_logical_expr(for_expr, schema)?;
-                            vec![arg, from_logic, for_logic]
-                        }
-                        (Some(from_expr), None) => {
-                            let from_logic =
-                                self.sql_expr_to_logical_expr(from_expr, schema)?;
-                            vec![arg, from_logic]
-                        }
-                        (None, Some(for_expr)) => {
-                            let from_logic = Expr::Literal(ScalarValue::Int64(Some(1)));
-                            let for_logic =
-                                self.sql_expr_to_logical_expr(for_expr, schema)?;
-                            vec![arg, from_logic, for_logic]
-                        }
-                        _ => {
-                            return Err(DataFusionError::Plan(format!(
-                                "Substring without for/from is not valid {:?}",
-                                sql
-                            )))
-                        }
-                    };
-                    Ok(Expr::ScalarFunction {
-                        fun: functions::BuiltinScalarFunction::Substr,
-                        args,
-                    })
-                }
+                let arg = self.sql_expr_to_logical_expr(expr, schema)?;
+                let args = match (substring_from, substring_for) {
+                    (Some(from_expr), Some(for_expr)) => {
+                        let from_logic =
+                            self.sql_expr_to_logical_expr(from_expr, schema)?;
+                        let for_logic =
+                            self.sql_expr_to_logical_expr(for_expr, schema)?;
+                        vec![arg, from_logic, for_logic]
+                    }
+                    (Some(from_expr), None) => {
+                        let from_logic =
+                            self.sql_expr_to_logical_expr(from_expr, schema)?;
+                        vec![arg, from_logic]
+                    }
+                    (None, Some(for_expr)) => {
+                        let from_logic = Expr::Literal(ScalarValue::Int64(Some(1)));
+                        let for_logic =
+                            self.sql_expr_to_logical_expr(for_expr, schema)?;
+                        vec![arg, from_logic, for_logic]
+                    }
+                    _ => {
+                        return Err(DataFusionError::Plan(format!(
+                            "Substring without for/from is not valid {:?}",
+                            sql
+                        )))
+                    }
+                };
+                Ok(Expr::ScalarFunction {
+                    fun: functions::BuiltinScalarFunction::Substr,
+                    args,
+                })
+            }
 
-                #[cfg(not(feature = "unicode_expressions"))]
-                {
-                    Err(DataFusionError::Internal(
-                        "statement substring requires compilation with feature flag: unicode_expressions.".to_string()
-                    ))
-                }
+            #[cfg(not(feature = "unicode_expressions"))]
+            SQLExpr::Substring {
+                ..
+            } => {
+                Err(DataFusionError::Internal(
+                    "statement substring requires compilation with feature flag: unicode_expressions.".to_string()
+                ))
             }
 
             SQLExpr::Trim { expr, trim_where } => {
diff --git a/datafusion/tests/dataframe_functions.rs b/datafusion/tests/dataframe_functions.rs
index d5118b3..ae521a0 100644
--- a/datafusion/tests/dataframe_functions.rs
+++ b/datafusion/tests/dataframe_functions.rs
@@ -174,6 +174,7 @@ async fn test_fn_approx_percentile_cont() -> Result<()> {
 }
 
 #[tokio::test]
+#[cfg(feature = "unicode_expressions")]
 async fn test_fn_character_length() -> Result<()> {
     let expr = character_length(col("a"));
 
@@ -231,6 +232,7 @@ async fn test_fn_initcap() -> Result<()> {
 }
 
 #[tokio::test]
+#[cfg(feature = "unicode_expressions")]
 async fn test_fn_left() -> Result<()> {
     let expr = left(col("a"), lit(3));
 
@@ -271,6 +273,7 @@ async fn test_fn_lower() -> Result<()> {
 }
 
 #[tokio::test]
+#[cfg(feature = "unicode_expressions")]
 async fn test_fn_lpad() -> Result<()> {
     let expr = lpad(vec![col("a"), lit(10)]);
 
@@ -291,6 +294,7 @@ async fn test_fn_lpad() -> Result<()> {
 }
 
 #[tokio::test]
+#[cfg(feature = "unicode_expressions")]
 async fn test_fn_lpad_with_string() -> Result<()> {
     let expr = lpad(vec![col("a"), lit(10), lit("*")]);
 
@@ -348,6 +352,7 @@ async fn test_fn_ltrim_with_columns() -> Result<()> {
 }
 
 #[tokio::test]
+#[cfg(feature = "unicode_expressions")]
 async fn test_fn_md5() -> Result<()> {
     let expr = md5(col("a"));
 
@@ -371,6 +376,7 @@ async fn test_fn_md5() -> Result<()> {
 //       https://github.com/apache/arrow-datafusion/issues/1429
 //       g flag doesn't compile
 #[tokio::test]
+#[cfg(feature = "unicode_expressions")]
 async fn test_fn_regexp_match() -> Result<()> {
     let expr = regexp_match(vec![col("a"), lit("[a-z]")]);
     // The below will fail
@@ -393,6 +399,7 @@ async fn test_fn_regexp_match() -> Result<()> {
 }
 
 #[tokio::test]
+#[cfg(feature = "unicode_expressions")]
 async fn test_fn_regexp_replace() -> Result<()> {
     let expr = regexp_replace(vec![col("a"), lit("[a-z]"), lit("x"), lit("g")]);
 
@@ -453,6 +460,7 @@ async fn test_fn_repeat() -> Result<()> {
 }
 
 #[tokio::test]
+#[cfg(feature = "unicode_expressions")]
 async fn test_fn_reverse() -> Result<()> {
     let expr = reverse(col("a"));
 
@@ -473,6 +481,7 @@ async fn test_fn_reverse() -> Result<()> {
 }
 
 #[tokio::test]
+#[cfg(feature = "unicode_expressions")]
 async fn test_fn_right() -> Result<()> {
     let expr = right(col("a"), lit(3));
 
@@ -493,6 +502,7 @@ async fn test_fn_right() -> Result<()> {
 }
 
 #[tokio::test]
+#[cfg(feature = "unicode_expressions")]
 async fn test_fn_rpad() -> Result<()> {
     let expr = rpad(vec![col("a"), lit(11)]);
 
@@ -513,6 +523,7 @@ async fn test_fn_rpad() -> Result<()> {
 }
 
 #[tokio::test]
+#[cfg(feature = "unicode_expressions")]
 async fn test_fn_rpad_with_characters() -> Result<()> {
     let expr = rpad(vec![col("a"), lit(11), lit("x")]);
 
@@ -533,6 +544,7 @@ async fn test_fn_rpad_with_characters() -> Result<()> {
 }
 
 #[tokio::test]
+#[cfg(feature = "unicode_expressions")]
 async fn test_fn_sha224() -> Result<()> {
     let expr = sha224(col("a"));
 
@@ -592,6 +604,7 @@ async fn test_fn_starts_with() -> Result<()> {
 }
 
 #[tokio::test]
+#[cfg(feature = "unicode_expressions")]
 async fn test_fn_strpos() -> Result<()> {
     let expr = strpos(col("a"), lit("f"));
 
@@ -611,6 +624,7 @@ async fn test_fn_strpos() -> Result<()> {
 }
 
 #[tokio::test]
+#[cfg(feature = "unicode_expressions")]
 async fn test_fn_substr() -> Result<()> {
     let expr = substr(col("a"), lit(2));
 
@@ -649,6 +663,7 @@ async fn test_fn_to_hex() -> Result<()> {
 }
 
 #[tokio::test]
+#[cfg(feature = "unicode_expressions")]
 async fn test_fn_translate() -> Result<()> {
     let expr = translate(col("a"), lit("bc"), lit("xx"));
 
diff --git a/datafusion/tests/sql/mod.rs b/datafusion/tests/sql/mod.rs
index d9088f5..a548d61 100644
--- a/datafusion/tests/sql/mod.rs
+++ b/datafusion/tests/sql/mod.rs
@@ -99,7 +99,7 @@ pub mod window;
 mod explain;
 pub mod information_schema;
 mod partitioned_csv;
-#[cfg_attr(not(feature = "unicode_expressions"), ignore)]
+#[cfg(feature = "unicode_expressions")]
 pub mod unicode;
 
 fn assert_float_eq<T>(expected: &[Vec<T>], received: &[Vec<String>])
@@ -601,27 +601,6 @@ fn result_vec(results: &[RecordBatch]) -> Vec<Vec<String>> {
     result
 }
 
-async fn generic_query_length<T: 'static + Array + From<Vec<&'static str>>>(
-    datatype: DataType,
-) -> Result<()> {
-    let schema = Arc::new(Schema::new(vec![Field::new("c1", datatype, false)]));
-
-    let data = RecordBatch::try_new(
-        schema.clone(),
-        vec![Arc::new(T::from(vec!["", "a", "aa", "aaa"]))],
-    )?;
-
-    let table = MemTable::try_new(schema, vec![vec![data]])?;
-
-    let mut ctx = ExecutionContext::new();
-    ctx.register_table("test", Arc::new(table))?;
-    let sql = "SELECT length(c1) FROM test";
-    let actual = execute(&mut ctx, sql).await;
-    let expected = vec![vec!["0"], vec!["1"], vec!["2"], vec!["3"]];
-    assert_eq!(expected, actual);
-    Ok(())
-}
-
 async fn register_simple_aggregate_csv_with_decimal_by_sql(ctx: &mut ExecutionContext) {
     let df = ctx
         .sql(
diff --git a/datafusion/tests/sql/unicode.rs b/datafusion/tests/sql/unicode.rs
index 28a0c83..55747f2 100644
--- a/datafusion/tests/sql/unicode.rs
+++ b/datafusion/tests/sql/unicode.rs
@@ -103,3 +103,24 @@ async fn test_unicode_expressions() -> Result<()> {
     test_expression!("translate('12345', '143', NULL)", "NULL");
     Ok(())
 }
+
+async fn generic_query_length<T: 'static + Array + From<Vec<&'static str>>>(
+    datatype: DataType,
+) -> Result<()> {
+    let schema = Arc::new(Schema::new(vec![Field::new("c1", datatype, false)]));
+
+    let data = RecordBatch::try_new(
+        schema.clone(),
+        vec![Arc::new(T::from(vec!["", "a", "aa", "aaa"]))],
+    )?;
+
+    let table = MemTable::try_new(schema, vec![vec![data]])?;
+
+    let mut ctx = ExecutionContext::new();
+    ctx.register_table("test", Arc::new(table))?;
+    let sql = "SELECT length(c1) FROM test";
+    let actual = execute(&mut ctx, sql).await;
+    let expected = vec![vec!["0"], vec!["1"], vec!["2"], vec!["3"]];
+    assert_eq!(expected, actual);
+    Ok(())
+}