You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by do...@apache.org on 2019/07/14 06:38:21 UTC

[spark] branch master updated: [SPARK-28343][SQL][TEST] Enabling cartesian product and ansi mode for PostgreSQL testing

This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 76079fa  [SPARK-28343][SQL][TEST] Enabling cartesian product and ansi mode for PostgreSQL testing
76079fa is described below

commit 76079fab5c643705046badd4ea1b595bd1404454
Author: Yuming Wang <yu...@ebay.com>
AuthorDate: Sat Jul 13 23:37:58 2019 -0700

    [SPARK-28343][SQL][TEST] Enabling cartesian product and ansi mode for PostgreSQL testing
    
    ## What changes were proposed in this pull request?
    
    This pr enables `spark.sql.crossJoin.enabled` and `spark.sql.parser.ansi.enabled` for PostgreSQL test.
    
    ## How was this patch tested?
    
    manual tests:
    Run `test.sql` in [pgSQL](https://github.com/apache/spark/tree/master/sql/core/src/test/resources/sql-tests/inputs/pgSQL) directory and in [inputs](https://github.com/apache/spark/tree/master/sql/core/src/test/resources/sql-tests/inputs) directory:
    ```sql
    cat <<EOF > test.sql
    create or replace temporary view t1 as
    select * from (values(1), (2)) as v (val);
    
    create or replace temporary view t2 as
    select * from (values(2), (1)) as v (val);
    
    select t1.*, t2.* from t1 join t2;
    EOF
    ```
    
    Closes #25109 from wangyum/SPARK-28343.
    
    Authored-by: Yuming Wang <yu...@ebay.com>
    Signed-off-by: Dongjoon Hyun <dh...@apple.com>
---
 .../resources/sql-tests/inputs/pgSQL/boolean.sql   |  27 +-
 .../test/resources/sql-tests/inputs/pgSQL/case.sql |   4 -
 .../test/resources/sql-tests/inputs/pgSQL/int4.sql |   3 +-
 .../test/resources/sql-tests/inputs/pgSQL/int8.sql |   5 +-
 .../test/resources/sql-tests/inputs/pgSQL/with.sql |   5 -
 .../sql-tests/results/pgSQL/boolean.sql.out        |  26 +-
 .../resources/sql-tests/results/pgSQL/case.sql.out | 190 +++++++-------
 .../resources/sql-tests/results/pgSQL/int4.sql.out |   2 +-
 .../resources/sql-tests/results/pgSQL/int8.sql.out |   4 +-
 .../resources/sql-tests/results/pgSQL/with.sql.out | 278 ++++++++++-----------
 .../org/apache/spark/sql/SQLQueryTestSuite.scala   |   3 +
 11 files changed, 252 insertions(+), 295 deletions(-)

diff --git a/sql/core/src/test/resources/sql-tests/inputs/pgSQL/boolean.sql b/sql/core/src/test/resources/sql-tests/inputs/pgSQL/boolean.sql
index 4e621c6..fd0d299 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/pgSQL/boolean.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/pgSQL/boolean.sql
@@ -17,19 +17,20 @@ SELECT 1 AS one;
 
 SELECT true AS true;
 
-SELECT false AS false;
+-- [SPARK-28349] We do not need to follow PostgreSQL to support reserved words in column alias
+SELECT false AS `false`;
 
 SELECT boolean('t') AS true;
 
 -- [SPARK-27931] Trim the string when cast string type to boolean type
-SELECT boolean('   f           ') AS false;
+SELECT boolean('   f           ') AS `false`;
 
 SELECT boolean('true') AS true;
 
 -- [SPARK-27923] PostgreSQL does not accept 'test' but Spark SQL accepts it and sets it to NULL
 SELECT boolean('test') AS error;
 
-SELECT boolean('false') AS false;
+SELECT boolean('false') AS `false`;
 
 -- [SPARK-27923] PostgreSQL does not accept 'foo' but Spark SQL accepts it and sets it to NULL
 SELECT boolean('foo') AS error;
@@ -41,9 +42,9 @@ SELECT boolean('yes') AS true;
 -- [SPARK-27923] PostgreSQL does not accept 'yeah' but Spark SQL accepts it and sets it to NULL
 SELECT boolean('yeah') AS error;
 
-SELECT boolean('n') AS false;
+SELECT boolean('n') AS `false`;
 
-SELECT boolean('no') AS false;
+SELECT boolean('no') AS `false`;
 
 -- [SPARK-27923] PostgreSQL does not accept 'nay' but Spark SQL accepts it and sets it to NULL
 SELECT boolean('nay') AS error;
@@ -51,10 +52,10 @@ SELECT boolean('nay') AS error;
 -- [SPARK-27931] Accept 'on' and 'off' as input for boolean data type
 SELECT boolean('on') AS true;
 
-SELECT boolean('off') AS false;
+SELECT boolean('off') AS `false`;
 
 -- [SPARK-27931] Accept unique prefixes thereof
-SELECT boolean('of') AS false;
+SELECT boolean('of') AS `false`;
 
 -- [SPARK-27923] PostgreSQL does not accept 'o' but Spark SQL accepts it and sets it to NULL
 SELECT boolean('o') AS error;
@@ -70,7 +71,7 @@ SELECT boolean('1') AS true;
 -- [SPARK-27923] PostgreSQL does not accept '11' but Spark SQL accepts it and sets it to NULL
 SELECT boolean('11') AS error;
 
-SELECT boolean('0') AS false;
+SELECT boolean('0') AS `false`;
 
 -- [SPARK-27923] PostgreSQL does not accept '000' but Spark SQL accepts it and sets it to NULL
 SELECT boolean('000') AS error;
@@ -82,11 +83,11 @@ SELECT boolean('') AS error;
 
 SELECT boolean('t') or boolean('f') AS true;
 
-SELECT boolean('t') and boolean('f') AS false;
+SELECT boolean('t') and boolean('f') AS `false`;
 
 SELECT not boolean('f') AS true;
 
-SELECT boolean('t') = boolean('f') AS false;
+SELECT boolean('t') = boolean('f') AS `false`;
 
 SELECT boolean('t') <> boolean('f') AS true;
 
@@ -99,11 +100,11 @@ SELECT boolean('f') < boolean('t') AS true;
 SELECT boolean('f') <= boolean('t') AS true;
 
 -- explicit casts to/from text
-SELECT boolean(string('TrUe')) AS true, boolean(string('fAlse')) AS false;
+SELECT boolean(string('TrUe')) AS true, boolean(string('fAlse')) AS `false`;
 -- [SPARK-27931] Trim the string when cast to boolean type
 SELECT boolean(string('    true   ')) AS true,
-       boolean(string('     FALSE')) AS false;
-SELECT string(boolean(true)) AS true, string(boolean(false)) AS false;
+       boolean(string('     FALSE')) AS `false`;
+SELECT string(boolean(true)) AS true, string(boolean(false)) AS `false`;
 
 -- [SPARK-27923] PostgreSQL does not accept '  tru e ' but Spark SQL accepts it and sets it to NULL
 SELECT boolean(string('  tru e ')) AS invalid;    -- error
diff --git a/sql/core/src/test/resources/sql-tests/inputs/pgSQL/case.sql b/sql/core/src/test/resources/sql-tests/inputs/pgSQL/case.sql
index 7bb425d..6d9c44c 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/pgSQL/case.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/pgSQL/case.sql
@@ -6,9 +6,6 @@
 -- https://github.com/postgres/postgres/blob/REL_12_BETA2/src/test/regress/sql/case.sql
 -- Test the CASE statement
 --
--- This test suite contains two Cartesian products without using explicit CROSS JOIN syntax.
--- Thus, we set spark.sql.crossJoin.enabled to true.
-set spark.sql.crossJoin.enabled=true;
 CREATE TABLE CASE_TBL (
   i integer,
   f double
@@ -264,4 +261,3 @@ SELECT CASE
 
 DROP TABLE CASE_TBL;
 DROP TABLE CASE2_TBL;
-set spark.sql.crossJoin.enabled=false;
diff --git a/sql/core/src/test/resources/sql-tests/inputs/pgSQL/int4.sql b/sql/core/src/test/resources/sql-tests/inputs/pgSQL/int4.sql
index cbd5878..675636e 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/pgSQL/int4.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/pgSQL/int4.sql
@@ -125,7 +125,8 @@ SELECT int('2') * smallint('2') = smallint('16') / int('4') AS true;
 
 SELECT smallint('2') * int('2') = int('16') / smallint('4') AS true;
 
-SELECT int('1000') < int('999') AS false;
+-- [SPARK-28349] We do not need to follow PostgreSQL to support reserved words in column alias
+SELECT int('1000') < int('999') AS `false`;
 
 -- [SPARK-28027] Our ! and !! has different meanings
 -- SELECT 4! AS twenty_four;
diff --git a/sql/core/src/test/resources/sql-tests/inputs/pgSQL/int8.sql b/sql/core/src/test/resources/sql-tests/inputs/pgSQL/int8.sql
index 31eef6f..32ac877 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/pgSQL/int8.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/pgSQL/int8.sql
@@ -67,10 +67,11 @@ SELECT * FROM INT8_TBL WHERE smallint('123') <= q1;
 SELECT * FROM INT8_TBL WHERE smallint('123') >= q1;
 
 
-SELECT '' AS five, q1 AS plus, -q1 AS minus FROM INT8_TBL;
+-- [SPARK-28349] We do not need to follow PostgreSQL to support reserved words in column alias
+SELECT '' AS five, q1 AS plus, -q1 AS `minus` FROM INT8_TBL;
 
 SELECT '' AS five, q1, q2, q1 + q2 AS plus FROM INT8_TBL;
-SELECT '' AS five, q1, q2, q1 - q2 AS minus FROM INT8_TBL;
+SELECT '' AS five, q1, q2, q1 - q2 AS `minus` FROM INT8_TBL;
 SELECT '' AS three, q1, q2, q1 * q2 AS multiply FROM INT8_TBL;
 SELECT '' AS three, q1, q2, q1 * q2 AS multiply FROM INT8_TBL
  WHERE q1 < 1000 or (q2 > 0 and q2 < 1000);
diff --git a/sql/core/src/test/resources/sql-tests/inputs/pgSQL/with.sql b/sql/core/src/test/resources/sql-tests/inputs/pgSQL/with.sql
index ff6055f..83c6fd8 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/pgSQL/with.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/pgSQL/with.sql
@@ -5,9 +5,6 @@
 -- WITH
 -- https://github.com/postgres/postgres/blob/REL_12_BETA2/src/test/regress/sql/with.sql
 --
--- This test suite contains two Cartesian products without using explicit CROSS JOIN syntax.
--- Thus, we set spark.sql.crossJoin.enabled to true.
-set spark.sql.crossJoin.enabled=true;
 -- This test uses the generate_series(...) function which is rewritten to EXPLODE(SEQUENCE(...)) as
 -- it's feature tracking ticket SPARK-27767 is closed as Won't Do.
 
@@ -1182,10 +1179,8 @@ SELECT * FROM parent;
 --DROP RULE y_rule ON y;
 
 -- check that parser lookahead for WITH doesn't cause any odd behavior
-set spark.sql.parser.ansi.enabled=true;
 create table foo (with baz);  -- fail, WITH is a reserved word
 create table foo (with ordinality);  -- fail, WITH is a reserved word
-set spark.sql.parser.ansi.enabled=false;
 with ordinality as (select 1 as x) select * from ordinality;
 
 -- check sane response to attempt to modify CTE relation
diff --git a/sql/core/src/test/resources/sql-tests/results/pgSQL/boolean.sql.out b/sql/core/src/test/resources/sql-tests/results/pgSQL/boolean.sql.out
index 99c42ec..b7cf3a9 100644
--- a/sql/core/src/test/resources/sql-tests/results/pgSQL/boolean.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/pgSQL/boolean.sql.out
@@ -19,7 +19,7 @@ true
 
 
 -- !query 2
-SELECT false AS false
+SELECT false AS `false`
 -- !query 2 schema
 struct<false:boolean>
 -- !query 2 output
@@ -35,7 +35,7 @@ true
 
 
 -- !query 4
-SELECT boolean('   f           ') AS false
+SELECT boolean('   f           ') AS `false`
 -- !query 4 schema
 struct<false:boolean>
 -- !query 4 output
@@ -59,7 +59,7 @@ NULL
 
 
 -- !query 7
-SELECT boolean('false') AS false
+SELECT boolean('false') AS `false`
 -- !query 7 schema
 struct<false:boolean>
 -- !query 7 output
@@ -99,7 +99,7 @@ NULL
 
 
 -- !query 12
-SELECT boolean('n') AS false
+SELECT boolean('n') AS `false`
 -- !query 12 schema
 struct<false:boolean>
 -- !query 12 output
@@ -107,7 +107,7 @@ false
 
 
 -- !query 13
-SELECT boolean('no') AS false
+SELECT boolean('no') AS `false`
 -- !query 13 schema
 struct<false:boolean>
 -- !query 13 output
@@ -131,7 +131,7 @@ NULL
 
 
 -- !query 16
-SELECT boolean('off') AS false
+SELECT boolean('off') AS `false`
 -- !query 16 schema
 struct<false:boolean>
 -- !query 16 output
@@ -139,7 +139,7 @@ NULL
 
 
 -- !query 17
-SELECT boolean('of') AS false
+SELECT boolean('of') AS `false`
 -- !query 17 schema
 struct<false:boolean>
 -- !query 17 output
@@ -187,7 +187,7 @@ NULL
 
 
 -- !query 23
-SELECT boolean('0') AS false
+SELECT boolean('0') AS `false`
 -- !query 23 schema
 struct<false:boolean>
 -- !query 23 output
@@ -219,7 +219,7 @@ true
 
 
 -- !query 27
-SELECT boolean('t') and boolean('f') AS false
+SELECT boolean('t') and boolean('f') AS `false`
 -- !query 27 schema
 struct<false:boolean>
 -- !query 27 output
@@ -235,7 +235,7 @@ true
 
 
 -- !query 29
-SELECT boolean('t') = boolean('f') AS false
+SELECT boolean('t') = boolean('f') AS `false`
 -- !query 29 schema
 struct<false:boolean>
 -- !query 29 output
@@ -283,7 +283,7 @@ true
 
 
 -- !query 35
-SELECT boolean(string('TrUe')) AS true, boolean(string('fAlse')) AS false
+SELECT boolean(string('TrUe')) AS true, boolean(string('fAlse')) AS `false`
 -- !query 35 schema
 struct<true:boolean,false:boolean>
 -- !query 35 output
@@ -292,7 +292,7 @@ true	false
 
 -- !query 36
 SELECT boolean(string('    true   ')) AS true,
-       boolean(string('     FALSE')) AS false
+       boolean(string('     FALSE')) AS `false`
 -- !query 36 schema
 struct<true:boolean,false:boolean>
 -- !query 36 output
@@ -300,7 +300,7 @@ NULL	NULL
 
 
 -- !query 37
-SELECT string(boolean(true)) AS true, string(boolean(false)) AS false
+SELECT string(boolean(true)) AS true, string(boolean(false)) AS `false`
 -- !query 37 schema
 struct<true:string,false:string>
 -- !query 37 output
diff --git a/sql/core/src/test/resources/sql-tests/results/pgSQL/case.sql.out b/sql/core/src/test/resources/sql-tests/results/pgSQL/case.sql.out
index dbd775e..9b20b31 100644
--- a/sql/core/src/test/resources/sql-tests/results/pgSQL/case.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/pgSQL/case.sql.out
@@ -1,19 +1,22 @@
 -- Automatically generated by SQLQueryTestSuite
--- Number of queries: 37
+-- Number of queries: 35
 
 
 -- !query 0
-set spark.sql.crossJoin.enabled=true
+CREATE TABLE CASE_TBL (
+  i integer,
+  f double
+) USING parquet
 -- !query 0 schema
-struct<key:string,value:string>
+struct<>
 -- !query 0 output
-spark.sql.crossJoin.enabled	true
+
 
 
 -- !query 1
-CREATE TABLE CASE_TBL (
+CREATE TABLE CASE2_TBL (
   i integer,
-  f double
+  j integer
 ) USING parquet
 -- !query 1 schema
 struct<>
@@ -22,10 +25,7 @@ struct<>
 
 
 -- !query 2
-CREATE TABLE CASE2_TBL (
-  i integer,
-  j integer
-) USING parquet
+INSERT INTO CASE_TBL VALUES (1, 10.1)
 -- !query 2 schema
 struct<>
 -- !query 2 output
@@ -33,7 +33,7 @@ struct<>
 
 
 -- !query 3
-INSERT INTO CASE_TBL VALUES (1, 10.1)
+INSERT INTO CASE_TBL VALUES (2, 20.2)
 -- !query 3 schema
 struct<>
 -- !query 3 output
@@ -41,7 +41,7 @@ struct<>
 
 
 -- !query 4
-INSERT INTO CASE_TBL VALUES (2, 20.2)
+INSERT INTO CASE_TBL VALUES (3, -30.3)
 -- !query 4 schema
 struct<>
 -- !query 4 output
@@ -49,7 +49,7 @@ struct<>
 
 
 -- !query 5
-INSERT INTO CASE_TBL VALUES (3, -30.3)
+INSERT INTO CASE_TBL VALUES (4, NULL)
 -- !query 5 schema
 struct<>
 -- !query 5 output
@@ -57,7 +57,7 @@ struct<>
 
 
 -- !query 6
-INSERT INTO CASE_TBL VALUES (4, NULL)
+INSERT INTO CASE2_TBL VALUES (1, -1)
 -- !query 6 schema
 struct<>
 -- !query 6 output
@@ -65,7 +65,7 @@ struct<>
 
 
 -- !query 7
-INSERT INTO CASE2_TBL VALUES (1, -1)
+INSERT INTO CASE2_TBL VALUES (2, -2)
 -- !query 7 schema
 struct<>
 -- !query 7 output
@@ -73,7 +73,7 @@ struct<>
 
 
 -- !query 8
-INSERT INTO CASE2_TBL VALUES (2, -2)
+INSERT INTO CASE2_TBL VALUES (3, -3)
 -- !query 8 schema
 struct<>
 -- !query 8 output
@@ -81,7 +81,7 @@ struct<>
 
 
 -- !query 9
-INSERT INTO CASE2_TBL VALUES (3, -3)
+INSERT INTO CASE2_TBL VALUES (2, -4)
 -- !query 9 schema
 struct<>
 -- !query 9 output
@@ -89,7 +89,7 @@ struct<>
 
 
 -- !query 10
-INSERT INTO CASE2_TBL VALUES (2, -4)
+INSERT INTO CASE2_TBL VALUES (1, NULL)
 -- !query 10 schema
 struct<>
 -- !query 10 output
@@ -97,7 +97,7 @@ struct<>
 
 
 -- !query 11
-INSERT INTO CASE2_TBL VALUES (1, NULL)
+INSERT INTO CASE2_TBL VALUES (NULL, -6)
 -- !query 11 schema
 struct<>
 -- !query 11 output
@@ -105,148 +105,140 @@ struct<>
 
 
 -- !query 12
-INSERT INTO CASE2_TBL VALUES (NULL, -6)
--- !query 12 schema
-struct<>
--- !query 12 output
-
-
-
--- !query 13
 SELECT '3' AS `One`,
   CASE
     WHEN 1 < 2 THEN 3
   END AS `Simple WHEN`
--- !query 13 schema
+-- !query 12 schema
 struct<One:string,Simple WHEN:int>
--- !query 13 output
+-- !query 12 output
 3	3
 
 
--- !query 14
+-- !query 13
 SELECT '<NULL>' AS `One`,
   CASE
     WHEN 1 > 2 THEN 3
   END AS `Simple default`
--- !query 14 schema
+-- !query 13 schema
 struct<One:string,Simple default:int>
--- !query 14 output
+-- !query 13 output
 <NULL>	NULL
 
 
--- !query 15
+-- !query 14
 SELECT '3' AS `One`,
   CASE
     WHEN 1 < 2 THEN 3
     ELSE 4
   END AS `Simple ELSE`
--- !query 15 schema
+-- !query 14 schema
 struct<One:string,Simple ELSE:int>
--- !query 15 output
+-- !query 14 output
 3	3
 
 
--- !query 16
+-- !query 15
 SELECT '4' AS `One`,
   CASE
     WHEN 1 > 2 THEN 3
     ELSE 4
   END AS `ELSE default`
--- !query 16 schema
+-- !query 15 schema
 struct<One:string,ELSE default:int>
--- !query 16 output
+-- !query 15 output
 4	4
 
 
--- !query 17
+-- !query 16
 SELECT '6' AS `One`,
   CASE
     WHEN 1 > 2 THEN 3
     WHEN 4 < 5 THEN 6
     ELSE 7
   END AS `Two WHEN with default`
--- !query 17 schema
+-- !query 16 schema
 struct<One:string,Two WHEN with default:int>
--- !query 17 output
+-- !query 16 output
 6	6
 
 
--- !query 18
+-- !query 17
 SELECT '7' AS `None`,
   CASE WHEN rand() < 0 THEN 1
   END AS `NULL on no matches`
--- !query 18 schema
+-- !query 17 schema
 struct<None:string,NULL on no matches:int>
--- !query 18 output
+-- !query 17 output
 7	NULL
 
 
--- !query 19
+-- !query 18
 SELECT CASE WHEN 1=0 THEN 1/0 WHEN 1=1 THEN 1 ELSE 2/0 END
--- !query 19 schema
+-- !query 18 schema
 struct<CASE WHEN (1 = 0) THEN (CAST(1 AS DOUBLE) / CAST(0 AS DOUBLE)) WHEN (1 = 1) THEN CAST(1 AS DOUBLE) ELSE (CAST(2 AS DOUBLE) / CAST(0 AS DOUBLE)) END:double>
--- !query 19 output
+-- !query 18 output
 1.0
 
 
--- !query 20
+-- !query 19
 SELECT CASE 1 WHEN 0 THEN 1/0 WHEN 1 THEN 1 ELSE 2/0 END
--- !query 20 schema
+-- !query 19 schema
 struct<CASE WHEN (1 = 0) THEN (CAST(1 AS DOUBLE) / CAST(0 AS DOUBLE)) WHEN (1 = 1) THEN CAST(1 AS DOUBLE) ELSE (CAST(2 AS DOUBLE) / CAST(0 AS DOUBLE)) END:double>
--- !query 20 output
+-- !query 19 output
 1.0
 
 
--- !query 21
+-- !query 20
 SELECT CASE WHEN i > 100 THEN 1/0 ELSE 0 END FROM case_tbl
--- !query 21 schema
+-- !query 20 schema
 struct<CASE WHEN (i > 100) THEN (CAST(1 AS DOUBLE) / CAST(0 AS DOUBLE)) ELSE CAST(0 AS DOUBLE) END:double>
--- !query 21 output
+-- !query 20 output
 0.0
 0.0
 0.0
 0.0
 
 
--- !query 22
+-- !query 21
 SELECT CASE 'a' WHEN 'a' THEN 1 ELSE 2 END
--- !query 22 schema
+-- !query 21 schema
 struct<CASE WHEN (a = a) THEN 1 ELSE 2 END:int>
--- !query 22 output
+-- !query 21 output
 1
 
 
--- !query 23
+-- !query 22
 SELECT '' AS `Five`,
   CASE
     WHEN i >= 3 THEN i
   END AS `>= 3 or Null`
   FROM CASE_TBL
--- !query 23 schema
+-- !query 22 schema
 struct<Five:string,>= 3 or Null:int>
--- !query 23 output
+-- !query 22 output
 3
 	4
 	NULL
 	NULL
 
 
--- !query 24
+-- !query 23
 SELECT '' AS `Five`,
   CASE WHEN i >= 3 THEN (i + i)
        ELSE i
   END AS `Simplest Math`
   FROM CASE_TBL
--- !query 24 schema
+-- !query 23 schema
 struct<Five:string,Simplest Math:int>
--- !query 24 output
+-- !query 23 output
 1
 	2
 	6
 	8
 
 
--- !query 25
+-- !query 24
 SELECT '' AS `Five`, i AS `Value`,
   CASE WHEN (i < 0) THEN 'small'
        WHEN (i = 0) THEN 'zero'
@@ -255,16 +247,16 @@ SELECT '' AS `Five`, i AS `Value`,
        ELSE 'big'
   END AS `Category`
   FROM CASE_TBL
--- !query 25 schema
+-- !query 24 schema
 struct<Five:string,Value:int,Category:string>
--- !query 25 output
+-- !query 24 output
 1	one
 	2	two
 	3	big
 	4	big
 
 
--- !query 26
+-- !query 25
 SELECT '' AS `Five`,
   CASE WHEN ((i < 0) or (i < 0)) THEN 'small'
        WHEN ((i = 0) or (i = 0)) THEN 'zero'
@@ -273,37 +265,37 @@ SELECT '' AS `Five`,
        ELSE 'big'
   END AS `Category`
   FROM CASE_TBL
--- !query 26 schema
+-- !query 25 schema
 struct<Five:string,Category:string>
--- !query 26 output
+-- !query 25 output
 big
 	big
 	one
 	two
 
 
--- !query 27
+-- !query 26
 SELECT * FROM CASE_TBL WHERE COALESCE(f,i) = 4
--- !query 27 schema
+-- !query 26 schema
 struct<i:int,f:double>
--- !query 27 output
+-- !query 26 output
 4	NULL
 
 
--- !query 28
+-- !query 27
 SELECT * FROM CASE_TBL WHERE NULLIF(f,i) = 2
--- !query 28 schema
+-- !query 27 schema
 struct<i:int,f:double>
--- !query 28 output
+-- !query 27 output
 
 
 
--- !query 29
+-- !query 28
 SELECT COALESCE(a.f, b.i, b.j)
   FROM CASE_TBL a, CASE2_TBL b
--- !query 29 schema
+-- !query 28 schema
 struct<coalesce(f, CAST(i AS DOUBLE), CAST(j AS DOUBLE)):double>
--- !query 29 output
+-- !query 28 output
 -30.3
 -30.3
 -30.3
@@ -330,24 +322,24 @@ struct<coalesce(f, CAST(i AS DOUBLE), CAST(j AS DOUBLE)):double>
 3.0
 
 
--- !query 30
+-- !query 29
 SELECT *
   FROM CASE_TBL a, CASE2_TBL b
   WHERE COALESCE(a.f, b.i, b.j) = 2
--- !query 30 schema
+-- !query 29 schema
 struct<i:int,f:double,i:int,j:int>
--- !query 30 output
+-- !query 29 output
 4	NULL	2	-2
 4	NULL	2	-4
 
 
--- !query 31
+-- !query 30
 SELECT '' AS Five, NULLIF(a.i,b.i) AS `NULLIF(a.i,b.i)`,
   NULLIF(b.i, 4) AS `NULLIF(b.i,4)`
   FROM CASE_TBL a, CASE2_TBL b
--- !query 31 schema
+-- !query 30 schema
 struct<Five:string,NULLIF(a.i,b.i):int,NULLIF(b.i,4):int>
--- !query 31 output
+-- !query 30 output
 1	2
 	1	2
 	1	3
@@ -374,18 +366,18 @@ struct<Five:string,NULLIF(a.i,b.i):int,NULLIF(b.i,4):int>
 	NULL	3
 
 
--- !query 32
+-- !query 31
 SELECT '' AS `Two`, *
   FROM CASE_TBL a, CASE2_TBL b
   WHERE COALESCE(f,b.i) = 2
--- !query 32 schema
+-- !query 31 schema
 struct<Two:string,i:int,f:double,i:int,j:int>
--- !query 32 output
+-- !query 31 output
 4	NULL	2	-2
 	4	NULL	2	-4
 
 
--- !query 33
+-- !query 32
 SELECT CASE
   (CASE vol('bar')
     WHEN 'foo' THEN 'it was foo!'
@@ -395,31 +387,23 @@ SELECT CASE
   WHEN 'it was foo!' THEN 'foo recognized'
   WHEN 'it was bar!' THEN 'bar recognized'
   ELSE 'unrecognized' END
--- !query 33 schema
+-- !query 32 schema
 struct<CASE WHEN (CASE WHEN (vol(bar) = foo) THEN it was foo! WHEN (vol(bar) = vol(null)) THEN null input WHEN (vol(bar) = bar) THEN it was bar! END = it was foo!) THEN foo recognized WHEN (CASE WHEN (vol(bar) = foo) THEN it was foo! WHEN (vol(bar) = vol(null)) THEN null input WHEN (vol(bar) = bar) THEN it was bar! END = it was bar!) THEN bar recognized ELSE unrecognized END:string>
--- !query 33 output
+-- !query 32 output
 bar recognized
 
 
--- !query 34
+-- !query 33
 DROP TABLE CASE_TBL
--- !query 34 schema
+-- !query 33 schema
 struct<>
--- !query 34 output
+-- !query 33 output
 
 
 
--- !query 35
+-- !query 34
 DROP TABLE CASE2_TBL
--- !query 35 schema
+-- !query 34 schema
 struct<>
--- !query 35 output
-
-
+-- !query 34 output
 
--- !query 36
-set spark.sql.crossJoin.enabled=false
--- !query 36 schema
-struct<key:string,value:string>
--- !query 36 output
-spark.sql.crossJoin.enabled	false
diff --git a/sql/core/src/test/resources/sql-tests/results/pgSQL/int4.sql.out b/sql/core/src/test/resources/sql-tests/results/pgSQL/int4.sql.out
index 9c17e9a..08fb4d4 100644
--- a/sql/core/src/test/resources/sql-tests/results/pgSQL/int4.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/pgSQL/int4.sql.out
@@ -417,7 +417,7 @@ true
 
 
 -- !query 42
-SELECT int('1000') < int('999') AS false
+SELECT int('1000') < int('999') AS `false`
 -- !query 42 schema
 struct<false:boolean>
 -- !query 42 output
diff --git a/sql/core/src/test/resources/sql-tests/results/pgSQL/int8.sql.out b/sql/core/src/test/resources/sql-tests/results/pgSQL/int8.sql.out
index 13bc748..880b257 100644
--- a/sql/core/src/test/resources/sql-tests/results/pgSQL/int8.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/pgSQL/int8.sql.out
@@ -351,7 +351,7 @@ struct<q1:bigint,q2:bigint>
 
 
 -- !query 37
-SELECT '' AS five, q1 AS plus, -q1 AS minus FROM INT8_TBL
+SELECT '' AS five, q1 AS plus, -q1 AS `minus` FROM INT8_TBL
 -- !query 37 schema
 struct<five:string,plus:bigint,minus:bigint>
 -- !query 37 output
@@ -375,7 +375,7 @@ struct<five:string,q1:bigint,q2:bigint,plus:bigint>
 
 
 -- !query 39
-SELECT '' AS five, q1, q2, q1 - q2 AS minus FROM INT8_TBL
+SELECT '' AS five, q1, q2, q1 - q2 AS `minus` FROM INT8_TBL
 -- !query 39 schema
 struct<five:string,q1:bigint,q2:bigint,minus:bigint>
 -- !query 39 output
diff --git a/sql/core/src/test/resources/sql-tests/results/pgSQL/with.sql.out b/sql/core/src/test/resources/sql-tests/results/pgSQL/with.sql.out
index c68ee0e..366b65f 100644
--- a/sql/core/src/test/resources/sql-tests/results/pgSQL/with.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/pgSQL/with.sql.out
@@ -1,43 +1,43 @@
 -- Automatically generated by SQLQueryTestSuite
--- Number of queries: 54
+-- Number of queries: 51
 
 
 -- !query 0
-set spark.sql.crossJoin.enabled=true
--- !query 0 schema
-struct<key:string,value:string>
--- !query 0 output
-spark.sql.crossJoin.enabled	true
-
-
--- !query 1
 WITH q1(x,y) AS (SELECT 1,2)
 SELECT * FROM q1, q1 AS q2
--- !query 1 schema
+-- !query 0 schema
 struct<x:int,y:int,x:int,y:int>
--- !query 1 output
+-- !query 0 output
 1	2	1	2
 
 
--- !query 2
+-- !query 1
 SELECT count(*) FROM (
   WITH q1(x) AS (SELECT rand() FROM (SELECT EXPLODE(SEQUENCE(1, 5))))
     SELECT * FROM q1
   UNION
     SELECT * FROM q1
 ) ss
--- !query 2 schema
+-- !query 1 schema
 struct<count(1):bigint>
--- !query 2 output
+-- !query 1 output
 10
 
 
--- !query 3
+-- !query 2
 CREATE TABLE department (
 	id INTEGER,  -- department ID
 	parent_department INTEGER, -- upper department ID
 	name string -- department name
 ) USING parquet
+-- !query 2 schema
+struct<>
+-- !query 2 output
+
+
+
+-- !query 3
+INSERT INTO department VALUES (0, NULL, 'ROOT')
 -- !query 3 schema
 struct<>
 -- !query 3 output
@@ -45,7 +45,7 @@ struct<>
 
 
 -- !query 4
-INSERT INTO department VALUES (0, NULL, 'ROOT')
+INSERT INTO department VALUES (1, 0, 'A')
 -- !query 4 schema
 struct<>
 -- !query 4 output
@@ -53,7 +53,7 @@ struct<>
 
 
 -- !query 5
-INSERT INTO department VALUES (1, 0, 'A')
+INSERT INTO department VALUES (2, 1, 'B')
 -- !query 5 schema
 struct<>
 -- !query 5 output
@@ -61,7 +61,7 @@ struct<>
 
 
 -- !query 6
-INSERT INTO department VALUES (2, 1, 'B')
+INSERT INTO department VALUES (3, 2, 'C')
 -- !query 6 schema
 struct<>
 -- !query 6 output
@@ -69,7 +69,7 @@ struct<>
 
 
 -- !query 7
-INSERT INTO department VALUES (3, 2, 'C')
+INSERT INTO department VALUES (4, 2, 'D')
 -- !query 7 schema
 struct<>
 -- !query 7 output
@@ -77,7 +77,7 @@ struct<>
 
 
 -- !query 8
-INSERT INTO department VALUES (4, 2, 'D')
+INSERT INTO department VALUES (5, 0, 'E')
 -- !query 8 schema
 struct<>
 -- !query 8 output
@@ -85,7 +85,7 @@ struct<>
 
 
 -- !query 9
-INSERT INTO department VALUES (5, 0, 'E')
+INSERT INTO department VALUES (6, 4, 'F')
 -- !query 9 schema
 struct<>
 -- !query 9 output
@@ -93,7 +93,7 @@ struct<>
 
 
 -- !query 10
-INSERT INTO department VALUES (6, 4, 'F')
+INSERT INTO department VALUES (7, 5, 'G')
 -- !query 10 schema
 struct<>
 -- !query 10 output
@@ -101,7 +101,10 @@ struct<>
 
 
 -- !query 11
-INSERT INTO department VALUES (7, 5, 'G')
+CREATE TABLE tree(
+    id INTEGER,
+    parent_id INTEGER
+) USING parquet
 -- !query 11 schema
 struct<>
 -- !query 11 output
@@ -109,10 +112,9 @@ struct<>
 
 
 -- !query 12
-CREATE TABLE tree(
-    id INTEGER,
-    parent_id INTEGER
-) USING parquet
+INSERT INTO tree
+VALUES (1, NULL), (2, 1), (3,1), (4,2), (5,2), (6,2), (7,3), (8,3),
+       (9,4), (10,4), (11,7), (12,7), (13,7), (14, 9), (15,11), (16,11)
 -- !query 12 schema
 struct<>
 -- !query 12 output
@@ -120,9 +122,7 @@ struct<>
 
 
 -- !query 13
-INSERT INTO tree
-VALUES (1, NULL), (2, 1), (3,1), (4,2), (5,2), (6,2), (7,3), (8,3),
-       (9,4), (10,4), (11,7), (12,7), (13,7), (14, 9), (15,11), (16,11)
+create table graph( f int, t int, label string ) USING parquet
 -- !query 13 schema
 struct<>
 -- !query 13 output
@@ -130,14 +130,6 @@ struct<>
 
 
 -- !query 14
-create table graph( f int, t int, label string ) USING parquet
--- !query 14 schema
-struct<>
--- !query 14 output
-
-
-
--- !query 15
 insert into graph values
 	(1, 2, 'arc 1 -> 2'),
 	(1, 3, 'arc 1 -> 3'),
@@ -145,6 +137,14 @@ insert into graph values
 	(1, 4, 'arc 1 -> 4'),
 	(4, 5, 'arc 4 -> 5'),
 	(5, 1, 'arc 5 -> 1')
+-- !query 14 schema
+struct<>
+-- !query 14 output
+
+
+
+-- !query 15
+CREATE TABLE y (a INTEGER) USING parquet
 -- !query 15 schema
 struct<>
 -- !query 15 output
@@ -152,7 +152,7 @@ struct<>
 
 
 -- !query 16
-CREATE TABLE y (a INTEGER) USING parquet
+INSERT INTO y SELECT EXPLODE(SEQUENCE(1, 10))
 -- !query 16 schema
 struct<>
 -- !query 16 output
@@ -160,7 +160,7 @@ struct<>
 
 
 -- !query 17
-INSERT INTO y SELECT EXPLODE(SEQUENCE(1, 10))
+DROP TABLE y
 -- !query 17 schema
 struct<>
 -- !query 17 output
@@ -168,7 +168,7 @@ struct<>
 
 
 -- !query 18
-DROP TABLE y
+CREATE TABLE y (a INTEGER) USING parquet
 -- !query 18 schema
 struct<>
 -- !query 18 output
@@ -176,7 +176,7 @@ struct<>
 
 
 -- !query 19
-CREATE TABLE y (a INTEGER) USING parquet
+INSERT INTO y SELECT EXPLODE(SEQUENCE(1, 10))
 -- !query 19 schema
 struct<>
 -- !query 19 output
@@ -184,22 +184,14 @@ struct<>
 
 
 -- !query 20
-INSERT INTO y SELECT EXPLODE(SEQUENCE(1, 10))
--- !query 20 schema
-struct<>
--- !query 20 output
-
-
-
--- !query 21
 with cte(foo) as ( select 42 ) select * from ((select foo from cte)) q
--- !query 21 schema
+-- !query 20 schema
 struct<foo:int>
--- !query 21 output
+-- !query 20 output
 42
 
 
--- !query 22
+-- !query 21
 WITH outermost(x) AS (
   SELECT 1
   UNION (WITH innermost as (SELECT 2)
@@ -207,15 +199,15 @@ WITH outermost(x) AS (
          UNION SELECT 3)
 )
 SELECT * FROM outermost ORDER BY 1
--- !query 22 schema
+-- !query 21 schema
 struct<x:int>
--- !query 22 output
+-- !query 21 output
 1
 2
 3
 
 
--- !query 23
+-- !query 22
 WITH outermost(x) AS (
   SELECT 1
   UNION (WITH innermost as (SELECT 2)
@@ -223,26 +215,26 @@ WITH outermost(x) AS (
          UNION SELECT * FROM innermost)
 )
 SELECT * FROM outermost ORDER BY 1
--- !query 23 schema
+-- !query 22 schema
 struct<>
--- !query 23 output
+-- !query 22 output
 org.apache.spark.sql.AnalysisException
 Table or view not found: outermost; line 4 pos 23
 
 
--- !query 24
+-- !query 23
 CREATE TABLE withz USING parquet AS SELECT i AS k, CAST(i || ' v' AS string) v FROM (SELECT EXPLODE(SEQUENCE(1, 16, 3)) i)
--- !query 24 schema
+-- !query 23 schema
 struct<>
--- !query 24 output
+-- !query 23 output
 
 
 
--- !query 25
+-- !query 24
 SELECT * FROM withz ORDER BY k
--- !query 25 schema
+-- !query 24 schema
 struct<k:int,v:string>
--- !query 25 output
+-- !query 24 output
 1	1 v
 4	4 v
 7	7 v
@@ -251,8 +243,16 @@ struct<k:int,v:string>
 16	16 v
 
 
--- !query 26
+-- !query 25
 DROP TABLE withz
+-- !query 25 schema
+struct<>
+-- !query 25 output
+
+
+
+-- !query 26
+TRUNCATE TABLE y
 -- !query 26 schema
 struct<>
 -- !query 26 output
@@ -260,7 +260,7 @@ struct<>
 
 
 -- !query 27
-TRUNCATE TABLE y
+INSERT INTO y SELECT EXPLODE(SEQUENCE(1, 3))
 -- !query 27 schema
 struct<>
 -- !query 27 output
@@ -268,7 +268,7 @@ struct<>
 
 
 -- !query 28
-INSERT INTO y SELECT EXPLODE(SEQUENCE(1, 3))
+CREATE TABLE yy (a INTEGER) USING parquet
 -- !query 28 schema
 struct<>
 -- !query 28 output
@@ -276,51 +276,51 @@ struct<>
 
 
 -- !query 29
-CREATE TABLE yy (a INTEGER) USING parquet
+SELECT * FROM y
 -- !query 29 schema
-struct<>
+struct<a:int>
 -- !query 29 output
-
+1
+2
+3
 
 
 -- !query 30
-SELECT * FROM y
+SELECT * FROM yy
 -- !query 30 schema
 struct<a:int>
 -- !query 30 output
-1
-2
-3
+
 
 
 -- !query 31
-SELECT * FROM yy
+SELECT * FROM y
 -- !query 31 schema
 struct<a:int>
 -- !query 31 output
-
+1
+2
+3
 
 
 -- !query 32
-SELECT * FROM y
+SELECT * FROM yy
 -- !query 32 schema
 struct<a:int>
 -- !query 32 output
-1
-2
-3
+
 
 
 -- !query 33
-SELECT * FROM yy
+CREATE TABLE parent ( id int, val string ) USING parquet
 -- !query 33 schema
-struct<a:int>
+struct<>
 -- !query 33 output
 
 
 
 -- !query 34
-CREATE TABLE parent ( id int, val string ) USING parquet
+INSERT INTO parent VALUES ( 1, 'p1' )
 -- !query 34 schema
 struct<>
 -- !query 34 output
@@ -328,11 +328,11 @@ struct<>
 
 
 -- !query 35
-INSERT INTO parent VALUES ( 1, 'p1' )
+SELECT * FROM parent
 -- !query 35 schema
-struct<>
+struct<id:int,val:string>
 -- !query 35 output
-
+1	p1
 
 
 -- !query 36
@@ -344,26 +344,10 @@ struct<id:int,val:string>
 
 
 -- !query 37
-SELECT * FROM parent
--- !query 37 schema
-struct<id:int,val:string>
--- !query 37 output
-1	p1
-
-
--- !query 38
-set spark.sql.parser.ansi.enabled=true
--- !query 38 schema
-struct<key:string,value:string>
--- !query 38 output
-spark.sql.parser.ansi.enabled	true
-
-
--- !query 39
 create table foo (with baz)
--- !query 39 schema
+-- !query 37 schema
 struct<>
--- !query 39 output
+-- !query 37 output
 org.apache.spark.sql.catalyst.parser.ParseException
 
 no viable alternative at input 'with'(line 1, pos 18)
@@ -373,12 +357,12 @@ create table foo (with baz)
 ------------------^^^
 
 
--- !query 40
+-- !query 38
 -- fail, WITH is a reserved word
 create table foo (with ordinality)
--- !query 40 schema
+-- !query 38 schema
 struct<>
--- !query 40 output
+-- !query 38 output
 org.apache.spark.sql.catalyst.parser.ParseException
 
 no viable alternative at input 'with'(line 2, pos 18)
@@ -389,107 +373,99 @@ create table foo (with ordinality)
 ------------------^^^
 
 
--- !query 41
+-- !query 39
 -- fail, WITH is a reserved word
-set spark.sql.parser.ansi.enabled=false
--- !query 41 schema
-struct<key:string,value:string>
--- !query 41 output
-spark.sql.parser.ansi.enabled	false
-
-
--- !query 42
 with ordinality as (select 1 as x) select * from ordinality
--- !query 42 schema
+-- !query 39 schema
 struct<x:int>
--- !query 42 output
+-- !query 39 output
 1
 
 
--- !query 43
+-- !query 40
 WITH test AS (SELECT 42) INSERT INTO test VALUES (1)
--- !query 43 schema
+-- !query 40 schema
 struct<>
--- !query 43 output
+-- !query 40 output
 org.apache.spark.sql.AnalysisException
 Table not found: test;
 
 
--- !query 44
+-- !query 41
 create table test (i int) USING parquet
--- !query 44 schema
+-- !query 41 schema
 struct<>
--- !query 44 output
+-- !query 41 output
 
 
 
--- !query 45
+-- !query 42
 with test as (select 42) insert into test select * from test
--- !query 45 schema
+-- !query 42 schema
 struct<>
--- !query 45 output
+-- !query 42 output
 
 
 
--- !query 46
+-- !query 43
 select * from test
--- !query 46 schema
+-- !query 43 schema
 struct<i:int>
--- !query 46 output
+-- !query 43 output
 42
 
 
--- !query 47
+-- !query 44
 drop table test
--- !query 47 schema
+-- !query 44 schema
 struct<>
--- !query 47 output
+-- !query 44 output
 
 
 
--- !query 48
+-- !query 45
 DROP TABLE department
--- !query 48 schema
+-- !query 45 schema
 struct<>
--- !query 48 output
+-- !query 45 output
 
 
 
--- !query 49
+-- !query 46
 DROP TABLE tree
--- !query 49 schema
+-- !query 46 schema
 struct<>
--- !query 49 output
+-- !query 46 output
 
 
 
--- !query 50
+-- !query 47
 DROP TABLE graph
--- !query 50 schema
+-- !query 47 schema
 struct<>
--- !query 50 output
+-- !query 47 output
 
 
 
--- !query 51
+-- !query 48
 DROP TABLE y
--- !query 51 schema
+-- !query 48 schema
 struct<>
--- !query 51 output
+-- !query 48 output
 
 
 
--- !query 52
+-- !query 49
 DROP TABLE yy
--- !query 52 schema
+-- !query 49 schema
 struct<>
--- !query 52 output
+-- !query 49 output
 
 
 
--- !query 53
+-- !query 50
 DROP TABLE parent
--- !query 53 schema
+-- !query 50 schema
 struct<>
--- !query 53 output
+-- !query 50 output
 
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala
index c8a187b..6e40fcf 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala
@@ -265,6 +265,9 @@ class SQLQueryTestSuite extends QueryTest with SharedSQLContext {
         localSparkSession.udf.register("boolne", (b1: Boolean, b2: Boolean) => b1 != b2)
         // vol used by boolean.sql and case.sql.
         localSparkSession.udf.register("vol", (s: String) => s)
+        // PostgreSQL enabled cartesian product by default.
+        localSparkSession.conf.set(SQLConf.CROSS_JOINS_ENABLED.key, true)
+        localSparkSession.conf.set(SQLConf.ANSI_SQL_PARSER.key, true)
       case _ => // Don't add UDFs in Regular tests.
     }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org