You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@calcite.apache.org by hy...@apache.org on 2019/05/27 03:03:17 UTC

[calcite] branch master updated: [CALCITE-3077] Rewrite CUBE&ROLLUP queries in SparkSqlDialect (DonnyZone)

This is an automated email from the ASF dual-hosted git repository.

hyuan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/calcite.git


The following commit(s) were added to refs/heads/master by this push:
     new 4e1b68e  [CALCITE-3077] Rewrite CUBE&ROLLUP queries in SparkSqlDialect (DonnyZone)
4e1b68e is described below

commit 4e1b68e3c0b2a90832cc790c6166cebb14ae2970
Author: wellfengzhu <we...@gmail.com>
AuthorDate: Fri May 24 13:52:41 2019 +0800

    [CALCITE-3077] Rewrite CUBE&ROLLUP queries in SparkSqlDialect (DonnyZone)
---
 .../java/org/apache/calcite/sql/SqlDialect.java    |  7 +++++
 .../calcite/sql/dialect/SparkSqlDialect.java       |  7 +++++
 .../apache/calcite/sql/fun/SqlRollupOperator.java  | 29 ++++++++++++++------
 .../calcite/rel/rel2sql/RelToSqlConverterTest.java | 32 ++++++++++++++++++++++
 4 files changed, 67 insertions(+), 8 deletions(-)

diff --git a/core/src/main/java/org/apache/calcite/sql/SqlDialect.java b/core/src/main/java/org/apache/calcite/sql/SqlDialect.java
index 5a1b14f..723eb36 100644
--- a/core/src/main/java/org/apache/calcite/sql/SqlDialect.java
+++ b/core/src/main/java/org/apache/calcite/sql/SqlDialect.java
@@ -895,6 +895,13 @@ public class SqlDialect {
     return false;
   }
 
+  /**
+   * Returns whether this dialect supports "WITH CUBE" in "GROUP BY" clause.
+   */
+  public boolean supportsGroupByWithCube() {
+    return false;
+  }
+
   /** Returns how NULL values are sorted if an ORDER BY item does not contain
    * NULLS ASCENDING or NULLS DESCENDING. */
   public NullCollation getNullCollation() {
diff --git a/core/src/main/java/org/apache/calcite/sql/dialect/SparkSqlDialect.java b/core/src/main/java/org/apache/calcite/sql/dialect/SparkSqlDialect.java
index e7edb08..8a750da 100644
--- a/core/src/main/java/org/apache/calcite/sql/dialect/SparkSqlDialect.java
+++ b/core/src/main/java/org/apache/calcite/sql/dialect/SparkSqlDialect.java
@@ -65,6 +65,13 @@ public class SparkSqlDialect extends SqlDialect {
     return JoinType.CROSS;
   }
 
+  @Override public boolean supportsGroupByWithRollup() {
+    return true;
+  }
+
+  @Override public boolean supportsGroupByWithCube() {
+    return true;
+  }
 
   @Override public void unparseOffsetFetch(SqlWriter writer, SqlNode offset,
       SqlNode fetch) {
diff --git a/core/src/main/java/org/apache/calcite/sql/fun/SqlRollupOperator.java b/core/src/main/java/org/apache/calcite/sql/fun/SqlRollupOperator.java
index 1bffe06..75098a7 100644
--- a/core/src/main/java/org/apache/calcite/sql/fun/SqlRollupOperator.java
+++ b/core/src/main/java/org/apache/calcite/sql/fun/SqlRollupOperator.java
@@ -41,20 +41,33 @@ class SqlRollupOperator extends SqlInternalOperator {
         // MySQL version 5: generate "GROUP BY x, y WITH ROLLUP".
         // MySQL version 8 and higher is SQL-compliant,
         // so generate "GROUP BY ROLLUP(x, y)"
-        final SqlWriter.Frame groupFrame =
-            writer.startList(SqlWriter.FrameTypeEnum.GROUP_BY_LIST);
-        for (SqlNode operand : call.getOperandList()) {
-          writer.sep(",");
-          operand.unparse(writer, 2, 3);
-        }
-        writer.endList(groupFrame);
-        writer.keyword("WITH ROLLUP");
+        unparseKeyword(writer, call, "WITH ROLLUP");
+        return;
+      }
+      break;
+    case CUBE:
+      if (!writer.getDialect().supportsAggregateFunction(kind)
+          && writer.getDialect().supportsGroupByWithCube()) {
+        // Spark SQL: generate "GROUP BY x, y WITH CUBE".
+        unparseKeyword(writer, call, "WITH CUBE");
         return;
       }
+      break;
     }
     unparseCube(writer, call);
   }
 
+  private void unparseKeyword(SqlWriter writer, SqlCall call, String keyword) {
+    final SqlWriter.Frame groupFrame =
+        writer.startList(SqlWriter.FrameTypeEnum.GROUP_BY_LIST);
+    for (SqlNode operand : call.getOperandList()) {
+      writer.sep(",");
+      operand.unparse(writer, 2, 3);
+    }
+    writer.endList(groupFrame);
+    writer.keyword(keyword);
+  }
+
   private static void unparseCube(SqlWriter writer, SqlCall call) {
     writer.keyword(call.getOperator().getName());
     final SqlWriter.Frame frame =
diff --git a/core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlConverterTest.java b/core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlConverterTest.java
index 337c09c..bd2d119 100644
--- a/core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlConverterTest.java
+++ b/core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlConverterTest.java
@@ -3319,6 +3319,38 @@ public class RelToSqlConverterTest {
     sql(query).ok(expected);
   }
 
+  @Test public void testCubeInSpark() {
+    final String query = "select count(*) "
+        + "from \"foodmart\".\"product\" "
+        + "group by cube(\"product_id\",\"product_class_id\")";
+    final String expected = "SELECT COUNT(*)\n"
+        + "FROM \"foodmart\".\"product\"\n"
+        + "GROUP BY CUBE(\"product_id\", \"product_class_id\")";
+    final String expectedInSpark = "SELECT COUNT(*)\n"
+        + "FROM foodmart.product\n"
+        + "GROUP BY product_id, product_class_id WITH CUBE";
+    sql(query)
+        .ok(expected)
+        .withSpark()
+        .ok(expectedInSpark);
+  }
+
+  @Test public void testRollupInSpark() {
+    final String query = "select count(*) "
+        + "from \"foodmart\".\"product\" "
+        + "group by rollup(\"product_id\",\"product_class_id\")";
+    final String expected = "SELECT COUNT(*)\n"
+        + "FROM \"foodmart\".\"product\"\n"
+        + "GROUP BY ROLLUP(\"product_id\", \"product_class_id\")";
+    final String expectedInSpark = "SELECT COUNT(*)\n"
+        + "FROM foodmart.product\n"
+        + "GROUP BY product_id, product_class_id WITH ROLLUP";
+    sql(query)
+        .ok(expected)
+        .withSpark()
+        .ok(expectedInSpark);
+  }
+
   @Test public void testJsonType() {
     String query = "select json_type(\"product_name\") from \"product\"";
     final String expected = "SELECT "