You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ku...@apache.org on 2018/11/22 10:01:25 UTC
carbondata git commit: [CARBONDATA-3115] Fix CodeGen error in
preaggregate table and codegen display issue in oldstores
Repository: carbondata
Updated Branches:
refs/heads/master 0fa0a96c4 -> 697eee3de
[CARBONDATA-3115] Fix CodeGen error in preaggregate table and codegen display issue in oldstores
Problem:
1. While querying a preaggregate table, codegen error is displayed.
2. In old stores, code is getting displayed while executing queries.
This closes #2939
Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/697eee3d
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/697eee3d
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/697eee3d
Branch: refs/heads/master
Commit: 697eee3de7eb1147fd75452d10acfe087a0566ba
Parents: 0fa0a96
Author: Indhumathi27 <in...@gmail.com>
Authored: Wed Nov 21 17:23:25 2018 +0530
Committer: kumarvishal09 <ku...@gmail.com>
Committed: Thu Nov 22 15:31:00 2018 +0530
----------------------------------------------------------------------
.../preaggregate/TestPreAggCreateCommand.scala | 23 ++++++++++++++++++++
.../spark/sql/CarbonDictionaryDecoder.scala | 12 +++++-----
2 files changed, 29 insertions(+), 6 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/carbondata/blob/697eee3d/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/preaggregate/TestPreAggCreateCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/preaggregate/TestPreAggCreateCommand.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/preaggregate/TestPreAggCreateCommand.scala
index 9fbdff7..7851bd1 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/preaggregate/TestPreAggCreateCommand.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/preaggregate/TestPreAggCreateCommand.scala
@@ -438,6 +438,29 @@ class TestPreAggCreateCommand extends QueryTest with BeforeAndAfterAll {
}
}
+ test("test codegen issue with preaggregate") {
+ sql("DROP TABLE IF EXISTS PreAggMain")
+ sql("CREATE TABLE PreAggMain (id Int, date date, country string, phonetype string, " +
+ "serialname String,salary int ) STORED BY 'org.apache.carbondata.format' " +
+ "tblproperties('dictionary_include'='country')")
+ sql("create datamap PreAggSum on table PreAggMain using 'preaggregate' as " +
+ "select country,sum(salary) as sum from PreAggMain group by country")
+ sql("create datamap PreAggAvg on table PreAggMain using 'preaggregate' as " +
+ "select country,avg(salary) as avg from PreAggMain group by country")
+ sql("create datamap PreAggCount on table PreAggMain using 'preaggregate' as " +
+ "select country,count(salary) as count from PreAggMain group by country")
+ sql("create datamap PreAggMin on table PreAggMain using 'preaggregate' as " +
+ "select country,min(salary) as min from PreAggMain group by country")
+ sql("create datamap PreAggMax on table PreAggMain using 'preaggregate' as " +
+ "select country,max(salary) as max from PreAggMain group by country")
+ sql(s"LOAD DATA INPATH '$integrationPath/spark-common-test/src/test/resources/source.csv' " +
+ s"into table PreAggMain")
+ checkExistence(sql("select t1.country,sum(id) from PreAggMain t1 join (select " +
+ "country as newcountry,sum(salary) as sum from PreAggMain group by country)" +
+ "t2 on t1.country=t2.newcountry group by country"), true, "france")
+ sql("DROP TABLE IF EXISTS PreAggMain")
+ }
+
// TODO: Need to Fix
ignore("test creation of multiple preaggregate of same name concurrently") {
sql("DROP TABLE IF EXISTS tbl_concurr")
http://git-wip-us.apache.org/repos/asf/carbondata/blob/697eee3d/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala
index 95ab29d..3b20c2f 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala
@@ -248,34 +248,34 @@ case class CarbonDictionaryDecoder(
|org.apache.spark.sql.DictTuple $value = $decodeDecimal($dictRef, ${ev.value});
""".stripMargin
ExprCode(code, s"$value.getIsNull()",
- s"(org.apache.spark.sql.types.Decimal)$value.getValue()")
+ s"((org.apache.spark.sql.types.Decimal)$value.getValue())")
} else {
getDictionaryColumnIds(index)._3.getDataType match {
case CarbonDataTypes.INT => code +=
s"""
|org.apache.spark.sql.DictTuple $value = $decodeInt($dictRef, ${ ev.value });
""".stripMargin
- ExprCode(code, s"$value.getIsNull()", s"(Integer)$value.getValue()")
+ ExprCode(code, s"$value.getIsNull()", s"((Integer)$value.getValue())")
case CarbonDataTypes.SHORT => code +=
s"""
|org.apache.spark.sql.DictTuple $value = $decodeShort($dictRef, ${ ev.value });
""".stripMargin
- ExprCode(code, s"$value.getIsNull()", s"(Short)$value.getValue()")
+ ExprCode(code, s"$value.getIsNull()", s"((Short)$value.getValue())")
case CarbonDataTypes.DOUBLE => code +=
s"""
|org.apache.spark.sql.DictTuple $value = $decodeDouble($dictRef, ${ ev.value });
""".stripMargin
- ExprCode(code, s"$value.getIsNull()", s"(Double)$value.getValue()")
+ ExprCode(code, s"$value.getIsNull()", s"((Double)$value.getValue())")
case CarbonDataTypes.LONG => code +=
s"""
|org.apache.spark.sql.DictTuple $value = $decodeLong($dictRef, ${ ev.value });
""".stripMargin
- ExprCode(code, s"$value.getIsNull()", s"(Long)$value.getValue()")
+ ExprCode(code, s"$value.getIsNull()", s"((Long)$value.getValue())")
case _ => code +=
s"""
|org.apache.spark.sql.DictTuple $value = $decodeStr($dictRef, ${ev.value});
""".stripMargin
- ExprCode(code, s"$value.getIsNull()", s"(UTF8String)$value.getValue()")
+ ExprCode(code, s"$value.getIsNull()", s"((UTF8String)$value.getValue())")
}
}