You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lens.apache.org by ra...@apache.org on 2015/10/14 04:28:24 UTC
[1/2] lens git commit: LENS-607: Fix test failures in Java-8
Repository: lens
Updated Branches:
refs/heads/master a6d6294d8 -> a079ad3f3
http://git-wip-us.apache.org/repos/asf/lens/blob/a079ad3f/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java b/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
index db09a4b..6b5fb6a 100644
--- a/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
+++ b/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
@@ -26,10 +26,12 @@ import java.net.URLClassLoader;
import java.util.*;
import org.apache.lens.cube.parse.HQLParser;
+import org.apache.lens.cube.parse.TestQuery;
import org.apache.lens.server.api.LensConfConstants;
import org.apache.lens.server.api.error.LensException;
import org.apache.commons.lang3.StringUtils;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.Database;
@@ -90,28 +92,8 @@ public class TestColumnarSQLRewriter {
* @param expected the expected
* @param actual the actual
*/
- private void compareQueries(String expected, String actual) {
- if (expected == null && actual == null) {
- return;
- } else if (expected == null) {
- Assert.fail();
- } else if (actual == null) {
- Assert.fail("Rewritten query is null");
- }
- String expectedTrimmed = expected.replaceAll("\\W", "");
- String actualTrimmed = actual.replaceAll("\\W", "");
-
- if (!expectedTrimmed.equalsIgnoreCase(actualTrimmed)) {
- String method = null;
- for (StackTraceElement trace : Thread.currentThread().getStackTrace()) {
- if (trace.getMethodName().startsWith("test")) {
- method = trace.getMethodName() + ":" + trace.getLineNumber();
- }
- }
-
- System.err.println("__FAILED__ " + method + "\n\tExpected: " + expected + "\n\t---------\n\tActual: " + actual);
- }
- Assert.assertTrue(expectedTrimmed.equalsIgnoreCase(actualTrimmed));
+ private void compareQueries(String actual, String expected) {
+ assertEquals(new TestQuery(actual), new TestQuery(expected));
}
/*
@@ -154,7 +136,7 @@ public class TestColumnarSQLRewriter {
public void setup() throws Exception {
qtest.init(conf);
- List<FieldSchema> factColumns = new ArrayList<FieldSchema>();
+ List<FieldSchema> factColumns = new ArrayList<>();
factColumns.add(new FieldSchema("item_key", "int", ""));
factColumns.add(new FieldSchema("branch_key", "int", ""));
factColumns.add(new FieldSchema("location_key", "int", ""));
@@ -270,9 +252,9 @@ public class TestColumnarSQLRewriter {
SessionState.start(hconf);
String rwq = qtest.rewrite(query, conf, hconf);
- String expected = "inner join (select day_of_week, day, time_key from time_dim) time_dim___time_dim "
+ String expected = "inner join (select time_key, day_of_week, day from time_dim) time_dim___time_dim "
+ "on (( sales_fact___fact . time_key ) = "
- + "( time_dim___time_dim . time_key )) inner join (select location_name, location_key from location_dim) "
+ + "( time_dim___time_dim . time_key )) inner join (select location_key, location_name from location_dim) "
+ "location_dim___location_dim on "
+ "((( sales_fact___fact . location_key ) = ( location_dim___location_dim . location_key )) "
+ "and (( location_dim___location_dim . location_name ) = 'test123' ))";
@@ -437,12 +419,12 @@ public class TestColumnarSQLRewriter {
+ "format(avg(( sales_fact___fact . dollars_sold )), '##################.###' ), "
+ "min(( sales_fact___fact . dollars_sold )), max(( sales_fact___fact . dollars_sold )) "
+ "from sales_fact sales_fact___fact inner join "
- + "(select day_of_week, day, time_key from time_dim) time_dim___time_dim on "
+ + "(select time_key, day_of_week, day from time_dim) time_dim___time_dim on "
+ "(( sales_fact___fact . time_key ) = ( time_dim___time_dim . time_key )) inner join "
- + "(select location_name, location_key from location_dim) location_dim___location_dim "
+ + "(select location_key, location_name from location_dim) location_dim___location_dim "
+ "on (( sales_fact___fact . location_key ) = "
- + "( location_dim___location_dim . location_key )) inner join (select item_name, "
- + "item_key from item_dim) item_dim___item_dim "
+ + "( location_dim___location_dim . location_key )) inner join (select item_key, "
+ + "item_name from item_dim) item_dim___item_dim "
+ "on ((( sales_fact___fact . item_key ) = ( item_dim___item_dim . item_key )) and "
+ "(( location_dim___location_dim . location_name ) = 'test123' )) where "
+ "(( time_dim___time_dim . time_key ) between date_add( '2013-01-01' , interval 1 day) "
@@ -491,9 +473,9 @@ public class TestColumnarSQLRewriter {
String expected = "select ( sales_fact___fact . time_key ), ( time_dim___time_dim . day_of_week ), "
+ "( time_dim___time_dim . day ), case when (sum(( sales_fact___fact . dollars_sold )) = 0 ) "
+ "then 0.0 else sum(( sales_fact___fact . dollars_sold )) end dollars_sold from sales_fact s"
- + "ales_fact___fact inner join (select day_of_week, day, time_key from time_dim) "
+ + "ales_fact___fact inner join (select time_key, day_of_week, day from time_dim) "
+ "time_dim___time_dim on (( sales_fact___fact . time_key ) "
- + "= ( time_dim___time_dim . time_key )) inner join (select location_name, location_key from location_dim) "
+ + "= ( time_dim___time_dim . time_key )) inner join (select location_key, location_name from location_dim) "
+ "location_dim___location_dim on "
+ "((( sales_fact___fact . location_key ) = ( location_dim___location_dim . location_key )) and "
+ "(( location_dim___location_dim . location_name ) = 'test123' )) where ( time_dim___time_dim . time_key ) "
@@ -502,10 +484,10 @@ public class TestColumnarSQLRewriter {
+ "asc union all select ( sales_fact___fact . time_key ), ( time_dim___time_dim . day_of_week ), "
+ "( time_dim___time_dim . day ), case when (sum(( sales_fact___fact . dollars_sold )) = 0 ) then 0.0 "
+ "else sum(( sales_fact___fact . dollars_sold )) end dollars_sold from sales_fact sales_fact___fact "
- + "inner join (select day_of_week, day, time_key from time_dim) time_dim___time_dim "
+ + "inner join (select time_key, day_of_week, day from time_dim) time_dim___time_dim "
+ "on (( sales_fact___fact . time_key ) = "
- + "( time_dim___time_dim . time_key )) inner join (select location_name, "
- + "location_key from location_dim) location_dim___location_dim on "
+ + "( time_dim___time_dim . time_key )) inner join (select location_key, "
+ + "location_name from location_dim) location_dim___location_dim on "
+ "((( sales_fact___fact . location_key ) = ( location_dim___location_dim . location_key )) and "
+ "(( location_dim___location_dim . location_name ) = 'test123' )) where ( time_dim___time_dim . time_key ) "
+ "between '2013-02-01' and '2013-02-05' group by ( sales_fact___fact . time_key ), "
@@ -513,9 +495,9 @@ public class TestColumnarSQLRewriter {
+ "select ( sales_fact___fact . time_key ), ( time_dim___time_dim . day_of_week ), "
+ "( time_dim___time_dim . day ), case when (sum(( sales_fact___fact . dollars_sold )) = 0 ) then 0.0 "
+ "else sum(( sales_fact___fact . dollars_sold )) end dollars_sold from sales_fact sales_fact___fact "
- + "inner join (select day_of_week, day, time_key from time_dim) "
+ + "inner join (select time_key, day_of_week, day from time_dim) "
+ "time_dim___time_dim on (( sales_fact___fact . time_key ) = "
- + "( time_dim___time_dim . time_key )) inner join (select location_name, location_key from location_dim) "
+ + "( time_dim___time_dim . time_key )) inner join (select location_key, location_name from location_dim) "
+ "location_dim___location_dim on "
+ "((( sales_fact___fact . location_key ) = ( location_dim___location_dim . location_key )) and "
+ "(( location_dim___location_dim . location_name ) = 'test123' )) where "
@@ -567,13 +549,13 @@ public class TestColumnarSQLRewriter {
+ "dollars_sold , round(sum(( sales_fact___fact . units_sold )), 2 ), "
+ "avg(( sales_fact___fact . dollars_sold )), "
+ "min(( sales_fact___fact . dollars_sold )), max(( sales_fact___fact . dollars_sold )), location_name , "
- + " from sales_fact sales_fact___fact inner join (select day_of_week, day, time_key "
+ + " from sales_fact sales_fact___fact inner join (select time_key, day_of_week, day"
+ "from time_dim) time_dim___time_dim "
+ "on (( sales_fact___fact . time_key ) = ( time_dim___time_dim . time_key )) "
- + "inner join (select location_name, location_key from location_dim) "
+ + "inner join (select location_key, location_name from location_dim) "
+ "location_dim___location_dim on (( sales_fact___fact . location_key ) = "
- + "( location_dim___location_dim . location_key )) inner join (select item_name, "
- + "item_key from item_dim) item_dim___item_dim on "
+ + "( location_dim___location_dim . location_key )) inner join (select item_key, "
+ + "item_name from item_dim) item_dim___item_dim on "
+ "((( sales_fact___fact . item_key ) = ( item_dim___item_dim . item_key )) and "
+ "(( location_dim___location_dim . location_name ) = 'test123' )) where (( time_dim___time_dim . time_key ) "
+ "between '2013-01-01' and '2013-01-31' and (( item_dim___item_dim . item_name ) = 'item_1' )) "
@@ -606,8 +588,8 @@ public class TestColumnarSQLRewriter {
+ "( time_dim___time_dim . day ), ( item_dim___item_dim . item_key ), sum(alias1) dollars_sold , "
+ "round(sum(alias2), 2 ), avg(alias6) avg_dollars_sold, min(alias4), max(alias5) max_dollars_sold, "
+ "location_name , (avg(alias6) / 1.0 ) "
- + "from (select sales_fact___fact.time_key, sales_fact___fact.dollars_sold, sales_fact___fact.location_key, "
- + "sales_fact___fact.item_key,sum( case when (( sales_fact___fact . dollars_sold ) = 0 ) then 0.0 end ) "
+ + "from (select sales_fact___fact.time_key, sales_fact___fact.location_key, sales_fact___fact.item_key,"
+ + "sales_fact___fact.dollars_sold, sum( case when (( sales_fact___fact . dollars_sold ) = 0 ) then 0.0 end )"
+ "as alias1, sum(( sales_fact___fact . units_sold )) as alias2, avg(( sales_fact___fact . dollars_sold )) "
+ "as alias3, min(( sales_fact___fact . dollars_sold )) as alias4, "
+ "max(( sales_fact___fact . dollars_sold )) as alias5, "
@@ -617,14 +599,14 @@ public class TestColumnarSQLRewriter {
+ "and sales_fact___fact.location_key in ( select location_dim .location_key from "
+ "location_dim where (( location_dim. location_name ) = 'test123' ) ) and sales_fact___fact.item_key in "
+ "( select item_dim .item_key from item_dim where (( item_dim. item_name ) = 'item_1' ) ) "
- + "group by sales_fact___fact.time_key, sales_fact___fact.dollars_sold, "
- + "sales_fact___fact.location_key, sales_fact___fact.item_key) sales_fact___fact "
- + "inner join (select day_of_week, day, time_key from time_dim) "
+ + "group by sales_fact___fact.time_key, sales_fact___fact.location_key, "
+ + "sales_fact___fact.item_key, sales_fact___fact.dollars_sold) sales_fact___fact "
+ + "inner join (select time_key, day_of_week, day from time_dim) "
+ "time_dim___time_dim on (( sales_fact___fact . time_key ) = "
- + "( time_dim___time_dim . time_key )) inner join (select location_name, "
- + "location_key from location_dim) location_dim___location_dim "
+ + "( time_dim___time_dim . time_key )) inner join (select location_key, "
+ + "location_name from location_dim) location_dim___location_dim "
+ "on (( sales_fact___fact . location_key ) = ( location_dim___location_dim . location_key )) "
- + "inner join (select item_name, item_key from item_dim) item_dim___item_dim "
+ + "inner join (select item_key, item_name from item_dim) item_dim___item_dim "
+ "on ((( sales_fact___fact . item_key ) = "
+ "( item_dim___item_dim . item_key )) and (( location_dim___location_dim . location_name ) = 'test123' )) "
+ "where (( time_dim___time_dim . time_key ) between '2013-01-01' and '2013-01-31' "
@@ -662,9 +644,9 @@ public class TestColumnarSQLRewriter {
+ "sales_fact___fact.location_key, sales_fact___fact.item_key) sales_fact___fact inner "
+ "join (select time_key from time_dim) time_dim___time_dim on (( sales_fact___fact . time_key ) = "
+ "( time_dim___time_dim . time_key )) inner join "
- + "(select location_name, location_key from location_dim) location_dim___location_dim on "
+ + "(select location_key, location_name from location_dim) location_dim___location_dim on "
+ "(( sales_fact___fact . location_key ) = ( location_dim___location_dim . location_key )) "
- + "inner join (select item_name, item_key from item_dim) item_dim___item_dim "
+ + "inner join (select item_key, item_name from item_dim) item_dim___item_dim "
+ "on ((( sales_fact___fact . item_key ) = "
+ "( item_dim___item_dim . item_key )) and inner ) inner join "
+ "(select branch_key from branch_dim) branch_dim___branch_dim on "
@@ -715,9 +697,9 @@ public class TestColumnarSQLRewriter {
+ "( time_dim___time_dim . day ), case when (sum(( sales_fact__db_sales_fact_fact . dollars_sold )) = 0 ) "
+ "then 0.0 else sum(( sales_fact__db_sales_fact_fact . dollars_sold )) end dollars_sold "
+ "from db.sales_fact sales_fact__db_sales_fact_fact inner join "
- + "(select day_of_week, day, time_key from time_dim) time_dim___time_dim "
+ + "(select time_key, day_of_week, day from time_dim) time_dim___time_dim "
+ "on (( sales_fact__db_sales_fact_fact . time_key ) = ( time_dim___time_dim . time_key )) "
- + "inner join (select location_name, location_key from db.location_dim) location_dim__db_location_dim_ld on "
+ + "inner join (select location_key, location_name from db.location_dim) location_dim__db_location_dim_ld on "
+ "((( sales_fact__db_sales_fact_fact . location_key ) = ( location_dim__db_location_dim_ld . location_key )) "
+ "and (( location_dim__db_location_dim_ld . location_name ) = 'test123' )) where "
+ "( time_dim___time_dim . time_key ) between '2013-01-01' and '2013-01-31' "
@@ -743,10 +725,10 @@ public class TestColumnarSQLRewriter {
String actual = qtest.rewrite(query, conf, hconf);
String expected = "select ( dim1___dim1 . date ) date , sum(alias1) msr1 , ( dim2___dim2 . name ) dim2_name , "
+ "( dim3___dim3 . name ) dim3_name , ( dim4___dim4 . name ) dim4_name "
- + "from (select fact___f.dim2_id, fact___f.dim1_id, fact___f.dim3_id,sum(( fact___f . msr1 )) "
+ + "from (select fact___f.dim1_id, fact___f.dim2_id, fact___f.dim3_id,sum(( fact___f . msr1 )) "
+ "as alias1 from fact fact___f where fact___f.dim1_id in ( select dim1 .id from dim1 where "
+ "(( dim1. date ) = '2014-11-25 00:00:00' ) ) "
- + "group by fact___f.dim2_id, fact___f.dim1_id, fact___f.dim3_id) "
+ + "group by fact___f.dim1_id, fact___f.dim2_id, fact___f.dim3_id) "
+ "fact___f inner join (select id, date from dim1) "
+ "dim1___dim1 on (( fact___f . dim1_id ) = ( dim1___dim1 . id )) "
+ "inner join (select id, id_2, name from dim2) dim2___dim2 "
@@ -773,11 +755,11 @@ public class TestColumnarSQLRewriter {
String actual = qtest.rewrite(query, conf, hconf);
String expected = "select ( dim1___dim1 . date ) date , sum(alias1) msr1 , ( dim2___dim2 . name ) "
- + "dim2_name from (select fact___f.dim2_id, fact___f.dim1_id, fact___f.m4, fact___f.m3, "
- + "fact___f.m2,sum(( fact___f . msr1 )) as alias1 from fact fact___f where ( fact___f . m4 ) "
+ + "dim2_name from (select fact___f.dim1_id, fact___f.m2, fact___f.dim2_id, fact___f.m3, fact___f.m4, "
+ + "sum(( fact___f . msr1 )) as alias1 from fact fact___f where ( fact___f . m4 ) "
+ "is not null and (( fact___f . m2 ) = '1234' ) and (( fact___f . m3 ) > 3000 ) and "
+ "fact___f.dim1_id in ( select dim1 .id from dim1 where (( dim1. date ) = '2014-11-25 00:00:00' ) ) "
- + "group by fact___f.dim2_id, fact___f.dim1_id, fact___f.m4, fact___f.m3, fact___f.m2) fact___f "
+ + "group by fact___f.dim1_id, fact___f.m2, fact___f.dim2_id, fact___f.m3, fact___f.m4) fact___f "
+ "inner join (select id, date from dim1) dim1___dim1 on ((( fact___f . dim1_id ) = ( dim1___dim1 . id )) and "
+ "(( fact___f . m2 ) = '1234' )) inner join (select id, name from dim2) "
+ "dim2___dim2 on ((( fact___f . dim2_id ) = "
@@ -800,13 +782,14 @@ public class TestColumnarSQLRewriter {
SessionState.start(hconf);
String actual = qtest.rewrite(query, conf, hconf);
+
String expected = "select ( dim1___dim1 . date ) dim1_date , sum(alias1) msr1 , "
+ "( dim2___dim2 . name ) dim2_name "
- + "from (select fact___f.dim2_id, fact___f.dim1_id, fact___f.m4, fact___f.m3, fact___f.m2,"
+ + "from (select fact___f.dim1_id, fact___f.m2, fact___f.dim2_id, fact___f.m3, fact___f.m4"
+ "sum(( fact___f . msr1 )) as alias1 from fact fact___f where ( fact___f . m4 ) "
+ "is not null and (( fact___f . m2 ) = '1234' ) and (( fact___f . m3 ) > 3000 ) "
+ "and fact___f.dim1_id in ( select dim1 .id from dim1 where (( dim1. date ) = '2014-11-25 00:00:00' ) ) "
- + "group by fact___f.dim2_id, fact___f.dim1_id, fact___f.m4, fact___f.m3, fact___f.m2) fact___f "
+ + "group by fact___f.dim1_id, fact___f.m2, fact___f.dim2_id, fact___f.m3, fact___f.m4) fact___f "
+ "inner join (select id, date from dim1) dim1___dim1 on ((( fact___f . dim1_id ) = ( dim1___dim1 . id )) "
+ "and (( fact___f . m2 ) = '1234' )) inner join (select id, name from dim2) "
+ "dim2___dim2 on ((( fact___f . dim2_id ) "
@@ -830,11 +813,12 @@ public class TestColumnarSQLRewriter {
String actual = qtest.rewrite(query, conf, hconf);
String expected = "select ( dim1___dim1 . date ) dim1_date , sum(alias1) msr1 , "
- + "( dim2___dim2 . name ) dim2_name from (select fact___f.dim2_id, fact___f.dim1_id, fact___f.dim3_id, "
- + "fact___f.m4, fact___f.m2,sum(( fact___f . msr1 )) as alias1 from fact fact___f where ( fact___f . m4 ) "
+ + "( dim2___dim2 . name ) dim2_name from (select fact___f.dim1_id, fact___f.m2, fact___f.dim2_id,"
+ + "fact___f.dim3_id, "
+ + "fact___f.m4, sum(( fact___f . msr1 )) as alias1 from fact fact___f where ( fact___f . m4 ) "
+ "is not null and (( fact___f . m2 ) = '1234' ) and fact___f.dim1_id in ( select dim1 .id from dim1 "
- + "where (( dim1. date ) = '2014-11-25 00:00:00' ) ) group by fact___f.dim2_id, fact___f.dim1_id, "
- + "fact___f.dim3_id, fact___f.m4, fact___f.m2) fact___f inner join (select id, date from dim1) dim1___dim1 on "
+ + "where (( dim1. date ) = '2014-11-25 00:00:00' ) ) group by fact___f.dim1_id, fact___f.m2, fact___f.dim2_id,"
+ + "fact___f.dim3_id, fact___f.m4) fact___f inner join (select id, date from dim1) dim1___dim1 on "
+ "((( fact___f . dim1_id ) = ( dim1___dim1 . id )) and (( fact___f . m2 ) = '1234' )) "
+ "inner join (select id, name from dim2) dim2___dim2 on ((( fact___f . dim2_id ) = ( dim2___dim2 . id )) "
+ "and (( fact___f . dim3_id ) = ( dim2___dim2 . id ))) where ((( dim1___dim1 . date ) = "
[2/2] lens git commit: LENS-607: Fix test failures in Java-8
Posted by ra...@apache.org.
LENS-607: Fix test failures in Java-8
Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/a079ad3f
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/a079ad3f
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/a079ad3f
Branch: refs/heads/master
Commit: a079ad3f360c169965ea353997be9c991e6c654e
Parents: a6d6294
Author: raju <ra...@apache.org>
Authored: Wed Oct 14 07:58:07 2015 +0530
Committer: raju <ra...@apache.org>
Committed: Wed Oct 14 07:58:07 2015 +0530
----------------------------------------------------------------------
.../apache/lens/cli/TestLensCubeCommands.java | 7 +-
.../lens/cube/parse/TestBaseCubeQueries.java | 40 +++-
.../lens/cube/parse/TestCubeRewriter.java | 215 +++++++++---------
.../cube/parse/TestDenormalizationResolver.java | 71 +++---
.../lens/cube/parse/TestExpressionResolver.java | 4 +-
.../org/apache/lens/cube/parse/TestQuery.java | 218 +++++++++++++++++++
.../apache/lens/cube/parse/TestStorageUtil.java | 17 +-
.../lens/cube/parse/TestTimeRangeResolver.java | 19 +-
lens-driver-jdbc/pom.xml | 7 +
.../lens/driver/jdbc/ColumnarSQLRewriter.java | 31 +--
.../driver/jdbc/TestColumnarSQLRewriter.java | 110 ++++------
11 files changed, 505 insertions(+), 234 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lens/blob/a079ad3f/lens-cli/src/test/java/org/apache/lens/cli/TestLensCubeCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/java/org/apache/lens/cli/TestLensCubeCommands.java b/lens-cli/src/test/java/org/apache/lens/cli/TestLensCubeCommands.java
index b553739..43d0722 100644
--- a/lens-cli/src/test/java/org/apache/lens/cli/TestLensCubeCommands.java
+++ b/lens-cli/src/test/java/org/apache/lens/cli/TestLensCubeCommands.java
@@ -32,6 +32,7 @@ import org.apache.lens.client.LensClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+
import org.testng.annotations.Test;
/**
@@ -122,7 +123,11 @@ public class TestLensCubeCommands extends LensCliApplicationTest {
chain2.setDestTable("test_dim");
chains.getJoinChain().add(chain2);
chains.getJoinChain().add(chain1);
- assertEquals(joinChains, new XJoinChainTable(chains).toString());
+ XJoinChains chainsInDiffOrder = new XJoinChains();
+ chainsInDiffOrder.getJoinChain().add(chain1);
+ chainsInDiffOrder.getJoinChain().add(chain2);
+ assertTrue(joinChains.equals(new XJoinChainTable(chains).toString())
+ || joinChains.equals(new XJoinChainTable(chainsInDiffOrder).toString()));
}
private void testFields(LensCubeCommands command) {
http://git-wip-us.apache.org/repos/asf/lens/blob/a079ad3f/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
index 0f76c76..e6c3be0 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
@@ -46,6 +46,8 @@ import org.testng.Assert;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test;
+import com.google.common.base.Splitter;
+
import lombok.Getter;
public class TestBaseCubeQueries extends TestQueryRewrite {
@@ -56,7 +58,7 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
@BeforeTest
public void setupDriver() throws Exception {
conf = new Configuration();
- conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1,C2");
+ conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
conf.setBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, false);
conf.setBoolean(CubeQueryConfUtil.ENABLE_SELECT_TO_GROUPBY, true);
conf.setBoolean(CubeQueryConfUtil.ENABLE_GROUP_BY_TO_SELECT, true);
@@ -84,14 +86,40 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
String columnSetsStr = matcher.group(1);
assertNotEquals(columnSetsStr.indexOf("test_time_dim"), -1, columnSetsStr);
assertNotEquals(columnSetsStr.indexOf("msr3, msr13"), -1);
- assertEquals(pruneCauses.getDetails().get("testfact3_base,testfact1_raw_base,testfact3_raw_base"),
- Arrays.asList(CandidateTablePruneCause.columnNotFound("test_time_dim")));
- assertEquals(pruneCauses.getDetails().get("testfact_deprecated,testfact2_raw_base,testfact2_base"),
- Arrays.asList(CandidateTablePruneCause.columnNotFound("msr3", "msr13")));
+
+ /**
+ * Verifying the BriefAndDetailedError:
+ * 1. Check for missing columns(COLUMN_NOT_FOUND)
+ * and check the respective tables for each COLUMN_NOT_FOUND
+ * 2. check for ELEMENT_IN_SET_PRUNED
+ *
+ */
+ boolean columnNotFound = false;
+ List<String> testTimeDimFactTables = Arrays.asList("testfact3_base", "testfact1_raw_base", "testfact3_raw_base");
+ List<String> factTablesForMeasures = Arrays.asList("testfact_deprecated", "testfact2_raw_base", "testfact2_base");
+ for (Map.Entry<String, List<CandidateTablePruneCause>> entry : pruneCauses.getDetails().entrySet()) {
+ if (entry.getValue().contains(CandidateTablePruneCause.columnNotFound("test_time_dim"))) {
+ columnNotFound = true;
+ compareStrings(testTimeDimFactTables, entry);
+ }
+ if (entry.getValue().contains(CandidateTablePruneCause.columnNotFound("msr3", "msr13"))) {
+ columnNotFound = true;
+ compareStrings(factTablesForMeasures, entry);
+ }
+ }
+ Assert.assertTrue(columnNotFound);
assertEquals(pruneCauses.getDetails().get("testfact1_base"),
Arrays.asList(new CandidateTablePruneCause(CandidateTablePruneCode.ELEMENT_IN_SET_PRUNED)));
}
+ private void compareStrings(List<String> factTablesList, Map.Entry<String, List<CandidateTablePruneCause>> entry) {
+ String factTablesString = entry.getKey();
+ Iterable<String> factTablesIterator = Splitter.on(',').split(factTablesString);
+ for (String factTable : factTablesIterator) {
+ Assert.assertTrue(factTablesList.contains(factTable), "Not selecting" + factTable + "fact table");
+ }
+ }
+
@Test
public void testCommonDimensions() throws Exception {
String hqlQuery = rewrite("select dim1, SUM(msr1) from basecube" + " where " + TWO_DAYS_RANGE, conf);
@@ -425,7 +453,7 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
@Test
public void testFallbackPartCol() throws Exception {
- Configuration conf = getConfWithStorages("C1,C2,C3,C4");
+ Configuration conf = getConfWithStorages("C1");
conf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, false);
String hql, expected;
// Prefer fact that has a storage with part col on queried time dim
http://git-wip-us.apache.org/repos/asf/lens/blob/a079ad3f/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
index a58f5fe..f5ff49a 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
@@ -37,6 +37,7 @@ import org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCode;
import org.apache.lens.server.api.error.LensException;
import org.apache.commons.lang.time.DateUtils;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -48,8 +49,10 @@ import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test;
+import com.google.common.base.Splitter;
import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
+
import lombok.extern.slf4j.Slf4j;
@Slf4j
@@ -105,7 +108,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
+ " WHERE ((( testcube . dt ) between '" + from + "' and '" + to + "' ))";
System.out.println("rewrittenQuery.toHQL() " + rewrittenQuery.toHQL());
System.out.println("expected " + expected);
- compareQueries(expected, rewrittenQuery.toHQL());
+ compareQueries(rewrittenQuery.toHQL(), expected);
//test with msr2 on different fact
rewrittenQuery = rewriteCtx("select SUM(msr2) from testCube where" + " time_range_in(d_time, '"
@@ -114,7 +117,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
+ " WHERE ((( testcube . dt ) between '" + from + "' and '" + to + "' ))";
System.out.println("rewrittenQuery.toHQL() " + rewrittenQuery.toHQL());
System.out.println("expected " + expected);
- compareQueries(expected, rewrittenQuery.toHQL());
+ compareQueries(rewrittenQuery.toHQL(), expected);
//from date 4 days back
qCal.setTime(BEFORE_4_DAYS_START);
@@ -144,7 +147,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
String expected =
getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
- compareQueries(expected, rewrittenQuery.toHQL());
+ compareQueries(rewrittenQuery.toHQL(), expected);
System.out.println("Non existing parts:" + rewrittenQuery.getNonExistingParts());
assertNotNull(rewrittenQuery.getNonExistingParts());
}
@@ -177,7 +180,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
String expected =
getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
Configuration conf = getConfWithStorages("C1");
conf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, true);
@@ -185,7 +188,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
expected =
getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
getWhereForHourly2days("c1_testfact2"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
conf.setBoolean(CubeQueryConfUtil.LIGHTEST_FACT_FIRST, true);
@@ -210,7 +213,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
String expected =
getExpectedQuery(CubeTestSetup.DERIVED_CUBE_NAME, "select sum(derivedCube.msr2) FROM ", null, null,
getWhereForDailyAndHourly2days(CubeTestSetup.DERIVED_CUBE_NAME, "C2_testfact"));
- compareQueries(expected, rewrittenQuery.toHQL());
+ compareQueries(rewrittenQuery.toHQL(), expected);
System.out.println("Non existing parts:" + rewrittenQuery.getNonExistingParts());
assertNotNull(rewrittenQuery.getNonExistingParts());
@@ -229,7 +232,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
+ getDbName() + "c1_testdim2tbl testdim2 ON derivedCube.dim2 = "
+ " testdim2.id and (testdim2.dt = 'latest') ", null, "group by (testdim2.name)", null,
getWhereForDailyAndHourly2days(CubeTestSetup.DERIVED_CUBE_NAME, "c1_summary2"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
// Test that explicit join query passes with join resolver disabled
conf.setBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, true);
@@ -243,7 +246,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
" inner JOIN " + getDbName() + "c1_testdim2tbl testdim2 ON derivedCube.dim2 = " + " testdim2.id ", null,
"group by (testdim2.name)", joinWhereConds,
getWhereForDailyAndHourly2days(CubeTestSetup.DERIVED_CUBE_NAME, "c1_summary2"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
}
@Test
@@ -255,59 +258,37 @@ public class TestCubeRewriter extends TestQueryRewrite {
Map<String, String> wh = getWhereForDailyAndHourly2days(cubeName, "C2_testfact");
String expected = "insert overwrite directory 'target/test' "
+ getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null, wh);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery = rewrite("insert overwrite directory" + " 'target/test' cube select SUM(msr2) from testCube where "
+ TWO_DAYS_RANGE, conf);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery = rewrite("insert overwrite local directory" + " 'target/test' select SUM(msr2) from testCube where "
+ TWO_DAYS_RANGE, conf);
wh = getWhereForDailyAndHourly2days(cubeName, "C2_testfact");
expected = "insert overwrite local directory 'target/test' "
+ getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null, wh);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery = rewrite("insert overwrite local directory" + " 'target/test' cube select SUM(msr2) from testCube where "
+ TWO_DAYS_RANGE, conf);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery = rewrite("insert overwrite table temp" + " select SUM(msr2) from testCube where " + TWO_DAYS_RANGE,
conf);
wh = getWhereForDailyAndHourly2days(cubeName, "C2_testfact");
expected = "insert overwrite table temp "
+ getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null, wh);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery = rewrite("insert overwrite table temp" + " cube select SUM(msr2) from testCube where " + TWO_DAYS_RANGE,
conf);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
}
static void compareQueries(String actual, String expected) {
- if (expected == null && actual == null) {
- return;
- } else if (expected == null) {
- fail();
- } else if (actual == null) {
- fail("Rewritten query is null");
- }
- String expectedTrimmed = expected.replaceAll("\\W", "");
- String actualTrimmed = actual.replaceAll("\\W", "");
-
- if (!expectedTrimmed.equalsIgnoreCase(actualTrimmed)) {
- String method = null;
- for (StackTraceElement trace : Thread.currentThread().getStackTrace()) {
- if (trace.getMethodName().startsWith("test")) {
- method = trace.getMethodName() + ":" + trace.getLineNumber();
- }
- }
-
- System.err.println("__FAILED__ " + method + "\n\tExpected: " + expected + "\n\t---------\n\tActual: " + actual);
- }
- log.info("expectedTrimmed " + expectedTrimmed);
- log.info("actualTrimmed " + actualTrimmed);
- assertTrue(expectedTrimmed.equalsIgnoreCase(actualTrimmed));
+ assertEquals(new TestQuery(actual), new TestQuery(expected));
}
static void compareContains(String expected, String actual) {
@@ -341,7 +322,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
expected =
getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
// Test with partition existence
Configuration conf = getConf();
@@ -350,7 +331,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
expected =
getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
getWhereForHourly2days("c1_testfact2"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
conf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, false);
// Tests for valid tables
@@ -360,7 +341,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
expected =
getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
getWhereForDailyAndHourly2days(cubeName, "C1_testfact"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
conf.set(CubeQueryConfUtil.getValidFactTablesKey(cubeName), "testFact");
@@ -368,7 +349,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
expected =
getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
conf.set(CubeQueryConfUtil.getValidFactTablesKey(cubeName), "testFact2");
@@ -376,7 +357,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
expected =
getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
getWhereForHourly2days("c1_testfact2"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
conf.set(CubeQueryConfUtil.getValidFactTablesKey(cubeName), "testFact2");
@@ -385,7 +366,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
expected =
getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
getWhereForHourly2days("c1_testfact2"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
conf.set(CubeQueryConfUtil.getValidFactTablesKey(cubeName), "testFact");
conf.set(CubeQueryConfUtil.getValidStorageTablesKey("testfact"), "C1_testFact");
@@ -393,7 +374,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
expected =
getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null, getWhereForHourly2days("c1_testfact"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
conf.set(CubeQueryConfUtil.getValidStorageTablesKey("testfact"), "C2_testFact");
@@ -401,7 +382,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
expected =
getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null, getWhereForHourly2days("c2_testfact"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
// max interval test
conf = new Configuration();
@@ -410,7 +391,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
expected =
getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null, getWhereForHourly2days("c1_testfact2"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
}
@Test
@@ -564,17 +545,17 @@ public class TestCubeRewriter extends TestQueryRewrite {
getExpectedQuery(cubeName, "select sum(testcube.msr2)" + " FROM ", " INNER JOIN " + getDbName()
+ "c2_citytable citydim ON" + " testCube.cityid = citydim.id", null, null, joinWhereConds,
getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery =
rewrite("select SUM(msr2) from testCube" + " join citydim on cityid = citydim.id" + " where " + TWO_DAYS_RANGE,
conf);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery =
rewrite("select SUM(msr2) from testCube" + " join citydim on cityid = id" + " where " + TWO_DAYS_RANGE,
getConfWithStorages("C2"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
// q2
hqlQuery =
@@ -591,7 +572,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
+ "(statedim.dt = 'latest') RIGHT OUTER JOIN " + getDbName() + "c1_ziptable"
+ " zipdim ON citydim.zipcode = zipdim.code", null, " group by" + " statedim.name ", joinWhereConds,
getWhereForHourly2days(cubeName, "C1_testfact2"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
// q3
hqlQuery =
@@ -607,7 +588,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
+ " ON st.id = ct.stateid and (st.dt = 'latest') " + "RIGHT OUTER JOIN " + getDbName() + "c1_ziptable"
+ " zt ON ct.zipcode = zt.code", null, " group by" + " st.name ", joinWhereConds,
getWhereForHourly2days("tc", "C1_testfact2"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
// q4
hqlQuery =
@@ -620,7 +601,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
+ " LEFT OUTER JOIN " + getDbName() + "c1_ziptable" + " zipdim ON citydim.zipcode = zipdim.code AND "
+ "(zipdim.dt = 'latest')", null, " group by" + " citydim.name ", null,
getWhereForHourly2days(cubeName, "C1_testfact2"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery =
rewrite("select SUM(msr2) from testCube" + " join countrydim on testCube.countryid = countrydim.id" + " where "
@@ -629,7 +610,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", " INNER JOIN " + getDbName()
+ "c1_countrytable countrydim ON testCube.countryid = " + " countrydim.id", null, null, null,
getWhereForMonthly2months("c2_testfactmonthly"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
LensException th = getLensExceptionInRewrite(
"select name, SUM(msr2) from testCube" + " join citydim" + " where " + TWO_DAYS_RANGE
@@ -647,7 +628,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
String expected1 = getExpectedQuery(cubeName, "select testcube.cityid, 99, \"placeHolder\", -1001,"
+ " sum(testcube.msr2) FROM ", null, " group by testcube.cityid ",
getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
- compareQueries(expected1, hqlQuery1);
+ compareQueries(hqlQuery1, expected1);
// check constants with expression
String hqlQuery2 = rewrite(
@@ -658,7 +639,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
+ " -1001, sum(testcube.msr2) FROM ", null,
" group by testcube.cityid, case when testcube.stateid = 'za' then \"Not Available\" end ",
getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
- compareQueries(expected2, hqlQuery2);
+ compareQueries(hqlQuery2, expected2);
// check expression with boolean and numeric constants
String hqlQuery3 = rewrite(
@@ -676,7 +657,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
+ "999 - testcube.stateid, "
+ " case when testcube.stateid='za' then 99 else -1001 end ",
getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
- compareQueries(expected3, hqlQuery3);
+ compareQueries(hqlQuery3, expected3);
}
@Test
@@ -693,7 +674,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
getExpectedQuery(cubeName, "select citydim.name," + " sum(testcube.msr2) FROM ", "INNER JOIN " + getDbName()
+ "c2_citytable citydim ON" + " testCube.cityid = citydim.id", null, " group by citydim.name ",
joinWhereConds, getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery =
rewrite("select SUM(msr2) from testCube" + " join citydim on testCube.cityid = citydim.id" + " where "
@@ -717,7 +698,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
expected =
getExpectedQuery(cubeName, "select round(testcube.zipcode)," + " sum(testcube.msr2) FROM ", null,
" group by round(testcube.zipcode) ", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery =
rewrite("select round(cityid), SUM(msr2) from" + " testCube where " + TWO_DAYS_RANGE + " group by zipcode",
@@ -725,13 +706,13 @@ public class TestCubeRewriter extends TestQueryRewrite {
expected =
getExpectedQuery(cubeName, "select " + " round(testcube.cityid), sum(testcube.msr2) FROM ", null,
" group by testcube.zipcode", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery = rewrite("select round(cityid), SUM(msr2) from" + " testCube where " + TWO_DAYS_RANGE, conf);
expected =
getExpectedQuery(cubeName, "select " + " round(testcube.cityid), sum(testcube.msr2) FROM ", null,
" group by round(testcube.cityid)", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery =
rewrite("select cityid, SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE + " group by round(zipcode)",
@@ -739,21 +720,21 @@ public class TestCubeRewriter extends TestQueryRewrite {
expected =
getExpectedQuery(cubeName, "select " + " testcube.cityid, sum(testcube.msr2) FROM ", null,
" group by round(testcube.zipcode)", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery =
rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE + " group by round(zipcode)", conf);
expected =
getExpectedQuery(cubeName, "select round(testcube.zipcode)," + " sum(testcube.msr2) FROM ", null,
" group by round(testcube.zipcode)", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery =
rewrite("select cityid, msr2 from testCube" + " where " + TWO_DAYS_RANGE + " group by round(zipcode)", conf);
expected =
getExpectedQuery(cubeName, "select " + " testcube.cityid, sum(testcube.msr2) FROM ", null,
" group by round(testcube.zipcode)", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery =
rewrite("select round(zipcode) rzc," + " msr2 from testCube where " + TWO_DAYS_RANGE + " group by zipcode"
@@ -761,7 +742,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
expected =
getExpectedQuery(cubeName, "select round(testcube.zipcode) rzc," + " sum(testcube.msr2) FROM ", null,
" group by testcube.zipcode order by rzc asc", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
// rewrite with expressions
conf.setBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, false);
@@ -845,7 +826,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
+ " and (( citydim.id ) == 12 )))), ( zipdim.f1 ) HAVING ((sum(( testcube.msr1 )) >= 1000 ) "
+ "and (sum(( testcube.msr2 )) >= 0.01 ))",
null, getWhereForHourly2days("c1_testfact2_raw"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery =
rewrite(
@@ -893,7 +874,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
+ " AND ( citydim.name == \"X\" OR citydim.name == \"Y\" ))"
+ " OR ((zipdim.f1=\"api\" OR zipdim.f1==\"uk\" OR (zipdim.f1==\"adc\" AND zipdim.f1!=\"js\")) AND"
+ " citydim.id==12))," + " zipdim.f1 " + "HAVING (SUM(msr1) >=1000) AND (SUM(msr2)>=0.01)", conf);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
}
@Test
@@ -937,33 +918,33 @@ public class TestCubeRewriter extends TestQueryRewrite {
String expected =
getExpectedQuery(cubeName, "select sum(testcube.msr2)" + " m2 FROM ", null, null,
getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery = rewrite("select SUM(msr2) from testCube mycube" + " where " + TWO_DAYS_RANGE, getConfWithStorages("C2"));
expected =
getExpectedQuery("mycube", "select sum(mycube.msr2) FROM ", null, null,
getWhereForDailyAndHourly2days("mycube", "C2_testfact"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery =
rewrite("select SUM(testCube.msr2) from testCube" + " where " + TWO_DAYS_RANGE, getConfWithStorages("C2"));
expected =
getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery = rewrite("select mycube.msr2 m2 from testCube" + " mycube where " + TWO_DAYS_RANGE, getConfWithStorages(
"C2"));
expected =
getExpectedQuery("mycube", "select sum(mycube.msr2) m2 FROM ", null, null,
getWhereForDailyAndHourly2days("mycube", "C2_testfact"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery = rewrite("select testCube.msr2 m2 from testCube" + " where " + TWO_DAYS_RANGE, getConfWithStorages("C2"));
expected =
getExpectedQuery(cubeName, "select sum(testcube.msr2) m2 FROM ", null, null,
getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
}
@Test
@@ -973,7 +954,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
String expected =
getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
getWhereForMonthlyDailyAndHourly2months("C2_testfact"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
}
@Test
@@ -991,8 +972,18 @@ public class TestCubeRewriter extends TestQueryRewrite {
MISSING_PARTITIONS.errorFormat.substring(0,
MISSING_PARTITIONS.errorFormat.length() - 3));
- assertEquals(pruneCauses.getDetails().get("summary1,summary2,testfact2_raw,summary3,testfact").iterator()
- .next().getCause(), MISSING_PARTITIONS);
+ Set<String> expectedSet =
+ Sets.newTreeSet(Arrays.asList("summary1", "summary2", "testfact2_raw", "summary3", "testfact"));
+ boolean missingPartitionCause = false;
+ for (String key : pruneCauses.getDetails().keySet()) {
+ Set<String> actualKeySet = Sets.newTreeSet(Splitter.on(',').split(key));
+ if (expectedSet.equals(actualKeySet)) {
+ assertEquals(pruneCauses.getDetails().get(key).iterator()
+ .next().getCause(), MISSING_PARTITIONS);
+ missingPartitionCause = true;
+ }
+ }
+ assertTrue(missingPartitionCause, MISSING_PARTITIONS + " error does not occur for facttables set " + expectedSet);
assertEquals(pruneCauses.getDetails().get("testfactmonthly").iterator().next().getCause(),
NO_FACT_UPDATE_PERIODS_FOR_GIVEN_RANGE);
assertEquals(pruneCauses.getDetails().get("testfact2").iterator().next().getCause(),
@@ -1011,7 +1002,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
String expected =
getExpectedQuery(cubeName, "select testcube.cityid," + " sum(testcube.msr2) FROM ", null,
"group by testcube.cityid", getWhereForMonthly2months("c2_testfact"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
}
@Test
@@ -1019,7 +1010,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
String hqlQuery = rewrite("select name, stateid from" + " citydim", getConf());
String expected =
getExpectedQuery("citydim", "select citydim.name," + " citydim.stateid from ", null, "c1_citytable", true);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
Configuration conf = getConf();
// should pick up c2 storage when 'fail on partial data' enabled
@@ -1027,7 +1018,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
hqlQuery = rewrite("select name, stateid from" + " citydim", conf);
expected =
getExpectedQuery("citydim", "select citydim.name," + " citydim.stateid from ", null, "c2_citytable", false);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
// state table is present on c1 with partition dumps and partitions added
LensException e = getLensExceptionInRewrite("select name, capital from statedim ", conf);
@@ -1060,7 +1051,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
CubeQueryContext rewrittenQuery = rewriteCtx("select name, capital from statedim ", conf);
expected =
getExpectedQuery("statedim", "select statedim.name," + " statedim.capital from ", null, "c1_statetable", true);
- compareQueries(expected, rewrittenQuery.toHQL());
+ compareQueries(rewrittenQuery.toHQL(), expected);
assertNotNull(rewrittenQuery.getNonExistingParts());
// run a query with time range function
@@ -1068,79 +1059,79 @@ public class TestCubeRewriter extends TestQueryRewrite {
expected =
getExpectedQuery("citydim", "select citydim.name," + " citydim.stateid from ", null, TWO_DAYS_RANGE, null,
"c1_citytable", true);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
// query with alias
hqlQuery = rewrite("select name, c.stateid from citydim" + " c", conf);
expected = getExpectedQuery("c", "select c.name, c.stateid from ", null, "c1_citytable", true);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
// query with where clause
hqlQuery = rewrite("select name, c.stateid from citydim" + " c where name != 'xyz' ", conf);
expected =
getExpectedQuery("c", "select c.name, c.stateid from ", null, " c.name != 'xyz' ", null, "c1_citytable", true);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
// query with orderby
hqlQuery = rewrite("select name, c.stateid from citydim" + " c where name != 'xyz' order by name", conf);
expected =
getExpectedQuery("c", "select c.name, c.stateid from ", null, " c.name != 'xyz' ", " order by c.name asc",
"c1_citytable", true);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
// query with where and orderby
hqlQuery = rewrite("select name, c.stateid from citydim" + " c where name != 'xyz' order by name", conf);
expected =
getExpectedQuery("c", "select c.name, c.stateid from ", null, " c.name != 'xyz' ", " order by c.name asc ",
"c1_citytable", true);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
// query with orderby with order specified
hqlQuery = rewrite("select name, c.stateid from citydim" + " c where name != 'xyz' order by name desc ", conf);
expected =
getExpectedQuery("c", "select c.name, c.stateid from ", null, " c.name != 'xyz' ", " order by c.name desc",
"c1_citytable", true);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
hqlQuery = rewrite("select name, stateid from citydim", conf);
expected =
getExpectedQuery("citydim", "select citydim.name," + " citydim.stateid from ", null, "c2_citytable", false);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
hqlQuery = rewrite("select name, stateid from citydim", conf);
expected =
getExpectedQuery("citydim", "select citydim.name," + " citydim.stateid from ", null, "c1_citytable", true);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "");
conf.set(CubeQueryConfUtil.VALID_STORAGE_DIM_TABLES, "C1_citytable");
hqlQuery = rewrite("select name, stateid from citydim", conf);
expected =
getExpectedQuery("citydim", "select citydim.name," + " citydim.stateid from ", null, "c1_citytable", true);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "");
conf.set(CubeQueryConfUtil.VALID_STORAGE_DIM_TABLES, "C2_citytable");
hqlQuery = rewrite("select name, stateid from citydim", conf);
expected =
getExpectedQuery("citydim", "select citydim.name," + " citydim.stateid from ", null, "c2_citytable", false);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery = rewrite("select name n, count(1) from citydim" + " group by name order by n ", conf);
expected =
getExpectedQuery("citydim", "select citydim.name n," + " count(1) from ",
"groupby citydim.name order by n asc", "c2_citytable", false);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery = rewrite("select name n, count(1) from citydim" + " order by n ", conf);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery = rewrite("select count(1) from citydim" + " group by name order by name ", conf);
expected =
getExpectedQuery("citydim", "select citydim.name," + " count(1) from ",
"groupby citydim.name order by citydim.name asc ", "c2_citytable", false);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
}
@Test
@@ -1150,19 +1141,19 @@ public class TestCubeRewriter extends TestQueryRewrite {
String expected =
getExpectedQuery("citydim", "select citydim.name," + " citydim.stateid from ", " limit 100", "c1_citytable",
true);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
hqlQuery = rewrite("select name, stateid from citydim " + "limit 100", conf);
expected =
getExpectedQuery("citydim", "select citydim.name," + "citydim.stateid from ", " limit 100", "c2_citytable",
false);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
hqlQuery = rewrite("select name, stateid from citydim" + " limit 100", conf);
expected =
getExpectedQuery("citydim", "select citydim.name," + " citydim.stateid from ", " limit 100", "c1_citytable",
true);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
}
@Test
@@ -1205,7 +1196,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
for (int i = 0; i < queries.length; i++) {
String hql = rewrite(queries[i], conf);
- compareQueries(expectedQueries[i], hql);
+ compareQueries(hql, expectedQueries[i]);
}
}
@@ -1216,7 +1207,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
String expected =
getExpectedQuery(cubeName, "select testcube.dim1, max(testcube.msr3), sum(testcube.msr2) FROM ", null,
" group by testcube.dim1", getWhereForDailyAndHourly2days(cubeName, "C1_summary1"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery =
rewrite("select dim1, dim2, COUNT(msr4)," + " SUM(msr2), msr3 from testCube" + " where " + TWO_DAYS_RANGE,
getConfWithStorages("C1"));
@@ -1224,7 +1215,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
getExpectedQuery(cubeName, "select testcube.dim1, testcube,dim2, count(testcube.msr4),"
+ " sum(testcube.msr2), max(testcube.msr3) FROM ", null, " group by testcube.dim1, testcube.dim2",
getWhereForDailyAndHourly2days(cubeName, "C1_summary2"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery =
rewrite("select dim1, dim2, cityid, msr4," + " SUM(msr2), msr3 from testCube" + " where " + TWO_DAYS_RANGE,
getConfWithStorages("C1"));
@@ -1233,7 +1224,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
+ " count(testcube.msr4), sum(testcube.msr2), max(testcube.msr3) FROM ", null,
" group by testcube.dim1, testcube.dim2, testcube.cityid",
getWhereForDailyAndHourly2days(cubeName, "C1_summary3"));
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
}
@Test
@@ -1248,7 +1239,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
getExpectedQuery(cubeName, "select testcube.dim1, max(testcube.msr3), sum(testcube.msr2) FROM ", null,
" group by testcube.dim1", getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "it", "C2_summary1"),
null);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery =
rewrite("select dim1, dim2, COUNT(msr4)," + " SUM(msr2), msr3 from testCube" + " where " + twoDaysITRange,
getConf());
@@ -1257,7 +1248,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
+ " sum(testcube.msr2), max(testcube.msr3) FROM ", null, " group by testcube.dim1, testcube.dim2",
getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "it", "C2_summary2"),
null);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery =
rewrite("select dim1, dim2, cityid, count(msr4)," + " SUM(msr2), msr3 from testCube" + " where "
+ twoDaysITRange, getConf());
@@ -1267,7 +1258,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
" group by testcube.dim1, testcube.dim2, testcube.cityid",
getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "it", "C2_summary3"),
null);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
}
// Disabling this as querying on part column directly is not allowed as of
@@ -1285,7 +1276,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
null, "or (( testcube.it ) == 'default')) and ((testcube.dim1) > 1000)" + " group by testcube.dim1",
getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "it", "C2_summary1"),
null);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE + " OR ("
+ CubeTestSetup.TWO_DAYS_RANGE_BEFORE_4_DAYS + " AND dt='default')", getConf());
@@ -1297,7 +1288,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
expected =
getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, " AND testcube.dt='default'",
expecteddtRangeWhere1, "c2_testfact");
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
String expecteddtRangeWhere2 =
"("
@@ -1310,7 +1301,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
expected =
getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, " AND testcube.dt='default'",
expecteddtRangeWhere2, "c2_testfact");
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
String twoDaysPTRange =
"time_range_in(pt, '" + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','"
@@ -1325,7 +1316,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
getExpectedQuery(cubeName, "select testcube.dim1, max(testcube.msr3), sum(testcube.msr2) FROM ", null,
"AND testcube.it == 'default' and testcube.dim1 > 1000 group by testcube.dim1", expectedITPTrange,
"C2_summary1");
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
}
@Test
@@ -1385,14 +1376,14 @@ public class TestCubeRewriter extends TestQueryRewrite {
+ getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "dt", BEFORE_4_DAYS_START, BEFORE_4_DAYS_END);
String expected =
getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null, expectedRangeWhere, "c2_testfact");
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery =
rewrite("select dim1, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE + " OR "
+ CubeTestSetup.TWO_DAYS_RANGE_BEFORE_4_DAYS, getConfWithStorages("C1"));
expected =
getExpectedQuery(cubeName, "select testcube.dim1, max(testcube.msr3), sum(testcube.msr2) FROM ", null,
" group by testcube.dim1", expectedRangeWhere, "C1_summary1");
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery =
rewrite("select dim1, dim2, COUNT(msr4)," + " SUM(msr2), msr3 from testCube" + " where " + TWO_DAYS_RANGE
+ " OR " + CubeTestSetup.TWO_DAYS_RANGE_BEFORE_4_DAYS, getConfWithStorages("C1"));
@@ -1400,7 +1391,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
getExpectedQuery(cubeName, "select testcube.dim1, testcube,dim2, count(testcube.msr4),"
+ " sum(testcube.msr2), max(testcube.msr3) FROM ", null, " group by testcube.dim1, testcube.dim2",
expectedRangeWhere, "C1_summary2");
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery =
rewrite("select dim1, dim2, cityid, count(msr4)," + " SUM(msr2), msr3 from testCube" + " where " + TWO_DAYS_RANGE
+ " OR " + CubeTestSetup.TWO_DAYS_RANGE_BEFORE_4_DAYS, getConfWithStorages("C1"));
@@ -1408,7 +1399,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
getExpectedQuery(cubeName, "select testcube.dim1, testcube,dim2, testcube.cityid,"
+ " count(testcube.msr4), sum(testcube.msr2), max(testcube.msr3) FROM ", null,
" group by testcube.dim1, testcube.dim2, testcube.cityid", expectedRangeWhere, "C1_summary3");
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
}
@Test
@@ -1417,17 +1408,17 @@ public class TestCubeRewriter extends TestQueryRewrite {
String expected =
getExpectedQuery("citydim", "select DISTINCT" + " citydim.name, citydim.stateid from ", null, "c1_citytable",
true);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery = rewrite("select id, sum(distinct id) from" + " citydim group by id", getConf());
expected =
getExpectedQuery("citydim", "select citydim.id," + " sum(DISTINCT citydim.id) from ", "group by citydim.id",
"c1_citytable", true);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
hqlQuery = rewrite("select count(distinct id) from" + " citydim", getConf());
expected = getExpectedQuery("citydim", "select count(DISTINCT" + " citydim.id) from ", null, "c1_citytable", true);
- compareQueries(expected, hqlQuery);
+ compareQueries(hqlQuery, expected);
}
@Test
http://git-wip-us.apache.org/repos/asf/lens/blob/a079ad3f/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
index bde4edd..64b1ac6 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
@@ -21,11 +21,7 @@ package org.apache.lens.cube.parse;
import static org.apache.lens.cube.parse.CubeTestSetup.*;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
+import java.util.*;
import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
import org.apache.lens.server.api.error.LensException;
@@ -38,6 +34,9 @@ import org.testng.Assert;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test;
+import com.google.common.base.Splitter;
+import com.google.common.collect.Sets;
+
public class TestDenormalizationResolver extends TestQueryRewrite {
private Configuration conf;
@@ -83,8 +82,10 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
null);
TestCubeRewriter.compareQueries(expecteddim2big2, hqlQuery);
+ Configuration conf2 = new Configuration(conf);
+ conf2.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
hqlQuery =
- rewrite("select testdim3.name, dim2big1, max(msr3)," + " msr2 from testCube" + " where " + twoDaysITRange, conf);
+ rewrite("select testdim3.name, dim2big1, max(msr3)," + " msr2 from testCube" + " where " + twoDaysITRange, conf2);
String expected =
getExpectedQuery(cubeName,
"select testdim3.name, testcube.dim2big1, max(testcube.msr3), sum(testcube.msr2) FROM ", " JOIN "
@@ -95,8 +96,6 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
null);
TestCubeRewriter.compareQueries(expected, hqlQuery);
- Configuration conf2 = new Configuration(conf);
- conf2.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
hqlQuery = rewrite("select dim2big1, max(msr3)," + " msr2 from testCube" + " where " + twoDaysITRange, conf2);
TestCubeRewriter.compareQueries(expecteddim2big1, hqlQuery);
hqlQuery = rewrite("select dim2big2, max(msr3)," + " msr2 from testCube" + " where " + twoDaysITRange, conf2);
@@ -151,28 +150,44 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
TestCubeRewriter.compareQueries(expected, hqlQuery);
LensException e = getLensExceptionInRewrite(
"select dim2big2, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE, tconf);
- Assert.assertEquals(extractPruneCause(e), new PruneCauses.BriefAndDetailedError(
- CandidateTablePruneCode.NO_CANDIDATE_STORAGES.errorFormat,
- new HashMap<String, List<CandidateTablePruneCause>>() {
- {
- put("summary2,testfact2_raw,summary3",
- Arrays.asList(new CandidateTablePruneCause(CandidateTablePruneCode.INVALID_DENORM_TABLE)));
- put("testfact_continuous",
- Arrays.asList(CandidateTablePruneCause.columnNotFound("msr2", "msr3")));
- put("summary4", Arrays.asList(CandidateTablePruneCause.noCandidateStorages(
- new HashMap<String, CandidateTablePruneCause.SkipStorageCause>() {
- {
- put("C2", new CandidateTablePruneCause.SkipStorageCause(
- CandidateTablePruneCause.SkipStorageCode.UNSUPPORTED));
- }
- }))
- );
- put("summary1,cheapfact,testfactmonthly,testfact2,testfact",
- Arrays.asList(CandidateTablePruneCause.columnNotFound("dim2big2")));
- }
+ PruneCauses.BriefAndDetailedError error = extractPruneCause(e);
+ Assert.assertEquals(error.getBrief(), CandidateTablePruneCode.NO_CANDIDATE_STORAGES.errorFormat);
+
+ HashMap<String, List<CandidateTablePruneCause>> details = error.getDetails();
+
+ for (Map.Entry<String, List<CandidateTablePruneCause>> entry : details.entrySet()) {
+ if (entry.getValue().equals(Arrays.asList(CandidateTablePruneCause.columnNotFound("dim2big2")))) {
+ Set<String> expectedKeySet =
+ Sets.newTreeSet(Splitter.on(',').split("summary1,cheapfact,testfactmonthly,testfact2,testfact"));
+ Assert.assertTrue(expectedKeySet.equals(Sets.newTreeSet(Splitter.on(',').split(entry.getKey()))));
}
- ));
+
+ if (entry.getValue().equals(
+ Arrays.asList(new CandidateTablePruneCause(CandidateTablePruneCode.INVALID_DENORM_TABLE)))) {
+ Set<String> expectedKeySet =
+ Sets.newTreeSet(Splitter.on(',').split("summary2,testfact2_raw,summary3"));
+ Assert.assertTrue(expectedKeySet.equals(Sets.newTreeSet(Splitter.on(',').split(entry.getKey()))));
+ }
+
+ if (entry.getKey().equals("testfact_continuous")) {
+ Assert.assertTrue(entry.getValue().equals(
+ Arrays.asList(CandidateTablePruneCause.columnNotFound("msr2", "msr3")))
+ || entry.getValue().equals(Arrays.asList(CandidateTablePruneCause.columnNotFound("msr3", "msr2"))));
+ }
+
+ if (entry.getKey().equals("summary4")) {
+ List<CandidateTablePruneCause> expectedPruneCauses = Arrays.asList(CandidateTablePruneCause.noCandidateStorages(
+ new HashMap<String, CandidateTablePruneCause.SkipStorageCause>() {
+ {
+ put("C2", new CandidateTablePruneCause.SkipStorageCause(
+ CandidateTablePruneCause.SkipStorageCode.UNSUPPORTED));
+ }
+ }));
+ Assert.assertTrue(entry.getValue().equals(expectedPruneCauses));
+ }
+ }
}
+
@Test
public void testCubeQueryWithExpressionHavingDenormColumnComingAsDirectColumn() throws Exception {
String twoDaysITRange =
http://git-wip-us.apache.org/repos/asf/lens/blob/a079ad3f/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
index 493b8d6..b2a15a0 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
@@ -41,7 +41,7 @@ public class TestExpressionResolver extends TestQueryRewrite {
@BeforeTest
public void setupDriver() throws Exception {
conf = new Configuration();
- conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1,C2");
+ conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
conf.setBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, false);
conf.setBoolean(CubeQueryConfUtil.ENABLE_SELECT_TO_GROUPBY, true);
conf.setBoolean(CubeQueryConfUtil.ENABLE_GROUP_BY_TO_SELECT, true);
@@ -289,6 +289,7 @@ public class TestExpressionResolver extends TestQueryRewrite {
@Test
public void testMultipleExpressionsPickingFirstExpression() throws Exception {
Configuration newConf = new Configuration(conf);
+ newConf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
newConf.set(CubeQueryConfUtil.getValidFactTablesKey(cubeName), "testFact");
String hqlQuery = rewrite("select equalsums from testCube where " + TWO_DAYS_RANGE, newConf);
String expected =
@@ -317,6 +318,7 @@ public class TestExpressionResolver extends TestQueryRewrite {
@Test
public void testMaterializedExpressionPickingMaterializedValue() throws Exception {
Configuration newConf = new Configuration(conf);
+ newConf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
newConf.set(CubeQueryConfUtil.getValidFactTablesKey(cubeName), "testFact");
String hqlQuery = rewrite("select msr5 from testCube where " + TWO_DAYS_RANGE, newConf);
String expected = getExpectedQuery(cubeName, "select testcube.msr5 FROM ", null, null,
http://git-wip-us.apache.org/repos/asf/lens/blob/a079ad3f/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQuery.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQuery.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQuery.java
new file mode 100644
index 0000000..2707c4c
--- /dev/null
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQuery.java
@@ -0,0 +1,218 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.lens.cube.parse;
+
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.commons.lang3.StringUtils;
+
+import com.google.common.base.Objects;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+
+import lombok.Getter;
+import lombok.Setter;
+
+import lombok.extern.slf4j.Slf4j;
+
+@Slf4j
+public class TestQuery {
+
+ private String actualQuery;
+ private String joinQueryPart = null;
+
+ private String trimmedQuery = null;
+
+ private Map<JoinType, Set<String>> joinTypeStrings = Maps.newTreeMap();
+
+ private String preJoinQueryPart = null;
+
+ private String postJoinQueryPart = null;
+
+ public enum JoinType {
+ INNERJOIN,
+ LEFTOUTERJOIN,
+ RIGHTOUTERJOIN,
+ FULLOUTERJOIN,
+ UNIQUE,
+ LEFTSEMIJOIN,
+ JOIN;
+ }
+
+ public enum Clause {
+ WHERE,
+ GROUPBY,
+ HAVING,
+ ORDEREDBY;
+ }
+
+ public TestQuery(String query) {
+ this.actualQuery = query;
+ this.trimmedQuery = getTrimmedQuery(query);
+ this.joinQueryPart = extractJoinStringFromQuery(trimmedQuery);
+ /**
+ * Get the join query part, pre-join query and post-join query part from the trimmed query.
+ *
+ */
+ if (trimmedQuery.indexOf(joinQueryPart) != -1) {
+ this.preJoinQueryPart = trimmedQuery.substring(0, trimmedQuery.indexOf(joinQueryPart));
+ if (getMinIndexOfClause() != -1) {
+ this.postJoinQueryPart = trimmedQuery.substring(getMinIndexOfClause());
+ }
+ prepareJoinStrings(trimmedQuery);
+ }
+ }
+
+ private String getTrimmedQuery(String query) {
+ return query.toUpperCase().replaceAll("\\W", "");
+ }
+
+ private void prepareJoinStrings(String query) {
+ while (true) {
+ JoinDetails joinDetails = getNextJoinTypeDetails(query);
+ int nextJoinIndex = joinDetails.getIndex();
+ if (joinDetails.getJoinType() == null) {
+ log.info("Parsing joinQuery completed");
+ return;
+ }
+ Set<String> joinStrings = joinTypeStrings.get(joinDetails.getJoinType());
+ if (joinStrings == null) {
+ joinStrings = Sets.newTreeSet();
+ joinTypeStrings.put(joinDetails.getJoinType(), joinStrings);
+ }
+ joinStrings.add(joinDetails.getJoinString());
+ // Pass the remaining query for finding next join query
+ query = query.substring(nextJoinIndex + joinDetails.getJoinType().name().length());
+ }
+ }
+
+ private class JoinDetails {
+ @Setter @Getter private JoinType joinType;
+ @Setter @Getter private int index;
+ @Setter @Getter private String joinString;
+ }
+
+ /**
+ * Get the next join query details from a given query
+ */
+ private JoinDetails getNextJoinTypeDetails(String query) {
+ int nextJoinIndex = Integer.MAX_VALUE;
+ JoinType nextJoinTypePart = null;
+ for (JoinType joinType : JoinType.values()) {
+ int joinIndex = StringUtils.indexOf(query, joinType.name(), 1);
+ if (joinIndex < nextJoinIndex && joinIndex > 0) {
+ nextJoinIndex = joinIndex;
+ nextJoinTypePart = joinType;
+ }
+ }
+ JoinDetails joinDetails = new JoinDetails();
+ joinDetails.setIndex(nextJoinIndex);
+ if (nextJoinIndex != Integer.MAX_VALUE) {
+ joinDetails.setJoinString(
+ getJoinString(query.substring(nextJoinIndex + nextJoinTypePart.name().length())));
+ }
+ joinDetails.setJoinType(nextJoinTypePart);
+ return joinDetails;
+ }
+
+ private String getJoinString(String joinQueryStr) {
+ int nextJoinIndex = Integer.MAX_VALUE;
+ for (JoinType joinType : JoinType.values()) {
+ int joinIndex = StringUtils.indexOf(joinQueryStr, joinType.name());
+ if (joinIndex < nextJoinIndex && joinIndex > 0) {
+ nextJoinIndex = joinIndex;
+ }
+ }
+ if (nextJoinIndex == Integer.MAX_VALUE) {
+ int minClauseIndex = getMinIndexOfClause(joinQueryStr);
+ // return join query completely if there is no Clause in the query
+ return minClauseIndex == -1 ? joinQueryStr : joinQueryStr.substring(0, minClauseIndex);
+ }
+ return joinQueryStr.substring(0, nextJoinIndex);
+ }
+
+ private int getMinIndexOfClause() {
+ return getMinIndexOfClause(trimmedQuery);
+ }
+
+ private int getMinIndexOfClause(String query) {
+ int minClauseIndex = Integer.MAX_VALUE;
+ for (Clause clause : Clause.values()) {
+ int clauseIndex = StringUtils.indexOf(query, clause.name());
+ if (clauseIndex == -1) {
+ continue;
+ }
+ minClauseIndex = clauseIndex < minClauseIndex ? clauseIndex : minClauseIndex;
+ }
+ return minClauseIndex == Integer.MAX_VALUE ? -1 : minClauseIndex;
+ }
+
+ private int getMinIndexOfJoinType() {
+ int minJoinTypeIndex = Integer.MAX_VALUE;
+ for (JoinType joinType : JoinType.values()) {
+ int joinIndex = StringUtils.indexOf(trimmedQuery, joinType.name());
+ if (joinIndex == -1) {
+ continue;
+ }
+ minJoinTypeIndex = joinIndex < minJoinTypeIndex ? joinIndex : minJoinTypeIndex;
+ }
+ return minJoinTypeIndex == Integer.MAX_VALUE ? -1 : minJoinTypeIndex;
+ }
+
+ private String extractJoinStringFromQuery(String queryTrimmed) {
+ int joinStartIndex = getMinIndexOfJoinType();
+ int joinEndIndex = getMinIndexOfClause();
+ if (joinStartIndex == -1 && joinEndIndex == -1) {
+ return queryTrimmed;
+ }
+ return StringUtils.substring(queryTrimmed, joinStartIndex, joinEndIndex);
+ }
+
+ @Override
+ public boolean equals(Object query) {
+ TestQuery expected = (TestQuery) query;
+ if (this == expected) {
+ return true;
+ }
+ if (this.actualQuery == null && expected.actualQuery == null) {
+ return true;
+ } else if (this.actualQuery == null) {
+ return false;
+ } else if (expected.actualQuery == null) {
+ return false;
+ }
+ return Objects.equal(this.joinTypeStrings, expected.joinTypeStrings)
+ && Objects.equal(this.preJoinQueryPart, expected.preJoinQueryPart)
+ && Objects.equal(this.postJoinQueryPart, expected.postJoinQueryPart);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hashCode(actualQuery, joinQueryPart, trimmedQuery, joinTypeStrings);
+ }
+
+ public String toString() {
+ StringBuilder sb = new StringBuilder();
+ sb.append("Actual Query: " + actualQuery).append("\n");
+ sb.append("JoinQueryString: " + joinTypeStrings);
+ return sb.toString();
+ }
+}
http://git-wip-us.apache.org/repos/asf/lens/blob/a079ad3f/lens-cube/src/test/java/org/apache/lens/cube/parse/TestStorageUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestStorageUtil.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestStorageUtil.java
index 81f515b..73c3338 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestStorageUtil.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestStorageUtil.java
@@ -184,7 +184,7 @@ public class TestStorageUtil {
Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWODAYS_BACK));
}
- // {s1, s2}, {s2, s3}, {s3,s4} -> {s2,s3}
+ // {s1, s2}, {s2, s3}, {s3,s4} -> {s2,s3} or {s1, s3} or {s2, s4}
answeringParts = new ArrayList<FactPartition>();
answeringParts.add(new FactPartition("dt", CubeTestSetup.TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, null, s12));
answeringParts.add(new FactPartition("dt", CubeTestSetup.TWODAYS_BACK, UpdatePeriod.DAILY, null, null, s23));
@@ -193,9 +193,16 @@ public class TestStorageUtil {
StorageUtil.getMinimalAnsweringTables(answeringParts, result);
System.out.println("results:" + result);
Assert.assertEquals(2, result.size());
- Assert.assertTrue(result.keySet().contains("S2"));
- Assert.assertTrue(result.keySet().contains("S3"));
- coveredParts = result.get("S2");
+ Set<String> actualSet = result.keySet();
+ Assert.assertTrue(
+ actualSet.contains("S2") && actualSet.contains("S3")
+ || actualSet.contains("S1") && actualSet.contains("S3")
+ || actualSet.contains("S1") && actualSet.contains("S4"));
+ /**
+ * Commenting the following asserts as there are multiple sets can cover the the given partitions
+ */
+ //Assert.assertTrue(result.keySet().contains("S3"));
+ /* coveredParts = result.get("S2");
Assert.assertTrue(coveredParts.size() >= 1);
Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWO_MONTHS_BACK));
if (coveredParts.size() == 2) {
@@ -208,7 +215,7 @@ public class TestStorageUtil {
if (coveredParts.size() == 2) {
Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWODAYS_BACK));
Assert.assertEquals(1, result.get("S2").size());
- }
+ }*/
// {s1, s2}, {s2}, {s1} -> {s1,s2}
answeringParts = new ArrayList<FactPartition>();
http://git-wip-us.apache.org/repos/asf/lens/blob/a079ad3f/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java
index cb27d50..1fc8bc8 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java
@@ -19,12 +19,16 @@
package org.apache.lens.cube.parse;
+import static org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode.COLUMN_NOT_FOUND;
import static org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode.FACT_NOT_AVAILABLE_IN_RANGE;
import static org.apache.lens.cube.parse.CubeTestSetup.*;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
+import java.util.List;
+import java.util.Set;
+
import org.apache.lens.server.api.error.LensException;
import org.apache.hadoop.conf.Configuration;
@@ -34,6 +38,8 @@ import org.apache.hadoop.hive.ql.parse.ParseException;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test;
+import com.google.common.collect.Sets;
+
public class TestTimeRangeResolver extends TestQueryRewrite {
private final String cubeName = CubeTestSetup.TEST_CUBE_NAME;
@@ -63,9 +69,16 @@ public class TestTimeRangeResolver extends TestQueryRewrite {
PruneCauses.BriefAndDetailedError causes = extractPruneCause(e);
assertTrue(causes.getBrief().contains("Columns [msr2] are not present in any table"));
assertEquals(causes.getDetails().size(), 2);
- assertEquals(causes.getDetails().values().iterator().next().size(), 1);
- assertEquals(causes.getDetails().values().iterator().next().iterator().next().getCause(),
- FACT_NOT_AVAILABLE_IN_RANGE);
+
+ Set<CandidateTablePruneCause.CandidateTablePruneCode> expectedPruneCodes = Sets.newTreeSet();
+ expectedPruneCodes.add(FACT_NOT_AVAILABLE_IN_RANGE);
+ expectedPruneCodes.add(COLUMN_NOT_FOUND);
+ Set<CandidateTablePruneCause.CandidateTablePruneCode> actualPruneCodes = Sets.newTreeSet();
+ for (List<CandidateTablePruneCause> cause : causes.getDetails().values()) {
+ assertEquals(cause.size(), 1);
+ actualPruneCodes.add(cause.iterator().next().getCause());
+ }
+ assertEquals(actualPruneCodes, expectedPruneCodes);
}
@Test
http://git-wip-us.apache.org/repos/asf/lens/blob/a079ad3f/lens-driver-jdbc/pom.xml
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/pom.xml b/lens-driver-jdbc/pom.xml
index 3186f31..f84513b 100644
--- a/lens-driver-jdbc/pom.xml
+++ b/lens-driver-jdbc/pom.xml
@@ -48,6 +48,13 @@
<version>${project.version}</version>
</dependency>
<dependency>
+ <groupId>org.apache.lens</groupId>
+ <artifactId>lens-cube</artifactId>
+ <version>${project.version}</version>
+ <type>test-jar</type>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
<groupId>com.mchange</groupId>
<artifactId>c3p0</artifactId>
</dependency>
http://git-wip-us.apache.org/repos/asf/lens/blob/a079ad3f/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/ColumnarSQLRewriter.java
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/ColumnarSQLRewriter.java b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/ColumnarSQLRewriter.java
index 295b476..8a98c57 100644
--- a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/ColumnarSQLRewriter.java
+++ b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/ColumnarSQLRewriter.java
@@ -80,7 +80,7 @@ public class ColumnarSQLRewriter implements QueryRewriter {
protected StringBuilder allSubQueries = new StringBuilder();
/** The fact keys. */
- Set<String> factKeys = new HashSet<String>();
+ Set<String> factKeys = new LinkedHashSet<String>();
/** The rewritten query. */
protected StringBuilder rewrittenQuery = new StringBuilder();
@@ -92,7 +92,7 @@ public class ColumnarSQLRewriter implements QueryRewriter {
protected StringBuilder factFilterPush = new StringBuilder();
/** The join list. */
- protected ArrayList<String> joinList = new ArrayList<String>();
+ protected List<String> joinList = new ArrayList<String>();
/** The join condition. */
protected StringBuilder joinCondition = new StringBuilder();
@@ -113,19 +113,20 @@ public class ColumnarSQLRewriter implements QueryRewriter {
private String leftFilter;
/** The map agg tab alias. */
- private final Map<String, String> mapAggTabAlias = new HashMap<String, String>();
+ private final Map<String, String> mapAggTabAlias = new LinkedHashMap<String, String>();
/** The map aliases. */
- private final Map<String, String> mapAliases = new HashMap<String, String>();
+ private final Map<String, String> mapAliases = new LinkedHashMap<String, String>();
/** The table to alias map. */
- private final Map<String, String> tableToAliasMap = new HashMap<String, String>();
+ private final Map<String, String> tableToAliasMap = new LinkedHashMap<String, String>();
/** The tables to accessed column map. */
- private final Map<String, HashSet<String>> tableToAccessedColMap = new HashMap<String, HashSet<String>>();
+ private final Map<String, LinkedHashSet<String>> tableToAccessedColMap =
+ new LinkedHashMap<String, LinkedHashSet<String>>();
/** The dimension table to subquery map. */
- private final Map<String, String> dimTableToSubqueryMap = new HashMap<String, String>();
+ private final Map<String, String> dimTableToSubqueryMap = new LinkedHashMap<String, String>();
/** The where tree. */
private String whereTree;
@@ -552,9 +553,9 @@ public class ColumnarSQLRewriter implements QueryRewriter {
String alias = tableToAliasMap.get(tab);
if ((table.equals(tab) || table.equals(alias)) && column != null) {
- HashSet<String> cols;
+ LinkedHashSet<String> cols;
if (!tableToAccessedColMap.containsKey(tab)) {
- cols = new HashSet<String>();
+ cols = new LinkedHashSet<String>();
cols.add(column);
tableToAccessedColMap.put(tab, cols);
} else {
@@ -580,7 +581,7 @@ public class ColumnarSQLRewriter implements QueryRewriter {
while (iterator.hasNext()) {
StringBuilder query = new StringBuilder();
String tab = (String) iterator.next();
- HashSet<String> cols = tableToAccessedColMap.get(tab);
+ LinkedHashSet<String> cols = tableToAccessedColMap.get(tab);
query.append("(").append("select ").append(StringUtils.join(cols, ","))
.append(" from ").append(tab).append(")");
dimTableToSubqueryMap.put(tab, query.toString());
@@ -638,7 +639,7 @@ public class ColumnarSQLRewriter implements QueryRewriter {
getAllFilters(whereAST);
rightFilter.add(leftFilter);
- Set<String> setAllFilters = new HashSet<String>(rightFilter);
+ Set<String> setAllFilters = new LinkedHashSet<String>(rightFilter);
// Check the occurrence of dimension table in the filter list and
// combine all filters of same dimension table with and .
@@ -827,7 +828,7 @@ public class ColumnarSQLRewriter implements QueryRewriter {
public String getFactNameAlias(ASTNode fromAST) {
String factTable;
String factAlias;
- ArrayList<String> allTables = new ArrayList<String>();
+ ArrayList<String> allTables = new ArrayList<>();
getAllTablesfromFromAST(fromAST, allTables);
String[] keys = allTables.get(0).trim().split(" +");
@@ -897,7 +898,7 @@ public class ColumnarSQLRewriter implements QueryRewriter {
* @return the string
*/
public String replaceUDFForDB(String query) {
- Map<String, String> imputnmatch = new HashMap<String, String>();
+ Map<String, String> imputnmatch = new LinkedHashMap<String, String>();
imputnmatch.put("to_date", "date");
imputnmatch.put("format_number", "format");
imputnmatch.put("date_sub\\((.*?),\\s*([0-9]+\\s*)\\)", "date_sub($1, interval $2 day)");
@@ -1199,7 +1200,7 @@ public class ColumnarSQLRewriter implements QueryRewriter {
@NoArgsConstructor
private static class NativeTableInfo {
- private Map<String, String> columnMapping = new HashMap<>();
+ private Map<String, String> columnMapping = new LinkedHashMap<>();
NativeTableInfo(Table tbl) {
String columnMappingProp = tbl.getProperty(LensConfConstants.NATIVE_TABLE_COLUMN_MAPPING);
if (StringUtils.isNotBlank(columnMappingProp)) {
@@ -1216,7 +1217,7 @@ public class ColumnarSQLRewriter implements QueryRewriter {
}
}
- private Map<String, NativeTableInfo> aliasToNativeTableInfo = new HashMap<>();
+ private Map<String, NativeTableInfo> aliasToNativeTableInfo = new LinkedHashMap<>();
/**
* Replace with underlying storage.