You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lens.apache.org by am...@apache.org on 2014/11/13 12:49:33 UTC
[06/50] incubator-lens git commit: Fix test failures and explain plan
parsing in hive
Fix test failures and explain plan parsing in hive
Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/79a9a373
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/79a9a373
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/79a9a373
Branch: refs/heads/toapache
Commit: 79a9a37398278027576b8074e17321089982290a
Parents: e1ee94e
Author: Amareshwari Sriramdasu <am...@inmobi.com>
Authored: Thu Nov 6 17:51:07 2014 +0530
Committer: Amareshwari Sriramdasu <am...@inmobi.com>
Committed: Thu Nov 6 17:51:07 2014 +0530
----------------------------------------------------------------------
.../org/apache/lens/driver/hive/HiveDriver.java | 6 +-
.../apache/lens/driver/hive/HiveQueryPlan.java | 63 +++----
.../apache/lens/driver/hive/TestHiveDriver.java | 167 ++++++++++++++-----
.../lens/driver/hive/TestRemoteHiveDriver.java | 71 ++------
.../driver/jdbc/TestColumnarSQLRewriter.java | 25 +--
5 files changed, 193 insertions(+), 139 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/79a9a373/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
index 84341fb..5ea20b2 100644
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
@@ -94,7 +94,8 @@ public class HiveDriver implements LensDriver {
// connections need to be separate for each user and each thread
/** The thread connections. */
- private final Map<String, Map<Long, ExpirableConnection>> threadConnections = new HashMap<String, Map<Long, ExpirableConnection>>();
+ private final Map<String, Map<Long, ExpirableConnection>> threadConnections =
+ new HashMap<String, Map<Long, ExpirableConnection>>();
/** The thrift conn expiry queue. */
private final DelayQueue<ExpirableConnection> thriftConnExpiryQueue = new DelayQueue<ExpirableConnection>();
@@ -274,7 +275,6 @@ public class HiveDriver implements LensDriver {
@Override
public void configure(Configuration conf) throws LensException {
this.driverConf = new HiveConf(conf, HiveDriver.class);
- ;
this.driverConf.addResource("hivedriver-default.xml");
this.driverConf.addResource("hivedriver-site.xml");
connectionClass = this.driverConf.getClass(HIVE_CONNECTION_CLASS, EmbeddedThriftConnection.class,
@@ -294,7 +294,7 @@ public class HiveDriver implements LensDriver {
Configuration explainConf = new Configuration(conf);
explainConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
String explainQuery = "EXPLAIN EXTENDED " + query;
- QueryContext explainQueryCtx = new QueryContext(explainQuery, null, explainConf);
+ QueryContext explainQueryCtx = new QueryContext(explainQuery, SessionState.get().getUserName(), explainConf);
// Get result set of explain
HiveInMemoryResultSet inMemoryResultSet = (HiveInMemoryResultSet) execute(explainQueryCtx);
List<String> explainOutput = new ArrayList<String>();
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/79a9a373/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveQueryPlan.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveQueryPlan.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveQueryPlan.java
index ac8cbee..78f776a 100644
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveQueryPlan.java
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveQueryPlan.java
@@ -143,18 +143,7 @@ public class HiveQueryPlan extends DriverQueryPlan {
}
break;
case TABLE_SCAN:
- if (tr.startsWith("name:")) {
- String tableName = tr.replace("name:", "").trim();
- tablesQueried.add(tableName);
- Table tbl = metastore.getTable(tableName);
- String costStr = tbl.getParameters().get(LensConfConstants.STORAGE_COST);
-
- Double weight = 1d;
- if (costStr != null) {
- weight = Double.parseDouble(costStr);
- }
- tableWeights.put(tableName, weight);
- }
+ // no op
break;
case JOIN:
if (tr.equals("condition map:")) {
@@ -177,37 +166,49 @@ public class HiveQueryPlan extends DriverQueryPlan {
}
break;
case PARTITION:
- if (tr.equals("partition values:")) {
- i++;
- List<String> partVals = new ArrayList<String>();
- // Look ahead until we reach partition properties
- String lineAhead = null;
- for (; i < explainOutput.size(); i++) {
- if (explainOutput.get(i).trim().equals("properties:")) {
- break;
+ String partConditionStr = null;
+ for (; i < explainOutput.size(); i++) {
+ if (explainOutput.get(i).trim().equals("partition values:")) {
+ List<String> partVals = new ArrayList<String>();
+ // Look ahead until we reach partition properties
+ String lineAhead = null;
+ for (; i < explainOutput.size(); i++) {
+ if (explainOutput.get(i).trim().equals("properties:")) {
+ break;
+ }
+ lineAhead = explainOutput.get(i).trim();
+ partVals.add(lineAhead);
}
- lineAhead = explainOutput.get(i).trim();
- partVals.add(lineAhead);
- }
-
- String partConditionStr = StringUtils.join(partVals, ";");
+ partConditionStr = StringUtils.join(partVals, ";");
+ }
// Now seek table name
- for (; i < explainOutput.size(); i++) {
- if (explainOutput.get(i).trim().startsWith("name:")) {
- String table = explainOutput.get(i).trim().substring("name:".length()).trim();
+ if (explainOutput.get(i).trim().startsWith("name:")) {
+ String table = explainOutput.get(i).trim().substring("name:".length()).trim();
+ // update tables queried and weights
+ if (!tablesQueried.contains(table)) {
+ tablesQueried.add(table);
+ Table tbl = metastore.getTable(table);
+ String costStr = tbl.getParameters().get(LensConfConstants.STORAGE_COST);
+
+ Double weight = 1d;
+ if (costStr != null) {
+ weight = Double.parseDouble(costStr);
+ }
+ tableWeights.put(table, weight);
+ }
+
+ if (partConditionStr != null) {
List<String> tablePartitions = partitions.get(table);
if (tablePartitions == null) {
tablePartitions = new ArrayList<String>();
partitions.put(table, tablePartitions);
}
tablePartitions.add(partConditionStr);
-
- break;
}
+ break;
}
}
-
break;
}
}
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/79a9a373/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
index f5312a0..45cebde 100644
--- a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
+++ b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
@@ -25,6 +25,7 @@ import java.util.*;
import junit.framework.Assert;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -51,7 +52,10 @@ import org.apache.lens.server.api.driver.LensResultSetMetadata;
import org.apache.lens.server.api.driver.DriverQueryStatus.DriverQueryState;
import org.apache.lens.server.api.query.PreparedQueryContext;
import org.apache.lens.server.api.query.QueryContext;
-import org.testng.annotations.*;
+import org.testng.annotations.AfterTest;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.BeforeTest;
+import org.testng.annotations.Test;
/**
* The Class TestHiveDriver.
@@ -65,13 +69,16 @@ public class TestHiveDriver {
public final String TEST_OUTPUT_DIR = "target/" + this.getClass().getSimpleName() + "/test-output";
/** The conf. */
- protected HiveConf conf = new HiveConf();
+ protected HiveConf conf;
/** The driver. */
protected HiveDriver driver;
/** The data base. */
- public final String DATA_BASE = this.getClass().getSimpleName();
+ public String DATA_BASE = this.getClass().getSimpleName().toLowerCase();
+
+ protected String sessionid;
+ protected SessionState ss;
/**
* Before test.
@@ -84,27 +91,50 @@ public class TestHiveDriver {
// Check if hadoop property set
System.out.println("###HADOOP_PATH " + System.getProperty("hadoop.bin.path"));
assertNotNull(System.getProperty("hadoop.bin.path"));
- conf.addResource("hivedriver-site.xml");
- conf.setClass(HiveDriver.HIVE_CONNECTION_CLASS, EmbeddedThriftConnection.class, ThriftConnection.class);
- conf.set("hive.lock.manager", "org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager");
- SessionState ss = new SessionState(conf, "testuser");
+
+ createDriver();
+ ss = new SessionState(conf, "testuser");
SessionState.start(ss);
Hive client = Hive.get(conf);
Database database = new Database();
- database.setName(TestHiveDriver.class.getSimpleName());
+ database.setName(DATA_BASE);
client.createDatabase(database, true);
- SessionState.get().setCurrentDatabase(TestHiveDriver.class.getSimpleName());
+ SessionState.get().setCurrentDatabase(DATA_BASE);
+ sessionid = SessionState.get().getSessionId();
- driver = new HiveDriver();
- driver.configure(conf);
conf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, false);
conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
- QueryContext context = new QueryContext("USE " + TestHiveDriver.class.getSimpleName(), null, conf);
+ QueryContext context = createContext("USE " + DATA_BASE, conf);
driver.execute(context);
conf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, true);
conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
- System.out.println("Driver created");
- Assert.assertEquals(0, driver.getHiveHandleSize());
+ }
+
+ protected void createDriver() throws LensException {
+ conf = new HiveConf();
+ conf.addResource("hivedriver-site.xml");
+ conf.setClass(HiveDriver.HIVE_CONNECTION_CLASS, EmbeddedThriftConnection.class, ThriftConnection.class);
+ conf.set("hive.lock.manager", "org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager");
+ driver = new HiveDriver();
+ driver.configure(conf);
+ System.out.println("TestHiveDriver created");
+ }
+
+ @BeforeMethod
+ public void setDB() {
+ SessionState.get().setCurrentDatabase(DATA_BASE);
+ }
+
+ protected QueryContext createContext(String query, Configuration conf) {
+ QueryContext context = new QueryContext(query, "testuser", conf);
+ context.setLensSessionIdentifier(sessionid);
+ return context;
+ }
+
+ protected QueryContext createContext(PreparedQueryContext query, Configuration conf) {
+ QueryContext context = new QueryContext(query, "testuser", conf);
+ context.setLensSessionIdentifier(sessionid);
+ return context;
}
/**
@@ -116,7 +146,7 @@ public class TestHiveDriver {
@AfterTest
public void afterTest() throws Exception {
driver.close();
- Hive.get(conf).dropDatabase(TestHiveDriver.class.getSimpleName(), true, true, true);
+ Hive.get(conf).dropDatabase(DATA_BASE, true, true, true);
}
/**
@@ -133,18 +163,45 @@ public class TestHiveDriver {
+ LensConfConstants.STORAGE_COST + "'='500')";
conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
// Craete again
- QueryContext context = new QueryContext(createTable, null, conf);
+ QueryContext context = createContext(createTable, conf);
LensResultSet resultSet = driver.execute(context);
assertNull(resultSet);
// Load some data into the table
String dataLoad = "LOAD DATA LOCAL INPATH '" + TEST_DATA_FILE + "' OVERWRITE INTO TABLE " + tableName;
- context = new QueryContext(dataLoad, null, conf);
+ context = createContext(dataLoad, conf);
resultSet = driver.execute(context);
assertNull(resultSet);
Assert.assertEquals(0, driver.getHiveHandleSize());
}
+ /**
+ * Creates the test table.
+ *
+ * @param tableName
+ * the table name
+ * @throws Exception
+ * the exception
+ */
+ protected void createPartitionedTable(String tableName) throws Exception {
+ System.out.println("Hadoop Location: " + System.getProperty("hadoop.bin.path"));
+ String createTable = "CREATE TABLE IF NOT EXISTS " + tableName + "(ID STRING)" + " PARTITIONED BY (dt string) TBLPROPERTIES ('"
+ + LensConfConstants.STORAGE_COST + "'='500')";
+ conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
+ // Craete again
+ QueryContext context = createContext(createTable, conf);
+ LensResultSet resultSet = driver.execute(context);
+ assertNull(resultSet);
+
+ // Load some data into the table
+ String dataLoad = "LOAD DATA LOCAL INPATH '" + TEST_DATA_FILE + "' OVERWRITE INTO TABLE " + tableName
+ + " partition (dt='today')";
+ context = createContext(dataLoad, conf);
+ resultSet = driver.execute(context);
+ assertNull(resultSet);
+ Assert.assertEquals(0, driver.getHiveHandleSize());
+ }
+
// Tests
/**
* Test insert overwrite conf.
@@ -157,7 +214,7 @@ public class TestHiveDriver {
createTestTable("test_insert_overwrite");
conf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, false);
String query = "SELECT ID FROM test_insert_overwrite";
- QueryContext context = new QueryContext(query, null, conf);
+ QueryContext context = createContext(query, conf);
driver.addPersistentPath(context);
assertEquals(context.getUserQuery(), query);
assertNotNull(context.getDriverQuery());
@@ -176,14 +233,14 @@ public class TestHiveDriver {
conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
Hive.get(conf).dropTable("test_temp_output");
String query = "CREATE TABLE test_temp_output AS SELECT ID FROM test_temp";
- QueryContext context = new QueryContext(query, null, conf);
+ QueryContext context = createContext(query, conf);
LensResultSet resultSet = driver.execute(context);
assertNull(resultSet);
Assert.assertEquals(0, driver.getHiveHandleSize());
// fetch results from temp table
String select = "SELECT * FROM test_temp_output";
- context = new QueryContext(select, null, conf);
+ context = createContext(select, conf);
resultSet = driver.execute(context);
Assert.assertEquals(0, driver.getHiveHandleSize());
validateInMemoryResult(resultSet, "test_temp_output");
@@ -203,11 +260,11 @@ public class TestHiveDriver {
// Execute a select query
conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
String select = "SELECT ID FROM test_execute";
- QueryContext context = new QueryContext(select, null, conf);
+ QueryContext context = createContext(select, conf);
resultSet = driver.execute(context);
validateInMemoryResult(resultSet);
conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
- context = new QueryContext(select, null, conf);
+ context = createContext(select, conf);
resultSet = driver.execute(context);
validatePersistentResult(resultSet, TEST_DATA_FILE, context.getHDFSResultDir(), false);
conf.set(LensConfConstants.QUERY_OUTPUT_DIRECTORY_FORMAT,
@@ -215,7 +272,7 @@ public class TestHiveDriver {
+ " WITH SERDEPROPERTIES ('serialization.null.format'='-NA-',"
+ " 'field.delim'=',' ) STORED AS TEXTFILE ");
select = "SELECT ID, null, ID FROM test_execute";
- context = new QueryContext(select, null, conf);
+ context = createContext(select, conf);
resultSet = driver.execute(context);
validatePersistentResult(resultSet, TEST_DATA_FILE, context.getHDFSResultDir(), true);
Assert.assertEquals(0, driver.getHiveHandleSize());
@@ -327,7 +384,7 @@ public class TestHiveDriver {
String expectFail = "SELECT ID FROM test_execute_sync";
conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
conf.set("hive.exec.driver.run.hooks", FailHook.class.getCanonicalName());
- QueryContext context = new QueryContext(expectFail, null, conf);
+ QueryContext context = createContext(expectFail, conf);
driver.executeAsync(context);
Assert.assertEquals(1, driver.getHiveHandleSize());
validateExecuteAsync(context, DriverQueryState.FAILED, true, false);
@@ -339,7 +396,7 @@ public class TestHiveDriver {
// Async select query
String select = "SELECT ID FROM test_execute_sync";
conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
- context = new QueryContext(select, null, conf);
+ context = createContext(select, conf);
driver.executeAsync(context);
Assert.assertEquals(1, driver.getHiveHandleSize());
validateExecuteAsync(context, DriverQueryState.SUCCESSFUL, false, false);
@@ -347,7 +404,7 @@ public class TestHiveDriver {
Assert.assertEquals(0, driver.getHiveHandleSize());
conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
- context = new QueryContext(select, null, conf);
+ context = createContext(select, conf);
driver.executeAsync(context);
Assert.assertEquals(1, driver.getHiveHandleSize());
validateExecuteAsync(context, DriverQueryState.SUCCESSFUL, true, false);
@@ -359,7 +416,7 @@ public class TestHiveDriver {
+ " WITH SERDEPROPERTIES ('serialization.null.format'='-NA-',"
+ " 'field.delim'=',' ) STORED AS TEXTFILE ");
select = "SELECT ID, null, ID FROM test_execute_sync";
- context = new QueryContext(select, null, conf);
+ context = createContext(select, conf);
driver.executeAsync(context);
Assert.assertEquals(1, driver.getHiveHandleSize());
validateExecuteAsync(context, DriverQueryState.SUCCESSFUL, true, true);
@@ -433,7 +490,7 @@ public class TestHiveDriver {
public void testCancelAsyncQuery() throws Exception {
createTestTable("test_cancel_async");
conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
- QueryContext context = new QueryContext("SELECT ID FROM test_cancel_async", null, conf);
+ QueryContext context = createContext("SELECT ID FROM test_cancel_async", conf);
driver.executeAsync(context);
driver.cancelQuery(context.getQueryHandle());
driver.updateStatus(context);
@@ -525,12 +582,12 @@ public class TestHiveDriver {
conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
conf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, true);
conf.set(LensConfConstants.RESULT_SET_PARENT_DIR, TEST_OUTPUT_DIR);
- QueryContext ctx = new QueryContext("SELECT ID FROM test_persistent_result_set", null, conf);
+ QueryContext ctx = createContext("SELECT ID FROM test_persistent_result_set", conf);
LensResultSet resultSet = driver.execute(ctx);
validatePersistentResult(resultSet, TEST_DATA_FILE, ctx.getHDFSResultDir(), false);
Assert.assertEquals(0, driver.getHiveHandleSize());
- ctx = new QueryContext("SELECT ID FROM test_persistent_result_set", null, conf);
+ ctx = createContext("SELECT ID FROM test_persistent_result_set", conf);
driver.executeAsync(ctx);
Assert.assertEquals(1, driver.getHiveHandleSize());
validateExecuteAsync(ctx, DriverQueryState.SUCCESSFUL, true, false);
@@ -541,14 +598,14 @@ public class TestHiveDriver {
"ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'"
+ " WITH SERDEPROPERTIES ('serialization.null.format'='-NA-',"
+ " 'field.delim'=',' ) STORED AS TEXTFILE ");
- ctx = new QueryContext("SELECT ID, null, ID FROM test_persistent_result_set", null, conf);
+ ctx = createContext("SELECT ID, null, ID FROM test_persistent_result_set", conf);
resultSet = driver.execute(ctx);
Assert.assertEquals(0, driver.getHiveHandleSize());
validatePersistentResult(resultSet, TEST_DATA_FILE, ctx.getHDFSResultDir(), true);
driver.closeQuery(ctx.getQueryHandle());
Assert.assertEquals(0, driver.getHiveHandleSize());
- ctx = new QueryContext("SELECT ID, null, ID FROM test_persistent_result_set", null, conf);
+ ctx = createContext("SELECT ID, null, ID FROM test_persistent_result_set", conf);
driver.executeAsync(ctx);
Assert.assertEquals(1, driver.getHiveHandleSize());
validateExecuteAsync(ctx, DriverQueryState.SUCCESSFUL, true, true);
@@ -592,21 +649,23 @@ public class TestHiveDriver {
@Test
public void testExplain() throws Exception {
createTestTable("test_explain");
+ SessionState.setCurrentSessionState(ss);
DriverQueryPlan plan = driver.explain("SELECT ID FROM test_explain", conf);
assertTrue(plan instanceof HiveQueryPlan);
- assertEquals(plan.getTableWeight("test_explain"), 500.0);
+ assertEquals(plan.getTableWeight(DATA_BASE + ".test_explain"), 500.0);
Assert.assertEquals(0, driver.getHiveHandleSize());
// test execute prepare
PreparedQueryContext pctx = new PreparedQueryContext("SELECT ID FROM test_explain", null, conf);
+ SessionState.setCurrentSessionState(ss);
plan = driver.explainAndPrepare(pctx);
- QueryContext qctx = new QueryContext(pctx, null, conf);
+ QueryContext qctx = createContext(pctx, conf);
LensResultSet result = driver.execute(qctx);
Assert.assertEquals(0, driver.getHiveHandleSize());
validateInMemoryResult(result);
// test execute prepare async
- qctx = new QueryContext(pctx, null, conf);
+ qctx = createContext(pctx, conf);
driver.executeAsync(qctx);
assertNotNull(qctx.getDriverOpHandle());
validateExecuteAsync(qctx, DriverQueryState.SUCCESSFUL, false, false);
@@ -616,14 +675,14 @@ public class TestHiveDriver {
Assert.assertEquals(0, driver.getHiveHandleSize());
// for backward compatibility
- qctx = new QueryContext(pctx, null, conf);
+ qctx = createContext(pctx, conf);
qctx.setQueryHandle(new QueryHandle(pctx.getPrepareHandle().getPrepareHandleId()));
result = driver.execute(qctx);
assertNotNull(qctx.getDriverOpHandle());
Assert.assertEquals(0, driver.getHiveHandleSize());
validateInMemoryResult(result);
// test execute prepare async
- qctx = new QueryContext(pctx, null, conf);
+ qctx = createContext(pctx, conf);
qctx.setQueryHandle(new QueryHandle(pctx.getPrepareHandle().getPrepareHandleId()));
driver.executeAsync(qctx);
Assert.assertEquals(1, driver.getHiveHandleSize());
@@ -635,6 +694,30 @@ public class TestHiveDriver {
}
/**
+ * Test explain partitioned table
+ *
+ * @throws Exception
+ * the exception
+ */
+ @Test
+ public void testExplainPartitionedTable() throws Exception {
+ createPartitionedTable("test_part_table");
+ // acquire
+ SessionState.setCurrentSessionState(ss);
+ DriverQueryPlan plan = driver.explain("SELECT ID FROM test_part_table", conf);
+ Assert.assertEquals(0, driver.getHiveHandleSize());
+ assertTrue(plan instanceof HiveQueryPlan);
+ assertNotNull(plan.getTablesQueried());
+ assertEquals(plan.getTablesQueried().size(), 1);
+ System.out.println("Tables:" + plan.getTablesQueried());
+ assertEquals(plan.getTableWeight(DATA_BASE + ".test_part_table"), 500.0);
+ System.out.println("Parts:" + plan.getPartitions());
+ assertFalse(plan.getPartitions().isEmpty());
+ assertTrue(plan.getPartitions().get(DATA_BASE + ".test_part_table").get(0).contains("today"));
+ assertTrue(plan.getPartitions().get(DATA_BASE + ".test_part_table").get(0).contains("dt"));
+ }
+
+ /**
* Test explain output.
*
* @throws Exception
@@ -645,6 +728,7 @@ public class TestHiveDriver {
createTestTable("explain_test_1");
createTestTable("explain_test_2");
+ SessionState.setCurrentSessionState(ss);
DriverQueryPlan plan = driver.explain("SELECT explain_test_1.ID, count(1) FROM "
+ " explain_test_1 join explain_test_2 on explain_test_1.ID = explain_test_2.ID"
+ " WHERE explain_test_1.ID = 'foo' or explain_test_2.ID = 'bar'" + " GROUP BY explain_test_1.ID", conf);
@@ -654,8 +738,8 @@ public class TestHiveDriver {
assertNotNull(plan.getTablesQueried());
assertEquals(plan.getTablesQueried().size(), 2);
assertNotNull(plan.getTableWeights());
- assertTrue(plan.getTableWeights().containsKey("explain_test_1"));
- assertTrue(plan.getTableWeights().containsKey("explain_test_2"));
+ assertTrue(plan.getTableWeights().containsKey(DATA_BASE + ".explain_test_1"));
+ assertTrue(plan.getTableWeights().containsKey(DATA_BASE + ".explain_test_2"));
assertEquals(plan.getNumJoins(), 1);
assertTrue(plan.getPlan() != null && !plan.getPlan().isEmpty());
driver.closeQuery(plan.getHandle());
@@ -671,6 +755,7 @@ public class TestHiveDriver {
public void testExplainOutputPersistent() throws Exception {
createTestTable("explain_test_1");
conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
+ SessionState.setCurrentSessionState(ss);
String query2 = "SELECT DISTINCT ID FROM explain_test_1";
PreparedQueryContext pctx = new PreparedQueryContext(query2, null, conf);
DriverQueryPlan plan2 = driver.explainAndPrepare(pctx);
@@ -678,9 +763,9 @@ public class TestHiveDriver {
Assert.assertEquals(0, driver.getHiveHandleSize());
assertNotNull(plan2.getTablesQueried());
assertEquals(plan2.getTablesQueried().size(), 1);
- assertTrue(plan2.getTableWeights().containsKey("explain_test_1"));
+ assertTrue(plan2.getTableWeights().containsKey(DATA_BASE + ".explain_test_1"));
assertEquals(plan2.getNumSels(), 1);
- QueryContext ctx = new QueryContext(pctx, null, conf);
+ QueryContext ctx = createContext(pctx, conf);
LensResultSet resultSet = driver.execute(ctx);
Assert.assertEquals(0, driver.getHiveHandleSize());
HivePersistentResultSet persistentResultSet = (HivePersistentResultSet) resultSet;
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/79a9a373/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestRemoteHiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestRemoteHiveDriver.java b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestRemoteHiveDriver.java
index 8f792f6..340e00a 100644
--- a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestRemoteHiveDriver.java
+++ b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestRemoteHiveDriver.java
@@ -30,8 +30,6 @@ import org.apache.commons.io.FileUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hive.service.server.HiveServer2;
import org.apache.lens.api.LensException;
@@ -46,9 +44,7 @@ import org.apache.lens.server.api.driver.DriverQueryStatus.DriverQueryState;
import org.apache.lens.server.api.query.QueryContext;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
-import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeClass;
-import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
/**
@@ -80,14 +76,6 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
@BeforeClass
public static void setupTest() throws Exception {
createHS2Service();
-
- SessionState ss = new SessionState(remoteConf, "testuser");
- SessionState.start(ss);
- Hive client = Hive.get(remoteConf);
- Database database = new Database();
- database.setName(TestRemoteHiveDriver.class.getSimpleName());
- client.createDatabase(database, true);
- SessionState.get().setCurrentDatabase(TestRemoteHiveDriver.class.getSimpleName());
}
/**
@@ -123,7 +111,6 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
@AfterClass
public static void cleanupTest() throws Exception {
stopHS2Service();
- Hive.get(remoteConf).dropDatabase(TestRemoteHiveDriver.class.getSimpleName(), true, true, true);
}
/**
@@ -140,39 +127,13 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
}
}
- /*
- * (non-Javadoc)
- *
- * @see org.apache.lens.driver.hive.TestHiveDriver#beforeTest()
- */
- @BeforeMethod
- @Override
- public void beforeTest() throws Exception {
+ protected void createDriver() throws LensException {
+ DATA_BASE = TestRemoteHiveDriver.class.getSimpleName().toLowerCase();
conf = new HiveConf(remoteConf);
conf.addResource("hivedriver-site.xml");
- // Check if hadoop property set
- System.out.println("###HADOOP_PATH " + System.getProperty("hadoop.bin.path"));
- Assert.assertNotNull(System.getProperty("hadoop.bin.path"));
driver = new HiveDriver();
driver.configure(conf);
- conf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, false);
- conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
- driver.execute(new QueryContext("USE " + TestRemoteHiveDriver.class.getSimpleName(), null, conf));
- conf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, true);
- conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
- Assert.assertEquals(0, driver.getHiveHandleSize());
- }
-
- /*
- * (non-Javadoc)
- *
- * @see org.apache.lens.driver.hive.TestHiveDriver#afterTest()
- */
- @AfterMethod
- @Override
- public void afterTest() throws Exception {
- LOG.info("Test finished, closing driver");
- driver.close();
+ System.out.println("TestRemoteHiveDriver created");
}
/**
@@ -190,7 +151,7 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
thConf.setLong(HiveDriver.HS2_CONNECTION_EXPIRY_DELAY, 10000);
final HiveDriver thrDriver = new HiveDriver();
thrDriver.configure(thConf);
- QueryContext ctx = new QueryContext("USE " + TestRemoteHiveDriver.class.getSimpleName(), null, conf);
+ QueryContext ctx = createContext("USE " + DATA_BASE, conf);
thrDriver.execute(ctx);
// Launch a select query
@@ -203,7 +164,7 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
for (int q = 0; q < QUERIES; q++) {
final QueryContext qctx;
try {
- qctx = new QueryContext("SELECT * FROM test_multithreads", null, conf);
+ qctx = createContext("SELECT * FROM test_multithreads", conf);
thrDriver.executeAsync(qctx);
} catch (LensException e) {
errCount.incrementAndGet();
@@ -270,7 +231,8 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
@Test
public void testHiveDriverPersistence() throws Exception {
System.out.println("@@@@ start_persistence_test");
- HiveConf driverConf = new HiveConf(conf, TestRemoteHiveDriver.class);
+ HiveConf driverConf = new HiveConf(remoteConf, TestRemoteHiveDriver.class);
+ driverConf.addResource("hivedriver-site.xml");
driverConf.setLong(HiveDriver.HS2_CONNECTION_EXPIRY_DELAY, 10000);
final HiveDriver oldDriver = new HiveDriver();
@@ -278,7 +240,7 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
driverConf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, false);
driverConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
- QueryContext ctx = new QueryContext("USE " + TestRemoteHiveDriver.class.getSimpleName(), null, driverConf);
+ QueryContext ctx = createContext("USE " + DATA_BASE, driverConf);
oldDriver.execute(ctx);
Assert.assertEquals(0, oldDriver.getHiveHandleSize());
@@ -286,20 +248,20 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
// Create some ops with a driver
String createTable = "CREATE TABLE IF NOT EXISTS " + tableName + "(ID STRING)";
- ctx = new QueryContext(createTable, null, driverConf);
+ ctx = createContext(createTable, driverConf);
oldDriver.execute(ctx);
// Load some data into the table
String dataLoad = "LOAD DATA LOCAL INPATH '" + TEST_DATA_FILE + "' OVERWRITE INTO TABLE " + tableName;
- ctx = new QueryContext(dataLoad, null, driverConf);
+ ctx = createContext(dataLoad, driverConf);
oldDriver.execute(ctx);
driverConf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, true);
driverConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
// Fire two queries
- QueryContext ctx1 = new QueryContext("SELECT * FROM " + tableName, null, driverConf);
+ QueryContext ctx1 = createContext("SELECT * FROM " + tableName, driverConf);
oldDriver.executeAsync(ctx1);
- QueryContext ctx2 = new QueryContext("SELECT ID FROM " + tableName, null, driverConf);
+ QueryContext ctx2 = createContext("SELECT ID FROM " + tableName, driverConf);
oldDriver.executeAsync(ctx2);
Assert.assertEquals(2, oldDriver.getHiveHandleSize());
@@ -404,8 +366,8 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
conf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, false);
conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
- QueryContext ctx = new QueryContext("CREATE EXTERNAL TABLE IF NOT EXISTS " + tableName
- + " (ID STRING) PARTITIONED BY (DT STRING, ET STRING)", null, conf);
+ QueryContext ctx = createContext("CREATE EXTERNAL TABLE IF NOT EXISTS " + tableName
+ + " (ID STRING) PARTITIONED BY (DT STRING, ET STRING)", conf);
driver.execute(ctx);
Assert.assertEquals(0, driver.getHiveHandleSize());
@@ -426,8 +388,8 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
FileUtils.writeLines(data, Arrays.asList("one", "two", "three", "four", "five"));
System.out.println("@@ Adding partition " + i);
- QueryContext partCtx = new QueryContext("ALTER TABLE " + tableName + " ADD IF NOT EXISTS PARTITION (DT='p" + i
- + "', ET='1') LOCATION '" + partDir.getPath() + "'", null, conf);
+ QueryContext partCtx = createContext("ALTER TABLE " + tableName + " ADD IF NOT EXISTS PARTITION (DT='p" + i
+ + "', ET='1') LOCATION '" + partDir.getPath() + "'", conf);
driver.execute(partCtx);
}
}
@@ -450,6 +412,7 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
+ "WHERE table_1.DT='p0' OR table_1.DT='p1' OR table_1.DT='p2' OR table_1.DT='p3' OR table_1.DT='p4' "
+ "AND table_1.ET='1'";
+ SessionState.setCurrentSessionState(ss);
DriverQueryPlan plan = driver.explain(explainQuery, conf);
Assert.assertEquals(0, driver.getHiveHandleSize());
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/79a9a373/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java b/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
index 3533146..b5088f8 100644
--- a/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
+++ b/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
@@ -22,6 +22,7 @@ import java.util.*;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -30,7 +31,6 @@ import org.apache.hadoop.hive.ql.parse.ParseException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.lens.api.LensException;
-import org.apache.lens.cube.metadata.CubeMetastoreClient;
import org.apache.lens.cube.metadata.MetastoreConstants;
import org.apache.lens.cube.parse.HQLParser;
import org.apache.lens.driver.jdbc.ColumnarSQLRewriter;
@@ -593,8 +593,11 @@ public class TestColumnarSQLRewriter {
SessionState.start(conf);
// Create test table
- createTable("default", "mytable", "testDB", "testTable_1");
- createTable("default", "mytable_2", "testDB", "testTable_2");
+ Database database = new Database();
+ database.setName("mydb");
+ Hive.get().createDatabase(database);
+ createTable("mydb", "mytable", "testDB", "testTable_1");
+ createTable("mydb", "mytable_2", "testDB", "testTable_2");
createTable("default", "mytable_3", "testDB", "testTable_3");
String query = "SELECT * FROM mydb.mytable t1 JOIN mydb.mytable_2 t2 ON t1.t2id = t2.id "
@@ -609,7 +612,6 @@ public class TestColumnarSQLRewriter {
System.out.println(joinTreeBeforeRewrite);
// Rewrite
- CubeMetastoreClient client = CubeMetastoreClient.getInstance(conf);
rewriter.replaceWithUnderlyingStorage(rewriter.fromAST);
String joinTreeAfterRewrite = HQLParser.getString(rewriter.fromAST);
System.out.println(joinTreeAfterRewrite);
@@ -628,7 +630,7 @@ public class TestColumnarSQLRewriter {
&& joinTreeAfterRewrite.contains("testtable_3"));
// Rewrite one more query where table and db name is not set
- createTable("default", "mytable_4", null, null);
+ createTable("mydb", "mytable_4", null, null);
String query2 = "SELECT * FROM mydb.mytable_4 WHERE a = 100";
rewriter = new ColumnarSQLRewriter();
rewriter.ast = HQLParser.parseHQL(query2);
@@ -647,8 +649,11 @@ public class TestColumnarSQLRewriter {
assertEquals(joinTreeAfterRewrite, joinTreeBeforeRewrite);
// Test a query with default db
- Hive.get().dropTable("default", "mytable");
- createTable("default", "mytable", "default", null);
+ Hive.get().dropTable("mydb", "mytable");
+ database = new Database();
+ database.setName("examples");
+ Hive.get().createDatabase(database);
+ createTable("examples", "mytable", "default", null);
String defaultQuery = "SELECT * FROM examples.mytable t1 WHERE A = 100";
rewriter = new ColumnarSQLRewriter();
@@ -662,10 +667,10 @@ public class TestColumnarSQLRewriter {
assertFalse(joinTreeAfterRewrite.contains("examples"), joinTreeAfterRewrite);
System.out.println("default case: " + joinTreeAfterRewrite);
- Hive.get().dropTable("default", "mytable");
- Hive.get().dropTable("default", "mytable_2");
+ Hive.get().dropTable("examples", "mytable");
+ Hive.get().dropTable("mydb", "mytable_2");
Hive.get().dropTable("default", "mytable_3");
- Hive.get().dropTable("default", "mytable_4");
+ Hive.get().dropTable("mydb", "mytable_4");
}
/**