You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by pr...@apache.org on 2014/10/14 21:07:05 UTC

svn commit: r1631841 [5/42] - in /hive/branches/llap: ./ accumulo-handler/ accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/columns/ accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/mr/ accumulo-handler/src/java/org/apache/hadoop/hive...

Modified: hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java (original)
+++ hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java Tue Oct 14 19:06:45 2014
@@ -33,10 +33,13 @@ import java.util.List;
 
 import org.apache.commons.lang3.tuple.ImmutablePair;
 import org.apache.commons.lang3.tuple.Pair;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.lockmgr.DbTxnManager;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
 import org.apache.hadoop.hive.ql.security.SessionStateUserAuthenticator;
@@ -52,10 +55,12 @@ import org.mockito.Mockito;
  * Test HiveAuthorizer api invocation
  */
 public class TestHiveAuthorizerCheckInvocation {
+  private final Log LOG = LogFactory.getLog(this.getClass().getName());;
   protected static HiveConf conf;
   protected static Driver driver;
   private static final String tableName = TestHiveAuthorizerCheckInvocation.class.getSimpleName()
       + "Table";
+  private static final String acidTableName = tableName + "_acid";
   private static final String dbName = TestHiveAuthorizerCheckInvocation.class.getSimpleName()
       + "Db";
   static HiveAuthorizer mockedAuthorizer;
@@ -82,14 +87,18 @@ public class TestHiveAuthorizerCheckInvo
     conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, MockedHiveAuthorizerFactory.class.getName());
     conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName());
     conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true);
-    conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
     conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false);
+    conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, true);
+    conf.setVar(ConfVars.HIVE_TXN_MANAGER, DbTxnManager.class.getName());
 
     SessionState.start(conf);
     driver = new Driver(conf);
     runCmd("create table " + tableName
         + " (i int, j int, k string) partitioned by (city string, date string) ");
     runCmd("create database " + dbName);
+    // Need a separate table for ACID testing since it has to be bucketed and it has to be Acid
+    runCmd("create table " + acidTableName + " (i int, j int) clustered by (i) into 2 buckets " +
+        "stored as orc TBLPROPERTIES ('transactional'='true')");
   }
 
   private static void runCmd(String cmd) throws CommandNeedRetryException {
@@ -99,6 +108,10 @@ public class TestHiveAuthorizerCheckInvo
 
   @AfterClass
   public static void afterTests() throws Exception {
+    // Drop the tables when we're done.  This makes the test work inside an IDE
+    runCmd("drop table if exists " + acidTableName);
+    runCmd("drop table if exists " + tableName);
+    runCmd("drop database if exists " + dbName);
     driver.close();
   }
 
@@ -244,6 +257,63 @@ public class TestHiveAuthorizerCheckInvo
     assertEquals("db name", null, funcObj.getDbname());
   }
 
+  @Test
+  public void testUpdateSomeColumnsUsed() throws HiveAuthzPluginException,
+      HiveAccessControlException, CommandNeedRetryException {
+    reset(mockedAuthorizer);
+    int status = driver.compile("update " + acidTableName + " set i = 5 where j = 3");
+    assertEquals(0, status);
+
+    Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> io = getHivePrivilegeObjectInputs();
+    List<HivePrivilegeObject> outputs = io.getRight();
+    HivePrivilegeObject tableObj = outputs.get(0);
+    LOG.debug("Got privilege object " + tableObj);
+    assertEquals("no of columns used", 1, tableObj.getColumns().size());
+    assertEquals("Column used", "i", tableObj.getColumns().get(0));
+    List<HivePrivilegeObject> inputs = io.getLeft();
+    assertEquals(1, inputs.size());
+    tableObj = inputs.get(0);
+    assertEquals(1, tableObj.getColumns().size());
+    assertEquals("j", tableObj.getColumns().get(0));
+  }
+
+  @Test
+  public void testUpdateSomeColumnsUsedExprInSet() throws HiveAuthzPluginException,
+      HiveAccessControlException, CommandNeedRetryException {
+    reset(mockedAuthorizer);
+    int status = driver.compile("update " + acidTableName + " set i = 5, l = k where j = 3");
+    assertEquals(0, status);
+
+    Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> io = getHivePrivilegeObjectInputs();
+    List<HivePrivilegeObject> outputs = io.getRight();
+    HivePrivilegeObject tableObj = outputs.get(0);
+    LOG.debug("Got privilege object " + tableObj);
+    assertEquals("no of columns used", 2, tableObj.getColumns().size());
+    assertEquals("Columns used", Arrays.asList("i", "l"),
+        getSortedList(tableObj.getColumns()));
+    List<HivePrivilegeObject> inputs = io.getLeft();
+    assertEquals(1, inputs.size());
+    tableObj = inputs.get(0);
+    assertEquals(2, tableObj.getColumns().size());
+    assertEquals("Columns used", Arrays.asList("j", "k"),
+        getSortedList(tableObj.getColumns()));
+  }
+
+  @Test
+  public void testDelete() throws HiveAuthzPluginException,
+      HiveAccessControlException, CommandNeedRetryException {
+    reset(mockedAuthorizer);
+    int status = driver.compile("delete from " + acidTableName + " where j = 3");
+    assertEquals(0, status);
+
+    Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> io = getHivePrivilegeObjectInputs();
+    List<HivePrivilegeObject> inputs = io.getLeft();
+    assertEquals(1, inputs.size());
+    HivePrivilegeObject tableObj = inputs.get(0);
+    assertEquals(1, tableObj.getColumns().size());
+    assertEquals("j", tableObj.getColumns().get(0));
+  }
+
   private void checkSingleTableInput(List<HivePrivilegeObject> inputs) {
     assertEquals("number of inputs", 1, inputs.size());
 

Modified: hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java (original)
+++ hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java Tue Oct 14 19:06:45 2014
@@ -230,8 +230,9 @@ public class TestCompactor {
     t.setThreadId((int) t.getId());
     t.setHiveConf(conf);
     MetaStoreThread.BooleanPointer stop = new MetaStoreThread.BooleanPointer();
+    MetaStoreThread.BooleanPointer looped = new MetaStoreThread.BooleanPointer();
     stop.boolVal = true;
-    t.init(stop);
+    t.init(stop, looped);
     t.run();
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
     List<ShowCompactResponseElement> compacts = rsp.getCompacts();

Modified: hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestDBTokenStore.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestDBTokenStore.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestDBTokenStore.java (original)
+++ hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestDBTokenStore.java Tue Oct 14 19:06:45 2014
@@ -37,7 +37,7 @@ public class TestDBTokenStore extends Te
   public void testDBTokenStore() throws TokenStoreException, MetaException, IOException {
 
     DelegationTokenStore ts = new DBTokenStore();
-    ts.setStore(new HMSHandler("Test handler"));
+    ts.setStore(new HMSHandler("Test handler").getMS());
     assertEquals(0, ts.getMasterKeys().length);
     assertEquals(false,ts.removeMasterKey(-1));
     try{

Modified: hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java (original)
+++ hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java Tue Oct 14 19:06:45 2014
@@ -210,7 +210,7 @@ public class TestBeeLineWithArgs {
     }
     scriptFile.delete();
   }
-  
+
   /**
    * Test that BeeLine will read comment lines that start with whitespace
    * @throws Throwable
@@ -477,4 +477,31 @@ public class TestBeeLineWithArgs {
     final String EXPECTED_PATTERN = "embedded_table";
     testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
   }
+
+  /**
+   * Test Beeline could show the query progress for time-consuming query.
+   * @throws Throwable
+   */
+  @Test
+  public void testQueryProgress() throws Throwable {
+    final String TEST_NAME = "testQueryProgress";
+    final String SCRIPT_TEXT = "set hive.support.concurrency = false;\n" +
+        "select count(*) from " + tableName + ";\n";
+    final String EXPECTED_PATTERN = "Parsing command";
+    testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, true, getBaseArgs(JDBC_URL));
+  }
+
+  /**
+   * Test Beeline will hide the query progress when silent option is set.
+   * @throws Throwable
+   */
+  @Test
+  public void testQueryProgressHidden() throws Throwable {
+    final String TEST_NAME = "testQueryProgress";
+    final String SCRIPT_TEXT = "set hive.support.concurrency = false;\n" +
+        "!set silent true\n" +
+        "select count(*) from " + tableName + ";\n";
+    final String EXPECTED_PATTERN = "Parsing command";
+    testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, false, getBaseArgs(JDBC_URL));
+  }
 }

Modified: hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java (original)
+++ hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java Tue Oct 14 19:06:45 2014
@@ -262,10 +262,9 @@ public class TestJdbcDriver2 {
   private void checkBadUrl(String url) throws SQLException {
     try{
       DriverManager.getConnection(url, "", "");
-      fail("should have thrown IllegalArgumentException but did not ");
-    } catch(SQLException i) {
-      assertTrue(i.getMessage().contains("Bad URL format. Hostname not found "
-          + " in authority part of the url"));
+      fail("Should have thrown JdbcUriParseException but did not ");
+    } catch(JdbcUriParseException e) {
+      assertTrue(e.getMessage().contains("Bad URL format"));
     }
   }
 
@@ -736,7 +735,7 @@ public class TestJdbcDriver2 {
     assertTrue(res.next());
     // skip the last (partitioning) column since it is always non-null
     for (int i = 1; i < meta.getColumnCount(); i++) {
-      assertNull(res.getObject(i));
+      assertNull("Column " + i + " should be null", res.getObject(i));
     }
     // getXXX returns 0 for numeric types, false for boolean and null for other
     assertEquals(0, res.getInt(1));
@@ -1319,6 +1318,42 @@ public class TestJdbcDriver2 {
   }
 
   @Test
+  public void testResultSetColumnNameCaseInsensitive() throws SQLException {
+    Statement stmt = con.createStatement();
+    ResultSet res;
+
+    res = stmt.executeQuery("select c1 from " + dataTypeTableName + " limit 1");
+    try {
+      int count = 0;
+      while (res.next()) {
+        res.findColumn("c1");
+        res.findColumn("C1");
+        count++;
+      }
+      assertEquals(count, 1);
+    } catch (Exception e) {
+      String msg = "Unexpected exception: " + e;
+      LOG.info(msg, e);
+      fail(msg);
+    }
+
+    res = stmt.executeQuery("select c1 C1 from " + dataTypeTableName + " limit 1");
+    try {
+      int count = 0;
+      while (res.next()) {
+        res.findColumn("c1");
+        res.findColumn("C1");
+        count++;
+      }
+      assertEquals(count, 1);
+    } catch (Exception e) {
+      String msg = "Unexpected exception: " + e;
+      LOG.info(msg, e);
+      fail(msg);
+    }
+  }
+
+  @Test
   public void testResultSetMetaData() throws SQLException {
     Statement stmt = con.createStatement();
 
@@ -1618,6 +1653,10 @@ public class TestJdbcDriver2 {
   // [url] [host] [port] [db]
   private static final String[][] URL_PROPERTIES = new String[][] {
     // binary mode
+    // For embedded mode, the JDBC uri is of the form:
+    // jdbc:hive2:///dbName;sess_var_list?hive_conf_list#hive_var_list
+    // and does not contain host:port string.
+    // As a result port is parsed to '-1' per the Java URI conventions
     {"jdbc:hive2://", "", "", "default"},
     {"jdbc:hive2://localhost:10001/default", "localhost", "10001", "default"},
     {"jdbc:hive2://localhost/notdefault", "localhost", "10000", "notdefault"},
@@ -1643,28 +1682,24 @@ public class TestJdbcDriver2 {
   }
 
   private static final String[][] HTTP_URL_PROPERTIES = new String[][] {
-    {"jdbc:hive2://server:10002/db;" +
-        "user=foo;password=bar?" +
-        "hive.server2.transport.mode=http;" +
-        "hive.server2.thrift.http.path=hs2", "server", "10002", "db", "http", "hs2"},
-        {"jdbc:hive2://server:10000/testdb;" +
-            "user=foo;password=bar?" +
-            "hive.server2.transport.mode=binary;" +
-            "hive.server2.thrift.http.path=", "server", "10000", "testdb", "binary", ""},
-  };
-
-  @Test
-  public void testParseUrlHttpMode() throws SQLException {
-    new HiveDriver();
-    for (String[] testValues : HTTP_URL_PROPERTIES) {
-      JdbcConnectionParams params = Utils.parseURL(testValues[0]);
-      assertEquals(params.getHost(), testValues[1]);
-      assertEquals(params.getPort(), Integer.parseInt(testValues[2]));
-      assertEquals(params.getDbName(), testValues[3]);
-      assertEquals(params.getHiveConfs().get("hive.server2.transport.mode"), testValues[4]);
-      assertEquals(params.getHiveConfs().get("hive.server2.thrift.http.path"), testValues[5]);
-    }
+      { "jdbc:hive2://server:10002/db;user=foo;password=bar;transportMode=http;httpPath=hs2",
+          "server", "10002", "db", "http", "hs2" },
+      { "jdbc:hive2://server:10000/testdb;user=foo;password=bar;transportMode=binary;httpPath=",
+          "server", "10000", "testdb", "binary", "" }, };
+
+@Test
+public void testParseUrlHttpMode() throws SQLException, JdbcUriParseException,
+    ZooKeeperHiveClientException {
+  new HiveDriver();
+  for (String[] testValues : HTTP_URL_PROPERTIES) {
+    JdbcConnectionParams params = Utils.parseURL(testValues[0]);
+    assertEquals(params.getHost(), testValues[1]);
+    assertEquals(params.getPort(), Integer.parseInt(testValues[2]));
+    assertEquals(params.getDbName(), testValues[3]);
+    assertEquals(params.getSessionVars().get("transportMode"), testValues[4]);
+    assertEquals(params.getSessionVars().get("httpPath"), testValues[5]);
   }
+}
 
   private static void assertDpi(DriverPropertyInfo dpi, String name,
       String value) {
@@ -2126,4 +2161,82 @@ public class TestJdbcDriver2 {
     }
     stmt.close();
   }
+
+  /**
+   * Test getting query log method in Jdbc
+   * @throws Exception
+   */
+  @Test
+  public void testGetQueryLog() throws Exception {
+    // Prepare
+    String[] expectedLogs = {
+        "Parsing command",
+        "Parse Completed",
+        "Starting Semantic Analysis",
+        "Semantic Analysis Completed",
+        "Starting command"
+    };
+    String sql = "select count(*) from " + tableName;
+
+    // Verify the fetched log (from the beginning of log file)
+    HiveStatement stmt = (HiveStatement)con.createStatement();
+    assertNotNull("Statement is null", stmt);
+    stmt.executeQuery(sql);
+    List<String> logs = stmt.getQueryLog(false, 10000);
+    stmt.close();
+    verifyFetchedLog(logs, expectedLogs);
+
+    // Verify the fetched log (incrementally)
+    final HiveStatement statement = (HiveStatement)con.createStatement();
+    assertNotNull("Statement is null", statement);
+    statement.setFetchSize(10000);
+    final List<String> incrementalLogs = new ArrayList<String>();
+
+    Runnable logThread = new Runnable() {
+      @Override
+      public void run() {
+        while (statement.hasMoreLogs()) {
+          try {
+            incrementalLogs.addAll(statement.getQueryLog());
+            Thread.sleep(500);
+          } catch (SQLException e) {
+            LOG.error("Failed getQueryLog. Error message: " + e.getMessage());
+            fail("error in getting log thread");
+          } catch (InterruptedException e) {
+            LOG.error("Getting log thread is interrupted. Error message: " + e.getMessage());
+            fail("error in getting log thread");
+          }
+        }
+      }
+    };
+
+    Thread thread = new Thread(logThread);
+    thread.setDaemon(true);
+    thread.start();
+    statement.executeQuery(sql);
+    thread.interrupt();
+    thread.join(10000);
+    // fetch remaining logs
+    List<String> remainingLogs;
+    do {
+      remainingLogs = statement.getQueryLog();
+      incrementalLogs.addAll(remainingLogs);
+    } while (remainingLogs.size() > 0);
+    statement.close();
+
+    verifyFetchedLog(incrementalLogs, expectedLogs);
+  }
+
+  private void verifyFetchedLog(List<String> logs, String[] expectedLogs) {
+    StringBuilder stringBuilder = new StringBuilder();
+
+    for (String log : logs) {
+      stringBuilder.append(log);
+    }
+
+    String accumulatedLogs = stringBuilder.toString();
+    for (String expectedLog : expectedLogs) {
+      assertTrue(accumulatedLogs.contains(expectedLog));
+    }
+  }
 }

Modified: hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java (original)
+++ hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java Tue Oct 14 19:06:45 2014
@@ -388,7 +388,7 @@ public class TestJdbcWithMiniHS2 {
   }
 
   /**
-   * Tests the creation of the root hdfs scratch dir, which should be writable by all (777).
+   * Tests the creation of the root hdfs scratch dir, which should be writable by all.
    *
    * @throws Exception
    */
@@ -410,7 +410,7 @@ public class TestJdbcWithMiniHS2 {
     hs2Conn = getConnection(miniHS2.getJdbcURL(), userName, "password");
     // FS
     FileSystem fs = miniHS2.getLocalFS();
-    FsPermission expectedFSPermission = new FsPermission("777");
+    FsPermission expectedFSPermission = new FsPermission((short)00733);
     // Verify scratch dir paths and permission
     // HDFS scratch dir
     scratchDirPath = new Path(HiveConf.getVar(conf, HiveConf.ConfVars.SCRATCHDIR));

Modified: hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java (original)
+++ hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java Tue Oct 14 19:06:45 2014
@@ -120,9 +120,10 @@ public class TestHS2AuthzContext {
     HiveAuthzContext context = contextCapturer.getValue();
 
     assertEquals("Command ", ctxCmd, context.getCommandString());
-    assertTrue("ip address pattern check", context.getIpAddress().contains("."));
+    assertTrue("ip address pattern check", context.getIpAddress().matches("[.:a-fA-F0-9]+"));
     // ip address size check - check for something better than non zero
     assertTrue("ip address size check", context.getIpAddress().length() > 7);
+
   }
 
   private Connection getConnection(String userName) throws SQLException {

Modified: hive/branches/llap/itests/src/test/resources/testconfiguration.properties
URL: http://svn.apache.org/viewvc/hive/branches/llap/itests/src/test/resources/testconfiguration.properties?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/itests/src/test/resources/testconfiguration.properties (original)
+++ hive/branches/llap/itests/src/test/resources/testconfiguration.properties Tue Oct 14 19:06:45 2014
@@ -55,6 +55,7 @@ minitez.query.files.shared=alter_merge_2
   bucket2.q,\
   bucket3.q,\
   bucket4.q,\
+  cbo_correctness.q,\
   correlationoptimizer1.q,\
   count.q,\
   create_merge_compressed.q,\
@@ -74,6 +75,7 @@ minitez.query.files.shared=alter_merge_2
   disable_merge_for_bucketing.q,\
   dynpart_sort_opt_vectorization.q,\
   dynpart_sort_optimization.q,\
+  dynpart_sort_optimization2.q,\
   enforce_order.q,\
   filter_join_breaktask.q,\
   filter_join_breaktask2.q,\
@@ -116,6 +118,7 @@ minitez.query.files.shared=alter_merge_2
   orc_merge7.q,\
   orc_merge_incompat1.q,\
   orc_merge_incompat2.q,\
+  orc_vectorization_ppd.q,\
   parallel.q,\
   ptf.q,\
   sample1.q,\
@@ -152,23 +155,74 @@ minitez.query.files.shared=alter_merge_2
   update_where_non_partitioned.q,\
   update_where_partitioned.q,\
   update_two_cols.q,\
+  vector_between_in.q,\
   vector_cast_constant.q,\
+  vector_char_4.q,\
+  vector_char_simple.q,\
+  vector_count_distinct.q,\
   vector_data_types.q,\
   vector_decimal_aggregate.q,\
+  vector_distinct_2.q,\
+  vector_elt.q,\
+  vector_groupby_3.q,\
   vector_left_outer_join.q,\
+  vector_mapjoin_reduce.q,\
+  vector_non_string_partition.q,\
+  vector_orderby_5.q,\
+  vector_partitioned_date_time.q,\
   vector_string_concat.q,\
+  vector_varchar_4.q,\
+  vector_varchar_simple.q,\
+  vectorization_0.q,\
+  vectorization_1.q,\
+  vectorization_10.q,\
+  vectorization_11.q,\
   vectorization_12.q,\
   vectorization_13.q,\
   vectorization_14.q,\
   vectorization_15.q,\
+  vectorization_16.q,\
+  vectorization_2.q,\
+  vectorization_3.q,\
+  vectorization_4.q,\
+  vectorization_5.q,\
+  vectorization_6.q,\
   vectorization_9.q,\
+  vectorization_decimal_date.q,\
+  vectorization_div0.q,\
+  vectorization_nested_udf.q,\
+  vectorization_not.q,\
+  vectorization_part.q,\
   vectorization_part_project.q,\
+  vectorization_pushdown.q,\
   vectorization_short_regress.q,\
+  vectorized_bucketmapjoin1.q,\
+  vectorized_case.q,\
+  vectorized_context.q,\
   vectorized_mapjoin.q,\
+  vectorized_math_funcs.q,\
   vectorized_nested_mapjoin.q,\
+  vectorized_parquet.q,\
   vectorized_ptf.q,\
+  vectorized_rcfile_columnar.q,\
   vectorized_shufflejoin.q,\
-  vectorized_timestamp_funcs.q
+  vectorized_string_funcs.q,\
+  vectorized_timestamp_funcs.q,\
+  auto_sortmerge_join_1.q,\
+  auto_sortmerge_join_10.q,\
+  auto_sortmerge_join_11.q,\
+  auto_sortmerge_join_12.q,\
+  auto_sortmerge_join_13.q,\
+  auto_sortmerge_join_14.q,\
+  auto_sortmerge_join_15.q,\
+  auto_sortmerge_join_16.q,\
+  auto_sortmerge_join_2.q,\
+  auto_sortmerge_join_3.q,\
+  auto_sortmerge_join_4.q,\
+  auto_sortmerge_join_5.q,\
+  auto_sortmerge_join_7.q,\
+  auto_sortmerge_join_8.q,\
+  auto_sortmerge_join_9.q
 
 minitez.query.files=bucket_map_join_tez1.q,\
   bucket_map_join_tez2.q,\
@@ -185,7 +239,11 @@ minitez.query.files=bucket_map_join_tez1
   tez_joins_explain.q,\
   tez_schema_evolution.q,\
   tez_union.q,\
-  tez_union_decimal.q
+  tez_union_decimal.q,\
+  tez_union_group_by.q,\
+  tez_smb_main.q,\
+  tez_smb_1.q,\
+  vectorized_dynamic_partition_pruning.q
 
 beeline.positive.exclude=add_part_exist.q,\
   alter1.q,\
@@ -341,6 +399,7 @@ beeline.positive.exclude=add_part_exist.
 
 minimr.query.negative.files=cluster_tasklog_retrieval.q,\
   file_with_header_footer_negative.q,\
+  local_mapred_error_cache.q,\
   mapreduce_stack_trace.q,\
   mapreduce_stack_trace_hadoop20.q,\
   mapreduce_stack_trace_turnoff.q,\

Modified: hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java Tue Oct 14 19:06:45 2014
@@ -103,6 +103,7 @@ public class QTestUtil {
 
   public static final String UTF_8 = "UTF-8";
   private static final Log LOG = LogFactory.getLog("QTestUtil");
+  private static final String QTEST_LEAVE_FILES = "QTEST_LEAVE_FILES";
   private final String defaultInitScript = "q_test_init.sql";
   private final String defaultCleanupScript = "q_test_cleanup.sql";
 
@@ -537,6 +538,9 @@ public class QTestUtil {
    * Clear out any side effects of running tests
    */
   public void clearTestSideEffects() throws Exception {
+    if (System.getenv(QTEST_LEAVE_FILES) != null) {
+      return;
+    }
     // Delete any tables other than the source tables
     // and any databases other than the default database.
     for (String dbName : db.getAllDatabases()) {
@@ -598,6 +602,9 @@ public class QTestUtil {
     if(!isSessionStateStarted) {
       startSessionState();
     }
+    if (System.getenv(QTEST_LEAVE_FILES) != null) {
+      return;
+    }
 
     SessionState.get().getConf().setBoolean("hive.test.shutdown.phase", true);
 
@@ -683,7 +690,10 @@ public class QTestUtil {
     // conf.logVars(System.out);
     // System.out.flush();
 
+    String execEngine = conf.get("hive.execution.engine");
+    conf.set("hive.execution.engine", "mr");
     SessionState.start(conf);
+    conf.set("hive.execution.engine", execEngine);
     db = Hive.get(conf);
     fs = FileSystem.get(conf);
     drv = new Driver(conf);
@@ -764,6 +774,8 @@ public class QTestUtil {
     HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER,
         "org.apache.hadoop.hive.ql.security.DummyAuthenticator");
 
+    String execEngine = conf.get("hive.execution.engine");
+    conf.set("hive.execution.engine", "mr");
     CliSessionState ss = new CliSessionState(conf);
     assert ss != null;
     ss.in = System.in;
@@ -781,6 +793,7 @@ public class QTestUtil {
 
     isSessionStateStarted = true;
 
+    conf.set("hive.execution.engine", execEngine);
     return ss;
   }
 

Modified: hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidatorForTest.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidatorForTest.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidatorForTest.java (original)
+++ hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidatorForTest.java Tue Oct 14 19:06:45 2014
@@ -25,6 +25,7 @@ import org.apache.hadoop.hive.ql.securit
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
@@ -38,8 +39,9 @@ public class SQLStdHiveAuthorizationVali
 
   public SQLStdHiveAuthorizationValidatorForTest(HiveMetastoreClientFactory metastoreClientFactory,
       HiveConf conf, HiveAuthenticationProvider authenticator,
-      SQLStdHiveAccessControllerWrapper privController) {
-    super(metastoreClientFactory, conf, authenticator, privController);
+      SQLStdHiveAccessControllerWrapper privController, HiveAuthzSessionContext ctx)
+      throws HiveAuthzPluginException {
+    super(metastoreClientFactory, conf, authenticator, privController, ctx);
   }
 
   @Override

Modified: hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactoryForTest.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactoryForTest.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactoryForTest.java (original)
+++ hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactoryForTest.java Tue Oct 14 19:06:45 2014
@@ -37,7 +37,7 @@ public class SQLStdHiveAuthorizerFactory
     return new HiveAuthorizerImpl(
         privilegeManager,
         new SQLStdHiveAuthorizationValidatorForTest(metastoreClientFactory, conf, authenticator,
-            privilegeManager)
+            privilegeManager, ctx)
         );
   }
 }

Modified: hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java (original)
+++ hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java Tue Oct 14 19:06:45 2014
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDAF;
 import org.apache.hadoop.hive.ql.exec.UDAFEvaluator;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
@@ -32,6 +33,8 @@ import org.apache.hadoop.io.Text;
  * UDAFTestMax.
  *
  */
+@Description(name = "test_max",
+value = "_FUNC_(col) - UDF to report Max Value")
 public class UDAFTestMax extends UDAF {
 
   /**

Modified: hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFFileLookup.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFFileLookup.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFFileLookup.java (original)
+++ hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFFileLookup.java Tue Oct 14 19:06:45 2014
@@ -27,6 +27,7 @@ import java.util.Map;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
@@ -34,6 +35,8 @@ import org.apache.hadoop.io.Text;
 /**
  * A UDF for testing, which does key/value lookup from a file
  */
+@Description(name = "lookup",
+value = "_FUNC_(col) - UDF for key/value lookup from a file")
 public class UDFFileLookup extends UDF {
   static Log LOG = LogFactory.getLog(UDFFileLookup.class);
 

Modified: hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestErrorOnFalse.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestErrorOnFalse.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestErrorOnFalse.java (original)
+++ hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestErrorOnFalse.java Tue Oct 14 19:06:45 2014
@@ -18,11 +18,14 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 /**
  * A UDF for testing, which throws RuntimeException if  the length of a string.
  */
+@Description(name = "test_error",
+value = "_FUNC_(col) - UDF throws RuntimeException if  expression evaluates to false")
 public class UDFTestErrorOnFalse extends UDF {
 
   public int evaluate(Boolean b) {

Modified: hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength.java (original)
+++ hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength.java Tue Oct 14 19:06:45 2014
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
@@ -25,6 +26,8 @@ import org.apache.hadoop.io.Text;
 /**
  * A UDF for testing, which evaluates the length of a string.
  */
+@Description(name = "testlength",
+value = "_FUNC_(col) - UDF evaluates the length of the string")
 public class UDFTestLength extends UDF {
 
   IntWritable result = new IntWritable();

Modified: hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java (original)
+++ hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java Tue Oct 14 19:06:45 2014
@@ -18,12 +18,15 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 /**
  * A UDF for testing, which evaluates the length of a string. This UDF uses Java
  * Primitive classes for parameters.
  */
+@Description(name = "testlength2",
+value = "_FUNC_(col) - UDF evaluates the length of the string and returns value as Java Integer")
 public class UDFTestLength2 extends UDF {
 
   public Integer evaluate(String s) {

Modified: hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java (original)
+++ hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java Tue Oct 14 19:06:45 2014
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.MapredContext;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -26,7 +27,8 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.mapred.Counters;
 import org.apache.hadoop.mapred.Reporter;
-
+@Description(name = "counter",
+value = "_FUNC_(col) - UDF to report MR counter values")
 public class DummyContextUDF extends GenericUDF {
 
   private MapredContext context;

Modified: hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java (original)
+++ hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java Tue Oct 14 19:06:45 2014
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -27,6 +28,8 @@ import org.apache.hadoop.hive.serde2.obj
 /**
  * A test GenericUDF to return native Java's boolean type
  */
+@Description(name = "test_udf_get_java_boolean",
+value = "_FUNC_(str) - GenericUDF to return native Java's boolean type")
 public class GenericUDFTestGetJavaBoolean extends GenericUDF {
   ObjectInspector[] argumentOIs;
 

Modified: hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java (original)
+++ hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java Tue Oct 14 19:06:45 2014
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -27,6 +28,8 @@ import org.apache.hadoop.hive.serde2.obj
 /**
  * A test GenericUDF to return native Java's string type
  */
+@Description(name = "test_udf_get_java_string",
+value = "_FUNC_(str) - GenericUDF to return native Java's string type")
 public class GenericUDFTestGetJavaString extends GenericUDF {
   ObjectInspector[] argumentOIs;
 

Modified: hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java (original)
+++ hive/branches/llap/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java Tue Oct 14 19:06:45 2014
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.ql.udf.ge
 import java.util.HashSet;
 import java.util.Set;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
@@ -34,6 +35,8 @@ import org.apache.hadoop.io.Text;
 /**
  * Mimics oracle's function translate(str1, str2, str3).
  */
+@Description(name = "test_translate",
+value = "_FUNC_(str1, str2, str3) - Mimics oracle's function translate(str1, str2, str3)")
 public class GenericUDFTestTranslate extends GenericUDF {
   private transient ObjectInspector[] argumentOIs;
 

Modified: hive/branches/llap/jdbc/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/llap/jdbc/pom.xml?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/jdbc/pom.xml (original)
+++ hive/branches/llap/jdbc/pom.xml Tue Oct 14 19:06:45 2014
@@ -41,8 +41,14 @@
     </dependency>
     <dependency>
       <groupId>org.apache.hive</groupId>
-      <artifactId>hive-metastore</artifactId>
+      <artifactId>hive-service</artifactId>
       <version>${project.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>org.apache.hive</groupId>
+            <artifactId>hive-exec</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>org.apache.hive</groupId>
@@ -51,12 +57,12 @@
     </dependency>
     <dependency>
       <groupId>org.apache.hive</groupId>
-      <artifactId>hive-service</artifactId>
+      <artifactId>hive-metastore</artifactId>
       <version>${project.version}</version>
     </dependency>
     <dependency>
       <groupId>org.apache.hive</groupId>
-      <artifactId>hive-exec</artifactId>
+      <artifactId>hive-shims</artifactId>
       <version>${project.version}</version>
     </dependency>
     <!-- inter-project -->
@@ -80,6 +86,17 @@
       <artifactId>libthrift</artifactId>
       <version>${libthrift.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.zookeeper</groupId>
+      <artifactId>zookeeper</artifactId>
+      <version>${zookeeper.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>org.jboss.netty</groupId>
+            <artifactId>netty</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
   </dependencies>
 
   <profiles>
@@ -124,8 +141,39 @@
               <minimizeJar>true</minimizeJar>
               <shadedArtifactAttached>true</shadedArtifactAttached>
               <shadedClassifierName>${hive.jdbc.driver.classifier}</shadedClassifierName>
+              <filters>
+                <filter>
+                  <artifact>org.apache.hive.shims:hive-shims-common</artifact>
+                  <includes>
+                    <include>org/apache/hadoop/hive/shims/*</include>
+                    <include>org/apache/hadoop/hive/thrift/*</include>
+                  </includes>
+                </filter>
+                <filter>
+                  <artifact>org.apache.hive.shims:hive-shims-common-secure</artifact>
+                  <includes>
+                    <include>org/apache/hadoop/hive/thrift/*</include>
+                    <include>org/apache/hadoop/hive/thrift/client/*</include>
+                  </includes>
+                </filter>
+                <filter>
+                  <artifact>org.apache.hive.shims:hive-shims-0.23</artifact>
+                  <includes>
+                    <include>org/apache/hadoop/hive/thrift/*</include>
+                  </includes>
+                </filter>
+                <filter>
+                  <artifact>*:*</artifact>
+                  <excludes>
+                    <exclude>META-INF/*.SF</exclude>
+                    <exclude>META-INF/*.DSA</exclude>
+                    <exclude>META-INF/*.RSA</exclude>
+                  </excludes>
+                </filter>
+              </filters>
               <artifactSet>
                 <excludes>
+                  <exclude>org.apache.commons:commons-compress</exclude>
                   <exclude>org.apache.hadoop:*</exclude>
                   <exclude>org.apache.hive:hive-ant</exclude>
                   <exclude>org.apache.ant:*</exclude>
@@ -139,13 +187,16 @@
                   <exclude>org.tukaani:*</exclude>
                   <exclude>org.iq80.snappy:*</exclude>
                   <exclude>org.apache.velocity:*</exclude>
+                  <exclude>net.sf.jpam:*</exclude>
+                  <exclude>org.apache.avro:*</exclude>
+                  <exclude>net.sf.opencsv:*</exclude>
+                  <exclude>org.antlr:*</exclude>
                 </excludes>
-              </artifactSet>
+              </artifactSet>       
             </configuration>
           </execution>
         </executions>
       </plugin>
     </plugins>
   </build>
-
 </project>

Modified: hive/branches/llap/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java (original)
+++ hive/branches/llap/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java Tue Oct 14 19:06:45 2014
@@ -102,8 +102,9 @@ public class HiveDriver implements Drive
     return Pattern.matches(URL_PREFIX + ".*", url);
   }
 
+  @Override
   public Connection connect(String url, Properties info) throws SQLException {
-    return new HiveConnection(url, info);
+    return acceptsURL(url) ? new HiveConnection(url, info) : null;
   }
 
   /**

Modified: hive/branches/llap/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java (original)
+++ hive/branches/llap/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java Tue Oct 14 19:06:45 2014
@@ -58,6 +58,7 @@ public abstract class HiveBaseResultSet 
   protected boolean wasNull = false;
   protected Object[] row;
   protected List<String> columnNames;
+  protected List<String> normalizedColumnNames;
   protected List<String> columnTypes;
   protected List<JdbcColumnAttributes> columnAttributes;
 
@@ -84,7 +85,7 @@ public abstract class HiveBaseResultSet 
   }
 
   public int findColumn(String columnName) throws SQLException {
-    int columnIndex = columnNames.indexOf(columnName);
+    int columnIndex = normalizedColumnNames.indexOf(columnName.toLowerCase());
     if (columnIndex==-1) {
       throw new SQLException();
     } else {

Modified: hive/branches/llap/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java (original)
+++ hive/branches/llap/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java Tue Oct 14 19:06:45 2014
@@ -53,6 +53,7 @@ import javax.security.sasl.SaslException
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hive.jdbc.Utils.JdbcConnectionParams;
 import org.apache.hive.service.auth.HiveAuthFactory;
 import org.apache.hive.service.auth.KerberosSaslHelper;
 import org.apache.hive.service.auth.PlainSaslHelper;
@@ -86,37 +87,20 @@ import org.apache.thrift.transport.TTran
  */
 public class HiveConnection implements java.sql.Connection {
   public static final Log LOG = LogFactory.getLog(HiveConnection.class.getName());
-  private static final String HIVE_AUTH_TYPE= "auth";
-  private static final String HIVE_AUTH_QOP = "sasl.qop";
-  private static final String HIVE_AUTH_SIMPLE = "noSasl";
-  private static final String HIVE_AUTH_TOKEN = "delegationToken";
-  private static final String HIVE_AUTH_USER = "user";
-  private static final String HIVE_AUTH_PRINCIPAL = "principal";
-  private static final String HIVE_AUTH_PASSWD = "password";
-  private static final String HIVE_AUTH_KERBEROS_AUTH_TYPE = "kerberosAuthType";
-  private static final String HIVE_AUTH_KERBEROS_AUTH_TYPE_FROM_SUBJECT = "fromSubject";
-  private static final String HIVE_ANONYMOUS_USER = "anonymous";
-  private static final String HIVE_ANONYMOUS_PASSWD = "anonymous";
-  private static final String HIVE_USE_SSL = "ssl";
-  private static final String HIVE_SSL_TRUST_STORE = "sslTrustStore";
-  private static final String HIVE_SSL_TRUST_STORE_PASSWORD = "trustStorePassword";
-  private static final String HIVE_SERVER2_TRANSPORT_MODE = "hive.server2.transport.mode";
-  private static final String HIVE_SERVER2_THRIFT_HTTP_PATH = "hive.server2.thrift.http.path";
   private static final String HIVE_VAR_PREFIX = "hivevar:";
   private static final String HIVE_CONF_PREFIX = "hiveconf:";
-  // Currently supports JKS keystore format
-  // See HIVE-6286 (Add support for PKCS12 keystore format)
-  private static final String HIVE_SSL_TRUST_STORE_TYPE = "JKS";
-
-  private final String jdbcURI;
-  private final String host;
-  private final int port;
+
+  private String jdbcUriString;
+  private String host;
+  private int port;
   private final Map<String, String> sessConfMap;
   private final Map<String, String> hiveConfMap;
   private final Map<String, String> hiveVarMap;
+  private JdbcConnectionParams connParams;
   private final boolean isEmbeddedMode;
   private TTransport transport;
-  private TCLIService.Iface client;   // todo should be replaced by CliServiceClient
+  // TODO should be replaced by CliServiceClient
+  private TCLIService.Iface client;
   private boolean isClosed = true;
   private SQLWarning warningChain = null;
   private TSessionHandle sessHandle = null;
@@ -126,14 +110,12 @@ public class HiveConnection implements j
 
   public HiveConnection(String uri, Properties info) throws SQLException {
     setupLoginTimeout();
-    jdbcURI = uri;
-    // parse the connection uri
-    Utils.JdbcConnectionParams connParams;
     try {
       connParams = Utils.parseURL(uri);
-    } catch (IllegalArgumentException e) {
+    } catch (ZooKeeperHiveClientException e) {
       throw new SQLException(e);
     }
+    jdbcUriString = connParams.getJdbcUriString();
     // extract parsed connection parameters:
     // JDBC URL: jdbc:hive2://<host>:<port>/dbName;sess_var_list?hive_conf_list#hive_var_list
     // each list: <key1>=<val1>;<key2>=<val2> and so on
@@ -164,14 +146,14 @@ public class HiveConnection implements j
     } else {
       // extract user/password from JDBC connection properties if its not supplied in the
       // connection URL
-      if (info.containsKey(HIVE_AUTH_USER)) {
-        sessConfMap.put(HIVE_AUTH_USER, info.getProperty(HIVE_AUTH_USER));
-        if (info.containsKey(HIVE_AUTH_PASSWD)) {
-          sessConfMap.put(HIVE_AUTH_PASSWD, info.getProperty(HIVE_AUTH_PASSWD));
+      if (info.containsKey(JdbcConnectionParams.AUTH_USER)) {
+        sessConfMap.put(JdbcConnectionParams.AUTH_USER, info.getProperty(JdbcConnectionParams.AUTH_USER));
+        if (info.containsKey(JdbcConnectionParams.AUTH_PASSWD)) {
+          sessConfMap.put(JdbcConnectionParams.AUTH_PASSWD, info.getProperty(JdbcConnectionParams.AUTH_PASSWD));
         }
       }
-      if (info.containsKey(HIVE_AUTH_TYPE)) {
-        sessConfMap.put(HIVE_AUTH_TYPE, info.getProperty(HIVE_AUTH_TYPE));
+      if (info.containsKey(JdbcConnectionParams.AUTH_TYPE)) {
+        sessConfMap.put(JdbcConnectionParams.AUTH_TYPE, info.getProperty(JdbcConnectionParams.AUTH_TYPE));
       }
       // open the client transport
       openTransport();
@@ -189,19 +171,44 @@ public class HiveConnection implements j
     supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V7);
 
     // open client session
-    openSession(connParams);
+    openSession();
   }
 
   private void openTransport() throws SQLException {
-    // TODO: Refactor transport creation to a factory, it's getting uber messy here
-    transport = isHttpTransportMode() ? createHttpTransport() : createBinaryTransport();
-    try {
-      if (!transport.isOpen()) {
-        transport.open();
+    while (true) {
+      try {
+        transport = isHttpTransportMode() ? createHttpTransport() : createBinaryTransport();
+        if (!transport.isOpen()) {
+          LOG.info("Will try to open client transport with JDBC Uri: " + jdbcUriString);
+          transport.open();
+        }
+        break;
+      } catch (TTransportException e) {
+        LOG.info("Could not open client transport with JDBC Uri: " + jdbcUriString);
+        // We'll retry till we exhaust all HiveServer2 uris from ZooKeeper
+        if ((sessConfMap.get(JdbcConnectionParams.SERVICE_DISCOVERY_MODE) != null)
+            && (JdbcConnectionParams.SERVICE_DISCOVERY_MODE_ZOOKEEPER.equalsIgnoreCase(sessConfMap
+                .get(JdbcConnectionParams.SERVICE_DISCOVERY_MODE)))) {
+          try {
+            // Update jdbcUriString, host & port variables in connParams
+            // Throw an exception if all HiveServer2 uris have been exhausted,
+            // or if we're unable to connect to ZooKeeper.
+            Utils.updateConnParamsFromZooKeeper(connParams);
+          } catch (ZooKeeperHiveClientException ze) {
+            throw new SQLException(
+                "Could not open client transport for any of the Server URI's in ZooKeeper: "
+                    + ze.getMessage(), " 08S01", ze);
+          }
+          // Update with new values
+          jdbcUriString = connParams.getJdbcUriString();
+          host = connParams.getHost();
+          port = connParams.getPort();
+          LOG.info("Will retry opening client transport");
+        } else {
+          throw new SQLException("Could not open client transport with JDBC Uri: " + jdbcUriString
+              + ": " + e.getMessage(), " 08S01", e);
+        }
       }
-    } catch (TTransportException e) {
-      throw new SQLException("Could not open connection to "
-          + jdbcURI + ": " + e.getMessage(), " 08S01", e);
     }
   }
 
@@ -211,37 +218,36 @@ public class HiveConnection implements j
     String schemeName = useSsl ? "https" : "http";
     // http path should begin with "/"
     String httpPath;
-    httpPath = hiveConfMap.get(HIVE_SERVER2_THRIFT_HTTP_PATH);
-    if(httpPath == null) {
+    httpPath = sessConfMap.get(JdbcConnectionParams.HTTP_PATH);
+    if (httpPath == null) {
       httpPath = "/";
-    }
-    else if(!httpPath.startsWith("/")) {
+    } else if (!httpPath.startsWith("/")) {
       httpPath = "/" + httpPath;
     }
-    return schemeName +  "://" + host + ":" + port + httpPath;
+    return schemeName + "://" + host + ":" + port + httpPath;
   }
 
-  private TTransport createHttpTransport() throws SQLException {
+  private TTransport createHttpTransport() throws SQLException, TTransportException {
     DefaultHttpClient httpClient;
-
     boolean useSsl = isSslConnection();
-
     // Create an http client from the configs
-    try {
-      httpClient = getHttpClient(useSsl);
-    } catch (Exception e) {
-      String msg =  "Could not create http connection to " +
-          jdbcURI + ". " + e.getMessage();
-      throw new SQLException(msg, " 08S01", e);
-    }
-
+    httpClient = getHttpClient(useSsl);
     try {
       transport = new THttpClient(getServerHttpUrl(useSsl), httpClient);
+      // We'll call an open/close here to send a test HTTP message to the server. Any
+      // TTransportException caused by trying to connect to a non-available peer are thrown here.
+      // Bubbling them up the call hierarchy so that a retry can happen in openTransport,
+      // if dynamic service discovery is configured.
+      TCLIService.Iface client = new TCLIService.Client(new TBinaryProtocol(transport));
+      TOpenSessionResp openResp = client.OpenSession(new TOpenSessionReq());
+      if (openResp != null) {
+        client.CloseSession(new TCloseSessionReq(openResp.getSessionHandle()));
+      }
     }
-    catch (TTransportException e) {
+    catch (TException e) {
       String msg =  "Could not create http connection to " +
-          jdbcURI + ". " + e.getMessage();
-      throw new SQLException(msg, " 08S01", e);
+          jdbcUriString + ". " + e.getMessage();
+      throw new TTransportException(msg, e);
     }
     return transport;
   }
@@ -263,7 +269,7 @@ public class HiveConnection implements j
        * for sending to the server before every request.
        */
       requestInterceptor = new HttpKerberosRequestInterceptor(
-          sessConfMap.get(HIVE_AUTH_PRINCIPAL), host, getServerHttpUrl(false));
+          sessConfMap.get(JdbcConnectionParams.AUTH_PRINCIPAL), host, getServerHttpUrl(false));
     }
     else {
       /**
@@ -273,11 +279,23 @@ public class HiveConnection implements j
       requestInterceptor = new HttpBasicAuthInterceptor(getUserName(), getPassword());
       // Configure httpClient for SSL
       if (useSsl) {
-        String sslTrustStorePath = sessConfMap.get(HIVE_SSL_TRUST_STORE);
+        String sslTrustStorePath = sessConfMap.get(JdbcConnectionParams.SSL_TRUST_STORE);
         String sslTrustStorePassword = sessConfMap.get(
-            HIVE_SSL_TRUST_STORE_PASSWORD);
+            JdbcConnectionParams.SSL_TRUST_STORE_PASSWORD);
         KeyStore sslTrustStore;
         SSLSocketFactory socketFactory;
+        /**
+         * The code within the try block throws:
+         * 1. SSLInitializationException
+         * 2. KeyStoreException
+         * 3. IOException
+         * 4. NoSuchAlgorithmException
+         * 5. CertificateException
+         * 6. KeyManagementException
+         * 7. UnrecoverableKeyException
+         * We don't want the client to retry on any of these, hence we catch all
+         * and throw a SQLException.
+         */
         try {
           if (sslTrustStorePath == null || sslTrustStorePath.isEmpty()) {
             // Create a default socket factory based on standard JSSE trust material
@@ -285,7 +303,7 @@ public class HiveConnection implements j
           }
           else {
             // Pick trust store config from the given path
-            sslTrustStore = KeyStore.getInstance(HIVE_SSL_TRUST_STORE_TYPE);
+            sslTrustStore = KeyStore.getInstance(JdbcConnectionParams.SSL_TRUST_STORE_TYPE);
             sslTrustStore.load(new FileInputStream(sslTrustStorePath),
                 sslTrustStorePassword.toCharArray());
             socketFactory = new SSLSocketFactory(sslTrustStore);
@@ -296,7 +314,7 @@ public class HiveConnection implements j
         }
         catch (Exception e) {
           String msg =  "Could not create an https connection to " +
-              jdbcURI + ". " + e.getMessage();
+              jdbcUriString + ". " + e.getMessage();
           throw new SQLException(msg, " 08S01", e);
         }
       }
@@ -316,29 +334,32 @@ public class HiveConnection implements j
    *   - Raw (non-SASL) socket
    *
    *   Kerberos and Delegation token supports SASL QOP configurations
+   * @throws SQLException, TTransportException
    */
-  private TTransport createBinaryTransport() throws SQLException {
+  private TTransport createBinaryTransport() throws SQLException, TTransportException {
     try {
       // handle secure connection if specified
-      if (!HIVE_AUTH_SIMPLE.equals(sessConfMap.get(HIVE_AUTH_TYPE))) {
+      if (!JdbcConnectionParams.AUTH_SIMPLE.equals(sessConfMap.get(JdbcConnectionParams.AUTH_TYPE))) {
         // If Kerberos
         Map<String, String> saslProps = new HashMap<String, String>();
         SaslQOP saslQOP = SaslQOP.AUTH;
-        if (sessConfMap.containsKey(HIVE_AUTH_PRINCIPAL)) {
-          if (sessConfMap.containsKey(HIVE_AUTH_QOP)) {
+        if (sessConfMap.containsKey(JdbcConnectionParams.AUTH_PRINCIPAL)) {
+          if (sessConfMap.containsKey(JdbcConnectionParams.AUTH_QOP)) {
             try {
-              saslQOP = SaslQOP.fromString(sessConfMap.get(HIVE_AUTH_QOP));
+              saslQOP = SaslQOP.fromString(sessConfMap.get(JdbcConnectionParams.AUTH_QOP));
             } catch (IllegalArgumentException e) {
-              throw new SQLException("Invalid " + HIVE_AUTH_QOP +
+              throw new SQLException("Invalid " + JdbcConnectionParams.AUTH_QOP +
                   " parameter. " + e.getMessage(), "42000", e);
             }
           }
           saslProps.put(Sasl.QOP, saslQOP.toString());
           saslProps.put(Sasl.SERVER_AUTH, "true");
-          boolean assumeSubject = HIVE_AUTH_KERBEROS_AUTH_TYPE_FROM_SUBJECT.equals(sessConfMap.get(HIVE_AUTH_KERBEROS_AUTH_TYPE));
+          boolean assumeSubject = JdbcConnectionParams.AUTH_KERBEROS_AUTH_TYPE_FROM_SUBJECT.equals(sessConfMap
+              .get(JdbcConnectionParams.AUTH_KERBEROS_AUTH_TYPE));
           transport = KerberosSaslHelper.getKerberosTransport(
-              sessConfMap.get(HIVE_AUTH_PRINCIPAL), host,
-              HiveAuthFactory.getSocketTransport(host, port, loginTimeout), saslProps, assumeSubject);
+              sessConfMap.get(JdbcConnectionParams.AUTH_PRINCIPAL), host,
+              HiveAuthFactory.getSocketTransport(host, port, loginTimeout), saslProps,
+              assumeSubject);
         } else {
           // If there's a delegation token available then use token based connection
           String tokenStr = getClientDelegationToken(sessConfMap);
@@ -349,10 +370,15 @@ public class HiveConnection implements j
             // we are using PLAIN Sasl connection with user/password
             String userName = getUserName();
             String passwd = getPassword();
+            // Note: Thrift returns an SSL socket that is already bound to the specified host:port
+            // Therefore an open called on this would be a no-op later
+            // Hence, any TTransportException related to connecting with the peer are thrown here.
+            // Bubbling them up the call hierarchy so that a retry can happen in openTransport,
+            // if dynamic service discovery is configured.
             if (isSslConnection()) {
               // get SSL socket
-              String sslTrustStore = sessConfMap.get(HIVE_SSL_TRUST_STORE);
-              String sslTrustStorePassword = sessConfMap.get(HIVE_SSL_TRUST_STORE_PASSWORD);
+              String sslTrustStore = sessConfMap.get(JdbcConnectionParams.SSL_TRUST_STORE);
+              String sslTrustStorePassword = sessConfMap.get(JdbcConnectionParams.SSL_TRUST_STORE_PASSWORD);
               if (sslTrustStore == null || sslTrustStore.isEmpty()) {
                 transport = HiveAuthFactory.getSSLSocket(host, port, loginTimeout);
               } else {
@@ -373,10 +399,7 @@ public class HiveConnection implements j
       }
     } catch (SaslException e) {
       throw new SQLException("Could not create secure connection to "
-          + jdbcURI + ": " + e.getMessage(), " 08S01", e);
-    } catch (TTransportException e) {
-      throw new SQLException("Could not create connection to "
-          + jdbcURI + ": " + e.getMessage(), " 08S01", e);
+          + jdbcUriString + ": " + e.getMessage(), " 08S01", e);
     }
     return transport;
   }
@@ -385,7 +408,7 @@ public class HiveConnection implements j
   private String getClientDelegationToken(Map<String, String> jdbcConnConf)
       throws SQLException {
     String tokenStr = null;
-    if (HIVE_AUTH_TOKEN.equalsIgnoreCase(jdbcConnConf.get(HIVE_AUTH_TYPE))) {
+    if (JdbcConnectionParams.AUTH_TOKEN.equalsIgnoreCase(jdbcConnConf.get(JdbcConnectionParams.AUTH_TYPE))) {
       // check delegation token in job conf if any
       try {
         tokenStr = ShimLoader.getHadoopShims().
@@ -397,7 +420,7 @@ public class HiveConnection implements j
     return tokenStr;
   }
 
-  private void openSession(Utils.JdbcConnectionParams connParams) throws SQLException {
+  private void openSession() throws SQLException {
     TOpenSessionReq openReq = new TOpenSessionReq();
 
     Map<String, String> openConf = new HashMap<String, String>();
@@ -433,7 +456,7 @@ public class HiveConnection implements j
     } catch (TException e) {
       LOG.error("Error opening session", e);
       throw new SQLException("Could not establish connection to "
-          + jdbcURI + ": " + e.getMessage(), " 08S01", e);
+          + jdbcUriString + ": " + e.getMessage(), " 08S01", e);
     }
     isClosed = false;
   }
@@ -442,27 +465,27 @@ public class HiveConnection implements j
    * @return username from sessConfMap
    */
   private String getUserName() {
-    return getSessionValue(HIVE_AUTH_USER, HIVE_ANONYMOUS_USER);
+    return getSessionValue(JdbcConnectionParams.AUTH_USER, JdbcConnectionParams.ANONYMOUS_USER);
   }
 
   /**
    * @return password from sessConfMap
    */
   private String getPassword() {
-    return getSessionValue(HIVE_AUTH_PASSWD, HIVE_ANONYMOUS_PASSWD);
+    return getSessionValue(JdbcConnectionParams.AUTH_PASSWD, JdbcConnectionParams.ANONYMOUS_PASSWD);
   }
 
   private boolean isSslConnection() {
-    return "true".equalsIgnoreCase(sessConfMap.get(HIVE_USE_SSL));
+    return "true".equalsIgnoreCase(sessConfMap.get(JdbcConnectionParams.USE_SSL));
   }
 
   private boolean isKerberosAuthMode() {
-    return !HIVE_AUTH_SIMPLE.equals(sessConfMap.get(HIVE_AUTH_TYPE))
-        && sessConfMap.containsKey(HIVE_AUTH_PRINCIPAL);
+    return !JdbcConnectionParams.AUTH_SIMPLE.equals(sessConfMap.get(JdbcConnectionParams.AUTH_TYPE))
+        && sessConfMap.containsKey(JdbcConnectionParams.AUTH_PRINCIPAL);
   }
 
   private boolean isHttpTransportMode() {
-    String transportMode = hiveConfMap.get(HIVE_SERVER2_TRANSPORT_MODE);
+    String transportMode = sessConfMap.get(JdbcConnectionParams.TRANSPORT_MODE);
     if(transportMode != null && (transportMode.equalsIgnoreCase("http"))) {
       return true;
     }

Modified: hive/branches/llap/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java (original)
+++ hive/branches/llap/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java Tue Oct 14 19:06:45 2014
@@ -230,7 +230,12 @@ public class HiveDriver implements Drive
       throw new SQLException("Invalid connection url: " + url);
     }
 
-    JdbcConnectionParams params = Utils.parseURL(url);
+    JdbcConnectionParams params = null;
+    try {
+      params = Utils.parseURL(url);
+    } catch (ZooKeeperHiveClientException e) {
+      throw new SQLException(e);
+    }
     String host = params.getHost();
     if (host == null){
       host = "";
@@ -239,7 +244,7 @@ public class HiveDriver implements Drive
     if(host.equals("")){
       port = "";
     }
-    else if(port.equals("0")){
+    else if(port.equals("0") || port.equals("-1")){
       port = Utils.DEFAULT_PORT;
     }
     String db = params.getDbName();

Modified: hive/branches/llap/jdbc/src/java/org/apache/hive/jdbc/HiveMetaDataResultSet.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/jdbc/src/java/org/apache/hive/jdbc/HiveMetaDataResultSet.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/jdbc/src/java/org/apache/hive/jdbc/HiveMetaDataResultSet.java (original)
+++ hive/branches/llap/jdbc/src/java/org/apache/hive/jdbc/HiveMetaDataResultSet.java Tue Oct 14 19:06:45 2014
@@ -36,8 +36,13 @@ public abstract class HiveMetaDataResult
     }
     if (columnNames!=null) {
       this.columnNames = new ArrayList<String>(columnNames);
+      this.normalizedColumnNames = new ArrayList<String>();
+      for (String colName : columnNames) {
+        this.normalizedColumnNames.add(colName.toLowerCase());
+      }
     } else {
       this.columnNames =  new ArrayList<String>();
+      this.normalizedColumnNames = new ArrayList<String>();
     }
     if (columnTypes!=null) {
       this.columnTypes = new ArrayList<String>(columnTypes);

Modified: hive/branches/llap/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java (original)
+++ hive/branches/llap/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java Tue Oct 14 19:06:45 2014
@@ -28,6 +28,7 @@ import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
+import java.util.concurrent.locks.ReentrantLock;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -75,6 +76,7 @@ public class HiveQueryResultSet extends 
   private boolean fetchFirst = false;
 
   private final TProtocolVersion protocol;
+  private ReentrantLock transportLock;
 
 
   public static class Builder {
@@ -98,6 +100,7 @@ public class HiveQueryResultSet extends 
     private int fetchSize = 50;
     private boolean emptyResultSet = false;
     private boolean isScrollable = false;
+    private ReentrantLock transportLock = null;
 
     public Builder(Statement statement) throws SQLException {
       this.statement = statement;
@@ -166,6 +169,11 @@ public class HiveQueryResultSet extends 
       return this;
     }
 
+    public Builder setTransportLock(ReentrantLock transportLock) {
+      this.transportLock = transportLock;
+      return this;
+    }
+
     public HiveQueryResultSet build() throws SQLException {
       return new HiveQueryResultSet(this);
     }
@@ -181,7 +189,9 @@ public class HiveQueryResultSet extends 
     this.stmtHandle = builder.stmtHandle;
     this.sessHandle = builder.sessHandle;
     this.fetchSize = builder.fetchSize;
+    this.transportLock = builder.transportLock;
     columnNames = new ArrayList<String>();
+    normalizedColumnNames = new ArrayList<String>();
     columnTypes = new ArrayList<String>();
     columnAttributes = new ArrayList<JdbcColumnAttributes>();
     if (builder.retrieveSchema) {
@@ -239,7 +249,17 @@ public class HiveQueryResultSet extends 
     try {
       TGetResultSetMetadataReq metadataReq = new TGetResultSetMetadataReq(stmtHandle);
       // TODO need session handle
-      TGetResultSetMetadataResp  metadataResp = client.GetResultSetMetadata(metadataReq);
+      TGetResultSetMetadataResp  metadataResp;
+      if (transportLock == null) {
+        metadataResp = client.GetResultSetMetadata(metadataReq);
+      } else {
+        transportLock.lock();
+        try {
+          metadataResp = client.GetResultSetMetadata(metadataReq);
+        } finally {
+          transportLock.unlock();
+        }
+      }
       Utils.verifySuccess(metadataResp.getStatus());
 
       StringBuilder namesSb = new StringBuilder();
@@ -260,6 +280,7 @@ public class HiveQueryResultSet extends 
         }
         String columnName = columns.get(pos).getColumnName();
         columnNames.add(columnName);
+        normalizedColumnNames.add(columnName.toLowerCase());
         TPrimitiveTypeEntry primitiveTypeEntry =
             columns.get(pos).getTypeDesc().getTypes().get(0).getPrimitiveEntry();
         String columnTypeName = TYPE_NAMES.get(primitiveTypeEntry.getType());
@@ -284,6 +305,10 @@ public class HiveQueryResultSet extends 
     columnNames.addAll(colNames);
     columnTypes.addAll(colTypes);
     columnAttributes.addAll(colAttributes);
+
+    for (String colName : colNames) {
+      normalizedColumnNames.add(colName.toLowerCase());
+    }
   }
 
   @Override
@@ -326,7 +351,17 @@ public class HiveQueryResultSet extends 
       if (fetchedRows == null || !fetchedRowsItr.hasNext()) {
         TFetchResultsReq fetchReq = new TFetchResultsReq(stmtHandle,
             orientation, fetchSize);
-        TFetchResultsResp fetchResp = client.FetchResults(fetchReq);
+        TFetchResultsResp fetchResp;
+        if (transportLock == null) {
+          fetchResp = client.FetchResults(fetchReq);
+        } else {
+          transportLock.lock();
+          try {
+            fetchResp = client.FetchResults(fetchReq);
+          } finally {
+            transportLock.unlock();
+          }
+        }
         Utils.verifySuccessWithInfo(fetchResp.getStatus());
 
         TRowSet results = fetchResp.getResults();

Modified: hive/branches/llap/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java (original)
+++ hive/branches/llap/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java Tue Oct 14 19:06:45 2014
@@ -23,10 +23,14 @@ import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.SQLFeatureNotSupportedException;
 import java.sql.SQLWarning;
+import java.util.ArrayList;
 import java.util.HashMap;
+import java.util.List;
 import java.util.Map;
 import java.util.concurrent.locks.ReentrantLock;
 
+import org.apache.hive.service.cli.RowSet;
+import org.apache.hive.service.cli.RowSetFactory;
 import org.apache.hive.service.cli.thrift.TCLIService;
 import org.apache.hive.service.cli.thrift.TCancelOperationReq;
 import org.apache.hive.service.cli.thrift.TCancelOperationResp;
@@ -38,6 +42,9 @@ import org.apache.hive.service.cli.thrif
 import org.apache.hive.service.cli.thrift.TGetOperationStatusResp;
 import org.apache.hive.service.cli.thrift.TOperationHandle;
 import org.apache.hive.service.cli.thrift.TSessionHandle;
+import org.apache.hive.service.cli.thrift.TFetchResultsReq;
+import org.apache.hive.service.cli.thrift.TFetchResultsResp;
+import org.apache.hive.service.cli.thrift.TFetchOrientation;
 
 /**
  * HiveStatement.
@@ -77,6 +84,27 @@ public class HiveStatement implements ja
    */
   private boolean isClosed = false;
 
+  /**
+   * Keep state so we can fail certain calls made after cancel().
+   */
+  private boolean isCancelled = false;
+
+  /**
+   * Keep this state so we can know whether the query in this statement is closed.
+   */
+  private boolean isQueryClosed = false;
+
+  /**
+   * Keep this state so we can know whether the query logs are being generated in HS2.
+   */
+  private boolean isLogBeingGenerated = true;
+
+  /**
+   * Keep this state so we can know whether the statement is submitted to HS2 and start execution
+   * successfully.
+   */
+  private boolean isExecuteStatementFailed = false;
+
   // A fair reentrant lock
   private ReentrantLock transportLock = new ReentrantLock(true);
 
@@ -113,6 +141,9 @@ public class HiveStatement implements ja
   @Override
   public void cancel() throws SQLException {
     checkConnection("cancel");
+    if (isCancelled) {
+      return;
+    }
 
     transportLock.lock();
     try {
@@ -128,6 +159,7 @@ public class HiveStatement implements ja
     } finally {
       transportLock.unlock();
     }
+    isCancelled = true;
   }
 
   /*
@@ -167,6 +199,8 @@ public class HiveStatement implements ja
     } finally {
       transportLock.unlock();
     }
+    isQueryClosed = true;
+    isExecuteStatementFailed = false;
     stmtHandle = null;
   }
 
@@ -202,6 +236,7 @@ public class HiveStatement implements ja
     checkConnection("execute");
 
     closeClientOperation();
+    initFlags();
 
     TExecuteStatementReq execReq = new TExecuteStatementReq(sessHandle, sql);
     /**
@@ -218,9 +253,12 @@ public class HiveStatement implements ja
       TExecuteStatementResp execResp = client.ExecuteStatement(execReq);
       Utils.verifySuccessWithInfo(execResp.getStatus());
       stmtHandle = execResp.getOperationHandle();
+      isExecuteStatementFailed = false;
     } catch (SQLException eS) {
+      isExecuteStatementFailed = true;
       throw eS;
     } catch (Exception ex) {
+      isExecuteStatementFailed = true;
       throw new SQLException(ex.toString(), "08S01", ex);
     } finally {
       transportLock.unlock();
@@ -266,11 +304,14 @@ public class HiveStatement implements ja
           }
         }
       } catch (SQLException e) {
+        isLogBeingGenerated = false;
         throw e;
       } catch (Exception e) {
+        isLogBeingGenerated = false;
         throw new SQLException(e.toString(), "08S01", e);
       }
     }
+    isLogBeingGenerated = false;
 
     // The query should be completed by now
     if (!stmtHandle.isHasResultSet()) {
@@ -278,7 +319,7 @@ public class HiveStatement implements ja
     }
     resultSet =  new HiveQueryResultSet.Builder(this).setClient(client).setSessionHandle(sessHandle)
         .setStmtHandle(stmtHandle).setMaxRows(maxRows).setFetchSize(fetchSize)
-        .setScrollable(isScrollableResultset)
+        .setScrollable(isScrollableResultset).setTransportLock(transportLock)
         .build();
     return true;
   }
@@ -289,6 +330,13 @@ public class HiveStatement implements ja
     }
   }
 
+  private void initFlags() {
+    isCancelled = false;
+    isQueryClosed = false;
+    isLogBeingGenerated = true;
+    isExecuteStatementFailed = false;
+  }
+
   /*
    * (non-Javadoc)
    *
@@ -713,4 +761,93 @@ public class HiveStatement implements ja
     throw new SQLException("Cannot unwrap to " + iface);
   }
 
+  /**
+   * Check whether query execution might be producing more logs to be fetched.
+   * This method is a public API for usage outside of Hive, although it is not part of the
+   * interface java.sql.Statement.
+   * @return true if query execution might be producing more logs. It does not indicate if last
+   *         log lines have been fetched by getQueryLog.
+   */
+  public boolean hasMoreLogs() {
+    return isLogBeingGenerated;
+  }
+
+  /**
+   * Get the execution logs of the given SQL statement.
+   * This method is a public API for usage outside of Hive, although it is not part of the
+   * interface java.sql.Statement.
+   * This method gets the incremental logs during SQL execution, and uses fetchSize holden by
+   * HiveStatement object.
+   * @return a list of logs. It can be empty if there are no new logs to be retrieved at that time.
+   * @throws SQLException
+   * @throws ClosedOrCancelledStatementException if statement has been cancelled or closed
+   */
+  public List<String> getQueryLog() throws SQLException, ClosedOrCancelledStatementException {
+    return getQueryLog(true, fetchSize);
+  }
+
+  /**
+   * Get the execution logs of the given SQL statement.
+   * This method is a public API for usage outside of Hive, although it is not part of the
+   * interface java.sql.Statement.
+   * @param incremental indicate getting logs either incrementally or from the beginning,
+   *                    when it is true or false.
+   * @param fetchSize the number of lines to fetch
+   * @return a list of logs. It can be empty if there are no new logs to be retrieved at that time.
+   * @throws SQLException
+   * @throws ClosedOrCancelledStatementException if statement has been cancelled or closed
+   */
+  public List<String> getQueryLog(boolean incremental, int fetchSize)
+      throws SQLException, ClosedOrCancelledStatementException {
+    checkConnection("getQueryLog");
+    if (isCancelled) {
+      throw new ClosedOrCancelledStatementException("Method getQueryLog() failed. The " +
+          "statement has been closed or cancelled.");
+    }
+
+    List<String> logs = new ArrayList<String>();
+    TFetchResultsResp tFetchResultsResp = null;
+    transportLock.lock();
+    try {
+      if (stmtHandle != null) {
+        TFetchResultsReq tFetchResultsReq = new TFetchResultsReq(stmtHandle,
+            getFetchOrientation(incremental), fetchSize);
+        tFetchResultsReq.setFetchType((short)1);
+        tFetchResultsResp = client.FetchResults(tFetchResultsReq);
+        Utils.verifySuccessWithInfo(tFetchResultsResp.getStatus());
+      } else {
+        if (isQueryClosed) {
+          throw new ClosedOrCancelledStatementException("Method getQueryLog() failed. The " +
+              "statement has been closed or cancelled.");
+        }
+        if (isExecuteStatementFailed) {
+          throw new SQLException("Method getQueryLog() failed. Because the stmtHandle in " +
+              "HiveStatement is null and the statement execution might fail.");
+        } else {
+          return logs;
+        }
+      }
+    } catch (SQLException e) {
+      throw e;
+    } catch (Exception e) {
+      throw new SQLException("Error when getting query log: " + e, e);
+    } finally {
+      transportLock.unlock();
+    }
+
+    RowSet rowSet = RowSetFactory.create(tFetchResultsResp.getResults(),
+        connection.getProtocol());
+    for (Object[] row : rowSet) {
+      logs.add((String)row[0]);
+    }
+    return logs;
+  }
+
+  private TFetchOrientation getFetchOrientation(boolean incremental) {
+    if (incremental) {
+      return TFetchOrientation.FETCH_NEXT;
+    } else {
+      return TFetchOrientation.FETCH_FIRST;
+    }
+  }
 }