You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by we...@apache.org on 2017/05/08 20:43:24 UTC

[34/51] [partial] hive git commit: Revert "HIVE-14671 : merge master into hive-14535 (Wei Zheng)"

http://git-wip-us.apache.org/repos/asf/hive/blob/ed64a74e/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/FolderPermissionBase.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/FolderPermissionBase.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/FolderPermissionBase.java
new file mode 100644
index 0000000..2ae9cc0
--- /dev/null
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/FolderPermissionBase.java
@@ -0,0 +1,792 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.security;
+
+import java.net.URI;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.junit.Assert;
+
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.PathFilter;
+import org.apache.hadoop.hive.cli.CliSessionState;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.shims.HadoopShims.MiniDFSShim;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * This test the flag 'hive.warehouse.subdir.inherit.perms'.
+ */
+public abstract class FolderPermissionBase {
+  protected static HiveConf conf;
+  protected static Driver driver;
+  protected static String dataFileDir;
+  protected static Path dataFilePath;
+  protected static FileSystem fs;
+
+  protected static Path warehouseDir;
+  protected static Path baseDfsDir;
+
+  protected static final PathFilter hiddenFileFilter = new PathFilter(){
+    public boolean accept(Path p){
+      String name = p.getName();
+      return !name.startsWith("_") && !name.startsWith(".");
+    }
+  };
+
+
+  public abstract void setPermission(String locn, int permIndex) throws Exception;
+
+  public abstract void verifyPermission(String locn, int permIndex) throws Exception;
+
+
+  public void setPermission(String locn) throws Exception {
+    setPermission(locn, 0);
+  }
+
+  public void verifyPermission(String locn) throws Exception {
+    verifyPermission(locn, 0);
+  }
+
+
+  public static void baseSetup() throws Exception {
+    MiniDFSShim dfs = ShimLoader.getHadoopShims().getMiniDfs(conf, 4, true, null);
+    fs = dfs.getFileSystem();
+    baseDfsDir =  new Path(new Path(fs.getUri()), "/base");
+    fs.mkdirs(baseDfsDir);
+    warehouseDir = new Path(baseDfsDir, "warehouse");
+    fs.mkdirs(warehouseDir);
+    conf.setVar(ConfVars.METASTOREWAREHOUSE, warehouseDir.toString());
+
+    // Assuming the tests are run either in C or D drive in Windows OS!
+    dataFileDir = conf.get("test.data.files").replace('\\', '/')
+        .replace("c:", "").replace("C:", "").replace("D:", "").replace("d:", "");
+    dataFilePath = new Path(dataFileDir, "kv1.txt");
+
+    // Set up scratch directory
+    Path scratchDir = new Path(baseDfsDir, "scratchdir");
+    conf.setVar(HiveConf.ConfVars.SCRATCHDIR, scratchDir.toString());
+
+    //set hive conf vars
+    conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
+    conf.setBoolVar(HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS, true);
+    conf.setVar(HiveConf.ConfVars.DYNAMICPARTITIONINGMODE, "nonstrict");
+    int port = MetaStoreUtils.findFreePort();
+    MetaStoreUtils.startMetaStore(port, ShimLoader.getHadoopThriftAuthBridge());
+
+    SessionState.start(new CliSessionState(conf));
+    driver = new Driver(conf);
+    setupDataTable();
+  }
+
+
+  private static void setupDataTable() throws Exception {
+    CommandProcessorResponse ret = driver.run("DROP TABLE IF EXISTS mysrc");
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    ret = driver.run("CREATE TABLE mysrc (key STRING, value STRING) PARTITIONED BY (part1 string, part2 string) STORED AS TEXTFILE");
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    ret = driver.run("LOAD DATA LOCAL INPATH '" + dataFilePath + "' INTO TABLE mysrc PARTITION (part1='1',part2='1')");
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    ret = driver.run("LOAD DATA LOCAL INPATH '" + dataFilePath + "' INTO TABLE mysrc PARTITION (part1='2',part2='2')");
+    Assert.assertEquals(0,ret.getResponseCode());
+  }
+
+  @Before
+  public void setupBeforeTest() throws Exception {
+    driver.run("USE default");
+  }
+
+  @Test
+  public void testCreateDb() throws Exception {
+    //see if db inherits permission from warehouse directory.
+    String testDb = "mydb";
+    String tableName = "createtable";
+
+    setPermission(warehouseDir.toString());
+    verifyPermission(warehouseDir.toString());
+
+    CommandProcessorResponse ret = driver.run("CREATE DATABASE " + testDb);
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    assertExistence(warehouseDir + "/" + testDb + ".db");
+    verifyPermission(warehouseDir + "/" + testDb + ".db");
+
+    ret = driver.run("USE " + testDb);
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    ret = driver.run("CREATE TABLE " + tableName + " (key string, value string)");
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    verifyPermission(warehouseDir + "/" + testDb + ".db/" + tableName);
+
+    ret = driver.run("insert into table " + tableName + " select key,value from default.mysrc");
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    assertExistence(warehouseDir + "/" + testDb + ".db/" + tableName);
+    verifyPermission(warehouseDir + "/" + testDb + ".db/" + tableName);
+
+    Assert.assertTrue(listStatus(warehouseDir + "/" + testDb + ".db/" + tableName).size() > 0);
+    for (String child : listStatus(warehouseDir + "/" + testDb + ".db/" + tableName)) {
+      verifyPermission(child);
+    }
+
+    ret = driver.run("USE default");
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    //cleanup after the test.
+    fs.delete(warehouseDir, true);
+    fs.mkdirs(warehouseDir);
+    Assert.assertEquals(listStatus(warehouseDir.toString()).size(), 0);
+    setupDataTable();
+  }
+
+  @Test
+  public void testCreateTable() throws Exception {
+    String testDb = "mydb2";
+    String tableName = "createtable";
+    CommandProcessorResponse ret = driver.run("CREATE DATABASE " + testDb);
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    assertExistence(warehouseDir + "/" + testDb + ".db");
+    setPermission(warehouseDir + "/" + testDb + ".db");
+    verifyPermission(warehouseDir + "/" + testDb + ".db");
+
+    ret = driver.run("USE " + testDb);
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    ret = driver.run("CREATE TABLE " + tableName + " (key string, value string)");
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    verifyPermission(warehouseDir + "/" + testDb + ".db/" + tableName);
+
+    ret = driver.run("insert into table " + tableName + " select key,value from default.mysrc");
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    assertExistence(warehouseDir + "/" + testDb + ".db/" + tableName);
+    verifyPermission(warehouseDir + "/" + testDb + ".db/" + tableName);
+
+    Assert.assertTrue(listStatus(warehouseDir + "/" + testDb + ".db/" + tableName).size() > 0);
+    for (String child : listStatus(warehouseDir + "/" + testDb + ".db/" + tableName)) {
+      verifyPermission(child);
+    }
+
+    ret = driver.run("USE default");
+    Assert.assertEquals(0,ret.getResponseCode());
+  }
+
+
+  @Test
+  public void testInsertNonPartTable() throws Exception {
+    //case 1 is non-partitioned table.
+    String tableName = "nonpart";
+
+    CommandProcessorResponse ret = driver.run("CREATE TABLE " + tableName + " (key string, value string)");
+    Assert.assertEquals(0, ret.getResponseCode());
+
+    String tableLoc = warehouseDir + "/" + tableName;
+    assertExistence(warehouseDir + "/" + tableName);
+
+    //case1A: insert into non-partitioned table.
+    setPermission(warehouseDir + "/" + tableName);
+    ret = driver.run("insert into table " + tableName + " select key,value from mysrc");
+    Assert.assertEquals(0, ret.getResponseCode());
+
+    verifyPermission(warehouseDir + "/" + tableName);
+    Assert.assertTrue(listStatus(tableLoc).size() > 0);
+    for (String child : listStatus(tableLoc)) {
+      verifyPermission(child);
+    }
+
+    //case1B: insert overwrite non-partitioned-table
+    setPermission(warehouseDir + "/" + tableName, 1);
+    ret = driver.run("insert overwrite table " + tableName + " select key,value from mysrc");
+    Assert.assertEquals(0, ret.getResponseCode());
+
+    verifyPermission(warehouseDir + "/" + tableName, 1);
+    Assert.assertTrue(listStatus(tableLoc).size() > 0);
+    for (String child : listStatus(tableLoc)) {
+      verifyPermission(child, 1);
+    }
+  }
+
+  @Test
+  public void testInsertStaticSinglePartition() throws Exception {
+    String tableName = "singlestaticpart";
+    CommandProcessorResponse ret = driver.run("CREATE TABLE " + tableName + " (key string, value string) partitioned by (part1 string)");
+    Assert.assertEquals(0, ret.getResponseCode());
+
+    assertExistence(warehouseDir + "/" + tableName);
+    setPermission(warehouseDir + "/" + tableName);
+
+    //insert into test
+    ret = driver.run("insert into table " + tableName + " partition(part1='1') select key,value from mysrc where part1='1' and part2='1'");
+    Assert.assertEquals(0, ret.getResponseCode());
+
+    verifyPermission(warehouseDir + "/" + tableName);
+    verifyPermission(warehouseDir + "/" + tableName + "/part1=1");
+
+    Assert.assertTrue(listStatus(warehouseDir + "/" + tableName + "/part1=1").size() > 0);
+    for (String child : listStatus(warehouseDir + "/" + tableName + "/part1=1")) {
+      verifyPermission(child);
+    }
+
+    //insert overwrite test
+    setPermission(warehouseDir + "/" + tableName, 1);
+    setPermission(warehouseDir + "/" + tableName + "/part1=1", 1);
+    ret = driver.run("insert overwrite table " + tableName + " partition(part1='1') select key,value from mysrc where part1='1' and part2='1'");
+    Assert.assertEquals(0, ret.getResponseCode());
+
+    verifyPermission(warehouseDir + "/" + tableName, 1);
+    verifyPermission(warehouseDir + "/" + tableName + "/part1=1", 1);
+
+    Assert.assertTrue(listStatus(warehouseDir + "/" + tableName + "/part1=1").size() > 0);
+    for (String child : listStatus(warehouseDir + "/" + tableName + "/part1=1")) {
+      verifyPermission(child, 1);
+    }
+  }
+
+  @Test
+  public void testInsertStaticDualPartition() throws Exception {
+    String tableName = "dualstaticpart";
+    CommandProcessorResponse ret = driver.run("CREATE TABLE " + tableName + " (key string, value string) partitioned by (part1 string, part2 string)");
+    Assert.assertEquals(0, ret.getResponseCode());
+
+    assertExistence(warehouseDir + "/" + tableName);
+    setPermission(warehouseDir + "/" + tableName);
+
+    //insert into test
+    ret = driver.run("insert into table " + tableName + " partition(part1='1', part2='1') select key,value from mysrc where part1='1' and part2='1'");
+    Assert.assertEquals(0, ret.getResponseCode());
+
+    verifyPermission(warehouseDir + "/" + tableName);
+    verifyPermission(warehouseDir + "/" + tableName + "/part1=1");
+    verifyPermission(warehouseDir + "/" + tableName + "/part1=1/part2=1");
+
+    Assert.assertTrue(listStatus(warehouseDir + "/" + tableName + "/part1=1/part2=1").size() > 0);
+    for (String child : listStatus(warehouseDir + "/" + tableName + "/part1=1/part2=1")) {
+      verifyPermission(child);
+    }
+
+    //insert overwrite test
+    setPermission(warehouseDir + "/" + tableName, 1);
+    setPermission(warehouseDir + "/" + tableName + "/part1=1", 1);
+    setPermission(warehouseDir + "/" + tableName + "/part1=1/part2=1", 1);
+
+    ret = driver.run("insert overwrite table " + tableName + " partition(part1='1', part2='1') select key,value from mysrc where part1='1' and part2='1'");
+    Assert.assertEquals(0, ret.getResponseCode());
+
+    verifyPermission(warehouseDir + "/" + tableName, 1);
+    verifyPermission(warehouseDir + "/" + tableName + "/part1=1", 1);
+    verifyPermission(warehouseDir + "/" + tableName + "/part1=1/part2=1", 1);
+
+    Assert.assertTrue(listStatus(warehouseDir + "/" + tableName + "/part1=1/part2=1").size() > 0);
+    for (String child : listStatus(warehouseDir + "/" + tableName + "/part1=1/part2=1")) {
+      verifyPermission(child, 1);
+    }
+  }
+
+  @Test
+  public void testInsertDualDynamicPartitions() throws Exception {
+    String tableName = "dualdynamicpart";
+
+    CommandProcessorResponse ret = driver.run("CREATE TABLE " + tableName + " (key string, value string) partitioned by (part1 string, part2 string)");
+    Assert.assertEquals(0, ret.getResponseCode());
+    assertExistence(warehouseDir + "/" + tableName);
+
+    //Insert into test, with permission set 0.
+    setPermission(warehouseDir + "/" + tableName, 0);
+    ret = driver.run("insert into table " + tableName + " partition (part1,part2) select key,value,part1,part2 from mysrc");
+    Assert.assertEquals(0, ret.getResponseCode());
+
+    verifyDualPartitionTable(warehouseDir + "/" + tableName, 0);
+
+    //Insert overwrite test, with permission set 1.  We need reset existing partitions to 1 since the permissions
+    //should be inherited from existing partition
+    setDualPartitionTable(warehouseDir + "/" + tableName, 1);
+    ret = driver.run("insert overwrite table " + tableName + " partition (part1,part2) select key,value,part1,part2 from mysrc");
+    Assert.assertEquals(0, ret.getResponseCode());
+
+    verifyDualPartitionTable(warehouseDir + "/" + tableName, 1);
+  }
+
+  @Test
+  public void testInsertSingleDynamicPartition() throws Exception {
+    String tableName = "singledynamicpart";
+
+    CommandProcessorResponse ret = driver.run("CREATE TABLE " + tableName + " (key string, value string) partitioned by (part1 string)");
+    Assert.assertEquals(0,ret.getResponseCode());
+    String tableLoc = warehouseDir + "/" + tableName;
+    assertExistence(tableLoc);
+
+    //Insert into test, with permission set 0.
+    setPermission(tableLoc, 0);
+    ret = driver.run("insert into table " + tableName + " partition (part1) select key,value,part1 from mysrc");
+    Assert.assertEquals(0,ret.getResponseCode());
+    verifySinglePartition(tableLoc, 0);
+
+    //Insert overwrite test, with permission set 1. We need reset existing partitions to 1 since the permissions
+    //should be inherited from existing partition
+    setSinglePartition(tableLoc, 1);
+    ret = driver.run("insert overwrite table " + tableName + " partition (part1) select key,value,part1 from mysrc");
+    Assert.assertEquals(0,ret.getResponseCode());
+    verifySinglePartition(tableLoc, 1);
+
+    //delete and re-insert using insert overwrite.  There's different code paths insert vs insert overwrite for new tables.
+    ret = driver.run("DROP TABLE " + tableName);
+    Assert.assertEquals(0, ret.getResponseCode());
+    ret = driver.run("CREATE TABLE " + tableName + " (key string, value string) partitioned by (part1 string)");
+    Assert.assertEquals(0, ret.getResponseCode());
+
+    assertExistence(warehouseDir + "/" + tableName);
+    setPermission(warehouseDir + "/" + tableName);
+
+    ret = driver.run("insert overwrite table " + tableName + " partition (part1) select key,value,part1 from mysrc");
+    Assert.assertEquals(0, ret.getResponseCode());
+
+    verifySinglePartition(tableLoc, 0);
+  }
+
+  @Test
+  public void testPartition() throws Exception {
+    String tableName = "alterpart";
+    CommandProcessorResponse ret = driver.run("CREATE TABLE " + tableName + " (key string, value string) partitioned by (part1 int, part2 int, part3 int)");
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    assertExistence(warehouseDir + "/" + tableName);
+    setPermission(warehouseDir + "/" + tableName);
+
+    ret = driver.run("insert into table " + tableName + " partition(part1='1',part2='1',part3='1') select key,value from mysrc");
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    assertExistence(warehouseDir + "/" + tableName);
+    setPermission(warehouseDir + "/" + tableName, 1);
+
+    //alter partition
+    ret = driver.run("alter table " + tableName + " partition (part1='1',part2='1',part3='1') rename to partition (part1='2',part2='2',part3='2')");
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    verifyPermission(warehouseDir + "/" + tableName + "/part1=2", 1);
+    verifyPermission(warehouseDir + "/" + tableName + "/part1=2/part2=2", 1);
+    verifyPermission(warehouseDir + "/" + tableName + "/part1=2/part2=2/part3=2", 1);
+
+    Assert.assertTrue(listStatus(warehouseDir + "/" + tableName + "/part1=2/part2=2/part3=2").size() > 0);
+    for (String child : listStatus(warehouseDir + "/" + tableName + "/part1=2/part2=2/part3=2")) {
+      verifyPermission(child, 1);
+    }
+
+    String tableName2 = "alterpart2";
+    ret = driver.run("CREATE TABLE " + tableName2 + " (key string, value string) partitioned by (part1 int, part2 int, part3 int)");
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    assertExistence(warehouseDir + "/" + tableName2);
+    setPermission(warehouseDir + "/" + tableName2);
+    ret = driver.run("alter table " + tableName2 + " exchange partition (part1='2',part2='2',part3='2') with table " + tableName);
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    //alter exchange can not change base table's permission
+    //alter exchange can only control final partition folder's permission
+    verifyPermission(warehouseDir + "/" + tableName2 + "/part1=2", 0);
+    verifyPermission(warehouseDir + "/" + tableName2 + "/part1=2/part2=2", 0);
+    verifyPermission(warehouseDir + "/" + tableName2 + "/part1=2/part2=2/part3=2", 1);
+  }
+
+  @Test
+  public void testExternalTable() throws Exception {
+    String tableName = "externaltable";
+
+    String myLocation = warehouseDir + "/myfolder";
+    FileSystem fs = FileSystem.get(new URI(myLocation), conf);
+    fs.mkdirs(new Path(myLocation));
+    setPermission(myLocation);
+
+    CommandProcessorResponse ret = driver.run("CREATE TABLE " + tableName + " (key string, value string) LOCATION '" + myLocation + "'");
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    ret = driver.run("insert into table " + tableName + " select key,value from mysrc");
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    Assert.assertTrue(listStatus(myLocation).size() > 0);
+    for (String child : listStatus(myLocation)) {
+      verifyPermission(child);
+    }
+  }
+
+  @Test
+  public void testLoadLocal() throws Exception {
+    //case 1 is non-partitioned table.
+    String tableName = "loadlocal";
+
+    CommandProcessorResponse ret = driver.run("CREATE TABLE " + tableName + " (key string, value string)");
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    String tableLoc = warehouseDir + "/" + tableName;
+    assertExistence(warehouseDir + "/" + tableName);
+
+    //case1A: load data local into non-partitioned table.
+    setPermission(warehouseDir + "/" + tableName);
+
+    ret = driver.run("load data local inpath '" + dataFilePath + "' into table " + tableName);
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    Assert.assertTrue(listStatus(tableLoc).size() > 0);
+    for (String child : listStatus(tableLoc)) {
+      verifyPermission(child);
+    }
+
+    //case1B: load data local into overwrite non-partitioned-table
+    setPermission(warehouseDir + "/" + tableName, 1);
+    for (String child : listStatus(tableLoc)) {
+      setPermission(child, 1);
+    }
+    ret = driver.run("load data local inpath '" + dataFilePath + "' overwrite into table " + tableName);
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    Assert.assertTrue(listStatus(tableLoc).size() > 0);
+    for (String child : listStatus(tableLoc)) {
+      verifyPermission(child, 1);
+    }
+
+    //case 2 is partitioned table.
+    tableName = "loadlocalpartition";
+
+    ret = driver.run("CREATE TABLE " + tableName + " (key string, value string) partitioned by (part1 int, part2 int)");
+    Assert.assertEquals(0,ret.getResponseCode());
+    tableLoc = warehouseDir + "/" + tableName;
+    assertExistence(tableLoc);
+
+    //case 2A: load data local into partitioned table.
+    setPermission(tableLoc);
+    ret = driver.run("LOAD DATA LOCAL INPATH '" + dataFilePath + "' INTO TABLE " + tableName + " PARTITION (part1='1',part2='1')");
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    String partLoc = warehouseDir + "/" + tableName + "/part1=1/part2=1";
+    Assert.assertTrue(listStatus(partLoc).size() > 0);
+    for (String child : listStatus(partLoc)) {
+      verifyPermission(child);
+    }
+
+    //case 2B: insert data overwrite into partitioned table. set testing table/partition folder hierarchy 1
+    //local load overwrite just overwrite the existing partition content but not the permission
+    setPermission(tableLoc, 1);
+    setPermission(partLoc, 1);
+    for (String child : listStatus(partLoc)) {
+      setPermission(child, 1);
+    }
+    ret = driver.run("LOAD DATA LOCAL INPATH '" + dataFilePath + "' OVERWRITE INTO TABLE " + tableName + " PARTITION (part1='1',part2='1')");
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    Assert.assertTrue(listStatus(tableLoc).size() > 0);
+    for (String child : listStatus(partLoc)) {
+      verifyPermission(child, 1);
+    }
+  }
+
+  @Test
+  public void testLoad() throws Exception {
+    String tableName = "load";
+    String location = "/hdfsPath";
+    fs.copyFromLocalFile(dataFilePath, new Path(location));
+
+    //case 1: load data
+    CommandProcessorResponse ret = driver.run("CREATE TABLE " + tableName + " (key string, value string)");
+    Assert.assertEquals(0,ret.getResponseCode());
+    String tableLoc = warehouseDir + "/" + tableName;
+    assertExistence(warehouseDir + "/" + tableName);
+
+    //case1A: load data into non-partitioned table.
+    setPermission(warehouseDir + "/" + tableName);
+
+    ret = driver.run("load data inpath '" + location + "' into table " + tableName);
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    Assert.assertTrue(listStatus(tableLoc).size() > 0);
+    for (String child : listStatus(tableLoc)) {
+      verifyPermission(child);
+    }
+
+    //case1B: load data into overwrite non-partitioned-table
+    setPermission(warehouseDir + "/" + tableName, 1);
+    for (String child : listStatus(tableLoc)) {
+      setPermission(child, 1);
+    }
+
+    fs.copyFromLocalFile(dataFilePath, new Path(location));
+    ret = driver.run("load data inpath '" + location + "' overwrite into table " + tableName);
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    Assert.assertTrue(listStatus(tableLoc).size() > 0);
+    for (String child : listStatus(tableLoc)) {
+      verifyPermission(child, 1);
+    }
+
+    //case 2 is partitioned table.
+    tableName = "loadpartition";
+
+    ret = driver.run("CREATE TABLE " + tableName + " (key string, value string) partitioned by (part1 int, part2 int)");
+    Assert.assertEquals(0,ret.getResponseCode());
+    tableLoc = warehouseDir + "/" + tableName;
+    assertExistence(tableLoc);
+
+    //case 2A: load data into partitioned table.
+    setPermission(tableLoc);
+    fs.copyFromLocalFile(dataFilePath, new Path(location));
+    ret = driver.run("LOAD DATA INPATH '" + location + "' INTO TABLE " + tableName + " PARTITION (part1='1',part2='1')");
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    String partLoc = warehouseDir + "/" + tableName + "/part1=1/part2=1";
+    Assert.assertTrue(listStatus(partLoc).size() > 0);
+    for (String child : listStatus(partLoc)) {
+      verifyPermission(child);
+    }
+
+    //case 2B: insert data overwrite into partitioned table. set testing table/partition folder hierarchy 1
+    //load overwrite just overwrite the existing partition content but not the permission
+    setPermission(tableLoc, 1);
+    setPermission(partLoc, 1);
+    Assert.assertTrue(listStatus(partLoc).size() > 0);
+    for (String child : listStatus(partLoc)) {
+      setPermission(child, 1);
+    }
+
+    fs.copyFromLocalFile(dataFilePath, new Path(location));
+    ret = driver.run("LOAD DATA INPATH '" + location + "' OVERWRITE INTO TABLE " + tableName + " PARTITION (part1='1',part2='1')");
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    Assert.assertTrue(listStatus(tableLoc).size() > 0);
+    for (String child : listStatus(partLoc)) {
+      verifyPermission(child, 1);
+    }
+  }
+
+  @Test
+  public void testCtas() throws Exception {
+    String testDb = "ctasdb";
+    String tableName = "createtable";
+    CommandProcessorResponse ret = driver.run("CREATE DATABASE " + testDb);
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    assertExistence(warehouseDir + "/" + testDb + ".db");
+    setPermission(warehouseDir + "/" + testDb + ".db");
+    verifyPermission(warehouseDir + "/" + testDb + ".db");
+
+    ret = driver.run("USE " + testDb);
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    ret = driver.run("create table " + tableName + " as select key,value from default.mysrc");
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    assertExistence(warehouseDir + "/" + testDb + ".db/" + tableName);
+    verifyPermission(warehouseDir + "/" + testDb + ".db/" + tableName);
+
+    Assert.assertTrue(listStatus(warehouseDir + "/" + testDb + ".db/" + tableName).size() > 0);
+    for (String child : listStatus(warehouseDir + "/" + testDb + ".db/" + tableName)) {
+      verifyPermission(child);
+    }
+
+    ret = driver.run("USE default");
+    Assert.assertEquals(0,ret.getResponseCode());
+  }
+
+  @Test
+  public void testExim() throws Exception {
+
+    //export the table to external file.
+    String myLocation = warehouseDir + "/exim";
+    FileSystem fs = FileSystem.get(new URI(myLocation), conf);
+    fs.mkdirs(new Path(myLocation));
+    setPermission(myLocation);
+    myLocation = myLocation + "/temp";
+
+    CommandProcessorResponse ret = driver.run("export table mysrc to '" + myLocation + "'");
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    //check if exported data has inherited the permissions.
+    assertExistence(myLocation);
+    verifyPermission(myLocation);
+
+    assertExistence(myLocation + "/part1=1/part2=1");
+    verifyPermission(myLocation + "/part1=1/part2=1");
+    Assert.assertTrue(listStatus(myLocation + "/part1=1/part2=1").size() > 0);
+    for (String child : listStatus(myLocation + "/part1=1/part2=1")) {
+      verifyPermission(child);
+    }
+
+    assertExistence(myLocation + "/part1=2/part2=2");
+    verifyPermission(myLocation + "/part1=2/part2=2");
+    Assert.assertTrue(listStatus(myLocation + "/part1=2/part2=2").size() > 0);
+    for (String child : listStatus(myLocation + "/part1=2/part2=2")) {
+      verifyPermission(child);
+    }
+
+    //import the table back into another database
+    String testDb = "eximdb";
+    ret = driver.run("CREATE DATABASE " + testDb);
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    //use another permission for this import location, to verify that it is really set (permIndex=2)
+    assertExistence(warehouseDir + "/" + testDb + ".db");
+    setPermission(warehouseDir + "/" + testDb + ".db", 1);
+
+    ret = driver.run("USE " + testDb);
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    ret = driver.run("import from '" + myLocation + "'");
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    //check permissions of imported, from the exported table
+    assertExistence(warehouseDir + "/" + testDb + ".db/mysrc");
+    verifyPermission(warehouseDir + "/" + testDb + ".db/mysrc", 1);
+
+    myLocation = warehouseDir + "/" + testDb + ".db/mysrc";
+    assertExistence(myLocation);
+    verifyPermission(myLocation, 1);
+
+    assertExistence(myLocation + "/part1=1/part2=1");
+    verifyPermission(myLocation + "/part1=1/part2=1", 1);
+    Assert.assertTrue(listStatus(myLocation + "/part1=1/part2=1").size() > 0);
+    for (String child : listStatus(myLocation + "/part1=1/part2=1")) {
+      verifyPermission(child, 1);
+    }
+
+    assertExistence(myLocation + "/part1=2/part2=2");
+    verifyPermission(myLocation + "/part1=2/part2=2", 1);
+    Assert.assertTrue(listStatus(myLocation + "/part1=2/part2=2").size() > 0);
+    for (String child : listStatus(myLocation + "/part1=2/part2=2")) {
+      verifyPermission(child, 1);
+    }
+  }
+
+  /**
+   * Tests the permission to the table doesn't change after the truncation
+   * @throws Exception
+   */
+  @Test
+  public void testTruncateTable() throws Exception {
+    String tableName = "truncatetable";
+    String partition = warehouseDir + "/" + tableName + "/part1=1";
+
+    CommandProcessorResponse ret = driver.run("CREATE TABLE " + tableName + " (key STRING, value STRING) PARTITIONED BY (part1 INT)");
+    Assert.assertEquals(0, ret.getResponseCode());
+
+    setPermission(warehouseDir + "/" + tableName);
+
+    ret = driver.run("insert into table " + tableName + " partition(part1='1') select key,value from mysrc where part1='1' and part2='1'");
+    Assert.assertEquals(0, ret.getResponseCode());
+
+    assertExistence(warehouseDir + "/" + tableName);
+
+    verifyPermission(warehouseDir + "/" + tableName);
+    verifyPermission(partition);
+
+    ret = driver.run("TRUNCATE TABLE " + tableName);
+    Assert.assertEquals(0, ret.getResponseCode());
+
+    assertExistence(warehouseDir + "/" + tableName);
+    verifyPermission(warehouseDir + "/" + tableName);
+
+    ret = driver.run("insert into table " + tableName + " partition(part1='1') select key,value from mysrc where part1='1' and part2='1'");
+    Assert.assertEquals(0, ret.getResponseCode());
+
+    verifyPermission(warehouseDir + "/" + tableName);
+
+    assertExistence(partition);
+    verifyPermission(partition);    
+
+    // Also test the partition folder if the partition is truncated
+    ret = driver.run("TRUNCATE TABLE " + tableName + " partition(part1='1')");
+    Assert.assertEquals(0, ret.getResponseCode());
+
+    assertExistence(partition);
+    verifyPermission(partition);
+  }
+
+  private void setSinglePartition(String tableLoc, int index) throws Exception {
+    setPermission(tableLoc + "/part1=1", index);
+    setPermission(tableLoc + "/part1=2", index);
+  }
+
+  private void verifySinglePartition(String tableLoc, int index) throws Exception {
+    verifyPermission(tableLoc + "/part1=1", index);
+    verifyPermission(tableLoc + "/part1=2", index);
+
+    Assert.assertTrue(listStatus(tableLoc + "/part1=1").size() > 0);
+    for (String child : listStatus(tableLoc + "/part1=1")) {
+      verifyPermission(child, index);
+    }
+
+    Assert.assertTrue(listStatus(tableLoc + "/part1=2").size() > 0);
+    for (String child : listStatus(tableLoc + "/part1=2")) {
+      verifyPermission(child, index);
+    }
+  }
+
+  private void setDualPartitionTable(String baseTablePath, int index) throws Exception {
+    setPermission(baseTablePath, index);
+    setPermission(baseTablePath + "/part1=1", index);
+    setPermission(baseTablePath + "/part1=1/part2=1", index);
+
+    setPermission(baseTablePath + "/part1=2", index);
+    setPermission(baseTablePath + "/part1=2/part2=2", index);
+  }
+
+  private void verifyDualPartitionTable(String baseTablePath, int index) throws Exception {
+    verifyPermission(baseTablePath, index);
+    verifyPermission(baseTablePath + "/part1=1", index);
+    verifyPermission(baseTablePath + "/part1=1/part2=1", index);
+
+    verifyPermission(baseTablePath + "/part1=2", index);
+    verifyPermission(baseTablePath + "/part1=2/part2=2", index);
+
+    Assert.assertTrue(listStatus(baseTablePath + "/part1=1/part2=1").size() > 0);
+    for (String child : listStatus(baseTablePath + "/part1=1/part2=1")) {
+      verifyPermission(child, index);
+    }
+
+    Assert.assertTrue(listStatus(baseTablePath + "/part1=2/part2=2").size() > 0);
+    for (String child : listStatus(baseTablePath + "/part1=2/part2=2")) {
+      verifyPermission(child, index);
+    }
+  }
+
+  private void assertExistence(String locn) throws Exception {
+    Assert.assertTrue(fs.exists(new Path(locn)));
+  }
+
+  private List<String> listStatus(String locn) throws Exception {
+    List<String> results = new ArrayList<String>();
+    FileStatus[] listStatus = fs.listStatus(new Path(locn), hiddenFileFilter);
+    for (FileStatus status : listStatus) {
+      results.add(status.getPath().toString());
+    }
+    return results;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/ed64a74e/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestFolderPermissions.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestFolderPermissions.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestFolderPermissions.java
new file mode 100644
index 0000000..6cc2d18
--- /dev/null
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestFolderPermissions.java
@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.security;
+
+import junit.framework.Assert;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestFolderPermissions extends FolderPermissionBase {
+
+  @BeforeClass
+  public static void setup() throws Exception {
+    conf = new HiveConf(TestFolderPermissions.class);
+    conf.setVar(HiveConf.ConfVars.HIVEMAPREDMODE, "nonstrict");
+    baseSetup();
+  }
+
+  public FsPermission[] expected = new FsPermission[] {
+     FsPermission.createImmutable((short) 0777),
+     FsPermission.createImmutable((short) 0766)
+  };
+
+  @Override
+  public void setPermission(String locn, int permIndex) throws Exception {
+    fs.setPermission(new Path(locn), expected[permIndex]);
+  }
+
+  @Override
+  public void verifyPermission(String locn, int permIndex) throws Exception {
+    FsPermission actual =  fs.getFileStatus(new Path(locn)).getPermission();
+    Assert.assertEquals(expected[permIndex], actual);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/ed64a74e/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationDrops.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationDrops.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationDrops.java
new file mode 100644
index 0000000..bb65ee7
--- /dev/null
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationDrops.java
@@ -0,0 +1,205 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.security;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.HadoopShims.MiniDFSShim;
+import org.apache.hadoop.hive.shims.Utils;
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ * Test cases focusing on drop table permission checks
+ */
+public class TestStorageBasedMetastoreAuthorizationDrops extends StorageBasedMetastoreTestBase {
+
+  protected static MiniDFSShim dfs = null;
+
+  @Override
+  protected HiveConf createHiveConf() throws Exception {
+    // Hadoop FS ACLs do not work with LocalFileSystem, so set up MiniDFS.
+    HiveConf conf = super.createHiveConf();
+
+    String currentUserName = Utils.getUGI().getShortUserName();
+    conf.set("hadoop.proxyuser." + currentUserName + ".groups", "*");
+    conf.set("hadoop.proxyuser." + currentUserName + ".hosts", "*");
+    dfs = ShimLoader.getHadoopShims().getMiniDfs(conf, 4, true, null);
+    FileSystem fs = dfs.getFileSystem();
+
+    Path warehouseDir = new Path(new Path(fs.getUri()), "/warehouse");
+    fs.mkdirs(warehouseDir);
+    conf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, warehouseDir.toString());
+    conf.setBoolVar(HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS, true);
+
+    // Set up scratch directory
+    Path scratchDir = new Path(new Path(fs.getUri()), "/scratchdir");
+    conf.setVar(HiveConf.ConfVars.SCRATCHDIR, scratchDir.toString());
+
+    return conf;
+  }
+
+  @Override
+  public void tearDown() throws Exception {
+    super.tearDown();
+
+    if (dfs != null) {
+      dfs.shutdown();
+      dfs = null;
+    }
+  }
+
+  @Test
+  public void testDropDatabase() throws Exception {
+    dropDatabaseByOtherUser("-rwxrwxrwx", 0);
+    dropDatabaseByOtherUser("-rwxrwxrwt", 1);
+  }
+
+  /**
+   * Creates db and tries to drop as 'other' user
+   * @param perm - permission for warehouse dir
+   * @param expectedRet - expected return code for drop by other user
+   * @throws Exception
+   */
+  public void dropDatabaseByOtherUser(String perm, int expectedRet) throws Exception {
+    String dbName = getTestDbName();
+    setPermissions(clientHiveConf.getVar(ConfVars.METASTOREWAREHOUSE), perm);
+
+    CommandProcessorResponse resp = driver.run("create database " + dbName);
+    Assert.assertEquals(0, resp.getResponseCode());
+    Database db = msc.getDatabase(dbName);
+    validateCreateDb(db, dbName);
+
+    InjectableDummyAuthenticator.injectMode(true);
+
+
+    resp = driver.run("drop database " + dbName);
+    Assert.assertEquals(expectedRet, resp.getResponseCode());
+
+  }
+
+  @Test
+  public void testDropTable() throws Exception {
+    dropTableByOtherUser("-rwxrwxrwx", 0);
+    dropTableByOtherUser("-rwxrwxrwt", 1);
+  }
+
+  /**
+   * @param perm dir permission for database dir
+   * @param expectedRet expected return code on drop table
+   * @throws Exception
+   */
+  public void dropTableByOtherUser(String perm, int expectedRet) throws Exception {
+    String dbName = getTestDbName();
+    String tblName = getTestTableName();
+    setPermissions(clientHiveConf.getVar(ConfVars.METASTOREWAREHOUSE), "-rwxrwxrwx");
+
+    CommandProcessorResponse resp = driver.run("create database " + dbName);
+    Assert.assertEquals(0, resp.getResponseCode());
+    Database db = msc.getDatabase(dbName);
+    validateCreateDb(db, dbName);
+
+    setPermissions(db.getLocationUri(), perm);
+
+    String dbDotTable = dbName + "." + tblName;
+    resp = driver.run("create table " + dbDotTable + "(i int)");
+    Assert.assertEquals(0, resp.getResponseCode());
+
+
+    InjectableDummyAuthenticator.injectMode(true);
+    resp = driver.run("drop table " + dbDotTable);
+    Assert.assertEquals(expectedRet, resp.getResponseCode());
+  }
+
+  /**
+   * Drop view should not be blocked by SBA. View will not have any location to drop.
+   * @throws Exception
+   */
+  @Test
+  public void testDropView() throws Exception {
+    String dbName = getTestDbName();
+    String tblName = getTestTableName();
+    String viewName = "view" + tblName;
+    setPermissions(clientHiveConf.getVar(ConfVars.METASTOREWAREHOUSE), "-rwxrwxrwx");
+
+    CommandProcessorResponse resp = driver.run("create database " + dbName);
+    Assert.assertEquals(0, resp.getResponseCode());
+    Database db = msc.getDatabase(dbName);
+    validateCreateDb(db, dbName);
+
+    setPermissions(db.getLocationUri(), "-rwxrwxrwt");
+
+    String dbDotTable = dbName + "." + tblName;
+    resp = driver.run("create table " + dbDotTable + "(i int)");
+    Assert.assertEquals(0, resp.getResponseCode());
+
+    String dbDotView = dbName + "." + viewName;
+    resp = driver.run("create view " + dbDotView + " as select * from " +  dbDotTable);
+    Assert.assertEquals(0, resp.getResponseCode());
+
+    resp = driver.run("drop view " + dbDotView);
+    Assert.assertEquals(0, resp.getResponseCode());
+
+    resp = driver.run("drop table " + dbDotTable);
+    Assert.assertEquals(0, resp.getResponseCode());
+  }
+
+  @Test
+  public void testDropPartition() throws Exception {
+    dropPartitionByOtherUser("-rwxrwxrwx", 0);
+    dropPartitionByOtherUser("-rwxrwxrwt", 1);
+  }
+
+  /**
+   * @param perm permissions for table dir
+   * @param expectedRet expected return code
+   * @throws Exception
+   */
+  public void dropPartitionByOtherUser(String perm, int expectedRet) throws Exception {
+    String dbName = getTestDbName();
+    String tblName = getTestTableName();
+    setPermissions(clientHiveConf.getVar(ConfVars.METASTOREWAREHOUSE), "-rwxrwxrwx");
+
+    CommandProcessorResponse resp = driver.run("create database " + dbName);
+    Assert.assertEquals(0, resp.getResponseCode());
+    Database db = msc.getDatabase(dbName);
+    validateCreateDb(db, dbName);
+    setPermissions(db.getLocationUri(), "-rwxrwxrwx");
+
+    String dbDotTable = dbName + "." + tblName;
+    resp = driver.run("create table " + dbDotTable + "(i int) partitioned by (b string)");
+    Assert.assertEquals(0, resp.getResponseCode());
+    Table tab = msc.getTable(dbName, tblName);
+    setPermissions(tab.getSd().getLocation(), perm);
+
+    resp = driver.run("alter table " + dbDotTable + " add partition (b='2011')");
+    Assert.assertEquals(0, resp.getResponseCode());
+
+    InjectableDummyAuthenticator.injectMode(true);
+    resp = driver.run("alter table " + dbDotTable + " drop partition (b='2011')");
+    Assert.assertEquals(expectedRet, resp.getResponseCode());
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/ed64a74e/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationReads.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationReads.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationReads.java
new file mode 100644
index 0000000..ea631d2
--- /dev/null
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationReads.java
@@ -0,0 +1,127 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.security;
+
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.ql.CommandNeedRetryException;
+import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ * Test cases focusing on read table permission checks
+ */
+public class TestStorageBasedMetastoreAuthorizationReads extends StorageBasedMetastoreTestBase {
+
+  @Test
+  public void testReadTableSuccess() throws Exception {
+    readTableByOtherUser("-rwxrwxrwx", true);
+  }
+
+  @Test
+  public void testReadTableSuccessWithReadOnly() throws Exception {
+    readTableByOtherUser("-r--r--r--", true);
+  }
+
+  @Test
+  public void testReadTableFailure() throws Exception {
+    readTableByOtherUser("-rwxrwx---", false);
+  }
+
+  /**
+   * @param perm dir permission for table dir
+   * @param isSuccess if command was successful
+   * @throws Exception
+   */
+  private void readTableByOtherUser(String perm, boolean isSuccess) throws Exception {
+    String dbName = getTestDbName();
+    String tblName = getTestTableName();
+    setPermissions(clientHiveConf.getVar(ConfVars.METASTOREWAREHOUSE), "-rwxrwxrwx");
+
+    CommandProcessorResponse resp = driver.run("create database " + dbName);
+    Assert.assertEquals(0, resp.getResponseCode());
+    Database db = msc.getDatabase(dbName);
+    validateCreateDb(db, dbName);
+
+    setPermissions(db.getLocationUri(), "-rwxrwxrwx");
+
+    String dbDotTable = dbName + "." + tblName;
+    resp = driver.run("create table " + dbDotTable + "(i int) partitioned by (`date` string)");
+    Assert.assertEquals(0, resp.getResponseCode());
+    Table tab = msc.getTable(dbName, tblName);
+    setPermissions(tab.getSd().getLocation(), perm);
+
+    InjectableDummyAuthenticator.injectMode(true);
+
+    testCmd(driver, "DESCRIBE  " + dbDotTable, isSuccess);
+    testCmd(driver, "DESCRIBE EXTENDED  " + dbDotTable, isSuccess);
+    testCmd(driver, "SHOW PARTITIONS  " + dbDotTable, isSuccess);
+    testCmd(driver, "SHOW COLUMNS IN " + tblName + " IN " + dbName, isSuccess);
+    testCmd(driver, "use " + dbName, true);
+    testCmd(driver, "SHOW TABLE EXTENDED LIKE " + tblName, isSuccess);
+
+  }
+
+  @Test
+  public void testReadDbSuccess() throws Exception {
+    readDbByOtherUser("-rwxrwxrwx", true);
+  }
+
+  @Test
+  public void testReadDbFailure() throws Exception {
+    readDbByOtherUser("-rwxrwx---", false);
+  }
+
+
+  /**
+   * @param perm dir permission for database dir
+   * @param isSuccess if command was successful
+   * @throws Exception
+   */
+  private void readDbByOtherUser(String perm, boolean isSuccess) throws Exception {
+    String dbName = getTestDbName();
+    setPermissions(clientHiveConf.getVar(ConfVars.METASTOREWAREHOUSE), perm);
+
+    CommandProcessorResponse resp = driver.run("create database " + dbName);
+    Assert.assertEquals(0, resp.getResponseCode());
+    Database db = msc.getDatabase(dbName);
+    validateCreateDb(db, dbName);
+    setPermissions(db.getLocationUri(), perm);
+
+    InjectableDummyAuthenticator.injectMode(true);
+
+    testCmd(driver, "DESCRIBE DATABASE " + dbName, isSuccess);
+    testCmd(driver, "DESCRIBE DATABASE EXTENDED " + dbName, isSuccess);
+    testCmd(driver, "SHOW TABLES IN " + dbName, isSuccess);
+    driver.run("use " + dbName);
+    testCmd(driver, "SHOW TABLES ", isSuccess);
+
+  }
+
+  private void testCmd(Driver driver, String cmd, boolean isSuccess)
+      throws CommandNeedRetryException {
+    CommandProcessorResponse resp = driver.run(cmd);
+    Assert.assertEquals(isSuccess, resp.getResponseCode() == 0);
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/ed64a74e/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
index e0c05bd..66ed8ca 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
@@ -659,17 +659,17 @@ public class TestCompactor {
       Path resultFile = null;
       for (int i = 0; i < names.length; i++) {
         names[i] = stat[i].getPath().getName();
-        if (names[i].equals("delta_0000003_0000006")) {
+        if (names[i].equals("delta_0000001_0000004")) {
           resultFile = stat[i].getPath();
         }
       }
       Arrays.sort(names);
-      String[] expected = new String[]{"delta_0000003_0000004",
-          "delta_0000003_0000006", "delta_0000005_0000006", "delta_0000007_0000008"};
+      String[] expected = new String[]{"delta_0000001_0000002",
+          "delta_0000001_0000004", "delta_0000003_0000004", "delta_0000005_0000006"};
       if (!Arrays.deepEquals(expected, names)) {
         Assert.fail("Expected: " + Arrays.toString(expected) + ", found: " + Arrays.toString(names));
       }
-      checkExpectedTxnsPresent(null, new Path[]{resultFile},columnNamesProperty, columnTypesProperty,  0, 3L, 6L);
+      checkExpectedTxnsPresent(null, new Path[]{resultFile},columnNamesProperty, columnTypesProperty,  0, 1L, 4L);
 
     } finally {
       connection.close();
@@ -718,11 +718,11 @@ public class TestCompactor {
       FileStatus[] stat =
           fs.listStatus(new Path(table.getSd().getLocation()), AcidUtils.baseFileFilter);
       if (1 != stat.length) {
-        Assert.fail("Expecting 1 file \"base_0000006\" and found " + stat.length + " files " + Arrays.toString(stat));
+        Assert.fail("Expecting 1 file \"base_0000004\" and found " + stat.length + " files " + Arrays.toString(stat));
       }
       String name = stat[0].getPath().getName();
-      Assert.assertEquals(name, "base_0000006");
-      checkExpectedTxnsPresent(stat[0].getPath(), null, columnNamesProperty, columnTypesProperty, 0, 3L, 6L);
+      Assert.assertEquals(name, "base_0000004");
+      checkExpectedTxnsPresent(stat[0].getPath(), null, columnNamesProperty, columnTypesProperty, 0, 1L, 4L);
     } finally {
       connection.close();
     }
@@ -778,17 +778,17 @@ public class TestCompactor {
       Path resultDelta = null;
       for (int i = 0; i < names.length; i++) {
         names[i] = stat[i].getPath().getName();
-        if (names[i].equals("delta_0000003_0000006")) {
+        if (names[i].equals("delta_0000001_0000004")) {
           resultDelta = stat[i].getPath();
         }
       }
       Arrays.sort(names);
-      String[] expected = new String[]{"delta_0000003_0000004",
-          "delta_0000003_0000006", "delta_0000005_0000006"};
+      String[] expected = new String[]{"delta_0000001_0000002",
+          "delta_0000001_0000004", "delta_0000003_0000004"};
       if (!Arrays.deepEquals(expected, names)) {
         Assert.fail("Expected: " + Arrays.toString(expected) + ", found: " + Arrays.toString(names));
       }
-      checkExpectedTxnsPresent(null, new Path[]{resultDelta}, columnNamesProperty, columnTypesProperty, 0, 3L, 6L);
+      checkExpectedTxnsPresent(null, new Path[]{resultDelta}, columnNamesProperty, columnTypesProperty, 0, 1L, 4L);
     } finally {
       connection.close();
     }
@@ -844,13 +844,13 @@ public class TestCompactor {
         Assert.fail("majorCompactAfterAbort FileStatus[] stat " + Arrays.toString(stat));
       }
       if (1 != stat.length) {
-        Assert.fail("Expecting 1 file \"base_0000006\" and found " + stat.length + " files " + Arrays.toString(stat));
+        Assert.fail("Expecting 1 file \"base_0000004\" and found " + stat.length + " files " + Arrays.toString(stat));
       }
       String name = stat[0].getPath().getName();
-      if (!name.equals("base_0000006")) {
-        Assert.fail("majorCompactAfterAbort name " + name + " not equals to base_0000006");
+      if (!name.equals("base_0000004")) {
+        Assert.fail("majorCompactAfterAbort name " + name + " not equals to base_0000004");
       }
-      checkExpectedTxnsPresent(stat[0].getPath(), null, columnNamesProperty, columnTypesProperty, 0, 3L, 6L);
+      checkExpectedTxnsPresent(stat[0].getPath(), null, columnNamesProperty, columnTypesProperty, 0, 1L, 4L);
     } finally {
       connection.close();
     }
@@ -899,11 +899,11 @@ public class TestCompactor {
       FileStatus[] stat =
           fs.listStatus(new Path(table.getSd().getLocation()), AcidUtils.baseFileFilter);
       if (1 != stat.length) {
-        Assert.fail("Expecting 1 file \"base_0000006\" and found " + stat.length + " files " + Arrays.toString(stat));
+        Assert.fail("Expecting 1 file \"base_0000004\" and found " + stat.length + " files " + Arrays.toString(stat));
       }
       String name = stat[0].getPath().getName();
-      Assert.assertEquals(name, "base_0000006");
-      checkExpectedTxnsPresent(stat[0].getPath(), null, columnNamesProperty, columnTypesProperty, 0, 3L, 6L);
+      Assert.assertEquals(name, "base_0000004");
+      checkExpectedTxnsPresent(stat[0].getPath(), null, columnNamesProperty, columnTypesProperty, 0, 1L, 4L);
     } finally {
       connection.close();
     }
@@ -923,18 +923,18 @@ public class TestCompactor {
         " STORED AS ORC  TBLPROPERTIES ('transactional'='true',"
         + "'transactional_properties'='default')", driver);
 
-    // Insert some data -> this will generate only insert deltas and no delete deltas: delta_3_3
+    // Insert some data -> this will generate only insert deltas and no delete deltas: delta_1_1
     executeStatementOnDriver("INSERT INTO " + tblName +"(a,b) VALUES(1, 'foo')", driver);
 
-    // Insert some data -> this will again generate only insert deltas and no delete deltas: delta_4_4
+    // Insert some data -> this will again generate only insert deltas and no delete deltas: delta_2_2
     executeStatementOnDriver("INSERT INTO " + tblName +"(a,b) VALUES(2, 'bar')", driver);
 
-    // Delete some data -> this will generate only delete deltas and no insert deltas: delete_delta_5_5
+    // Delete some data -> this will generate only delete deltas and no insert deltas: delete_delta_3_3
     executeStatementOnDriver("DELETE FROM " + tblName +" WHERE a = 2", driver);
 
     // Now, compact -> Compaction produces a single range for both delta and delete delta
-    // That is, both delta and delete_deltas would be compacted into delta_3_5 and delete_delta_3_5
-    // even though there are only two delta_3_3, delta_4_4 and one delete_delta_5_5.
+    // That is, both delta and delete_deltas would be compacted into delta_1_3 and delete_delta_1_3
+    // even though there are only two delta_1_1, delta_2_2 and one delete_delta_3_3.
     TxnStore txnHandler = TxnUtils.getTxnStore(conf);
     txnHandler.compact(new CompactionRequest(dbName, tblName, CompactionType.MINOR));
     Worker t = new Worker();
@@ -957,16 +957,16 @@ public class TestCompactor {
     Path minorCompactedDelta = null;
     for (int i = 0; i < deltas.length; i++) {
       deltas[i] = stat[i].getPath().getName();
-      if (deltas[i].equals("delta_0000003_0000005")) {
+      if (deltas[i].equals("delta_0000001_0000003")) {
         minorCompactedDelta = stat[i].getPath();
       }
     }
     Arrays.sort(deltas);
-    String[] expectedDeltas = new String[]{"delta_0000003_0000003_0000", "delta_0000003_0000005", "delta_0000004_0000004_0000"};
+    String[] expectedDeltas = new String[]{"delta_0000001_0000001_0000", "delta_0000001_0000003", "delta_0000002_0000002_0000"};
     if (!Arrays.deepEquals(expectedDeltas, deltas)) {
       Assert.fail("Expected: " + Arrays.toString(expectedDeltas) + ", found: " + Arrays.toString(deltas));
     }
-    checkExpectedTxnsPresent(null, new Path[]{minorCompactedDelta}, columnNamesProperty, columnTypesProperty, 0, 3L, 4L);
+    checkExpectedTxnsPresent(null, new Path[]{minorCompactedDelta}, columnNamesProperty, columnTypesProperty, 0, 1L, 2L);
 
     // Verify that we have got correct set of delete_deltas.
     FileStatus[] deleteDeltaStat =
@@ -975,16 +975,16 @@ public class TestCompactor {
     Path minorCompactedDeleteDelta = null;
     for (int i = 0; i < deleteDeltas.length; i++) {
       deleteDeltas[i] = deleteDeltaStat[i].getPath().getName();
-      if (deleteDeltas[i].equals("delete_delta_0000003_0000005")) {
+      if (deleteDeltas[i].equals("delete_delta_0000001_0000003")) {
         minorCompactedDeleteDelta = deleteDeltaStat[i].getPath();
       }
     }
     Arrays.sort(deleteDeltas);
-    String[] expectedDeleteDeltas = new String[]{"delete_delta_0000003_0000005", "delete_delta_0000005_0000005_0000"};
+    String[] expectedDeleteDeltas = new String[]{"delete_delta_0000001_0000003", "delete_delta_0000003_0000003_0000"};
     if (!Arrays.deepEquals(expectedDeleteDeltas, deleteDeltas)) {
       Assert.fail("Expected: " + Arrays.toString(expectedDeleteDeltas) + ", found: " + Arrays.toString(deleteDeltas));
     }
-    checkExpectedTxnsPresent(null, new Path[]{minorCompactedDeleteDelta}, columnNamesProperty, columnTypesProperty, 0, 4L, 4L);
+    checkExpectedTxnsPresent(null, new Path[]{minorCompactedDeleteDelta}, columnNamesProperty, columnTypesProperty, 0, 2L, 2L);
   }
 
   @Test
@@ -1034,16 +1034,16 @@ public class TestCompactor {
     Path minorCompactedDelta = null;
     for (int i = 0; i < deltas.length; i++) {
       deltas[i] = stat[i].getPath().getName();
-      if (deltas[i].equals("delta_0000003_0000004")) {
+      if (deltas[i].equals("delta_0000001_0000002")) {
         minorCompactedDelta = stat[i].getPath();
       }
     }
     Arrays.sort(deltas);
-    String[] expectedDeltas = new String[]{"delta_0000003_0000003_0000", "delta_0000003_0000004", "delta_0000004_0000004_0000"};
+    String[] expectedDeltas = new String[]{"delta_0000001_0000001_0000", "delta_0000001_0000002", "delta_0000002_0000002_0000"};
     if (!Arrays.deepEquals(expectedDeltas, deltas)) {
       Assert.fail("Expected: " + Arrays.toString(expectedDeltas) + ", found: " + Arrays.toString(deltas));
     }
-    checkExpectedTxnsPresent(null, new Path[]{minorCompactedDelta}, columnNamesProperty, columnTypesProperty, 0, 3L, 4L);
+    checkExpectedTxnsPresent(null, new Path[]{minorCompactedDelta}, columnNamesProperty, columnTypesProperty, 0, 1L, 2L);
 
     // Verify that we have got correct set of delete_deltas.
     FileStatus[] deleteDeltaStat =
@@ -1052,12 +1052,12 @@ public class TestCompactor {
     Path minorCompactedDeleteDelta = null;
     for (int i = 0; i < deleteDeltas.length; i++) {
       deleteDeltas[i] = deleteDeltaStat[i].getPath().getName();
-      if (deleteDeltas[i].equals("delete_delta_0000003_0000004")) {
+      if (deleteDeltas[i].equals("delete_delta_0000001_0000002")) {
         minorCompactedDeleteDelta = deleteDeltaStat[i].getPath();
       }
     }
     Arrays.sort(deleteDeltas);
-    String[] expectedDeleteDeltas = new String[]{"delete_delta_0000003_0000004"};
+    String[] expectedDeleteDeltas = new String[]{"delete_delta_0000001_0000002"};
     if (!Arrays.deepEquals(expectedDeleteDeltas, deleteDeltas)) {
       Assert.fail("Expected: " + Arrays.toString(expectedDeleteDeltas) + ", found: " + Arrays.toString(deleteDeltas));
     }
@@ -1111,17 +1111,17 @@ public class TestCompactor {
       Path resultFile = null;
       for (int i = 0; i < names.length; i++) {
         names[i] = stat[i].getPath().getName();
-        if (names[i].equals("delta_0000003_0000006")) {
+        if (names[i].equals("delta_0000001_0000004")) {
           resultFile = stat[i].getPath();
         }
       }
       Arrays.sort(names);
-      String[] expected = new String[]{"delta_0000003_0000004",
-          "delta_0000003_0000006", "delta_0000005_0000006", "delta_0000007_0000008"};
+      String[] expected = new String[]{"delta_0000001_0000002",
+          "delta_0000001_0000004", "delta_0000003_0000004", "delta_0000005_0000006"};
       if (!Arrays.deepEquals(expected, names)) {
         Assert.fail("Expected: " + Arrays.toString(expected) + ", found: " + Arrays.toString(names));
       }
-      checkExpectedTxnsPresent(null, new Path[]{resultFile},columnNamesProperty, columnTypesProperty,  0, 3L, 6L);
+      checkExpectedTxnsPresent(null, new Path[]{resultFile},columnNamesProperty, columnTypesProperty,  0, 1L, 4L);
 
       // Verify that we have got correct set of delete_deltas also
       FileStatus[] deleteDeltaStat =
@@ -1130,12 +1130,12 @@ public class TestCompactor {
       Path minorCompactedDeleteDelta = null;
       for (int i = 0; i < deleteDeltas.length; i++) {
         deleteDeltas[i] = deleteDeltaStat[i].getPath().getName();
-        if (deleteDeltas[i].equals("delete_delta_0000003_0000006")) {
+        if (deleteDeltas[i].equals("delete_delta_0000001_0000004")) {
           minorCompactedDeleteDelta = deleteDeltaStat[i].getPath();
         }
       }
       Arrays.sort(deleteDeltas);
-      String[] expectedDeleteDeltas = new String[]{"delete_delta_0000003_0000006"};
+      String[] expectedDeleteDeltas = new String[]{"delete_delta_0000001_0000004"};
       if (!Arrays.deepEquals(expectedDeleteDeltas, deleteDeltas)) {
         Assert.fail("Expected: " + Arrays.toString(expectedDeleteDeltas) + ", found: " + Arrays.toString(deleteDeltas));
       }
@@ -1332,16 +1332,6 @@ public class TestCompactor {
       public boolean isValidBase(long txnid) {
         return true;
       }
-
-      @Override
-      public boolean isTxnAborted(long txnid) {
-        return true;
-      }
-
-      @Override
-      public RangeResponse isTxnRangeAborted(long minTxnId, long maxTxnId) {
-        return RangeResponse.ALL;
-      }
     };
 
     OrcInputFormat aif = new OrcInputFormat();

http://git-wip-us.apache.org/repos/asf/hive/blob/ed64a74e/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java b/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java
index 75f46ec..650c4b7 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java
@@ -1069,17 +1069,4 @@ public class TestBeeLineWithArgs {
       this.shouldMatch = shouldMatch;
     }
   }
-
-  /**
-   * Test that Beeline can handle \\ characters within a string literal. Either at the beginning, middle, or end of the
-   * literal.
-   */
-  @Test
-  public void testBackslashInLiteral() throws Throwable {
-    String SCRIPT_TEXT = "select 'hello\\\\', '\\\\hello', 'hel\\\\lo', '\\\\' as literal;";
-    final String EXPECTED_PATTERN = "hello\\\\\t\\\\hello\thel\\\\lo\t\\\\";
-    List<String> argList = getBaseArgs(miniHS2.getBaseJdbcURL());
-    argList.add("--outputformat=tsv2");
-    testScriptFile(SCRIPT_TEXT, argList, EXPECTED_PATTERN, true);
-  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/ed64a74e/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestSchemaTool.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestSchemaTool.java b/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestSchemaTool.java
index 604c234..22630b9 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestSchemaTool.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestSchemaTool.java
@@ -36,7 +36,6 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.HiveMetaException;
 import org.apache.hadoop.hive.metastore.MetaStoreSchemaInfo;
-import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hive.beeline.HiveSchemaHelper.NestedScriptParser;
 import org.apache.hive.beeline.HiveSchemaHelper.PostgresCommandParser;
 
@@ -58,10 +57,6 @@ public class TestSchemaTool extends TestCase {
     hiveConf = new HiveConf(this.getClass());
     schemaTool = new HiveSchemaTool(
         System.getProperty("test.tmp.dir", "target/tmp"), hiveConf, "derby");
-    schemaTool.setUserName(
-        schemaTool.getHiveConf().get(HiveConf.ConfVars.METASTORE_CONNECTION_USER_NAME.varname));
-    schemaTool.setPassWord(ShimLoader.getHadoopShims().getPassword(schemaTool.getHiveConf(),
-          HiveConf.ConfVars.METASTOREPWD.varname));
     System.setProperty("beeLine.system.exit", "true");
     errStream = System.err;
     outStream = System.out;
@@ -125,8 +120,8 @@ public class TestSchemaTool extends TestCase {
     boolean isValid = (boolean)schemaTool.validateSchemaTables(conn);
     assertTrue(isValid);
 
-    // upgrade from 2.0.0 schema and re-validate
-    schemaTool.doUpgrade("2.0.0");
+    // upgrade to 2.2.0 schema and re-validate
+    schemaTool.doUpgrade("2.2.0");
     isValid = (boolean)schemaTool.validateSchemaTables(conn);
     assertTrue(isValid);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/ed64a74e/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
index 6e9223a..4a82aa5 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
@@ -30,7 +30,6 @@ import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.processors.DfsProcessor;
 import org.apache.hive.common.util.HiveVersionInfo;
 import org.apache.hive.jdbc.Utils.JdbcConnectionParams;
-import org.apache.hive.service.cli.HiveSQLException;
 import org.apache.hive.service.cli.operation.ClassicTableTypeMapping;
 import org.apache.hive.service.cli.operation.ClassicTableTypeMapping.ClassicTableTypes;
 import org.apache.hive.service.cli.operation.HiveTableTypeMapping;
@@ -578,7 +577,7 @@ public class TestJdbcDriver2 {
 
   @Test
   public void testSetOnConnection() throws Exception {
-    Connection connection = getConnection(testDbName + "?conf1=conf2;conf3=conf4#var1=var2;var3=var4");
+    Connection connection = getConnection("test?conf1=conf2;conf3=conf4#var1=var2;var3=var4");
     try {
       verifyConfValue(connection, "conf1", "conf2");
       verifyConfValue(connection, "conf3", "conf4");
@@ -1084,45 +1083,6 @@ public class TestJdbcDriver2 {
   }
 
   @Test
-  public void testShowTablesInDb() throws SQLException {
-    Statement stmt = con.createStatement();
-    assertNotNull("Statement is null", stmt);
-
-    String tableNameInDbUnique = tableName + "_unique";
-    // create a table with a unique name in testDb
-    stmt.execute("drop table if exists " + testDbName + "." + tableNameInDbUnique);
-    stmt.execute("create table " + testDbName + "." + tableNameInDbUnique
-        + " (under_col int comment 'the under column', value string) comment '" + tableComment
-        + "'");
-
-    ResultSet res = stmt.executeQuery("show tables in " + testDbName);
-
-    boolean testTableExists = false;
-    while (res.next()) {
-      assertNotNull("table name is null in result set", res.getString(1));
-      if (tableNameInDbUnique.equalsIgnoreCase(res.getString(1))) {
-        testTableExists = true;
-      }
-    }
-    assertTrue("table name " + tableNameInDbUnique
-        + " not found in SHOW TABLES result set", testTableExists);
-    stmt.execute("drop table if exists " + testDbName + "." + tableNameInDbUnique);
-    stmt.close();
-  }
-
-  @Test
-  public void testInvalidShowTables() throws SQLException {
-    Statement stmt = con.createStatement();
-    assertNotNull("Statement is null", stmt);
-
-    //show tables <dbname> is in invalid show tables syntax. Hive does not return
-    //any tables in this case
-    ResultSet res = stmt.executeQuery("show tables " + testDbName);
-    assertFalse(res.next());
-    stmt.close();
-  }
-
-  @Test
   public void testMetaDataGetTables() throws SQLException {
     getTablesTest(ImmutableSet.of(ClassicTableTypes.TABLE.toString()),
         ClassicTableTypes.VIEW.toString());
@@ -2923,10 +2883,4 @@ public class TestJdbcDriver2 {
     assertEquals(rowCount, dataFileRowCount);
     stmt.execute("drop table " + tblName);
   }
-
-  // Test that opening a JDBC connection to a non-existent database throws a HiveSQLException
-  @Test(expected = HiveSQLException.class)
-  public void testConnectInvalidDatabase() throws SQLException {
-    DriverManager.getConnection("jdbc:hive2:///databasedoesnotexist", "", "");
-  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/ed64a74e/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
index fc2cb08..afe23f8 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
@@ -112,14 +112,6 @@ public class TestJdbcWithMiniHS2 {
     stmt.execute("drop database if exists " + testDbName + " cascade");
     stmt.execute("create database " + testDbName);
     stmt.close();
-
-    try {
-      openTestConnections();
-    } catch (Exception e) {
-      System.out.println("Unable to open default connections to MiniHS2: " + e);
-      throw e;
-    }
-
     // tables in test db
     createTestTables(conTestDb, testDbName);
   }
@@ -191,7 +183,6 @@ public class TestJdbcWithMiniHS2 {
     HiveConf conf = new HiveConf();
     startMiniHS2(conf);
     openDefaultConnections();
-    openTestConnections();
   }
 
   private static void startMiniHS2(HiveConf conf) throws Exception {
@@ -217,9 +208,6 @@ public class TestJdbcWithMiniHS2 {
 
   private static void openDefaultConnections() throws Exception {
     conDefault = getConnection();
-  }
-
-  private static void openTestConnections() throws Exception {
     conTestDb = getConnection(testDbName);
   }
 
@@ -978,38 +966,6 @@ public class TestJdbcWithMiniHS2 {
   }
 
   /**
-   * Test for jdbc driver retry on NoHttpResponseException
-   * @throws Exception
-   */
-  @Test
-  public void testHttpRetryOnServerIdleTimeout() throws Exception {
-    // Stop HiveServer2
-    stopMiniHS2();
-    HiveConf conf = new HiveConf();
-    conf.set("hive.server2.transport.mode", "http");
-    // Set server's idle timeout to a very low value
-    conf.set("hive.server2.thrift.http.max.idle.time", "5");
-    startMiniHS2(conf);
-    String userName = System.getProperty("user.name");
-    Connection conn = getConnection(miniHS2.getJdbcURL(testDbName), userName, "password");
-    Statement stmt = conn.createStatement();
-    stmt.execute("select from_unixtime(unix_timestamp())");
-    // Sleep for longer than server's idletimeout and execute a query
-    TimeUnit.SECONDS.sleep(10);
-    try {
-      stmt.execute("select from_unixtime(unix_timestamp())");
-    } catch (Exception e) {
-      fail("Not expecting exception: " + e);
-    } finally {
-      if (conn != null) {
-        conn.close();
-      }
-    }
-    // Restore original state
-    restoreMiniHS2AndConnections();
-  }
-
-  /**
    * Tests that DataNucleus' NucleusContext.classLoaderResolverMap clears cached class objects
    * (& hence doesn't leak classloaders) on closing any session
    *

http://git-wip-us.apache.org/repos/asf/hive/blob/ed64a74e/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java
index d227275..0a53259 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java
@@ -17,12 +17,12 @@
  */
 
 package org.apache.hive.jdbc;
-
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
 import java.io.File;
+import java.net.URLEncoder;
 import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.ResultSet;
@@ -35,10 +35,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-
 import org.apache.hive.jdbc.miniHS2.MiniHS2;
-import org.hadoop.hive.jdbc.SSLTestUtils;
-
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Assert;
@@ -47,13 +44,10 @@ import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Ignore;
 import org.junit.Test;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-
 public class TestSSL {
-
   private static final Logger LOG = LoggerFactory.getLogger(TestSSL.class);
   private static final String LOCALHOST_KEY_STORE_NAME = "keystore.jks";
   private static final String EXAMPLEDOTCOM_KEY_STORE_NAME = "keystore_exampledotcom.jks";
@@ -61,12 +55,19 @@ public class TestSSL {
   private static final String KEY_STORE_TRUST_STORE_PASSWORD = "HiveJdbc";
   private static final String JAVA_TRUST_STORE_PROP = "javax.net.ssl.trustStore";
   private static final String JAVA_TRUST_STORE_PASS_PROP = "javax.net.ssl.trustStorePassword";
+  private static final String HS2_BINARY_MODE = "binary";
+  private static final String HS2_HTTP_MODE = "http";
+  private static final String HS2_HTTP_ENDPOINT = "cliservice";
+  private static final String HS2_BINARY_AUTH_MODE = "NONE";
 
   private MiniHS2 miniHS2 = null;
   private static HiveConf conf = new HiveConf();
   private Connection hs2Conn = null;
-  private String dataFileDir = SSLTestUtils.getDataFileDir();
+  private String dataFileDir = conf.get("test.data.files");
   private Map<String, String> confOverlay;
+  private final String SSL_CONN_PARAMS = "ssl=true;sslTrustStore="
+      + URLEncoder.encode(dataFileDir + File.separator + TRUST_STORE_NAME) + ";trustStorePassword="
+      + KEY_STORE_TRUST_STORE_PASSWORD;
 
   @BeforeClass
   public static void beforeTest() throws Exception {
@@ -82,6 +83,10 @@ public class TestSSL {
   @Before
   public void setUp() throws Exception {
     DriverManager.setLoginTimeout(0);
+    if (!System.getProperty("test.data.files", "").isEmpty()) {
+      dataFileDir = System.getProperty("test.data.files");
+    }
+    dataFileDir = dataFileDir.replace('\\', '/').replace("c:", "");
     miniHS2 = new MiniHS2.Builder().withConf(conf).cleanupLocalDirOnStartup(false).build();
     confOverlay = new HashMap<String, String>();
   }
@@ -122,9 +127,9 @@ public class TestSSL {
     // we depend on linux openssl exit codes
     Assume.assumeTrue(System.getProperty("os.name").toLowerCase().contains("linux"));
 
-    SSLTestUtils.setSslConfOverlay(confOverlay);
+    setSslConfOverlay(confOverlay);
     // Test in binary mode
-    SSLTestUtils.setBinaryConfOverlay(confOverlay);
+    setBinaryConfOverlay(confOverlay);
     // Start HS2 with SSL
     miniHS2.start(confOverlay);
 
@@ -141,7 +146,7 @@ public class TestSSL {
     miniHS2.stop();
 
     // Test in http mode
-    SSLTestUtils.setHttpConfOverlay(confOverlay);
+    setHttpConfOverlay(confOverlay);
     miniHS2.start(confOverlay);
     // make SSL connection
     try {
@@ -170,13 +175,13 @@ public class TestSSL {
    */
   @Test
   public void testInvalidConfig() throws Exception {
-    SSLTestUtils.clearSslConfOverlay(confOverlay);
+    clearSslConfOverlay(confOverlay);
     // Test in binary mode
-    SSLTestUtils.setBinaryConfOverlay(confOverlay);
+    setBinaryConfOverlay(confOverlay);
     miniHS2.start(confOverlay);
     DriverManager.setLoginTimeout(4);
     try {
-      hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSLTestUtils.SSL_CONN_PARAMS),
+      hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSL_CONN_PARAMS),
           System.getProperty("user.name"), "bar");
       fail("SSL connection should fail with NON-SSL server");
     } catch (SQLException e) {
@@ -199,10 +204,10 @@ public class TestSSL {
     // Test in http mode with ssl properties specified in url
     System.clearProperty(JAVA_TRUST_STORE_PROP);
     System.clearProperty(JAVA_TRUST_STORE_PASS_PROP);
-    SSLTestUtils.setHttpConfOverlay(confOverlay);
+    setHttpConfOverlay(confOverlay);
     miniHS2.start(confOverlay);
     try {
-      hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSLTestUtils.SSL_CONN_PARAMS),
+      hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSL_CONN_PARAMS),
           System.getProperty("user.name"), "bar");
       fail("SSL connection should fail with NON-SSL server");
     } catch (SQLException e) {
@@ -218,9 +223,9 @@ public class TestSSL {
    */
   @Test
   public void testConnectionMismatch() throws Exception {
-    SSLTestUtils.setSslConfOverlay(confOverlay);
+    setSslConfOverlay(confOverlay);
     // Test in binary mode
-    SSLTestUtils.setBinaryConfOverlay(confOverlay);
+    setBinaryConfOverlay(confOverlay);
     miniHS2.start(confOverlay);
     // Start HS2 with SSL
     try {
@@ -242,7 +247,7 @@ public class TestSSL {
     miniHS2.stop();
 
     // Test in http mode
-    SSLTestUtils.setHttpConfOverlay(confOverlay);
+    setHttpConfOverlay(confOverlay);
     miniHS2.start(confOverlay);
     try {
       hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", ";ssl=false"),
@@ -261,23 +266,23 @@ public class TestSSL {
    */
   @Test
   public void testSSLConnectionWithURL() throws Exception {
-    SSLTestUtils.setSslConfOverlay(confOverlay);
+    setSslConfOverlay(confOverlay);
     // Test in binary mode
-    SSLTestUtils.setBinaryConfOverlay(confOverlay);
+    setBinaryConfOverlay(confOverlay);
     // Start HS2 with SSL
     miniHS2.start(confOverlay);
 
     // make SSL connection
-    hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSLTestUtils.SSL_CONN_PARAMS),
+    hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSL_CONN_PARAMS),
         System.getProperty("user.name"), "bar");
     hs2Conn.close();
     miniHS2.stop();
 
     // Test in http mode
-    SSLTestUtils.setHttpConfOverlay(confOverlay);
+    setHttpConfOverlay(confOverlay);
     miniHS2.start(confOverlay);
     // make SSL connection
-    hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSLTestUtils.SSL_CONN_PARAMS),
+    hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSL_CONN_PARAMS),
         System.getProperty("user.name"), "bar");
     hs2Conn.close();
   }
@@ -288,9 +293,9 @@ public class TestSSL {
    */
   @Test
   public void testSSLConnectionWithProperty() throws Exception {
-    SSLTestUtils.setSslConfOverlay(confOverlay);
+    setSslConfOverlay(confOverlay);
     // Test in binary mode
-    SSLTestUtils.setBinaryConfOverlay(confOverlay);
+    setBinaryConfOverlay(confOverlay);
     // Start HS2 with SSL
     miniHS2.start(confOverlay);
 
@@ -303,10 +308,10 @@ public class TestSSL {
     miniHS2.stop();
 
     // Test in http mode
-    SSLTestUtils.setHttpConfOverlay(confOverlay);
+    setHttpConfOverlay(confOverlay);
     miniHS2.start(confOverlay);
     // make SSL connection
-    hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default",SSLTestUtils.SSL_CONN_PARAMS),
+    hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSL_CONN_PARAMS),
         System.getProperty("user.name"), "bar");
     hs2Conn.close();
   }
@@ -317,9 +322,9 @@ public class TestSSL {
    */
   @Test
   public void testSSLFetch() throws Exception {
-    SSLTestUtils.setSslConfOverlay(confOverlay);
+    setSslConfOverlay(confOverlay);
     // Test in binary mode
-    SSLTestUtils.setBinaryConfOverlay(confOverlay);
+    setBinaryConfOverlay(confOverlay);
     // Start HS2 with SSL
     miniHS2.start(confOverlay);
 
@@ -327,11 +332,11 @@ public class TestSSL {
     Path dataFilePath = new Path(dataFileDir, "kv1.txt");
 
     // make SSL connection
-    hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSLTestUtils.SSL_CONN_PARAMS),
+    hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSL_CONN_PARAMS),
         System.getProperty("user.name"), "bar");
 
     // Set up test data
-    SSLTestUtils.setupTestTableWithData(tableName, dataFilePath, hs2Conn);
+    setupTestTableWithData(tableName, dataFilePath, hs2Conn);
 
     Statement stmt = hs2Conn.createStatement();
     ResultSet res = stmt.executeQuery("SELECT * FROM " + tableName);
@@ -352,20 +357,20 @@ public class TestSSL {
    */
   @Test
   public void testSSLFetchHttp() throws Exception {
-    SSLTestUtils.setSslConfOverlay(confOverlay);
+    setSslConfOverlay(confOverlay);
     // Test in http mode
-    SSLTestUtils.setHttpConfOverlay(confOverlay);
+    setHttpConfOverlay(confOverlay);
     miniHS2.start(confOverlay);
 
     String tableName = "sslTab";
     Path dataFilePath = new Path(dataFileDir, "kv1.txt");
 
     // make SSL connection
-    hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSLTestUtils.SSL_CONN_PARAMS),
+    hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSL_CONN_PARAMS),
         System.getProperty("user.name"), "bar");
 
     // Set up test data
-    SSLTestUtils.setupTestTableWithData(tableName, dataFilePath, hs2Conn);
+    setupTestTableWithData(tableName, dataFilePath, hs2Conn);
     Statement stmt = hs2Conn.createStatement();
     ResultSet res = stmt.executeQuery("SELECT * FROM " + tableName);
     int rowCount = 0;
@@ -388,16 +393,16 @@ public class TestSSL {
   @Test
   public void testConnectionWrongCertCN() throws Exception {
     // This call sets the default ssl params including the correct keystore in the server config
-    SSLTestUtils.setSslConfOverlay(confOverlay);
+    setSslConfOverlay(confOverlay);
     // Replace default keystore with keystore for www.example.com
     confOverlay.put(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH.varname, dataFileDir + File.separator
         + EXAMPLEDOTCOM_KEY_STORE_NAME);
     // Binary (TCP) mode
-    SSLTestUtils.setBinaryConfOverlay(confOverlay);
+    setBinaryConfOverlay(confOverlay);
     miniHS2.start(confOverlay);
     try {
       hs2Conn =
-          DriverManager.getConnection(miniHS2.getJdbcURL("default", SSLTestUtils.SSL_CONN_PARAMS),
+          DriverManager.getConnection(miniHS2.getJdbcURL("default", SSL_CONN_PARAMS),
               System.getProperty("user.name"), "bar");
       fail("SSL connection, with the server providing wrong certifcate (with CN www.example.com, "
           + "instead of localhost), should fail");
@@ -410,11 +415,11 @@ public class TestSSL {
     miniHS2.stop();
 
     // Http mode
-    SSLTestUtils.setHttpConfOverlay(confOverlay);
+    setHttpConfOverlay(confOverlay);
     miniHS2.start(confOverlay);
     try {
       hs2Conn =
-          DriverManager.getConnection(miniHS2.getJdbcURL("default", SSLTestUtils.SSL_CONN_PARAMS),
+          DriverManager.getConnection(miniHS2.getJdbcURL("default", SSL_CONN_PARAMS),
               System.getProperty("user.name"), "bar");
       fail("SSL connection, with the server providing wrong certifcate (with CN www.example.com, "
           + "instead of localhost), should fail");
@@ -434,10 +439,10 @@ public class TestSSL {
    */
   @Test
   public void testMetastoreWithSSL() throws Exception {
-    SSLTestUtils.setMetastoreSslConf(conf);
-    SSLTestUtils.setSslConfOverlay(confOverlay);
+    setMetastoreSslConf(conf);
+    setSslConfOverlay(confOverlay);
     // Test in http mode
-    SSLTestUtils.setHttpConfOverlay(confOverlay);
+    setHttpConfOverlay(confOverlay);
     miniHS2 = new MiniHS2.Builder().withRemoteMetastore().withConf(conf).cleanupLocalDirOnStartup(false).build();
     miniHS2.start(confOverlay);
 
@@ -445,11 +450,11 @@ public class TestSSL {
     Path dataFilePath = new Path(dataFileDir, "kv1.txt");
 
     // make SSL connection
-    hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSLTestUtils.SSL_CONN_PARAMS),
+    hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSL_CONN_PARAMS),
         System.getProperty("user.name"), "bar");
 
     // Set up test data
-    SSLTestUtils.setupTestTableWithData(tableName, dataFilePath, hs2Conn);
+    setupTestTableWithData(tableName, dataFilePath, hs2Conn);
     Statement stmt = hs2Conn.createStatement();
     ResultSet res = stmt.executeQuery("SELECT * FROM " + tableName);
     int rowCount = 0;
@@ -469,7 +474,7 @@ public class TestSSL {
    */
   @Test
   public void testMetastoreConnectionWrongCertCN() throws Exception {
-    SSLTestUtils.setMetastoreSslConf(conf);
+    setMetastoreSslConf(conf);
     conf.setVar(ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PATH,
         dataFileDir + File.separator +  EXAMPLEDOTCOM_KEY_STORE_NAME);
     miniHS2 = new MiniHS2.Builder().withRemoteMetastore().withConf(conf).cleanupLocalDirOnStartup(false).build();
@@ -481,4 +486,55 @@ public class TestSSL {
 
     miniHS2.stop();
   }
+
+  private void setupTestTableWithData(String tableName, Path dataFilePath,
+      Connection hs2Conn) throws Exception {
+    Statement stmt = hs2Conn.createStatement();
+    stmt.execute("set hive.support.concurrency = false");
+
+    stmt.execute("drop table if exists " + tableName);
+    stmt.execute("create table " + tableName
+        + " (under_col int comment 'the under column', value string)");
+
+    // load data
+    stmt.execute("load data local inpath '"
+        + dataFilePath.toString() + "' into table " + tableName);
+    stmt.close();
+  }
+
+  private void setSslConfOverlay(Map<String, String> confOverlay) {
+    confOverlay.put(ConfVars.HIVE_SERVER2_USE_SSL.varname, "true");
+    confOverlay.put(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH.varname,
+        dataFileDir + File.separator +  LOCALHOST_KEY_STORE_NAME);
+    confOverlay.put(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname,
+        KEY_STORE_TRUST_STORE_PASSWORD);
+  }
+
+  private void setMetastoreSslConf(HiveConf conf) {
+    conf.setBoolVar(ConfVars.HIVE_METASTORE_USE_SSL, true);
+    conf.setVar(ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PATH,
+        dataFileDir + File.separator +  LOCALHOST_KEY_STORE_NAME);
+    conf.setVar(ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PASSWORD,
+        KEY_STORE_TRUST_STORE_PASSWORD);
+    conf.setVar(ConfVars.HIVE_METASTORE_SSL_TRUSTSTORE_PATH,
+        dataFileDir + File.separator +  TRUST_STORE_NAME);
+    conf.setVar(ConfVars.HIVE_METASTORE_SSL_TRUSTSTORE_PASSWORD,
+        KEY_STORE_TRUST_STORE_PASSWORD);
+  }
+
+  private void clearSslConfOverlay(Map<String, String> confOverlay) {
+    confOverlay.put(ConfVars.HIVE_SERVER2_USE_SSL.varname, "false");
+  }
+
+  private void setHttpConfOverlay(Map<String, String> confOverlay) {
+    confOverlay.put(ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname, HS2_HTTP_MODE);
+    confOverlay.put(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH.varname, HS2_HTTP_ENDPOINT);
+    confOverlay.put(ConfVars.HIVE_SERVER2_ENABLE_DOAS.varname, "true");
+  }
+
+  private void setBinaryConfOverlay(Map<String, String> confOverlay) {
+    confOverlay.put(ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname, HS2_BINARY_MODE);
+    confOverlay.put(ConfVars.HIVE_SERVER2_AUTHENTICATION.varname,  HS2_BINARY_AUTH_MODE);
+    confOverlay.put(ConfVars.HIVE_SERVER2_ENABLE_DOAS.varname, "true");
+  }
 }