You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by px...@apache.org on 2015/08/11 23:11:59 UTC

[1/7] hive git commit: HIVE-8448 : Union All might not work due to the type conversion issue (Yongzhi Chen via Szehon)

Repository: hive
Updated Branches:
  refs/heads/branch-1.0 7f29ee466 -> 84af92e65


HIVE-8448 : Union All might not work due to the type conversion issue (Yongzhi Chen via Szehon)

git-svn-id: https://svn.apache.org/repos/asf/hive/trunk@1632393 13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/27a3fc20
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/27a3fc20
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/27a3fc20

Branch: refs/heads/branch-1.0
Commit: 27a3fc203e9a03e395ecc7e2d6d500027b854760
Parents: 7f29ee4
Author: Szehon Ho <sz...@apache.org>
Authored: Thu Oct 16 17:33:38 2014 +0000
Committer: Pengcheng Xiong <px...@apache.org>
Committed: Tue Aug 11 13:52:15 2015 -0700

----------------------------------------------------------------------
 .../hadoop/hive/ql/exec/UnionOperator.java      |  2 +-
 .../hive/ql/udf/generic/GenericUDFUtils.java    | 28 +++++++++-
 .../queries/clientpositive/union_date_trim.q    |  7 +++
 .../clientpositive/union_date_trim.q.out        | 54 ++++++++++++++++++++
 4 files changed, 89 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/27a3fc20/ql/src/java/org/apache/hadoop/hive/ql/exec/UnionOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/UnionOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/UnionOperator.java
index 59c07c3..0f761e8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/UnionOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/UnionOperator.java
@@ -80,7 +80,7 @@ public class UnionOperator extends Operator<UnionDesc> implements Serializable {
     for (int p = 0; p < parents; p++) {
       assert (parentFields[p].size() == columns);
       for (int c = 0; c < columns; c++) {
-        if (!columnTypeResolvers[c].update(parentFields[p].get(c)
+        if (!columnTypeResolvers[c].updateForUnionAll(parentFields[p].get(c)
             .getFieldObjectInspector())) {
           // checked in SemanticAnalyzer. Should not happen
           throw new HiveException("Incompatible types for union operator");

http://git-wip-us.apache.org/repos/asf/hive/blob/27a3fc20/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java
index 1f70c55..833452d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java
@@ -100,6 +100,26 @@ public final class GenericUDFUtils {
      * @return false if there is a type mismatch
      */
     public boolean update(ObjectInspector oi) throws UDFArgumentTypeException {
+      return update(oi, false);
+    }
+
+    /**
+     * Update returnObjectInspector and valueInspectorsAreTheSame based on the
+     * ObjectInspector seen for UnionAll.
+     *
+     * @return false if there is a type mismatch
+     */
+    public boolean updateForUnionAll(ObjectInspector oi) throws UDFArgumentTypeException {
+      return update(oi, true);
+    }
+
+    /**
+     * Update returnObjectInspector and valueInspectorsAreTheSame based on the
+     * ObjectInspector seen.
+     *
+     * @return false if there is a type mismatch
+     */
+    private boolean update(ObjectInspector oi, boolean isUnionAll) throws UDFArgumentTypeException {
       if (oi instanceof VoidObjectInspector) {
         return true;
       }
@@ -137,8 +157,14 @@ public final class GenericUDFUtils {
 
       // Types are different, we need to check whether we can convert them to
       // a common base class or not.
-      TypeInfo commonTypeInfo = FunctionRegistry.getCommonClass(oiTypeInfo,
+      TypeInfo commonTypeInfo = null;
+      if (isUnionAll) {
+        commonTypeInfo = FunctionRegistry.getCommonClassForUnionAll(oiTypeInfo,
+          rTypeInfo);
+      } else {
+        commonTypeInfo = FunctionRegistry.getCommonClass(oiTypeInfo,
           rTypeInfo);
+      }
       if (commonTypeInfo == null) {
         return false;
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/27a3fc20/ql/src/test/queries/clientpositive/union_date_trim.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/union_date_trim.q b/ql/src/test/queries/clientpositive/union_date_trim.q
new file mode 100644
index 0000000..6842e56
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/union_date_trim.q
@@ -0,0 +1,7 @@
+drop table if exists testDate;
+create table testDate(id int, dt date);
+insert into table testDate select 1, '2014-04-07' from src where key=100 limit 1;
+insert into table testDate select 2, '2014-04-08' from src where key=100 limit 1;
+insert into table testDate select 3, '2014-04-09' from src where key=100 limit 1;
+--- without the fix following query will throw HiveException: Incompatible types for union operator
+insert into table testDate select id, tm from (select id, dt as tm from testDate where id = 1 union all select id, dt as tm from testDate where id = 2 union all select id, trim(Cast (dt as string)) as tm from testDate where id = 3 ) a;

http://git-wip-us.apache.org/repos/asf/hive/blob/27a3fc20/ql/src/test/results/clientpositive/union_date_trim.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/union_date_trim.q.out b/ql/src/test/results/clientpositive/union_date_trim.q.out
new file mode 100644
index 0000000..e0682e6
--- /dev/null
+++ b/ql/src/test/results/clientpositive/union_date_trim.q.out
@@ -0,0 +1,54 @@
+PREHOOK: query: drop table if exists testDate
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table if exists testDate
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table testDate(id int, dt date)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@testDate
+POSTHOOK: query: create table testDate(id int, dt date)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@testDate
+PREHOOK: query: insert into table testDate select 1, '2014-04-07' from src where key=100 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@testdate
+POSTHOOK: query: insert into table testDate select 1, '2014-04-07' from src where key=100 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@testdate
+POSTHOOK: Lineage: testdate.dt EXPRESSION []
+POSTHOOK: Lineage: testdate.id SIMPLE []
+PREHOOK: query: insert into table testDate select 2, '2014-04-08' from src where key=100 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@testdate
+POSTHOOK: query: insert into table testDate select 2, '2014-04-08' from src where key=100 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@testdate
+POSTHOOK: Lineage: testdate.dt EXPRESSION []
+POSTHOOK: Lineage: testdate.id SIMPLE []
+PREHOOK: query: insert into table testDate select 3, '2014-04-09' from src where key=100 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@testdate
+POSTHOOK: query: insert into table testDate select 3, '2014-04-09' from src where key=100 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@testdate
+POSTHOOK: Lineage: testdate.dt EXPRESSION []
+POSTHOOK: Lineage: testdate.id SIMPLE []
+PREHOOK: query: --- without the fix following query will throw HiveException: Incompatible types for union operator
+insert into table testDate select id, tm from (select id, dt as tm from testDate where id = 1 union all select id, dt as tm from testDate where id = 2 union all select id, trim(Cast (dt as string)) as tm from testDate where id = 3 ) a
+PREHOOK: type: QUERY
+PREHOOK: Input: default@testdate
+PREHOOK: Output: default@testdate
+POSTHOOK: query: --- without the fix following query will throw HiveException: Incompatible types for union operator
+insert into table testDate select id, tm from (select id, dt as tm from testDate where id = 1 union all select id, dt as tm from testDate where id = 2 union all select id, trim(Cast (dt as string)) as tm from testDate where id = 3 ) a
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@testdate
+POSTHOOK: Output: default@testdate
+POSTHOOK: Lineage: testdate.dt EXPRESSION []
+POSTHOOK: Lineage: testdate.id EXPRESSION [(testdate)testdate.FieldSchema(name:id, type:int, comment:null), (testdate)testdate.FieldSchema(name:id, type:int, comment:null), (testdate)testdate.FieldSchema(name:id, type:int, comment:null), ]


[4/7] hive git commit: HIVE-8863 : Cannot drop table with uppercase name after compute statistics for columns (Chaoyu Tang via Ashutosh Chauhan)

Posted by px...@apache.org.
HIVE-8863 : Cannot drop table with uppercase name after compute statistics for columns (Chaoyu Tang via Ashutosh Chauhan)

git-svn-id: https://svn.apache.org/repos/asf/hive/trunk@1640943 13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/fba31e76
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/fba31e76
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/fba31e76

Branch: refs/heads/branch-1.0
Commit: fba31e76d3d32678258d62e49b80f8eaaab94216
Parents: dbdea20
Author: Ashutosh Chauhan <ha...@apache.org>
Authored: Fri Nov 21 15:42:35 2014 +0000
Committer: Pengcheng Xiong <px...@apache.org>
Committed: Tue Aug 11 13:56:03 2015 -0700

----------------------------------------------------------------------
 .../hive/common/util/HiveStringUtils.java       |   4 +
 .../hadoop/hive/metastore/ObjectStore.java      | 208 ++++----
 .../clientpositive/drop_partition_with_stats.q  |  68 +++
 .../clientpositive/drop_table_with_stats.q      |  43 ++
 .../drop_partition_with_stats.q.out             | 496 +++++++++++++++++++
 .../clientpositive/drop_table_with_stats.q.out  | 236 +++++++++
 6 files changed, 956 insertions(+), 99 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/fba31e76/common/src/java/org/apache/hive/common/util/HiveStringUtils.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hive/common/util/HiveStringUtils.java b/common/src/java/org/apache/hive/common/util/HiveStringUtils.java
index 2705f1e..78cd983 100644
--- a/common/src/java/org/apache/hive/common/util/HiveStringUtils.java
+++ b/common/src/java/org/apache/hive/common/util/HiveStringUtils.java
@@ -882,4 +882,8 @@ public class HiveStringUtils {
     }
     return len;
   }
+
+  public static String normalizeIdentifier(String identifier) {
+	  return identifier.trim().toLowerCase();
+	}
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/fba31e76/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index 135cadf..4f2106e 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@ -139,6 +139,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy;
 import org.apache.hadoop.util.StringUtils;
+import org.apache.hive.common.util.HiveStringUtils;
 import org.apache.thrift.TException;
 import org.datanucleus.store.rdbms.exceptions.MissingTableException;
 
@@ -503,7 +504,7 @@ public class ObjectStore implements RawStore, Configurable {
     boolean commited = false;
     try {
       openTransaction();
-      name = name.toLowerCase().trim();
+      name = HiveStringUtils.normalizeIdentifier(name);
       Query query = pm.newQuery(MDatabase.class, "name == dbname");
       query.declareParameters("java.lang.String dbname");
       query.setUnique(true);
@@ -615,7 +616,7 @@ public class ObjectStore implements RawStore, Configurable {
   public boolean dropDatabase(String dbname) throws NoSuchObjectException, MetaException {
     boolean success = false;
     LOG.info("Dropping database " + dbname + " along with all tables");
-    dbname = dbname.toLowerCase();
+    dbname = HiveStringUtils.normalizeIdentifier(dbname);
     try {
       openTransaction();
 
@@ -912,7 +913,7 @@ public class ObjectStore implements RawStore, Configurable {
     List<String> tbls = null;
     try {
       openTransaction();
-      dbName = dbName.toLowerCase().trim();
+      dbName = HiveStringUtils.normalizeIdentifier(dbName);
       // Take the pattern and split it on the | to get all the composing
       // patterns
       String[] subpatterns = pattern.trim().split("\\|");
@@ -958,8 +959,8 @@ public class ObjectStore implements RawStore, Configurable {
     boolean commited = false;
     try {
       openTransaction();
-      db = db.toLowerCase().trim();
-      table = table.toLowerCase().trim();
+      db = HiveStringUtils.normalizeIdentifier(db);
+      table = HiveStringUtils.normalizeIdentifier(table);
       Query query = pm.newQuery(MTable.class, "tableName == table && database.name == db");
       query.declareParameters("java.lang.String table, java.lang.String db");
       query.setUnique(true);
@@ -982,7 +983,7 @@ public class ObjectStore implements RawStore, Configurable {
     try {
       openTransaction();
 
-      db = db.toLowerCase().trim();
+      db = HiveStringUtils.normalizeIdentifier(db);
       Query dbExistsQuery = pm.newQuery(MDatabase.class, "name == db");
       dbExistsQuery.declareParameters("java.lang.String db");
       dbExistsQuery.setUnique(true);
@@ -994,7 +995,7 @@ public class ObjectStore implements RawStore, Configurable {
 
       List<String> lowered_tbl_names = new ArrayList<String>();
       for (String t : tbl_names) {
-        lowered_tbl_names.add(t.toLowerCase().trim());
+        lowered_tbl_names.add(HiveStringUtils.normalizeIdentifier(t));
       }
       Query query = pm.newQuery(MTable.class);
       query.setFilter("database.name == db && tbl_names.contains(tableName)");
@@ -1075,7 +1076,7 @@ public class ObjectStore implements RawStore, Configurable {
     }
 
     // A new table is always created with a new column descriptor
-    return new MTable(tbl.getTableName().toLowerCase(), mdb,
+    return new MTable(HiveStringUtils.normalizeIdentifier(tbl.getTableName()), mdb,
         convertToMStorageDescriptor(tbl.getSd()), tbl.getOwner(), tbl
             .getCreateTime(), tbl.getLastAccessTime(), tbl.getRetention(),
         convertToMFieldSchemas(tbl.getPartitionKeys()), tbl.getParameters(),
@@ -1088,7 +1089,7 @@ public class ObjectStore implements RawStore, Configurable {
     if (keys != null) {
       mkeys = new ArrayList<MFieldSchema>(keys.size());
       for (FieldSchema part : keys) {
-        mkeys.add(new MFieldSchema(part.getName().toLowerCase(),
+        mkeys.add(new MFieldSchema(HiveStringUtils.normalizeIdentifier(part.getName()),
             part.getType(), part.getComment()));
       }
     }
@@ -1112,7 +1113,7 @@ public class ObjectStore implements RawStore, Configurable {
     if (keys != null) {
       mkeys = new ArrayList<MOrder>(keys.size());
       for (Order part : keys) {
-        mkeys.add(new MOrder(part.getCol().toLowerCase(), part.getOrder()));
+        mkeys.add(new MOrder(HiveStringUtils.normalizeIdentifier(part.getCol()), part.getOrder()));
       }
     }
     return mkeys;
@@ -1487,8 +1488,8 @@ public class ObjectStore implements RawStore, Configurable {
     boolean commited = false;
     try {
       openTransaction();
-      dbName = dbName.toLowerCase().trim();
-      tableName = tableName.toLowerCase().trim();
+      dbName = HiveStringUtils.normalizeIdentifier(dbName);
+      tableName = HiveStringUtils.normalizeIdentifier(tableName);
       MTable mtbl = getMTable(dbName, tableName);
       if (mtbl == null) {
         commited = commitTransaction();
@@ -1821,8 +1822,8 @@ public class ObjectStore implements RawStore, Configurable {
 
   private List<String> getPartitionNamesNoTxn(String dbName, String tableName, short max) {
     List<String> pns = new ArrayList<String>();
-    dbName = dbName.toLowerCase().trim();
-    tableName = tableName.toLowerCase().trim();
+    dbName = HiveStringUtils.normalizeIdentifier(dbName);
+    tableName = HiveStringUtils.normalizeIdentifier(tableName);
     Query q = pm.newQuery(
         "select partitionName from org.apache.hadoop.hive.metastore.model.MPartition "
         + "where table.database.name == t1 && table.tableName == t2 "
@@ -1858,8 +1859,8 @@ public class ObjectStore implements RawStore, Configurable {
   private Collection getPartitionPsQueryResults(String dbName, String tableName,
       List<String> part_vals, short max_parts, String resultsCol)
       throws MetaException, NoSuchObjectException {
-    dbName = dbName.toLowerCase().trim();
-    tableName = tableName.toLowerCase().trim();
+    dbName = HiveStringUtils.normalizeIdentifier(dbName);
+    tableName = HiveStringUtils.normalizeIdentifier(tableName);
     Table table = getTable(dbName, tableName);
 
     if (table == null) {
@@ -1968,8 +1969,8 @@ public class ObjectStore implements RawStore, Configurable {
     try {
       openTransaction();
       LOG.debug("Executing listMPartitions");
-      dbName = dbName.toLowerCase().trim();
-      tableName = tableName.toLowerCase().trim();
+      dbName = HiveStringUtils.normalizeIdentifier(dbName);
+      tableName = HiveStringUtils.normalizeIdentifier(tableName);
       Query query = pm.newQuery(MPartition.class,
           "table.tableName == t1 && table.database.name == t2");
       query.declareParameters("java.lang.String t1, java.lang.String t2");
@@ -2290,8 +2291,8 @@ public class ObjectStore implements RawStore, Configurable {
     query.setFilter(sb.toString());
 
     LOG.debug(" JDOQL filter is " + sb.toString());
-    params.put("t1", tblName.trim().toLowerCase());
-    params.put("t2", dbName.trim().toLowerCase());
+    params.put("t1", HiveStringUtils.normalizeIdentifier(tblName));
+    params.put("t2", HiveStringUtils.normalizeIdentifier(dbName));
 
     query.declareParameters(makeParameterDeclarationString(params));
     return new ObjectPair<Query, Map<String,String>>(query, params);
@@ -2317,9 +2318,9 @@ public class ObjectStore implements RawStore, Configurable {
         throws MetaException {
       assert allowSql || allowJdo;
       this.allowJdo = allowJdo;
-      this.dbName = dbName.toLowerCase();
+      this.dbName = HiveStringUtils.normalizeIdentifier(dbName);
       if (tblName != null){
-        this.tblName = tblName.toLowerCase();
+        this.tblName = HiveStringUtils.normalizeIdentifier(tblName);
       } else {
         // tblName can be null in cases of Helper being used at a higher
         // abstraction level, such as with datbases
@@ -2620,7 +2621,7 @@ public class ObjectStore implements RawStore, Configurable {
     try {
       openTransaction();
       LOG.debug("Executing listTableNamesByFilter");
-      dbName = dbName.toLowerCase().trim();
+      dbName = HiveStringUtils.normalizeIdentifier(dbName);
       Map<String, Object> params = new HashMap<String, Object>();
       String queryFilterString = makeQueryFilterString(dbName, null, filter, params);
       Query query = pm.newQuery(MTable.class);
@@ -2665,8 +2666,8 @@ public class ObjectStore implements RawStore, Configurable {
     try {
       openTransaction();
       LOG.debug("Executing listMPartitionNamesByFilter");
-      dbName = dbName.toLowerCase();
-      tableName = tableName.toLowerCase();
+      dbName = HiveStringUtils.normalizeIdentifier(dbName);
+      tableName = HiveStringUtils.normalizeIdentifier(tableName);
 
       MTable mtable = getMTable(dbName, tableName);
       if( mtable == null ) {
@@ -2717,8 +2718,8 @@ public class ObjectStore implements RawStore, Configurable {
     boolean success = false;
     try {
       openTransaction();
-      name = name.toLowerCase();
-      dbname = dbname.toLowerCase();
+      name = HiveStringUtils.normalizeIdentifier(name);
+      dbname = HiveStringUtils.normalizeIdentifier(dbname);
       MTable newt = convertToMTable(newTable);
       if (newt == null) {
         throw new InvalidObjectException("new table is invalid");
@@ -2731,7 +2732,7 @@ public class ObjectStore implements RawStore, Configurable {
 
       // For now only alter name, owner, paramters, cols, bucketcols are allowed
       oldt.setDatabase(newt.getDatabase());
-      oldt.setTableName(newt.getTableName().toLowerCase());
+      oldt.setTableName(HiveStringUtils.normalizeIdentifier(newt.getTableName()));
       oldt.setParameters(newt.getParameters());
       oldt.setOwner(newt.getOwner());
       // Fully copy over the contents of the new SD into the old SD,
@@ -2759,9 +2760,9 @@ public class ObjectStore implements RawStore, Configurable {
     boolean success = false;
     try {
       openTransaction();
-      name = name.toLowerCase();
-      baseTblName = baseTblName.toLowerCase();
-      dbname = dbname.toLowerCase();
+      name = HiveStringUtils.normalizeIdentifier(name);
+      baseTblName = HiveStringUtils.normalizeIdentifier(baseTblName);
+      dbname = HiveStringUtils.normalizeIdentifier(dbname);
       MIndex newi = convertToMIndex(newIndex);
       if (newi == null) {
         throw new InvalidObjectException("new index is invalid");
@@ -2786,8 +2787,8 @@ public class ObjectStore implements RawStore, Configurable {
 
   private void alterPartitionNoTxn(String dbname, String name, List<String> part_vals,
       Partition newPart) throws InvalidObjectException, MetaException {
-    name = name.toLowerCase();
-    dbname = dbname.toLowerCase();
+    name = HiveStringUtils.normalizeIdentifier(name);
+    dbname = HiveStringUtils.normalizeIdentifier(dbname);
     MPartition oldp = getMPartition(dbname, name, part_vals);
     MPartition newp = convertToMPart(newPart, false);
     if (oldp == null || newp == null) {
@@ -3020,7 +3021,7 @@ public class ObjectStore implements RawStore, Configurable {
           "Underlying index table does not exist for the given index.");
     }
 
-    return new MIndex(index.getIndexName().toLowerCase(), origTable, index.getCreateTime(),
+    return new MIndex(HiveStringUtils.normalizeIdentifier(index.getIndexName()), origTable, index.getCreateTime(),
         index.getLastAccessTime(), index.getParameters(), indexTable, msd,
         index.getIndexHandlerClass(), index.isDeferredRebuild());
   }
@@ -3049,8 +3050,8 @@ public class ObjectStore implements RawStore, Configurable {
     boolean commited = false;
     try {
       openTransaction();
-      dbName = dbName.toLowerCase().trim();
-      originalTblName = originalTblName.toLowerCase().trim();
+      dbName = HiveStringUtils.normalizeIdentifier(dbName);
+      originalTblName = HiveStringUtils.normalizeIdentifier(originalTblName);
       MTable mtbl = getMTable(dbName, originalTblName);
       if (mtbl == null) {
         commited = commitTransaction();
@@ -3061,7 +3062,8 @@ public class ObjectStore implements RawStore, Configurable {
         "origTable.tableName == t1 && origTable.database.name == t2 && indexName == t3");
       query.declareParameters("java.lang.String t1, java.lang.String t2, java.lang.String t3");
       query.setUnique(true);
-      midx = (MIndex) query.execute(originalTblName, dbName, indexName.toLowerCase());
+      midx = (MIndex) query.execute(originalTblName, dbName,
+          HiveStringUtils.normalizeIdentifier(indexName));
       pm.retrieve(midx);
       commited = commitTransaction();
     } finally {
@@ -3128,8 +3130,8 @@ public class ObjectStore implements RawStore, Configurable {
     try {
       openTransaction();
       LOG.debug("Executing listMIndexes");
-      dbName = dbName.toLowerCase().trim();
-      origTableName = origTableName.toLowerCase().trim();
+      dbName = HiveStringUtils.normalizeIdentifier(dbName);
+      origTableName = HiveStringUtils.normalizeIdentifier(origTableName);
       Query query = pm.newQuery(MIndex.class,
           "origTable.tableName == t1 && origTable.database.name == t2");
       query.declareParameters("java.lang.String t1, java.lang.String t2");
@@ -3154,8 +3156,8 @@ public class ObjectStore implements RawStore, Configurable {
     try {
       openTransaction();
       LOG.debug("Executing listIndexNames");
-      dbName = dbName.toLowerCase().trim();
-      origTableName = origTableName.toLowerCase().trim();
+      dbName = HiveStringUtils.normalizeIdentifier(dbName);
+      origTableName = HiveStringUtils.normalizeIdentifier(origTableName);
       Query q = pm.newQuery(
           "select indexName from org.apache.hadoop.hive.metastore.model.MIndex "
           + "where origTable.database.name == t1 && origTable.tableName == t2 "
@@ -3576,7 +3578,7 @@ public class ObjectStore implements RawStore, Configurable {
   public List<PrivilegeGrantInfo> getDBPrivilege(String dbName,
       String principalName, PrincipalType principalType)
       throws InvalidObjectException, MetaException {
-    dbName = dbName.toLowerCase().trim();
+    dbName = HiveStringUtils.normalizeIdentifier(dbName);
 
     if (principalName != null) {
       List<MDBPrivilege> userNameDbPriv = this.listPrincipalDBGrants(
@@ -3602,7 +3604,7 @@ public class ObjectStore implements RawStore, Configurable {
       String userName, List<String> groupNames) throws InvalidObjectException,
       MetaException {
     boolean commited = false;
-    dbName = dbName.toLowerCase().trim();
+    dbName = HiveStringUtils.normalizeIdentifier(dbName);
 
     PrincipalPrivilegeSet ret = new PrincipalPrivilegeSet();
     try {
@@ -3645,8 +3647,8 @@ public class ObjectStore implements RawStore, Configurable {
       List<String> groupNames) throws InvalidObjectException, MetaException {
     boolean commited = false;
     PrincipalPrivilegeSet ret = new PrincipalPrivilegeSet();
-    tableName = tableName.toLowerCase().trim();
-    dbName = dbName.toLowerCase().trim();
+    tableName = HiveStringUtils.normalizeIdentifier(tableName);
+    dbName = HiveStringUtils.normalizeIdentifier(dbName);
 
     try {
       openTransaction();
@@ -3688,8 +3690,8 @@ public class ObjectStore implements RawStore, Configurable {
       throws InvalidObjectException, MetaException {
     boolean commited = false;
     PrincipalPrivilegeSet ret = new PrincipalPrivilegeSet();
-    tableName = tableName.toLowerCase().trim();
-    dbName = dbName.toLowerCase().trim();
+    tableName = HiveStringUtils.normalizeIdentifier(tableName);
+    dbName = HiveStringUtils.normalizeIdentifier(dbName);
 
     try {
       openTransaction();
@@ -3730,9 +3732,9 @@ public class ObjectStore implements RawStore, Configurable {
       String tableName, String partitionName, String columnName,
       String userName, List<String> groupNames) throws InvalidObjectException,
       MetaException {
-    tableName = tableName.toLowerCase().trim();
-    dbName = dbName.toLowerCase().trim();
-    columnName = columnName.toLowerCase().trim();
+    tableName = HiveStringUtils.normalizeIdentifier(tableName);
+    dbName = HiveStringUtils.normalizeIdentifier(dbName);
+    columnName = HiveStringUtils.normalizeIdentifier(columnName);
 
     boolean commited = false;
     PrincipalPrivilegeSet ret = new PrincipalPrivilegeSet();
@@ -3774,8 +3776,8 @@ public class ObjectStore implements RawStore, Configurable {
       String tableName, String partName, String principalName,
       PrincipalType principalType) {
 
-    tableName = tableName.toLowerCase().trim();
-    dbName = dbName.toLowerCase().trim();
+    tableName = HiveStringUtils.normalizeIdentifier(tableName);
+    dbName = HiveStringUtils.normalizeIdentifier(dbName);
 
     if (principalName != null) {
       List<MPartitionPrivilege> userNameTabPartPriv = this
@@ -3803,8 +3805,8 @@ public class ObjectStore implements RawStore, Configurable {
 
   private List<PrivilegeGrantInfo> getTablePrivilege(String dbName,
       String tableName, String principalName, PrincipalType principalType) {
-    tableName = tableName.toLowerCase().trim();
-    dbName = dbName.toLowerCase().trim();
+    tableName = HiveStringUtils.normalizeIdentifier(tableName);
+    dbName = HiveStringUtils.normalizeIdentifier(dbName);
 
     if (principalName != null) {
       List<MTablePrivilege> userNameTabPartPriv = this
@@ -3829,9 +3831,9 @@ public class ObjectStore implements RawStore, Configurable {
       String tableName, String columnName, String partitionName,
       String principalName, PrincipalType principalType) {
 
-    tableName = tableName.toLowerCase().trim();
-    dbName = dbName.toLowerCase().trim();
-    columnName = columnName.toLowerCase().trim();
+    tableName = HiveStringUtils.normalizeIdentifier(tableName);
+    dbName = HiveStringUtils.normalizeIdentifier(dbName);
+    columnName = HiveStringUtils.normalizeIdentifier(columnName);
 
     if (partitionName == null) {
       List<MTableColumnPrivilege> userNameColumnPriv = this
@@ -4415,7 +4417,7 @@ public class ObjectStore implements RawStore, Configurable {
       PrincipalType principalType, String dbName) {
     boolean success = false;
     List<MDBPrivilege> mSecurityDBList = null;
-    dbName = dbName.toLowerCase().trim();
+    dbName = HiveStringUtils.normalizeIdentifier(dbName);
 
     try {
       openTransaction();
@@ -4499,11 +4501,11 @@ public class ObjectStore implements RawStore, Configurable {
   public List<MTablePrivilege> listAllTableGrants(String dbName,
       String tableName) {
     boolean success = false;
-    tableName = tableName.toLowerCase().trim();
-    dbName = dbName.toLowerCase().trim();
+    tableName = HiveStringUtils.normalizeIdentifier(tableName);
+    dbName = HiveStringUtils.normalizeIdentifier(dbName);
     List<MTablePrivilege> mSecurityTabList = null;
-    tableName = tableName.toLowerCase().trim();
-    dbName = dbName.toLowerCase().trim();
+    tableName = HiveStringUtils.normalizeIdentifier(tableName);
+    dbName = HiveStringUtils.normalizeIdentifier(dbName);
     try {
       openTransaction();
       LOG.debug("Executing listAllTableGrants");
@@ -4530,8 +4532,8 @@ public class ObjectStore implements RawStore, Configurable {
   @SuppressWarnings("unchecked")
   public List<MPartitionPrivilege> listTableAllPartitionGrants(String dbName,
       String tableName) {
-    tableName = tableName.toLowerCase().trim();
-    dbName = dbName.toLowerCase().trim();
+    tableName = HiveStringUtils.normalizeIdentifier(tableName);
+    dbName = HiveStringUtils.normalizeIdentifier(dbName);
     boolean success = false;
     List<MPartitionPrivilege> mSecurityTabPartList = null;
     try {
@@ -4562,8 +4564,8 @@ public class ObjectStore implements RawStore, Configurable {
       String tableName) {
     boolean success = false;
     List<MTableColumnPrivilege> mTblColPrivilegeList = null;
-    tableName = tableName.toLowerCase().trim();
-    dbName = dbName.toLowerCase().trim();
+    tableName = HiveStringUtils.normalizeIdentifier(tableName);
+    dbName = HiveStringUtils.normalizeIdentifier(dbName);
 
     try {
       openTransaction();
@@ -4589,8 +4591,8 @@ public class ObjectStore implements RawStore, Configurable {
   public List<MPartitionColumnPrivilege> listTableAllPartitionColumnGrants(String dbName,
       String tableName) {
     boolean success = false;
-    tableName = tableName.toLowerCase().trim();
-    dbName = dbName.toLowerCase().trim();
+    tableName = HiveStringUtils.normalizeIdentifier(tableName);
+    dbName = HiveStringUtils.normalizeIdentifier(dbName);
 
     List<MPartitionColumnPrivilege> mSecurityColList = null;
     try {
@@ -4617,8 +4619,8 @@ public class ObjectStore implements RawStore, Configurable {
   public List<MPartitionColumnPrivilege> listPartitionAllColumnGrants(String dbName,
       String tableName, List<String> partNames) {
     boolean success = false;
-    tableName = tableName.toLowerCase().trim();
-    dbName = dbName.toLowerCase().trim();
+    tableName = HiveStringUtils.normalizeIdentifier(tableName);
+    dbName = HiveStringUtils.normalizeIdentifier(dbName);
 
     List<MPartitionColumnPrivilege> mSecurityColList = null;
     try {
@@ -4649,7 +4651,7 @@ public class ObjectStore implements RawStore, Configurable {
 
   @SuppressWarnings("unchecked")
   private List<MDBPrivilege> listDatabaseGrants(String dbName) {
-    dbName = dbName.toLowerCase().trim();
+    dbName = HiveStringUtils.normalizeIdentifier(dbName);
 
     boolean success = false;
     try {
@@ -4675,8 +4677,8 @@ public class ObjectStore implements RawStore, Configurable {
   @SuppressWarnings("unchecked")
   private List<MPartitionPrivilege> listPartitionGrants(String dbName, String tableName,
       List<String> partNames) {
-    tableName = tableName.toLowerCase().trim();
-    dbName = dbName.toLowerCase().trim();
+    tableName = HiveStringUtils.normalizeIdentifier(tableName);
+    dbName = HiveStringUtils.normalizeIdentifier(dbName);
 
     boolean success = false;
     List<MPartitionPrivilege> mSecurityTabPartList = null;
@@ -4719,8 +4721,8 @@ public class ObjectStore implements RawStore, Configurable {
     String queryStr = tbCol + " == t1 && " + dbCol + " == t2";
     String paramStr = "java.lang.String t1, java.lang.String t2";
     Object[] params = new Object[2 + partNames.size()];
-    params[0] = tableName;
-    params[1] = dbName;
+    params[0] = HiveStringUtils.normalizeIdentifier(tableName);
+    params[1] = HiveStringUtils.normalizeIdentifier(dbName);
     int index = 0;
     for (String partName : partNames) {
       params[index + 2] = partName;
@@ -4739,8 +4741,8 @@ public class ObjectStore implements RawStore, Configurable {
   public List<MTablePrivilege> listAllTableGrants(
       String principalName, PrincipalType principalType, String dbName,
       String tableName) {
-    tableName = tableName.toLowerCase().trim();
-    dbName = dbName.toLowerCase().trim();
+    tableName = HiveStringUtils.normalizeIdentifier(tableName);
+    dbName = HiveStringUtils.normalizeIdentifier(dbName);
 
     boolean success = false;
     List<MTablePrivilege> mSecurityTabPartList = null;
@@ -4773,8 +4775,8 @@ public class ObjectStore implements RawStore, Configurable {
       String principalName, PrincipalType principalType, String dbName,
       String tableName, String partName) {
     boolean success = false;
-    tableName = tableName.toLowerCase().trim();
-    dbName = dbName.toLowerCase().trim();
+    tableName = HiveStringUtils.normalizeIdentifier(tableName);
+    dbName = HiveStringUtils.normalizeIdentifier(dbName);
 
     List<MPartitionPrivilege> mSecurityTabPartList = null;
     try {
@@ -4808,9 +4810,9 @@ public class ObjectStore implements RawStore, Configurable {
       String principalName, PrincipalType principalType, String dbName,
       String tableName, String columnName) {
     boolean success = false;
-    tableName = tableName.toLowerCase().trim();
-    dbName = dbName.toLowerCase().trim();
-    columnName = columnName.toLowerCase().trim();
+    tableName = HiveStringUtils.normalizeIdentifier(tableName);
+    dbName = HiveStringUtils.normalizeIdentifier(dbName);
+    columnName = HiveStringUtils.normalizeIdentifier(columnName);
     List<MTableColumnPrivilege> mSecurityColList = null;
     try {
       openTransaction();
@@ -4842,9 +4844,9 @@ public class ObjectStore implements RawStore, Configurable {
       String principalName, PrincipalType principalType, String dbName,
       String tableName, String partitionName, String columnName) {
     boolean success = false;
-    tableName = tableName.toLowerCase().trim();
-    dbName = dbName.toLowerCase().trim();
-    columnName = columnName.toLowerCase().trim();
+    tableName = HiveStringUtils.normalizeIdentifier(tableName);
+    dbName = HiveStringUtils.normalizeIdentifier(dbName);
+    columnName = HiveStringUtils.normalizeIdentifier(columnName);
 
     List<MPartitionColumnPrivilege> mSecurityColList = null;
     try {
@@ -5995,7 +5997,8 @@ public class ObjectStore implements RawStore, Configurable {
   protected ColumnStatistics getTableColumnStatisticsInternal(
       String dbName, String tableName, final List<String> colNames, boolean allowSql,
       boolean allowJdo) throws MetaException, NoSuchObjectException {
-    return new GetStatHelper(dbName.toLowerCase(), tableName.toLowerCase(), allowSql, allowJdo) {
+    return new GetStatHelper(HiveStringUtils.normalizeIdentifier(dbName),
+        HiveStringUtils.normalizeIdentifier(tableName), allowSql, allowJdo) {
       @Override
       protected ColumnStatistics getSqlResult(GetHelper<ColumnStatistics> ctx) throws MetaException {
         return directSql.getTableStats(dbName, tblName, colNames);
@@ -6212,7 +6215,9 @@ public class ObjectStore implements RawStore, Configurable {
       if (colName != null) {
         query.setUnique(true);
         mStatsObj = (MPartitionColumnStatistics)query.executeWithArray(partName.trim(),
-                                                dbName.trim(), tableName.trim(), colName.trim());
+            HiveStringUtils.normalizeIdentifier(dbName),
+            HiveStringUtils.normalizeIdentifier(tableName),
+            HiveStringUtils.normalizeIdentifier(colName));
         pm.retrieve(mStatsObj);
 
         if (mStatsObj != null) {
@@ -6223,7 +6228,8 @@ public class ObjectStore implements RawStore, Configurable {
         }
       } else {
         mStatsObjColl= (List<MPartitionColumnStatistics>)query.execute(partName.trim(),
-                                  dbName.trim(), tableName.trim());
+            HiveStringUtils.normalizeIdentifier(dbName),
+            HiveStringUtils.normalizeIdentifier(tableName));
         pm.retrieveAll(mStatsObjColl);
 
         if (mStatsObjColl != null) {
@@ -6289,8 +6295,10 @@ public class ObjectStore implements RawStore, Configurable {
 
       if (colName != null) {
         query.setUnique(true);
-        mStatsObj = (MTableColumnStatistics)query.execute(tableName.trim(),
-                                                    dbName.trim(), colName.trim());
+        mStatsObj = (MTableColumnStatistics)query.execute(
+            HiveStringUtils.normalizeIdentifier(tableName),
+            HiveStringUtils.normalizeIdentifier(dbName),
+            HiveStringUtils.normalizeIdentifier(colName));
         pm.retrieve(mStatsObj);
 
         if (mStatsObj != null) {
@@ -6300,7 +6308,9 @@ public class ObjectStore implements RawStore, Configurable {
               + tableName + " col=" + colName);
         }
       } else {
-        mStatsObjColl= (List<MTableColumnStatistics>)query.execute(tableName.trim(), dbName.trim());
+        mStatsObjColl= (List<MTableColumnStatistics>)query.execute(
+            HiveStringUtils.normalizeIdentifier(tableName),
+            HiveStringUtils.normalizeIdentifier(dbName));
         pm.retrieveAll(mStatsObjColl);
 
         if (mStatsObjColl != null) {
@@ -6679,8 +6689,8 @@ public class ObjectStore implements RawStore, Configurable {
     boolean success = false;
     try {
       openTransaction();
-      dbName = dbName.toLowerCase().trim();
-      tableName = tableName.toLowerCase().trim();
+      dbName = HiveStringUtils.normalizeIdentifier(dbName);
+      tableName = HiveStringUtils.normalizeIdentifier(tableName);
 
       // TODO: this could also be passed from upper layer; or this method should filter the list.
       MTable mtbl = getMTable(dbName, tableName);
@@ -6812,8 +6822,8 @@ public class ObjectStore implements RawStore, Configurable {
     boolean success = false;
     try {
       openTransaction();
-      funcName = funcName.toLowerCase();
-      dbName = dbName.toLowerCase();
+      funcName = HiveStringUtils.normalizeIdentifier(funcName);
+      dbName = HiveStringUtils.normalizeIdentifier(dbName);
       MFunction newf = convertToMFunction(newFunction);
       if (newf == null) {
         throw new InvalidObjectException("new function is invalid");
@@ -6825,7 +6835,7 @@ public class ObjectStore implements RawStore, Configurable {
       }
 
       // For now only alter name, owner, class name, type
-      oldf.setFunctionName(newf.getFunctionName().toLowerCase());
+      oldf.setFunctionName(HiveStringUtils.normalizeIdentifier(newf.getFunctionName()));
       oldf.setDatabase(newf.getDatabase());
       oldf.setOwnerName(newf.getOwnerName());
       oldf.setOwnerType(newf.getOwnerType());
@@ -6866,8 +6876,8 @@ public class ObjectStore implements RawStore, Configurable {
     boolean commited = false;
     try {
       openTransaction();
-      db = db.toLowerCase().trim();
-      function = function.toLowerCase().trim();
+      db = HiveStringUtils.normalizeIdentifier(db);
+      function = HiveStringUtils.normalizeIdentifier(function);
       Query query = pm.newQuery(MFunction.class, "functionName == function && database.name == db");
       query.declareParameters("java.lang.String function, java.lang.String db");
       query.setUnique(true);
@@ -6905,7 +6915,7 @@ public class ObjectStore implements RawStore, Configurable {
     List<String> funcs = null;
     try {
       openTransaction();
-      dbName = dbName.toLowerCase().trim();
+      dbName = HiveStringUtils.normalizeIdentifier(dbName);
       // Take the pattern and split it on the | to get all the composing
       // patterns
       String[] subpatterns = pattern.trim().split("\\|");

http://git-wip-us.apache.org/repos/asf/hive/blob/fba31e76/ql/src/test/queries/clientpositive/drop_partition_with_stats.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/drop_partition_with_stats.q b/ql/src/test/queries/clientpositive/drop_partition_with_stats.q
new file mode 100644
index 0000000..40b43c2
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/drop_partition_with_stats.q
@@ -0,0 +1,68 @@
+-- This test verifies that a table partition could be dropped with columns stats computed
+-- The column stats for a partitioned table will go to PART_COL_STATS
+CREATE DATABASE IF NOT EXISTS partstatsdb1;
+USE partstatsdb1;
+CREATE TABLE IF NOT EXISTS testtable (key STRING, value STRING) PARTITIONED BY (part1 STRING, Part2 STRING);
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE testtable PARTITION (part1='p11', Part2='P12');
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE testtable PARTITION (part1='p21', Part2='P22');
+ANALYZE TABLE testtable COMPUTE STATISTICS FOR COLUMNS key;
+ANALYZE TABLE testtable PARTITION (part1='p11', Part2='P12') COMPUTE STATISTICS FOR COLUMNS key;
+
+
+CREATE TABLE IF NOT EXISTS TestTable1 (key STRING, value STRING) PARTITIONED BY (part1 STRING, Part2 STRING);
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1 PARTITION (part1='p11', Part2='P11');
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1 PARTITION (part1='p11', Part2='P12');
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1 PARTITION (part1='p21', Part2='P22');
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1 PARTITION (part1='p31', Part2='P32');
+ANALYZE TABLE TestTable1 COMPUTE STATISTICS FOR COLUMNS key;
+ANALYZE TABLE TestTable1 PARTITION (part1='p11') COMPUTE STATISTICS FOR COLUMNS key;
+ANALYZE TABLE TestTable1 PARTITION (part1='p11', Part2='P12') COMPUTE STATISTICS FOR COLUMNS key;
+
+CREATE TABLE IF NOT EXISTS TESTTABLE2 (key STRING, value STRING) PARTITIONED BY (part1 STRING, Part2 STRING);
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TESTTABLE2 PARTITION (part1='p11', Part2='P12');
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TESTTABLE2 PARTITION (part1='p21', Part2='P22');
+ANALYZE TABLE TESTTABLE2 COMPUTE STATISTICS FOR COLUMNS key;
+ANALYZE TABLE TESTTABLE2 PARTITION (part1='p11', Part2='P12') COMPUTE STATISTICS FOR COLUMNS key;
+
+ALTER TABLE partstatsdb1.testtable DROP PARTITION (part1='p11', Part2='P12');
+ALTER TABLE partstatsdb1.TestTable1 DROP PARTITION (part1='p11', Part2='P12');
+ALTER TABLE partstatsdb1.TESTTABLE2 DROP PARTITION (part1='p11', Part2='P12');
+
+DROP TABLE partstatsdb1.testtable;
+DROP TABLE partstatsdb1.TestTable1;
+DROP TABLE partstatsdb1.TESTTABLE2;
+DROP DATABASE partstatsdb1;
+
+CREATE DATABASE IF NOT EXISTS PARTSTATSDB2;
+USE PARTSTATSDB2;
+CREATE TABLE IF NOT EXISTS testtable (key STRING, value STRING) PARTITIONED BY (part1 STRING, Part2 STRING);
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE testtable PARTITION (part1='p11', Part2='P12');
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE testtable PARTITION (part1='p21', Part2='P22');
+ANALYZE TABLE testtable COMPUTE STATISTICS FOR COLUMNS key;
+ANALYZE TABLE testtable PARTITION (part1='p11', Part2='P12') COMPUTE STATISTICS FOR COLUMNS key;
+
+
+CREATE TABLE IF NOT EXISTS TestTable1 (key STRING, value STRING) PARTITIONED BY (part1 STRING, Part2 STRING);
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1 PARTITION (part1='p11', Part2='P11');
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1 PARTITION (part1='p11', Part2='P12');
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1 PARTITION (part1='p21', Part2='P22');
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1 PARTITION (part1='p31', Part2='P32');
+ANALYZE TABLE TestTable1 COMPUTE STATISTICS FOR COLUMNS key;
+ANALYZE TABLE TestTable1 PARTITION (part1='p11') COMPUTE STATISTICS FOR COLUMNS key;
+ANALYZE TABLE TestTable1 PARTITION (part1='p11', Part2='P12') COMPUTE STATISTICS FOR COLUMNS key;
+
+CREATE TABLE IF NOT EXISTS TESTTABLE2 (key STRING, value STRING) PARTITIONED BY (part1 STRING, Part2 STRING);
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TESTTABLE2 PARTITION (part1='p11', Part2='P12');
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TESTTABLE2 PARTITION (part1='p21', Part2='P22');
+ANALYZE TABLE TESTTABLE2 COMPUTE STATISTICS FOR COLUMNS key;
+ANALYZE TABLE TESTTABLE2 PARTITION (part1='p11', Part2='P12') COMPUTE STATISTICS FOR COLUMNS key;
+
+ALTER TABLE PARTSTATSDB2.testtable DROP PARTITION (part1='p11', Part2='P12');
+ALTER TABLE PARTSTATSDB2.TestTable1 DROP PARTITION (part1='p11', Part2='P12');
+ALTER TABLE PARTSTATSDB2.TESTTABLE2 DROP PARTITION (part1='p11', Part2='P12');
+
+DROP TABLE PARTSTATSDB2.testtable;
+DROP TABLE PARTSTATSDB2.TestTable1;
+DROP TABLE PARTSTATSDB2.TESTTABLE2;
+DROP DATABASE PARTSTATSDB2;
+

http://git-wip-us.apache.org/repos/asf/hive/blob/fba31e76/ql/src/test/queries/clientpositive/drop_table_with_stats.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/drop_table_with_stats.q b/ql/src/test/queries/clientpositive/drop_table_with_stats.q
new file mode 100644
index 0000000..b655b53
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/drop_table_with_stats.q
@@ -0,0 +1,43 @@
+-- This test verifies that a table could be dropped with columns stats computed
+-- The column stats for table without partition will go to TAB_COL_STATS
+CREATE DATABASE IF NOT EXISTS tblstatsdb1;
+USE tblstatsdb1;
+CREATE TABLE IF NOT EXISTS testtable (key STRING, value STRING);
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE testtable;
+ANALYZE TABLE testtable COMPUTE STATISTICS FOR COLUMNS key;
+
+CREATE TABLE IF NOT EXISTS TestTable1 (key STRING, value STRING);
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1;
+ANALYZE TABLE TestTable1 COMPUTE STATISTICS FOR COLUMNS key;
+
+CREATE TABLE IF NOT EXISTS TESTTABLE2 (key STRING, value STRING);
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TESTTABLE2;
+ANALYZE TABLE TESTTABLE2 COMPUTE STATISTICS FOR COLUMNS key;
+
+DROP TABLE tblstatsdb1.testtable;
+DROP TABLE tblstatsdb1.TestTable1;
+DROP TABLE tblstatsdb1.TESTTABLE2;
+DROP DATABASE tblstatsdb1;
+
+CREATE DATABASE IF NOT EXISTS TBLSTATSDB2;
+USE TBLSTATSDB2;
+CREATE TABLE IF NOT EXISTS testtable (key STRING, value STRING);
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE testtable;
+ANALYZE TABLE testtable COMPUTE STATISTICS FOR COLUMNS key;
+
+
+CREATE TABLE IF NOT EXISTS TestTable1 (key STRING, value STRING);
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1;
+ANALYZE TABLE TestTable1 COMPUTE STATISTICS FOR COLUMNS key;
+
+
+CREATE TABLE IF NOT EXISTS TESTTABLE2 (key STRING, value STRING);
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TESTTABLE2;
+ANALYZE TABLE TESTTABLE2 COMPUTE STATISTICS FOR COLUMNS key;
+
+
+DROP TABLE TBLSTATSDB2.testtable;
+DROP TABLE TBLSTATSDB2.TestTable1;
+DROP TABLE TBLSTATSDB2.TESTTABLE2;
+DROP DATABASE TBLSTATSDB2;
+

http://git-wip-us.apache.org/repos/asf/hive/blob/fba31e76/ql/src/test/results/clientpositive/drop_partition_with_stats.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/drop_partition_with_stats.q.out b/ql/src/test/results/clientpositive/drop_partition_with_stats.q.out
new file mode 100644
index 0000000..e27e557
--- /dev/null
+++ b/ql/src/test/results/clientpositive/drop_partition_with_stats.q.out
@@ -0,0 +1,496 @@
+PREHOOK: query: -- This test verifies that a table partition could be dropped with columns stats computed
+-- The column stats for a partitioned table will go to PART_COL_STATS
+CREATE DATABASE IF NOT EXISTS partstatsdb1
+PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:partstatsdb1
+POSTHOOK: query: -- This test verifies that a table partition could be dropped with columns stats computed
+-- The column stats for a partitioned table will go to PART_COL_STATS
+CREATE DATABASE IF NOT EXISTS partstatsdb1
+POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:partstatsdb1
+PREHOOK: query: USE partstatsdb1
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:partstatsdb1
+POSTHOOK: query: USE partstatsdb1
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:partstatsdb1
+PREHOOK: query: CREATE TABLE IF NOT EXISTS testtable (key STRING, value STRING) PARTITIONED BY (part1 STRING, Part2 STRING)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:partstatsdb1
+PREHOOK: Output: partstatsdb1@testtable
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS testtable (key STRING, value STRING) PARTITIONED BY (part1 STRING, Part2 STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:partstatsdb1
+POSTHOOK: Output: partstatsdb1@testtable
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE testtable PARTITION (part1='p11', Part2='P12')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: partstatsdb1@testtable
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE testtable PARTITION (part1='p11', Part2='P12')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: partstatsdb1@testtable
+POSTHOOK: Output: partstatsdb1@testtable@part1=p11/part2=P12
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE testtable PARTITION (part1='p21', Part2='P22')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: partstatsdb1@testtable
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE testtable PARTITION (part1='p21', Part2='P22')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: partstatsdb1@testtable
+POSTHOOK: Output: partstatsdb1@testtable@part1=p21/part2=P22
+PREHOOK: query: ANALYZE TABLE testtable COMPUTE STATISTICS FOR COLUMNS key
+PREHOOK: type: QUERY
+PREHOOK: Input: partstatsdb1@testtable
+PREHOOK: Input: partstatsdb1@testtable@part1=p11/part2=P12
+PREHOOK: Input: partstatsdb1@testtable@part1=p21/part2=P22
+#### A masked pattern was here ####
+POSTHOOK: query: ANALYZE TABLE testtable COMPUTE STATISTICS FOR COLUMNS key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: partstatsdb1@testtable
+POSTHOOK: Input: partstatsdb1@testtable@part1=p11/part2=P12
+POSTHOOK: Input: partstatsdb1@testtable@part1=p21/part2=P22
+#### A masked pattern was here ####
+PREHOOK: query: ANALYZE TABLE testtable PARTITION (part1='p11', Part2='P12') COMPUTE STATISTICS FOR COLUMNS key
+PREHOOK: type: QUERY
+PREHOOK: Input: partstatsdb1@testtable
+PREHOOK: Input: partstatsdb1@testtable@part1=p11/part2=P12
+#### A masked pattern was here ####
+POSTHOOK: query: ANALYZE TABLE testtable PARTITION (part1='p11', Part2='P12') COMPUTE STATISTICS FOR COLUMNS key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: partstatsdb1@testtable
+POSTHOOK: Input: partstatsdb1@testtable@part1=p11/part2=P12
+#### A masked pattern was here ####
+PREHOOK: query: CREATE TABLE IF NOT EXISTS TestTable1 (key STRING, value STRING) PARTITIONED BY (part1 STRING, Part2 STRING)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:partstatsdb1
+PREHOOK: Output: partstatsdb1@TestTable1
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS TestTable1 (key STRING, value STRING) PARTITIONED BY (part1 STRING, Part2 STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:partstatsdb1
+POSTHOOK: Output: partstatsdb1@TestTable1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1 PARTITION (part1='p11', Part2='P11')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: partstatsdb1@testtable1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1 PARTITION (part1='p11', Part2='P11')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: partstatsdb1@testtable1
+POSTHOOK: Output: partstatsdb1@testtable1@part1=p11/part2=P11
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1 PARTITION (part1='p11', Part2='P12')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: partstatsdb1@testtable1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1 PARTITION (part1='p11', Part2='P12')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: partstatsdb1@testtable1
+POSTHOOK: Output: partstatsdb1@testtable1@part1=p11/part2=P12
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1 PARTITION (part1='p21', Part2='P22')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: partstatsdb1@testtable1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1 PARTITION (part1='p21', Part2='P22')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: partstatsdb1@testtable1
+POSTHOOK: Output: partstatsdb1@testtable1@part1=p21/part2=P22
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1 PARTITION (part1='p31', Part2='P32')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: partstatsdb1@testtable1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1 PARTITION (part1='p31', Part2='P32')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: partstatsdb1@testtable1
+POSTHOOK: Output: partstatsdb1@testtable1@part1=p31/part2=P32
+PREHOOK: query: ANALYZE TABLE TestTable1 COMPUTE STATISTICS FOR COLUMNS key
+PREHOOK: type: QUERY
+PREHOOK: Input: partstatsdb1@testtable1
+PREHOOK: Input: partstatsdb1@testtable1@part1=p11/part2=P11
+PREHOOK: Input: partstatsdb1@testtable1@part1=p11/part2=P12
+PREHOOK: Input: partstatsdb1@testtable1@part1=p21/part2=P22
+PREHOOK: Input: partstatsdb1@testtable1@part1=p31/part2=P32
+#### A masked pattern was here ####
+POSTHOOK: query: ANALYZE TABLE TestTable1 COMPUTE STATISTICS FOR COLUMNS key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: partstatsdb1@testtable1
+POSTHOOK: Input: partstatsdb1@testtable1@part1=p11/part2=P11
+POSTHOOK: Input: partstatsdb1@testtable1@part1=p11/part2=P12
+POSTHOOK: Input: partstatsdb1@testtable1@part1=p21/part2=P22
+POSTHOOK: Input: partstatsdb1@testtable1@part1=p31/part2=P32
+#### A masked pattern was here ####
+PREHOOK: query: ANALYZE TABLE TestTable1 PARTITION (part1='p11') COMPUTE STATISTICS FOR COLUMNS key
+PREHOOK: type: QUERY
+PREHOOK: Input: partstatsdb1@testtable1
+PREHOOK: Input: partstatsdb1@testtable1@part1=p11/part2=P11
+PREHOOK: Input: partstatsdb1@testtable1@part1=p11/part2=P12
+#### A masked pattern was here ####
+POSTHOOK: query: ANALYZE TABLE TestTable1 PARTITION (part1='p11') COMPUTE STATISTICS FOR COLUMNS key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: partstatsdb1@testtable1
+POSTHOOK: Input: partstatsdb1@testtable1@part1=p11/part2=P11
+POSTHOOK: Input: partstatsdb1@testtable1@part1=p11/part2=P12
+#### A masked pattern was here ####
+PREHOOK: query: ANALYZE TABLE TestTable1 PARTITION (part1='p11', Part2='P12') COMPUTE STATISTICS FOR COLUMNS key
+PREHOOK: type: QUERY
+PREHOOK: Input: partstatsdb1@testtable1
+PREHOOK: Input: partstatsdb1@testtable1@part1=p11/part2=P12
+#### A masked pattern was here ####
+POSTHOOK: query: ANALYZE TABLE TestTable1 PARTITION (part1='p11', Part2='P12') COMPUTE STATISTICS FOR COLUMNS key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: partstatsdb1@testtable1
+POSTHOOK: Input: partstatsdb1@testtable1@part1=p11/part2=P12
+#### A masked pattern was here ####
+PREHOOK: query: CREATE TABLE IF NOT EXISTS TESTTABLE2 (key STRING, value STRING) PARTITIONED BY (part1 STRING, Part2 STRING)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:partstatsdb1
+PREHOOK: Output: partstatsdb1@TESTTABLE2
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS TESTTABLE2 (key STRING, value STRING) PARTITIONED BY (part1 STRING, Part2 STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:partstatsdb1
+POSTHOOK: Output: partstatsdb1@TESTTABLE2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TESTTABLE2 PARTITION (part1='p11', Part2='P12')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: partstatsdb1@testtable2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TESTTABLE2 PARTITION (part1='p11', Part2='P12')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: partstatsdb1@testtable2
+POSTHOOK: Output: partstatsdb1@testtable2@part1=p11/part2=P12
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TESTTABLE2 PARTITION (part1='p21', Part2='P22')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: partstatsdb1@testtable2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TESTTABLE2 PARTITION (part1='p21', Part2='P22')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: partstatsdb1@testtable2
+POSTHOOK: Output: partstatsdb1@testtable2@part1=p21/part2=P22
+PREHOOK: query: ANALYZE TABLE TESTTABLE2 COMPUTE STATISTICS FOR COLUMNS key
+PREHOOK: type: QUERY
+PREHOOK: Input: partstatsdb1@testtable2
+PREHOOK: Input: partstatsdb1@testtable2@part1=p11/part2=P12
+PREHOOK: Input: partstatsdb1@testtable2@part1=p21/part2=P22
+#### A masked pattern was here ####
+POSTHOOK: query: ANALYZE TABLE TESTTABLE2 COMPUTE STATISTICS FOR COLUMNS key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: partstatsdb1@testtable2
+POSTHOOK: Input: partstatsdb1@testtable2@part1=p11/part2=P12
+POSTHOOK: Input: partstatsdb1@testtable2@part1=p21/part2=P22
+#### A masked pattern was here ####
+PREHOOK: query: ANALYZE TABLE TESTTABLE2 PARTITION (part1='p11', Part2='P12') COMPUTE STATISTICS FOR COLUMNS key
+PREHOOK: type: QUERY
+PREHOOK: Input: partstatsdb1@testtable2
+PREHOOK: Input: partstatsdb1@testtable2@part1=p11/part2=P12
+#### A masked pattern was here ####
+POSTHOOK: query: ANALYZE TABLE TESTTABLE2 PARTITION (part1='p11', Part2='P12') COMPUTE STATISTICS FOR COLUMNS key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: partstatsdb1@testtable2
+POSTHOOK: Input: partstatsdb1@testtable2@part1=p11/part2=P12
+#### A masked pattern was here ####
+PREHOOK: query: ALTER TABLE partstatsdb1.testtable DROP PARTITION (part1='p11', Part2='P12')
+PREHOOK: type: ALTERTABLE_DROPPARTS
+PREHOOK: Input: partstatsdb1@testtable
+PREHOOK: Output: partstatsdb1@testtable@part1=p11/part2=P12
+POSTHOOK: query: ALTER TABLE partstatsdb1.testtable DROP PARTITION (part1='p11', Part2='P12')
+POSTHOOK: type: ALTERTABLE_DROPPARTS
+POSTHOOK: Input: partstatsdb1@testtable
+POSTHOOK: Output: partstatsdb1@testtable@part1=p11/part2=P12
+PREHOOK: query: ALTER TABLE partstatsdb1.TestTable1 DROP PARTITION (part1='p11', Part2='P12')
+PREHOOK: type: ALTERTABLE_DROPPARTS
+PREHOOK: Input: partstatsdb1@testtable1
+PREHOOK: Output: partstatsdb1@testtable1@part1=p11/part2=P12
+POSTHOOK: query: ALTER TABLE partstatsdb1.TestTable1 DROP PARTITION (part1='p11', Part2='P12')
+POSTHOOK: type: ALTERTABLE_DROPPARTS
+POSTHOOK: Input: partstatsdb1@testtable1
+POSTHOOK: Output: partstatsdb1@testtable1@part1=p11/part2=P12
+PREHOOK: query: ALTER TABLE partstatsdb1.TESTTABLE2 DROP PARTITION (part1='p11', Part2='P12')
+PREHOOK: type: ALTERTABLE_DROPPARTS
+PREHOOK: Input: partstatsdb1@testtable2
+PREHOOK: Output: partstatsdb1@testtable2@part1=p11/part2=P12
+POSTHOOK: query: ALTER TABLE partstatsdb1.TESTTABLE2 DROP PARTITION (part1='p11', Part2='P12')
+POSTHOOK: type: ALTERTABLE_DROPPARTS
+POSTHOOK: Input: partstatsdb1@testtable2
+POSTHOOK: Output: partstatsdb1@testtable2@part1=p11/part2=P12
+PREHOOK: query: DROP TABLE partstatsdb1.testtable
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: partstatsdb1@testtable
+PREHOOK: Output: partstatsdb1@testtable
+POSTHOOK: query: DROP TABLE partstatsdb1.testtable
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: partstatsdb1@testtable
+POSTHOOK: Output: partstatsdb1@testtable
+PREHOOK: query: DROP TABLE partstatsdb1.TestTable1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: partstatsdb1@testtable1
+PREHOOK: Output: partstatsdb1@testtable1
+POSTHOOK: query: DROP TABLE partstatsdb1.TestTable1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: partstatsdb1@testtable1
+POSTHOOK: Output: partstatsdb1@testtable1
+PREHOOK: query: DROP TABLE partstatsdb1.TESTTABLE2
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: partstatsdb1@testtable2
+PREHOOK: Output: partstatsdb1@testtable2
+POSTHOOK: query: DROP TABLE partstatsdb1.TESTTABLE2
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: partstatsdb1@testtable2
+POSTHOOK: Output: partstatsdb1@testtable2
+PREHOOK: query: DROP DATABASE partstatsdb1
+PREHOOK: type: DROPDATABASE
+PREHOOK: Input: database:partstatsdb1
+PREHOOK: Output: database:partstatsdb1
+POSTHOOK: query: DROP DATABASE partstatsdb1
+POSTHOOK: type: DROPDATABASE
+POSTHOOK: Input: database:partstatsdb1
+POSTHOOK: Output: database:partstatsdb1
+PREHOOK: query: CREATE DATABASE IF NOT EXISTS PARTSTATSDB2
+PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:PARTSTATSDB2
+POSTHOOK: query: CREATE DATABASE IF NOT EXISTS PARTSTATSDB2
+POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:PARTSTATSDB2
+PREHOOK: query: USE PARTSTATSDB2
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:partstatsdb2
+POSTHOOK: query: USE PARTSTATSDB2
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:partstatsdb2
+PREHOOK: query: CREATE TABLE IF NOT EXISTS testtable (key STRING, value STRING) PARTITIONED BY (part1 STRING, Part2 STRING)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: PARTSTATSDB2@testtable
+PREHOOK: Output: database:partstatsdb2
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS testtable (key STRING, value STRING) PARTITIONED BY (part1 STRING, Part2 STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: PARTSTATSDB2@testtable
+POSTHOOK: Output: database:partstatsdb2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE testtable PARTITION (part1='p11', Part2='P12')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: partstatsdb2@testtable
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE testtable PARTITION (part1='p11', Part2='P12')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: partstatsdb2@testtable
+POSTHOOK: Output: partstatsdb2@testtable@part1=p11/part2=P12
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE testtable PARTITION (part1='p21', Part2='P22')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: partstatsdb2@testtable
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE testtable PARTITION (part1='p21', Part2='P22')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: partstatsdb2@testtable
+POSTHOOK: Output: partstatsdb2@testtable@part1=p21/part2=P22
+PREHOOK: query: ANALYZE TABLE testtable COMPUTE STATISTICS FOR COLUMNS key
+PREHOOK: type: QUERY
+PREHOOK: Input: partstatsdb2@testtable
+PREHOOK: Input: partstatsdb2@testtable@part1=p11/part2=P12
+PREHOOK: Input: partstatsdb2@testtable@part1=p21/part2=P22
+#### A masked pattern was here ####
+POSTHOOK: query: ANALYZE TABLE testtable COMPUTE STATISTICS FOR COLUMNS key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: partstatsdb2@testtable
+POSTHOOK: Input: partstatsdb2@testtable@part1=p11/part2=P12
+POSTHOOK: Input: partstatsdb2@testtable@part1=p21/part2=P22
+#### A masked pattern was here ####
+PREHOOK: query: ANALYZE TABLE testtable PARTITION (part1='p11', Part2='P12') COMPUTE STATISTICS FOR COLUMNS key
+PREHOOK: type: QUERY
+PREHOOK: Input: partstatsdb2@testtable
+PREHOOK: Input: partstatsdb2@testtable@part1=p11/part2=P12
+#### A masked pattern was here ####
+POSTHOOK: query: ANALYZE TABLE testtable PARTITION (part1='p11', Part2='P12') COMPUTE STATISTICS FOR COLUMNS key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: partstatsdb2@testtable
+POSTHOOK: Input: partstatsdb2@testtable@part1=p11/part2=P12
+#### A masked pattern was here ####
+PREHOOK: query: CREATE TABLE IF NOT EXISTS TestTable1 (key STRING, value STRING) PARTITIONED BY (part1 STRING, Part2 STRING)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: PARTSTATSDB2@TestTable1
+PREHOOK: Output: database:partstatsdb2
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS TestTable1 (key STRING, value STRING) PARTITIONED BY (part1 STRING, Part2 STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: PARTSTATSDB2@TestTable1
+POSTHOOK: Output: database:partstatsdb2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1 PARTITION (part1='p11', Part2='P11')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: partstatsdb2@testtable1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1 PARTITION (part1='p11', Part2='P11')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: partstatsdb2@testtable1
+POSTHOOK: Output: partstatsdb2@testtable1@part1=p11/part2=P11
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1 PARTITION (part1='p11', Part2='P12')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: partstatsdb2@testtable1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1 PARTITION (part1='p11', Part2='P12')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: partstatsdb2@testtable1
+POSTHOOK: Output: partstatsdb2@testtable1@part1=p11/part2=P12
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1 PARTITION (part1='p21', Part2='P22')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: partstatsdb2@testtable1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1 PARTITION (part1='p21', Part2='P22')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: partstatsdb2@testtable1
+POSTHOOK: Output: partstatsdb2@testtable1@part1=p21/part2=P22
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1 PARTITION (part1='p31', Part2='P32')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: partstatsdb2@testtable1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1 PARTITION (part1='p31', Part2='P32')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: partstatsdb2@testtable1
+POSTHOOK: Output: partstatsdb2@testtable1@part1=p31/part2=P32
+PREHOOK: query: ANALYZE TABLE TestTable1 COMPUTE STATISTICS FOR COLUMNS key
+PREHOOK: type: QUERY
+PREHOOK: Input: partstatsdb2@testtable1
+PREHOOK: Input: partstatsdb2@testtable1@part1=p11/part2=P11
+PREHOOK: Input: partstatsdb2@testtable1@part1=p11/part2=P12
+PREHOOK: Input: partstatsdb2@testtable1@part1=p21/part2=P22
+PREHOOK: Input: partstatsdb2@testtable1@part1=p31/part2=P32
+#### A masked pattern was here ####
+POSTHOOK: query: ANALYZE TABLE TestTable1 COMPUTE STATISTICS FOR COLUMNS key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: partstatsdb2@testtable1
+POSTHOOK: Input: partstatsdb2@testtable1@part1=p11/part2=P11
+POSTHOOK: Input: partstatsdb2@testtable1@part1=p11/part2=P12
+POSTHOOK: Input: partstatsdb2@testtable1@part1=p21/part2=P22
+POSTHOOK: Input: partstatsdb2@testtable1@part1=p31/part2=P32
+#### A masked pattern was here ####
+PREHOOK: query: ANALYZE TABLE TestTable1 PARTITION (part1='p11') COMPUTE STATISTICS FOR COLUMNS key
+PREHOOK: type: QUERY
+PREHOOK: Input: partstatsdb2@testtable1
+PREHOOK: Input: partstatsdb2@testtable1@part1=p11/part2=P11
+PREHOOK: Input: partstatsdb2@testtable1@part1=p11/part2=P12
+#### A masked pattern was here ####
+POSTHOOK: query: ANALYZE TABLE TestTable1 PARTITION (part1='p11') COMPUTE STATISTICS FOR COLUMNS key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: partstatsdb2@testtable1
+POSTHOOK: Input: partstatsdb2@testtable1@part1=p11/part2=P11
+POSTHOOK: Input: partstatsdb2@testtable1@part1=p11/part2=P12
+#### A masked pattern was here ####
+PREHOOK: query: ANALYZE TABLE TestTable1 PARTITION (part1='p11', Part2='P12') COMPUTE STATISTICS FOR COLUMNS key
+PREHOOK: type: QUERY
+PREHOOK: Input: partstatsdb2@testtable1
+PREHOOK: Input: partstatsdb2@testtable1@part1=p11/part2=P12
+#### A masked pattern was here ####
+POSTHOOK: query: ANALYZE TABLE TestTable1 PARTITION (part1='p11', Part2='P12') COMPUTE STATISTICS FOR COLUMNS key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: partstatsdb2@testtable1
+POSTHOOK: Input: partstatsdb2@testtable1@part1=p11/part2=P12
+#### A masked pattern was here ####
+PREHOOK: query: CREATE TABLE IF NOT EXISTS TESTTABLE2 (key STRING, value STRING) PARTITIONED BY (part1 STRING, Part2 STRING)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: PARTSTATSDB2@TESTTABLE2
+PREHOOK: Output: database:partstatsdb2
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS TESTTABLE2 (key STRING, value STRING) PARTITIONED BY (part1 STRING, Part2 STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: PARTSTATSDB2@TESTTABLE2
+POSTHOOK: Output: database:partstatsdb2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TESTTABLE2 PARTITION (part1='p11', Part2='P12')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: partstatsdb2@testtable2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TESTTABLE2 PARTITION (part1='p11', Part2='P12')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: partstatsdb2@testtable2
+POSTHOOK: Output: partstatsdb2@testtable2@part1=p11/part2=P12
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TESTTABLE2 PARTITION (part1='p21', Part2='P22')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: partstatsdb2@testtable2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TESTTABLE2 PARTITION (part1='p21', Part2='P22')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: partstatsdb2@testtable2
+POSTHOOK: Output: partstatsdb2@testtable2@part1=p21/part2=P22
+PREHOOK: query: ANALYZE TABLE TESTTABLE2 COMPUTE STATISTICS FOR COLUMNS key
+PREHOOK: type: QUERY
+PREHOOK: Input: partstatsdb2@testtable2
+PREHOOK: Input: partstatsdb2@testtable2@part1=p11/part2=P12
+PREHOOK: Input: partstatsdb2@testtable2@part1=p21/part2=P22
+#### A masked pattern was here ####
+POSTHOOK: query: ANALYZE TABLE TESTTABLE2 COMPUTE STATISTICS FOR COLUMNS key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: partstatsdb2@testtable2
+POSTHOOK: Input: partstatsdb2@testtable2@part1=p11/part2=P12
+POSTHOOK: Input: partstatsdb2@testtable2@part1=p21/part2=P22
+#### A masked pattern was here ####
+PREHOOK: query: ANALYZE TABLE TESTTABLE2 PARTITION (part1='p11', Part2='P12') COMPUTE STATISTICS FOR COLUMNS key
+PREHOOK: type: QUERY
+PREHOOK: Input: partstatsdb2@testtable2
+PREHOOK: Input: partstatsdb2@testtable2@part1=p11/part2=P12
+#### A masked pattern was here ####
+POSTHOOK: query: ANALYZE TABLE TESTTABLE2 PARTITION (part1='p11', Part2='P12') COMPUTE STATISTICS FOR COLUMNS key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: partstatsdb2@testtable2
+POSTHOOK: Input: partstatsdb2@testtable2@part1=p11/part2=P12
+#### A masked pattern was here ####
+PREHOOK: query: ALTER TABLE PARTSTATSDB2.testtable DROP PARTITION (part1='p11', Part2='P12')
+PREHOOK: type: ALTERTABLE_DROPPARTS
+PREHOOK: Input: partstatsdb2@testtable
+PREHOOK: Output: partstatsdb2@testtable@part1=p11/part2=P12
+POSTHOOK: query: ALTER TABLE PARTSTATSDB2.testtable DROP PARTITION (part1='p11', Part2='P12')
+POSTHOOK: type: ALTERTABLE_DROPPARTS
+POSTHOOK: Input: partstatsdb2@testtable
+POSTHOOK: Output: partstatsdb2@testtable@part1=p11/part2=P12
+PREHOOK: query: ALTER TABLE PARTSTATSDB2.TestTable1 DROP PARTITION (part1='p11', Part2='P12')
+PREHOOK: type: ALTERTABLE_DROPPARTS
+PREHOOK: Input: partstatsdb2@testtable1
+PREHOOK: Output: partstatsdb2@testtable1@part1=p11/part2=P12
+POSTHOOK: query: ALTER TABLE PARTSTATSDB2.TestTable1 DROP PARTITION (part1='p11', Part2='P12')
+POSTHOOK: type: ALTERTABLE_DROPPARTS
+POSTHOOK: Input: partstatsdb2@testtable1
+POSTHOOK: Output: partstatsdb2@testtable1@part1=p11/part2=P12
+PREHOOK: query: ALTER TABLE PARTSTATSDB2.TESTTABLE2 DROP PARTITION (part1='p11', Part2='P12')
+PREHOOK: type: ALTERTABLE_DROPPARTS
+PREHOOK: Input: partstatsdb2@testtable2
+PREHOOK: Output: partstatsdb2@testtable2@part1=p11/part2=P12
+POSTHOOK: query: ALTER TABLE PARTSTATSDB2.TESTTABLE2 DROP PARTITION (part1='p11', Part2='P12')
+POSTHOOK: type: ALTERTABLE_DROPPARTS
+POSTHOOK: Input: partstatsdb2@testtable2
+POSTHOOK: Output: partstatsdb2@testtable2@part1=p11/part2=P12
+PREHOOK: query: DROP TABLE PARTSTATSDB2.testtable
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: partstatsdb2@testtable
+PREHOOK: Output: partstatsdb2@testtable
+POSTHOOK: query: DROP TABLE PARTSTATSDB2.testtable
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: partstatsdb2@testtable
+POSTHOOK: Output: partstatsdb2@testtable
+PREHOOK: query: DROP TABLE PARTSTATSDB2.TestTable1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: partstatsdb2@testtable1
+PREHOOK: Output: partstatsdb2@testtable1
+POSTHOOK: query: DROP TABLE PARTSTATSDB2.TestTable1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: partstatsdb2@testtable1
+POSTHOOK: Output: partstatsdb2@testtable1
+PREHOOK: query: DROP TABLE PARTSTATSDB2.TESTTABLE2
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: partstatsdb2@testtable2
+PREHOOK: Output: partstatsdb2@testtable2
+POSTHOOK: query: DROP TABLE PARTSTATSDB2.TESTTABLE2
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: partstatsdb2@testtable2
+POSTHOOK: Output: partstatsdb2@testtable2
+PREHOOK: query: DROP DATABASE PARTSTATSDB2
+PREHOOK: type: DROPDATABASE
+PREHOOK: Input: database:partstatsdb2
+PREHOOK: Output: database:partstatsdb2
+POSTHOOK: query: DROP DATABASE PARTSTATSDB2
+POSTHOOK: type: DROPDATABASE
+POSTHOOK: Input: database:partstatsdb2
+POSTHOOK: Output: database:partstatsdb2

http://git-wip-us.apache.org/repos/asf/hive/blob/fba31e76/ql/src/test/results/clientpositive/drop_table_with_stats.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/drop_table_with_stats.q.out b/ql/src/test/results/clientpositive/drop_table_with_stats.q.out
new file mode 100644
index 0000000..fbc3ab9
--- /dev/null
+++ b/ql/src/test/results/clientpositive/drop_table_with_stats.q.out
@@ -0,0 +1,236 @@
+PREHOOK: query: -- This test verifies that a table could be dropped with columns stats computed
+-- The column stats for table without partition will go to TAB_COL_STATS
+CREATE DATABASE IF NOT EXISTS tblstatsdb1
+PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:tblstatsdb1
+POSTHOOK: query: -- This test verifies that a table could be dropped with columns stats computed
+-- The column stats for table without partition will go to TAB_COL_STATS
+CREATE DATABASE IF NOT EXISTS tblstatsdb1
+POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:tblstatsdb1
+PREHOOK: query: USE tblstatsdb1
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:tblstatsdb1
+POSTHOOK: query: USE tblstatsdb1
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:tblstatsdb1
+PREHOOK: query: CREATE TABLE IF NOT EXISTS testtable (key STRING, value STRING)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:tblstatsdb1
+PREHOOK: Output: tblstatsdb1@testtable
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS testtable (key STRING, value STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:tblstatsdb1
+POSTHOOK: Output: tblstatsdb1@testtable
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE testtable
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: tblstatsdb1@testtable
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE testtable
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: tblstatsdb1@testtable
+PREHOOK: query: ANALYZE TABLE testtable COMPUTE STATISTICS FOR COLUMNS key
+PREHOOK: type: QUERY
+PREHOOK: Input: tblstatsdb1@testtable
+#### A masked pattern was here ####
+POSTHOOK: query: ANALYZE TABLE testtable COMPUTE STATISTICS FOR COLUMNS key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: tblstatsdb1@testtable
+#### A masked pattern was here ####
+PREHOOK: query: CREATE TABLE IF NOT EXISTS TestTable1 (key STRING, value STRING)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:tblstatsdb1
+PREHOOK: Output: tblstatsdb1@TestTable1
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS TestTable1 (key STRING, value STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:tblstatsdb1
+POSTHOOK: Output: tblstatsdb1@TestTable1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: tblstatsdb1@testtable1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: tblstatsdb1@testtable1
+PREHOOK: query: ANALYZE TABLE TestTable1 COMPUTE STATISTICS FOR COLUMNS key
+PREHOOK: type: QUERY
+PREHOOK: Input: tblstatsdb1@testtable1
+#### A masked pattern was here ####
+POSTHOOK: query: ANALYZE TABLE TestTable1 COMPUTE STATISTICS FOR COLUMNS key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: tblstatsdb1@testtable1
+#### A masked pattern was here ####
+PREHOOK: query: CREATE TABLE IF NOT EXISTS TESTTABLE2 (key STRING, value STRING)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:tblstatsdb1
+PREHOOK: Output: tblstatsdb1@TESTTABLE2
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS TESTTABLE2 (key STRING, value STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:tblstatsdb1
+POSTHOOK: Output: tblstatsdb1@TESTTABLE2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TESTTABLE2
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: tblstatsdb1@testtable2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TESTTABLE2
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: tblstatsdb1@testtable2
+PREHOOK: query: ANALYZE TABLE TESTTABLE2 COMPUTE STATISTICS FOR COLUMNS key
+PREHOOK: type: QUERY
+PREHOOK: Input: tblstatsdb1@testtable2
+#### A masked pattern was here ####
+POSTHOOK: query: ANALYZE TABLE TESTTABLE2 COMPUTE STATISTICS FOR COLUMNS key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: tblstatsdb1@testtable2
+#### A masked pattern was here ####
+PREHOOK: query: DROP TABLE tblstatsdb1.testtable
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: tblstatsdb1@testtable
+PREHOOK: Output: tblstatsdb1@testtable
+POSTHOOK: query: DROP TABLE tblstatsdb1.testtable
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: tblstatsdb1@testtable
+POSTHOOK: Output: tblstatsdb1@testtable
+PREHOOK: query: DROP TABLE tblstatsdb1.TestTable1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: tblstatsdb1@testtable1
+PREHOOK: Output: tblstatsdb1@testtable1
+POSTHOOK: query: DROP TABLE tblstatsdb1.TestTable1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: tblstatsdb1@testtable1
+POSTHOOK: Output: tblstatsdb1@testtable1
+PREHOOK: query: DROP TABLE tblstatsdb1.TESTTABLE2
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: tblstatsdb1@testtable2
+PREHOOK: Output: tblstatsdb1@testtable2
+POSTHOOK: query: DROP TABLE tblstatsdb1.TESTTABLE2
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: tblstatsdb1@testtable2
+POSTHOOK: Output: tblstatsdb1@testtable2
+PREHOOK: query: DROP DATABASE tblstatsdb1
+PREHOOK: type: DROPDATABASE
+PREHOOK: Input: database:tblstatsdb1
+PREHOOK: Output: database:tblstatsdb1
+POSTHOOK: query: DROP DATABASE tblstatsdb1
+POSTHOOK: type: DROPDATABASE
+POSTHOOK: Input: database:tblstatsdb1
+POSTHOOK: Output: database:tblstatsdb1
+PREHOOK: query: CREATE DATABASE IF NOT EXISTS TBLSTATSDB2
+PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:TBLSTATSDB2
+POSTHOOK: query: CREATE DATABASE IF NOT EXISTS TBLSTATSDB2
+POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:TBLSTATSDB2
+PREHOOK: query: USE TBLSTATSDB2
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:tblstatsdb2
+POSTHOOK: query: USE TBLSTATSDB2
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:tblstatsdb2
+PREHOOK: query: CREATE TABLE IF NOT EXISTS testtable (key STRING, value STRING)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: TBLSTATSDB2@testtable
+PREHOOK: Output: database:tblstatsdb2
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS testtable (key STRING, value STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: TBLSTATSDB2@testtable
+POSTHOOK: Output: database:tblstatsdb2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE testtable
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: tblstatsdb2@testtable
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE testtable
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: tblstatsdb2@testtable
+PREHOOK: query: ANALYZE TABLE testtable COMPUTE STATISTICS FOR COLUMNS key
+PREHOOK: type: QUERY
+PREHOOK: Input: tblstatsdb2@testtable
+#### A masked pattern was here ####
+POSTHOOK: query: ANALYZE TABLE testtable COMPUTE STATISTICS FOR COLUMNS key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: tblstatsdb2@testtable
+#### A masked pattern was here ####
+PREHOOK: query: CREATE TABLE IF NOT EXISTS TestTable1 (key STRING, value STRING)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: TBLSTATSDB2@TestTable1
+PREHOOK: Output: database:tblstatsdb2
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS TestTable1 (key STRING, value STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: TBLSTATSDB2@TestTable1
+POSTHOOK: Output: database:tblstatsdb2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: tblstatsdb2@testtable1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: tblstatsdb2@testtable1
+PREHOOK: query: ANALYZE TABLE TestTable1 COMPUTE STATISTICS FOR COLUMNS key
+PREHOOK: type: QUERY
+PREHOOK: Input: tblstatsdb2@testtable1
+#### A masked pattern was here ####
+POSTHOOK: query: ANALYZE TABLE TestTable1 COMPUTE STATISTICS FOR COLUMNS key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: tblstatsdb2@testtable1
+#### A masked pattern was here ####
+PREHOOK: query: CREATE TABLE IF NOT EXISTS TESTTABLE2 (key STRING, value STRING)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: TBLSTATSDB2@TESTTABLE2
+PREHOOK: Output: database:tblstatsdb2
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS TESTTABLE2 (key STRING, value STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: TBLSTATSDB2@TESTTABLE2
+POSTHOOK: Output: database:tblstatsdb2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TESTTABLE2
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: tblstatsdb2@testtable2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TESTTABLE2
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: tblstatsdb2@testtable2
+PREHOOK: query: ANALYZE TABLE TESTTABLE2 COMPUTE STATISTICS FOR COLUMNS key
+PREHOOK: type: QUERY
+PREHOOK: Input: tblstatsdb2@testtable2
+#### A masked pattern was here ####
+POSTHOOK: query: ANALYZE TABLE TESTTABLE2 COMPUTE STATISTICS FOR COLUMNS key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: tblstatsdb2@testtable2
+#### A masked pattern was here ####
+PREHOOK: query: DROP TABLE TBLSTATSDB2.testtable
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: tblstatsdb2@testtable
+PREHOOK: Output: tblstatsdb2@testtable
+POSTHOOK: query: DROP TABLE TBLSTATSDB2.testtable
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: tblstatsdb2@testtable
+POSTHOOK: Output: tblstatsdb2@testtable
+PREHOOK: query: DROP TABLE TBLSTATSDB2.TestTable1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: tblstatsdb2@testtable1
+PREHOOK: Output: tblstatsdb2@testtable1
+POSTHOOK: query: DROP TABLE TBLSTATSDB2.TestTable1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: tblstatsdb2@testtable1
+POSTHOOK: Output: tblstatsdb2@testtable1
+PREHOOK: query: DROP TABLE TBLSTATSDB2.TESTTABLE2
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: tblstatsdb2@testtable2
+PREHOOK: Output: tblstatsdb2@testtable2
+POSTHOOK: query: DROP TABLE TBLSTATSDB2.TESTTABLE2
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: tblstatsdb2@testtable2
+POSTHOOK: Output: tblstatsdb2@testtable2
+PREHOOK: query: DROP DATABASE TBLSTATSDB2
+PREHOOK: type: DROPDATABASE
+PREHOOK: Input: database:tblstatsdb2
+PREHOOK: Output: database:tblstatsdb2
+POSTHOOK: query: DROP DATABASE TBLSTATSDB2
+POSTHOOK: type: DROPDATABASE
+POSTHOOK: Input: database:tblstatsdb2
+POSTHOOK: Output: database:tblstatsdb2


[7/7] hive git commit: HIVE-9177: Fix child operator references after NonBlockingOpDeDupProc (II) (Szehon via Xuefu) merged from trunk, r1646994

Posted by px...@apache.org.
HIVE-9177: Fix child operator references after NonBlockingOpDeDupProc (II) (Szehon via Xuefu)
merged from trunk, r1646994

git-svn-id: https://svn.apache.org/repos/asf/hive/branches/spark@1646995 13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/84af92e6
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/84af92e6
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/84af92e6

Branch: refs/heads/branch-1.0
Commit: 84af92e6520783d32abe1f5c6c263179748332ae
Parents: 329a336
Author: Xuefu Zhang <xu...@apache.org>
Authored: Sat Dec 20 14:50:56 2014 +0000
Committer: Pengcheng Xiong <px...@apache.org>
Committed: Tue Aug 11 13:59:06 2015 -0700

----------------------------------------------------------------------
 .../apache/hadoop/hive/ql/optimizer/NonBlockingOpDeDupProc.java  | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/84af92e6/ql/src/java/org/apache/hadoop/hive/ql/optimizer/NonBlockingOpDeDupProc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/NonBlockingOpDeDupProc.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/NonBlockingOpDeDupProc.java
index 5e0959a..5291851 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/NonBlockingOpDeDupProc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/NonBlockingOpDeDupProc.java
@@ -186,7 +186,9 @@ public class NonBlockingOpDeDupProc implements Transform {
    * @param pSEL parent operator
    */
   private void fixContextReferences(SelectOperator cSEL, SelectOperator pSEL) {
-    Collection<QBJoinTree> qbJoinTrees = pctx.getJoinContext().values();
+    Collection<QBJoinTree> qbJoinTrees = new ArrayList<QBJoinTree>();
+    qbJoinTrees.addAll(pctx.getJoinContext().values());
+    qbJoinTrees.addAll(pctx.getMapJoinContext().values());
     for (QBJoinTree qbJoinTree : qbJoinTrees) {
       Map<String, Operator<? extends OperatorDesc>> aliasToOpInfo = qbJoinTree.getAliasToOpInfo();
       for (Map.Entry<String, Operator<? extends OperatorDesc>> entry : aliasToOpInfo.entrySet()) {


[6/7] hive git commit: HIVE-9113 : Explain on query failed with NPE (Navis reviewed by Szehon Ho)

Posted by px...@apache.org.
HIVE-9113 : Explain on query failed with NPE (Navis reviewed by Szehon Ho)

git-svn-id: https://svn.apache.org/repos/asf/hive/trunk@1646390 13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/329a3368
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/329a3368
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/329a3368

Branch: refs/heads/branch-1.0
Commit: 329a33689a0bf1d96092cf0b600617d2a51099b5
Parents: 2b9414b
Author: Navis Ryu <na...@apache.org>
Authored: Thu Dec 18 06:41:09 2014 +0000
Committer: Pengcheng Xiong <px...@apache.org>
Committed: Tue Aug 11 13:58:16 2015 -0700

----------------------------------------------------------------------
 .../apache/hadoop/hive/ql/parse/QBSubQuery.java | 30 ++++++++++++++------
 .../hadoop/hive/ql/parse/SubQueryUtils.java     | 15 +++++-----
 .../clientnegative/subquery_missing_from.q      |  1 +
 .../clientnegative/subquery_missing_from.q.out  |  3 ++
 4 files changed, 32 insertions(+), 17 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/329a3368/ql/src/java/org/apache/hadoop/hive/ql/parse/QBSubQuery.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/QBSubQuery.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/QBSubQuery.java
index 3c7b707..1b6b33b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/QBSubQuery.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/QBSubQuery.java
@@ -382,7 +382,7 @@ public class QBSubQuery implements ISubQueryJoinInfo {
     /*
      * row resolver of the SubQuery.
      * Set by the SemanticAnalyzer after the Plan for the SubQuery is genned.
-     * This is neede in case the SubQuery select list contains a TOK_ALLCOLREF
+     * This is needed in case the SubQuery select list contains a TOK_ALLCOLREF
      */
     RowResolver sqRR;
 
@@ -513,7 +513,10 @@ public class QBSubQuery implements ISubQueryJoinInfo {
       String outerQueryAlias,
       Set<String> outerQryAliases) throws SemanticException {
 
-    ASTNode selectClause = (ASTNode) subQueryAST.getChild(1).getChild(1);
+    ASTNode fromClause = getChildFromSubqueryAST("From", HiveParser.TOK_FROM);
+    ASTNode insertClause = getChildFromSubqueryAST("Insert", HiveParser.TOK_INSERT);
+
+    ASTNode selectClause = (ASTNode) insertClause.getChild(1);
 
     int selectExprStart = 0;
     if ( selectClause.getChild(0).getType() == HiveParser.TOK_HINTLIST ) {
@@ -537,7 +540,7 @@ public class QBSubQuery implements ISubQueryJoinInfo {
      * Restriction 17.s :: SubQuery cannot use the same table alias as one used in
      * the Outer Query.
      */
-    List<String> sqAliases = SubQueryUtils.getTableAliasesInSubQuery(this);
+    List<String> sqAliases = SubQueryUtils.getTableAliasesInSubQuery(fromClause);
     String sharedAlias = null;
     for(String s : sqAliases ) {
       if ( outerQryAliases.contains(s) ) {
@@ -545,7 +548,7 @@ public class QBSubQuery implements ISubQueryJoinInfo {
       }
     }
     if ( sharedAlias != null) {
-      ASTNode whereClause = SubQueryUtils.subQueryWhere(subQueryAST);
+      ASTNode whereClause = SubQueryUtils.subQueryWhere(insertClause);
 
       if ( whereClause != null ) {
         ASTNode u = SubQueryUtils.hasUnQualifiedColumnReferences(whereClause);
@@ -581,7 +584,7 @@ public class QBSubQuery implements ISubQueryJoinInfo {
       containsAggregationExprs = containsAggregationExprs | ( r == 1 );
     }
 
-    rewrite(outerQueryRR, forHavingClause, outerQueryAlias);
+    rewrite(outerQueryRR, forHavingClause, outerQueryAlias, insertClause, selectClause);
 
     SubQueryUtils.setOriginDeep(subQueryAST, originalSQASTOrigin);
 
@@ -631,6 +634,16 @@ public class QBSubQuery implements ISubQueryJoinInfo {
 
   }
 
+  private ASTNode getChildFromSubqueryAST(String errorMsg, int type) throws SemanticException {
+    ASTNode childAST = (ASTNode) subQueryAST.getFirstChildWithType(type);
+    if (childAST == null && errorMsg != null) {
+      subQueryAST.setOrigin(originalSQASTOrigin);
+      throw new SemanticException(ErrorMsg.INVALID_SUBQUERY_EXPRESSION.getMsg(
+          subQueryAST, errorMsg + " clause is missing in SubQuery."));
+    }
+    return childAST;
+  }
+
   private void setJoinType() {
     if ( operator.getType() == SubQueryType.NOT_IN ||
         operator.getType() == SubQueryType.NOT_EXISTS ) {
@@ -744,7 +757,7 @@ public class QBSubQuery implements ISubQueryJoinInfo {
    *         R2.x = min(R1.y)
    *      Where R1 is an outer table reference, and R2 is a SubQuery table reference.
    *   b. When hoisting the correlation predicate to a join predicate, we need to
-   *      rewrite it to be in the form the Join code allows: so the predicte needs
+   *      rewrite it to be in the form the Join code allows: so the predict needs
    *      to contain a qualified column references.
    *      We handle this by generating a new name for the aggregation expression,
    *      like R1._gby_sq_col_1 and adding this mapping to the Outer Query's
@@ -753,9 +766,8 @@ public class QBSubQuery implements ISubQueryJoinInfo {
    */
   private void rewrite(RowResolver parentQueryRR,
       boolean forHavingClause,
-      String outerQueryAlias) throws SemanticException {
-    ASTNode selectClause = (ASTNode) subQueryAST.getChild(1).getChild(1);
-    ASTNode whereClause = SubQueryUtils.subQueryWhere(subQueryAST);
+      String outerQueryAlias, ASTNode insertClause, ASTNode selectClause) throws SemanticException {
+    ASTNode whereClause = SubQueryUtils.subQueryWhere(insertClause);
 
     if ( whereClause == null ) {
       return;

http://git-wip-us.apache.org/repos/asf/hive/blob/329a3368/ql/src/java/org/apache/hadoop/hive/ql/parse/SubQueryUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SubQueryUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SubQueryUtils.java
index 089ad78..57868b7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SubQueryUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SubQueryUtils.java
@@ -43,7 +43,7 @@ public class SubQueryUtils {
   }
 
   /*
-   * Remove the SubQuery from the Where CLause Tree.
+   * Remove the SubQuery from the Where Clause Tree.
    * return the remaining WhereClause.
    */
   static ASTNode rewriteParentQueryWhere(ASTNode whereCond, ASTNode subQuery)
@@ -271,10 +271,9 @@ public class SubQueryUtils {
     return r;
   }
 
-  static List<String> getTableAliasesInSubQuery(QBSubQuery sq) {
+  static List<String> getTableAliasesInSubQuery(ASTNode fromClause) {
     List<String> aliases = new ArrayList<String>();
-    ASTNode joinAST = (ASTNode) sq.getSubQueryAST().getChild(0);
-    getTableAliasesInSubQuery((ASTNode) joinAST.getChild(0), aliases);
+    getTableAliasesInSubQuery((ASTNode) fromClause.getChild(0), aliases);
     return aliases;
   }
 
@@ -318,10 +317,10 @@ public class SubQueryUtils {
     return null;
   }
   
-  static ASTNode subQueryWhere(ASTNode subQueryAST) {
-    if ( subQueryAST.getChild(1).getChildCount() > 2 &&
-        subQueryAST.getChild(1).getChild(2).getType() == HiveParser.TOK_WHERE ) {
-      return (ASTNode) subQueryAST.getChild(1).getChild(2);
+  static ASTNode subQueryWhere(ASTNode insertClause) {
+    if (insertClause.getChildCount() > 2 &&
+        insertClause.getChild(2).getType() == HiveParser.TOK_WHERE ) {
+      return (ASTNode) insertClause.getChild(2);
     }
     return null;
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/329a3368/ql/src/test/queries/clientnegative/subquery_missing_from.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/subquery_missing_from.q b/ql/src/test/queries/clientnegative/subquery_missing_from.q
new file mode 100644
index 0000000..3b49ac6
--- /dev/null
+++ b/ql/src/test/queries/clientnegative/subquery_missing_from.q
@@ -0,0 +1 @@
+select * from src where src.key in (select key);
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/329a3368/ql/src/test/results/clientnegative/subquery_missing_from.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/subquery_missing_from.q.out b/ql/src/test/results/clientnegative/subquery_missing_from.q.out
new file mode 100644
index 0000000..eaf7735
--- /dev/null
+++ b/ql/src/test/results/clientnegative/subquery_missing_from.q.out
@@ -0,0 +1,3 @@
+FAILED: SemanticException Line 0:-1 Invalid SubQuery expression 'key' in definition of SubQuery sq_1 [
+src.key in (select key)
+] used as sq_1 at Line 1:32: From clause is missing in SubQuery.


[5/7] hive git commit: HIVE-9060: Fix child operator references after NonBlockingOpDeDupProc (Szehon via Xuefu) merged from trunk, r1644780

Posted by px...@apache.org.
HIVE-9060: Fix child operator references after NonBlockingOpDeDupProc (Szehon via Xuefu)
merged from trunk, r1644780

git-svn-id: https://svn.apache.org/repos/asf/hive/branches/spark@1644781 13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/2b9414b4
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/2b9414b4
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/2b9414b4

Branch: refs/heads/branch-1.0
Commit: 2b9414b4729333eadc15b499474b7f5f6c8678f1
Parents: fba31e7
Author: Xuefu Zhang <xu...@apache.org>
Authored: Thu Dec 11 22:26:49 2014 +0000
Committer: Pengcheng Xiong <px...@apache.org>
Committed: Tue Aug 11 13:57:31 2015 -0700

----------------------------------------------------------------------
 .../ql/optimizer/NonBlockingOpDeDupProc.java    | 24 +++++++++++++++++++-
 1 file changed, 23 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/2b9414b4/ql/src/java/org/apache/hadoop/hive/ql/optimizer/NonBlockingOpDeDupProc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/NonBlockingOpDeDupProc.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/NonBlockingOpDeDupProc.java
index 63862b9..5e0959a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/NonBlockingOpDeDupProc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/NonBlockingOpDeDupProc.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hive.ql.optimizer;
 
 import java.util.ArrayList;
+import java.util.Collection;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.LinkedHashMap;
@@ -28,6 +29,8 @@ import java.util.Set;
 import java.util.Stack;
 
 import org.apache.hadoop.hive.ql.exec.FilterOperator;
+import org.apache.hadoop.hive.ql.exec.JoinOperator;
+import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.SelectOperator;
 import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
 import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
@@ -39,11 +42,13 @@ import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
 import org.apache.hadoop.hive.ql.lib.Rule;
 import org.apache.hadoop.hive.ql.lib.RuleRegExp;
 import org.apache.hadoop.hive.ql.parse.ParseContext;
+import org.apache.hadoop.hive.ql.parse.QBJoinTree;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDescUtils;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 
 /**
  * merges SEL-SEL or FIL-FIL into single operator
@@ -129,8 +134,8 @@ public class NonBlockingOpDeDupProc implements Transform {
       pSEL.removeChildAndAdoptItsChildren(cSEL);
       cSEL.setParentOperators(null);
       cSEL.setChildOperators(null);
+      fixContextReferences(cSEL, pSEL);
       cSEL = null;
-
       return null;
     }
 
@@ -175,6 +180,23 @@ public class NonBlockingOpDeDupProc implements Transform {
     }
   }
 
+  /**
+   * Change existing references in the context to point from child to parent operator.
+   * @param cSEL child operator (to be removed, and merged into parent)
+   * @param pSEL parent operator
+   */
+  private void fixContextReferences(SelectOperator cSEL, SelectOperator pSEL) {
+    Collection<QBJoinTree> qbJoinTrees = pctx.getJoinContext().values();
+    for (QBJoinTree qbJoinTree : qbJoinTrees) {
+      Map<String, Operator<? extends OperatorDesc>> aliasToOpInfo = qbJoinTree.getAliasToOpInfo();
+      for (Map.Entry<String, Operator<? extends OperatorDesc>> entry : aliasToOpInfo.entrySet()) {
+        if (entry.getValue() == cSEL) {
+          aliasToOpInfo.put(entry.getKey(), pSEL);
+        }
+      }
+    }
+  }
+
   private class FilterDedup implements NodeProcessor {
     @Override
     public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,


[3/7] hive git commit: HIVE-8594 : Wrong condition in SettableConfigUpdater#setHiveConfWhiteList() (Ted Yu via Szehon)

Posted by px...@apache.org.
HIVE-8594 : Wrong condition in SettableConfigUpdater#setHiveConfWhiteList() (Ted Yu via Szehon)

git-svn-id: https://svn.apache.org/repos/asf/hive/trunk@1640844 13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/dbdea20d
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/dbdea20d
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/dbdea20d

Branch: refs/heads/branch-1.0
Commit: dbdea20d22a9c64d450299406f74f13b8f0b7ac3
Parents: 6be7f51
Author: Szehon Ho <sz...@apache.org>
Authored: Fri Nov 21 00:53:34 2014 +0000
Committer: Pengcheng Xiong <px...@apache.org>
Committed: Tue Aug 11 13:55:23 2015 -0700

----------------------------------------------------------------------
 .../ql/security/authorization/plugin/SettableConfigUpdater.java    | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/dbdea20d/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/SettableConfigUpdater.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/SettableConfigUpdater.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/SettableConfigUpdater.java
index 89f155c..f12cd51 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/SettableConfigUpdater.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/SettableConfigUpdater.java
@@ -44,7 +44,7 @@ public class SettableConfigUpdater {
     String whiteListParamsStr = hiveConf
         .getVar(ConfVars.HIVE_AUTHORIZATION_SQL_STD_AUTH_CONFIG_WHITELIST);
 
-    if(whiteListParamsStr == null && whiteListParamsStr.trim().isEmpty()) {
+    if(whiteListParamsStr == null || whiteListParamsStr.trim().isEmpty()) {
       throw new HiveAuthzPluginException("Configuration parameter "
           + ConfVars.HIVE_AUTHORIZATION_SQL_STD_AUTH_CONFIG_WHITELIST.varname
           + " is not iniatialized.");


[2/7] hive git commit: HIVE-8627: Compute stats on a table from impala caused the table to be corrupted (Na via Xuefu)

Posted by px...@apache.org.
HIVE-8627: Compute stats on a table from impala caused the table to be corrupted (Na via Xuefu)

git-svn-id: https://svn.apache.org/repos/asf/hive/trunk@1635309 13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/6be7f512
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/6be7f512
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/6be7f512

Branch: refs/heads/branch-1.0
Commit: 6be7f5127c3b0f5a25256456798c9d98ba2145f3
Parents: 27a3fc2
Author: Xuefu Zhang <xu...@apache.org>
Authored: Wed Oct 29 21:38:01 2014 +0000
Committer: Pengcheng Xiong <px...@apache.org>
Committed: Tue Aug 11 13:53:47 2015 -0700

----------------------------------------------------------------------
 .../java/org/apache/hadoop/hive/metastore/StatObjectConverter.java  | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/6be7f512/metastore/src/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java b/metastore/src/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java
index 5c3bce3..475883b 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java
@@ -417,6 +417,7 @@ public class StatObjectConverter {
   public static void fillColumnStatisticsData(String colType, ColumnStatisticsData data,
       Object llow, Object lhigh, Object dlow, Object dhigh, Object declow, Object dechigh,
       Object nulls, Object dist, Object avglen, Object maxlen, Object trues, Object falses) throws MetaException {
+    colType = colType.toLowerCase();
     if (colType.equals("boolean")) {
       BooleanColumnStatsData boolStats = new BooleanColumnStatsData();
       boolStats.setNumFalses(MetaStoreDirectSql.extractSqlLong(falses));