You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by xu...@apache.org on 2015/10/28 13:10:54 UTC

[01/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)

Repository: hive
Updated Branches:
  refs/heads/spark 51f257af0 -> c9073aadc


http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-py/hive_service/ThriftHive.py
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-py/hive_service/ThriftHive.py b/service/src/gen/thrift/gen-py/hive_service/ThriftHive.py
index 177531d..978c2a3 100644
--- a/service/src/gen/thrift/gen-py/hive_service/ThriftHive.py
+++ b/service/src/gen/thrift/gen-py/hive_service/ThriftHive.py
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #
@@ -8,6 +8,7 @@
 
 from thrift.Thrift import TType, TMessageType, TException, TApplicationException
 import hive_metastore.ThriftHiveMetastore
+import logging
 from ttypes import *
 from thrift.Thrift import TProcessor
 from thrift.transport import TTransport
@@ -116,7 +117,7 @@ class Client(hive_metastore.ThriftHiveMetastore.Client, Iface):
       return result.success
     if result.ex is not None:
       raise result.ex
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "fetchOne failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "fetchOne failed: unknown result")
 
   def fetchN(self, numRows):
     """
@@ -149,7 +150,7 @@ class Client(hive_metastore.ThriftHiveMetastore.Client, Iface):
       return result.success
     if result.ex is not None:
       raise result.ex
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "fetchN failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "fetchN failed: unknown result")
 
   def fetchAll(self):
     self.send_fetchAll()
@@ -177,7 +178,7 @@ class Client(hive_metastore.ThriftHiveMetastore.Client, Iface):
       return result.success
     if result.ex is not None:
       raise result.ex
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "fetchAll failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "fetchAll failed: unknown result")
 
   def getSchema(self):
     self.send_getSchema()
@@ -205,7 +206,7 @@ class Client(hive_metastore.ThriftHiveMetastore.Client, Iface):
       return result.success
     if result.ex is not None:
       raise result.ex
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "getSchema failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "getSchema failed: unknown result")
 
   def getThriftSchema(self):
     self.send_getThriftSchema()
@@ -233,7 +234,7 @@ class Client(hive_metastore.ThriftHiveMetastore.Client, Iface):
       return result.success
     if result.ex is not None:
       raise result.ex
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "getThriftSchema failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "getThriftSchema failed: unknown result")
 
   def getClusterStatus(self):
     self.send_getClusterStatus()
@@ -261,7 +262,7 @@ class Client(hive_metastore.ThriftHiveMetastore.Client, Iface):
       return result.success
     if result.ex is not None:
       raise result.ex
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "getClusterStatus failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "getClusterStatus failed: unknown result")
 
   def getQueryPlan(self):
     self.send_getQueryPlan()
@@ -289,7 +290,7 @@ class Client(hive_metastore.ThriftHiveMetastore.Client, Iface):
       return result.success
     if result.ex is not None:
       raise result.ex
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "getQueryPlan failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "getQueryPlan failed: unknown result")
 
   def clean(self):
     self.send_clean()
@@ -351,9 +352,17 @@ class Processor(hive_metastore.ThriftHiveMetastore.Processor, Iface, TProcessor)
     result = execute_result()
     try:
       self._handler.execute(args.query)
-    except HiveServerException, ex:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except HiveServerException as ex:
+      msg_type = TMessageType.REPLY
       result.ex = ex
-    oprot.writeMessageBegin("execute", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("execute", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -365,9 +374,17 @@ class Processor(hive_metastore.ThriftHiveMetastore.Processor, Iface, TProcessor)
     result = fetchOne_result()
     try:
       result.success = self._handler.fetchOne()
-    except HiveServerException, ex:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except HiveServerException as ex:
+      msg_type = TMessageType.REPLY
       result.ex = ex
-    oprot.writeMessageBegin("fetchOne", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("fetchOne", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -379,9 +396,17 @@ class Processor(hive_metastore.ThriftHiveMetastore.Processor, Iface, TProcessor)
     result = fetchN_result()
     try:
       result.success = self._handler.fetchN(args.numRows)
-    except HiveServerException, ex:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except HiveServerException as ex:
+      msg_type = TMessageType.REPLY
       result.ex = ex
-    oprot.writeMessageBegin("fetchN", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("fetchN", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -393,9 +418,17 @@ class Processor(hive_metastore.ThriftHiveMetastore.Processor, Iface, TProcessor)
     result = fetchAll_result()
     try:
       result.success = self._handler.fetchAll()
-    except HiveServerException, ex:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except HiveServerException as ex:
+      msg_type = TMessageType.REPLY
       result.ex = ex
-    oprot.writeMessageBegin("fetchAll", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("fetchAll", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -407,9 +440,17 @@ class Processor(hive_metastore.ThriftHiveMetastore.Processor, Iface, TProcessor)
     result = getSchema_result()
     try:
       result.success = self._handler.getSchema()
-    except HiveServerException, ex:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except HiveServerException as ex:
+      msg_type = TMessageType.REPLY
       result.ex = ex
-    oprot.writeMessageBegin("getSchema", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("getSchema", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -421,9 +462,17 @@ class Processor(hive_metastore.ThriftHiveMetastore.Processor, Iface, TProcessor)
     result = getThriftSchema_result()
     try:
       result.success = self._handler.getThriftSchema()
-    except HiveServerException, ex:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except HiveServerException as ex:
+      msg_type = TMessageType.REPLY
       result.ex = ex
-    oprot.writeMessageBegin("getThriftSchema", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("getThriftSchema", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -435,9 +484,17 @@ class Processor(hive_metastore.ThriftHiveMetastore.Processor, Iface, TProcessor)
     result = getClusterStatus_result()
     try:
       result.success = self._handler.getClusterStatus()
-    except HiveServerException, ex:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except HiveServerException as ex:
+      msg_type = TMessageType.REPLY
       result.ex = ex
-    oprot.writeMessageBegin("getClusterStatus", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("getClusterStatus", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -449,9 +506,17 @@ class Processor(hive_metastore.ThriftHiveMetastore.Processor, Iface, TProcessor)
     result = getQueryPlan_result()
     try:
       result.success = self._handler.getQueryPlan()
-    except HiveServerException, ex:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except HiveServerException as ex:
+      msg_type = TMessageType.REPLY
       result.ex = ex
-    oprot.writeMessageBegin("getQueryPlan", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("getQueryPlan", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -461,8 +526,16 @@ class Processor(hive_metastore.ThriftHiveMetastore.Processor, Iface, TProcessor)
     args.read(iprot)
     iprot.readMessageEnd()
     result = clean_result()
-    self._handler.clean()
-    oprot.writeMessageBegin("clean", TMessageType.REPLY, seqid)
+    try:
+      self._handler.clean()
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("clean", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -495,7 +568,7 @@ class execute_args:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.query = iprot.readString();
+          self.query = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -674,7 +747,7 @@ class fetchOne_result:
         break
       if fid == 0:
         if ftype == TType.STRING:
-          self.success = iprot.readString();
+          self.success = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 1:
@@ -750,7 +823,7 @@ class fetchN_args:
         break
       if fid == 1:
         if ftype == TType.I32:
-          self.numRows = iprot.readI32();
+          self.numRows = iprot.readI32()
         else:
           iprot.skip(ftype)
       else:
@@ -820,7 +893,7 @@ class fetchN_result:
           self.success = []
           (_etype3, _size0) = iprot.readListBegin()
           for _i4 in xrange(_size0):
-            _elem5 = iprot.readString();
+            _elem5 = iprot.readString()
             self.success.append(_elem5)
           iprot.readListEnd()
         else:
@@ -952,7 +1025,7 @@ class fetchAll_result:
           self.success = []
           (_etype10, _size7) = iprot.readListBegin()
           for _i11 in xrange(_size7):
-            _elem12 = iprot.readString();
+            _elem12 = iprot.readString()
             self.success.append(_elem12)
           iprot.readListEnd()
         else:

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-py/hive_service/constants.py
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-py/hive_service/constants.py b/service/src/gen/thrift/gen-py/hive_service/constants.py
index 99717a9..4a6492b 100644
--- a/service/src/gen/thrift/gen-py/hive_service/constants.py
+++ b/service/src/gen/thrift/gen-py/hive_service/constants.py
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-py/hive_service/ttypes.py
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-py/hive_service/ttypes.py b/service/src/gen/thrift/gen-py/hive_service/ttypes.py
index 29eb916..e19fe2a 100644
--- a/service/src/gen/thrift/gen-py/hive_service/ttypes.py
+++ b/service/src/gen/thrift/gen-py/hive_service/ttypes.py
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #
@@ -75,32 +75,32 @@ class HiveClusterStatus:
         break
       if fid == 1:
         if ftype == TType.I32:
-          self.taskTrackers = iprot.readI32();
+          self.taskTrackers = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.I32:
-          self.mapTasks = iprot.readI32();
+          self.mapTasks = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.I32:
-          self.reduceTasks = iprot.readI32();
+          self.reduceTasks = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.I32:
-          self.maxMapTasks = iprot.readI32();
+          self.maxMapTasks = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 5:
         if ftype == TType.I32:
-          self.maxReduceTasks = iprot.readI32();
+          self.maxReduceTasks = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 6:
         if ftype == TType.I32:
-          self.state = iprot.readI32();
+          self.state = iprot.readI32()
         else:
           iprot.skip(ftype)
       else:
@@ -196,17 +196,17 @@ class HiveServerException(TException):
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.message = iprot.readString();
+          self.message = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.I32:
-          self.errorCode = iprot.readI32();
+          self.errorCode = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.SQLState = iprot.readString();
+          self.SQLState = iprot.readString()
         else:
           iprot.skip(ftype)
       else:

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-rb/hive_service_constants.rb
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-rb/hive_service_constants.rb b/service/src/gen/thrift/gen-rb/hive_service_constants.rb
index 73f4d7b..ed7d2d7 100644
--- a/service/src/gen/thrift/gen-rb/hive_service_constants.rb
+++ b/service/src/gen/thrift/gen-rb/hive_service_constants.rb
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-rb/hive_service_types.rb
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-rb/hive_service_types.rb b/service/src/gen/thrift/gen-rb/hive_service_types.rb
index f97a6fb..9191767 100644
--- a/service/src/gen/thrift/gen-rb/hive_service_types.rb
+++ b/service/src/gen/thrift/gen-rb/hive_service_types.rb
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-rb/t_c_l_i_service.rb
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-rb/t_c_l_i_service.rb b/service/src/gen/thrift/gen-rb/t_c_l_i_service.rb
index f754d7d..7d7f7a7 100644
--- a/service/src/gen/thrift/gen-rb/t_c_l_i_service.rb
+++ b/service/src/gen/thrift/gen-rb/t_c_l_i_service.rb
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-rb/t_c_l_i_service_constants.rb
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-rb/t_c_l_i_service_constants.rb b/service/src/gen/thrift/gen-rb/t_c_l_i_service_constants.rb
index 3f72769..25adbb4 100644
--- a/service/src/gen/thrift/gen-rb/t_c_l_i_service_constants.rb
+++ b/service/src/gen/thrift/gen-rb/t_c_l_i_service_constants.rb
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-rb/t_c_l_i_service_types.rb
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-rb/t_c_l_i_service_types.rb b/service/src/gen/thrift/gen-rb/t_c_l_i_service_types.rb
index bfb2b69..f004ec4 100644
--- a/service/src/gen/thrift/gen-rb/t_c_l_i_service_types.rb
+++ b/service/src/gen/thrift/gen-rb/t_c_l_i_service_types.rb
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-rb/thrift_hive.rb
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-rb/thrift_hive.rb b/service/src/gen/thrift/gen-rb/thrift_hive.rb
index 6859ceb..2b57cd9 100644
--- a/service/src/gen/thrift/gen-rb/thrift_hive.rb
+++ b/service/src/gen/thrift/gen-rb/thrift_hive.rb
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #


[47/55] [abbrv] hive git commit: HIVE-12250 Zookeeper connection leaks in Hive's HBaseHandler (Naveen, reviewed by Aihua and Szehon)

Posted by xu...@apache.org.
HIVE-12250 Zookeeper connection leaks in Hive's HBaseHandler (Naveen, reviewed by Aihua and Szehon)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/d5e8544e
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/d5e8544e
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/d5e8544e

Branch: refs/heads/spark
Commit: d5e8544e7106ba0879b176c3524e369833bd844b
Parents: 601a481
Author: Szehon Ho <sz...@cloudera.com>
Authored: Tue Oct 27 11:09:07 2015 -0700
Committer: Szehon Ho <sz...@cloudera.com>
Committed: Tue Oct 27 11:09:07 2015 -0700

----------------------------------------------------------------------
 .../hive/hbase/HiveHBaseTableInputFormat.java   | 105 ++++++++++---------
 .../hive/hbase/HiveHBaseTableOutputFormat.java  |   9 ++
 2 files changed, 64 insertions(+), 50 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/d5e8544e/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java
----------------------------------------------------------------------
diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java
index 8e72759..5f4a1e4 100644
--- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java
+++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java
@@ -107,6 +107,7 @@ public class HiveHBaseTableInputFormat extends TableInputFormatBase
     try {
       recordReader.initialize(tableSplit, tac);
     } catch (InterruptedException e) {
+      closeTable(); // Free up the HTable connections
       throw new IOException("Failed to initialize RecordReader", e);
     }
 
@@ -445,65 +446,69 @@ public class HiveHBaseTableInputFormat extends TableInputFormatBase
     String hbaseColumnsMapping = jobConf.get(HBaseSerDe.HBASE_COLUMNS_MAPPING);
     boolean doColumnRegexMatching = jobConf.getBoolean(HBaseSerDe.HBASE_COLUMNS_REGEX_MATCHING, true);
 
-    if (hbaseColumnsMapping == null) {
-      throw new IOException(HBaseSerDe.HBASE_COLUMNS_MAPPING + " required for HBase Table.");
-    }
-
-    ColumnMappings columnMappings = null;
     try {
-      columnMappings = HBaseSerDe.parseColumnsMapping(hbaseColumnsMapping, doColumnRegexMatching);
-    } catch (SerDeException e) {
-      throw new IOException(e);
-    }
+      if (hbaseColumnsMapping == null) {
+        throw new IOException(HBaseSerDe.HBASE_COLUMNS_MAPPING + " required for HBase Table.");
+      }
 
-    int iKey = columnMappings.getKeyIndex();
-    int iTimestamp = columnMappings.getTimestampIndex();
-    ColumnMapping keyMapping = columnMappings.getKeyMapping();
-
-    // Take filter pushdown into account while calculating splits; this
-    // allows us to prune off regions immediately.  Note that although
-    // the Javadoc for the superclass getSplits says that it returns one
-    // split per region, the implementation actually takes the scan
-    // definition into account and excludes regions which don't satisfy
-    // the start/stop row conditions (HBASE-1829).
-    Scan scan = createFilterScan(jobConf, iKey, iTimestamp,
-        HiveHBaseInputFormatUtil.getStorageFormatOfKey(keyMapping.mappingSpec,
-            jobConf.get(HBaseSerDe.HBASE_TABLE_DEFAULT_STORAGE_TYPE, "string")));
-
-    // The list of families that have been added to the scan
-    List<String> addedFamilies = new ArrayList<String>();
-
-    // REVIEW:  are we supposed to be applying the getReadColumnIDs
-    // same as in getRecordReader?
-    for (ColumnMapping colMap : columnMappings) {
-      if (colMap.hbaseRowKey || colMap.hbaseTimestamp) {
-        continue;
+      ColumnMappings columnMappings = null;
+      try {
+        columnMappings = HBaseSerDe.parseColumnsMapping(hbaseColumnsMapping, doColumnRegexMatching);
+      } catch (SerDeException e) {
+        throw new IOException(e);
       }
 
-      if (colMap.qualifierName == null) {
-        scan.addFamily(colMap.familyNameBytes);
-        addedFamilies.add(colMap.familyName);
-      } else {
-        if(!addedFamilies.contains(colMap.familyName)){
-          // add the column only if the family has not already been added
-          scan.addColumn(colMap.familyNameBytes, colMap.qualifierNameBytes);
+      int iKey = columnMappings.getKeyIndex();
+      int iTimestamp = columnMappings.getTimestampIndex();
+      ColumnMapping keyMapping = columnMappings.getKeyMapping();
+
+      // Take filter pushdown into account while calculating splits; this
+      // allows us to prune off regions immediately.  Note that although
+      // the Javadoc for the superclass getSplits says that it returns one
+      // split per region, the implementation actually takes the scan
+      // definition into account and excludes regions which don't satisfy
+      // the start/stop row conditions (HBASE-1829).
+      Scan scan = createFilterScan(jobConf, iKey, iTimestamp,
+          HiveHBaseInputFormatUtil.getStorageFormatOfKey(keyMapping.mappingSpec,
+              jobConf.get(HBaseSerDe.HBASE_TABLE_DEFAULT_STORAGE_TYPE, "string")));
+
+      // The list of families that have been added to the scan
+      List<String> addedFamilies = new ArrayList<String>();
+
+      // REVIEW:  are we supposed to be applying the getReadColumnIDs
+      // same as in getRecordReader?
+      for (ColumnMapping colMap : columnMappings) {
+        if (colMap.hbaseRowKey || colMap.hbaseTimestamp) {
+          continue;
+        }
+
+        if (colMap.qualifierName == null) {
+          scan.addFamily(colMap.familyNameBytes);
+          addedFamilies.add(colMap.familyName);
+        } else {
+          if(!addedFamilies.contains(colMap.familyName)){
+            // add the column only if the family has not already been added
+            scan.addColumn(colMap.familyNameBytes, colMap.qualifierNameBytes);
+          }
         }
       }
-    }
-    setScan(scan);
+      setScan(scan);
 
-    Job job = new Job(jobConf);
-    JobContext jobContext = ShimLoader.getHadoopShims().newJobContext(job);
-    Path [] tablePaths = FileInputFormat.getInputPaths(jobContext);
+      Job job = new Job(jobConf);
+      JobContext jobContext = ShimLoader.getHadoopShims().newJobContext(job);
+      Path [] tablePaths = FileInputFormat.getInputPaths(jobContext);
 
-    List<org.apache.hadoop.mapreduce.InputSplit> splits =
-      super.getSplits(jobContext);
-    InputSplit [] results = new InputSplit[splits.size()];
+      List<org.apache.hadoop.mapreduce.InputSplit> splits =
+        super.getSplits(jobContext);
+      InputSplit [] results = new InputSplit[splits.size()];
 
-    for (int i = 0; i < splits.size(); i++) {
-      results[i] = new HBaseSplit((TableSplit) splits.get(i), tablePaths[0]);
-    }
+      for (int i = 0; i < splits.size(); i++) {
+        results[i] = new HBaseSplit((TableSplit) splits.get(i), tablePaths[0]);
+      }
 
-    return results;
+      return results;
+    } finally {
+      closeTable();
+    }
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/d5e8544e/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableOutputFormat.java
----------------------------------------------------------------------
diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableOutputFormat.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableOutputFormat.java
index 3100885..0715a51 100644
--- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableOutputFormat.java
+++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableOutputFormat.java
@@ -145,5 +145,14 @@ public class HiveHBaseTableOutputFormat extends
       }
       m_table.put(put);
     }
+
+    @Override
+    protected void finalize() throws Throwable {
+      try {
+        m_table.close();
+      } finally {
+        super.finalize();
+      }
+    }
   }
 }


[06/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)

Posted by xu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-cpp/TCLIService_types.cpp
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-cpp/TCLIService_types.cpp b/service/src/gen/thrift/gen-cpp/TCLIService_types.cpp
index 742cfdc..b852379 100644
--- a/service/src/gen/thrift/gen-cpp/TCLIService_types.cpp
+++ b/service/src/gen/thrift/gen-cpp/TCLIService_types.cpp
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -280,11 +280,9 @@ void TTypeQualifierValue::__set_stringValue(const std::string& val) {
 __isset.stringValue = true;
 }
 
-const char* TTypeQualifierValue::ascii_fingerprint = "A7801670116150C65ACA43E6F679BA79";
-const uint8_t TTypeQualifierValue::binary_fingerprint[16] = {0xA7,0x80,0x16,0x70,0x11,0x61,0x50,0xC6,0x5A,0xCA,0x43,0xE6,0xF6,0x79,0xBA,0x79};
-
 uint32_t TTypeQualifierValue::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -333,7 +331,7 @@ uint32_t TTypeQualifierValue::read(::apache::thrift::protocol::TProtocol* iprot)
 
 uint32_t TTypeQualifierValue::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TTypeQualifierValue");
 
   if (this->__isset.i32Value) {
@@ -348,7 +346,6 @@ uint32_t TTypeQualifierValue::write(::apache::thrift::protocol::TProtocol* oprot
   }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -370,13 +367,12 @@ TTypeQualifierValue& TTypeQualifierValue::operator=(const TTypeQualifierValue& o
   __isset = other1.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TTypeQualifierValue& obj) {
-  using apache::thrift::to_string;
+void TTypeQualifierValue::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TTypeQualifierValue(";
-  out << "i32Value="; (obj.__isset.i32Value ? (out << to_string(obj.i32Value)) : (out << "<null>"));
-  out << ", " << "stringValue="; (obj.__isset.stringValue ? (out << to_string(obj.stringValue)) : (out << "<null>"));
+  out << "i32Value="; (__isset.i32Value ? (out << to_string(i32Value)) : (out << "<null>"));
+  out << ", " << "stringValue="; (__isset.stringValue ? (out << to_string(stringValue)) : (out << "<null>"));
   out << ")";
-  return out;
 }
 
 
@@ -388,11 +384,9 @@ void TTypeQualifiers::__set_qualifiers(const std::map<std::string, TTypeQualifie
   this->qualifiers = val;
 }
 
-const char* TTypeQualifiers::ascii_fingerprint = "6C72981CFA989214285648FA8C196C47";
-const uint8_t TTypeQualifiers::binary_fingerprint[16] = {0x6C,0x72,0x98,0x1C,0xFA,0x98,0x92,0x14,0x28,0x56,0x48,0xFA,0x8C,0x19,0x6C,0x47};
-
 uint32_t TTypeQualifiers::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -451,7 +445,7 @@ uint32_t TTypeQualifiers::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TTypeQualifiers::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TTypeQualifiers");
 
   xfer += oprot->writeFieldBegin("qualifiers", ::apache::thrift::protocol::T_MAP, 1);
@@ -469,7 +463,6 @@ uint32_t TTypeQualifiers::write(::apache::thrift::protocol::TProtocol* oprot) co
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -485,12 +478,11 @@ TTypeQualifiers& TTypeQualifiers::operator=(const TTypeQualifiers& other11) {
   qualifiers = other11.qualifiers;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TTypeQualifiers& obj) {
-  using apache::thrift::to_string;
+void TTypeQualifiers::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TTypeQualifiers(";
-  out << "qualifiers=" << to_string(obj.qualifiers);
+  out << "qualifiers=" << to_string(qualifiers);
   out << ")";
-  return out;
 }
 
 
@@ -507,11 +499,9 @@ void TPrimitiveTypeEntry::__set_typeQualifiers(const TTypeQualifiers& val) {
 __isset.typeQualifiers = true;
 }
 
-const char* TPrimitiveTypeEntry::ascii_fingerprint = "755674F6A5C8EB47868686AE386FBC1C";
-const uint8_t TPrimitiveTypeEntry::binary_fingerprint[16] = {0x75,0x56,0x74,0xF6,0xA5,0xC8,0xEB,0x47,0x86,0x86,0x86,0xAE,0x38,0x6F,0xBC,0x1C};
-
 uint32_t TPrimitiveTypeEntry::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -565,7 +555,7 @@ uint32_t TPrimitiveTypeEntry::read(::apache::thrift::protocol::TProtocol* iprot)
 
 uint32_t TPrimitiveTypeEntry::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TPrimitiveTypeEntry");
 
   xfer += oprot->writeFieldBegin("type", ::apache::thrift::protocol::T_I32, 1);
@@ -579,7 +569,6 @@ uint32_t TPrimitiveTypeEntry::write(::apache::thrift::protocol::TProtocol* oprot
   }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -601,13 +590,12 @@ TPrimitiveTypeEntry& TPrimitiveTypeEntry::operator=(const TPrimitiveTypeEntry& o
   __isset = other14.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TPrimitiveTypeEntry& obj) {
-  using apache::thrift::to_string;
+void TPrimitiveTypeEntry::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TPrimitiveTypeEntry(";
-  out << "type=" << to_string(obj.type);
-  out << ", " << "typeQualifiers="; (obj.__isset.typeQualifiers ? (out << to_string(obj.typeQualifiers)) : (out << "<null>"));
+  out << "type=" << to_string(type);
+  out << ", " << "typeQualifiers="; (__isset.typeQualifiers ? (out << to_string(typeQualifiers)) : (out << "<null>"));
   out << ")";
-  return out;
 }
 
 
@@ -619,11 +607,9 @@ void TArrayTypeEntry::__set_objectTypePtr(const TTypeEntryPtr val) {
   this->objectTypePtr = val;
 }
 
-const char* TArrayTypeEntry::ascii_fingerprint = "E86CACEB22240450EDCBEFC3A83970E4";
-const uint8_t TArrayTypeEntry::binary_fingerprint[16] = {0xE8,0x6C,0xAC,0xEB,0x22,0x24,0x04,0x50,0xED,0xCB,0xEF,0xC3,0xA8,0x39,0x70,0xE4};
-
 uint32_t TArrayTypeEntry::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -667,7 +653,7 @@ uint32_t TArrayTypeEntry::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TArrayTypeEntry::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TArrayTypeEntry");
 
   xfer += oprot->writeFieldBegin("objectTypePtr", ::apache::thrift::protocol::T_I32, 1);
@@ -676,7 +662,6 @@ uint32_t TArrayTypeEntry::write(::apache::thrift::protocol::TProtocol* oprot) co
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -692,12 +677,11 @@ TArrayTypeEntry& TArrayTypeEntry::operator=(const TArrayTypeEntry& other16) {
   objectTypePtr = other16.objectTypePtr;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TArrayTypeEntry& obj) {
-  using apache::thrift::to_string;
+void TArrayTypeEntry::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TArrayTypeEntry(";
-  out << "objectTypePtr=" << to_string(obj.objectTypePtr);
+  out << "objectTypePtr=" << to_string(objectTypePtr);
   out << ")";
-  return out;
 }
 
 
@@ -713,11 +697,9 @@ void TMapTypeEntry::__set_valueTypePtr(const TTypeEntryPtr val) {
   this->valueTypePtr = val;
 }
 
-const char* TMapTypeEntry::ascii_fingerprint = "989D1F1AE8D148D5E2119FFEC4BBBEE3";
-const uint8_t TMapTypeEntry::binary_fingerprint[16] = {0x98,0x9D,0x1F,0x1A,0xE8,0xD1,0x48,0xD5,0xE2,0x11,0x9F,0xFE,0xC4,0xBB,0xBE,0xE3};
-
 uint32_t TMapTypeEntry::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -772,7 +754,7 @@ uint32_t TMapTypeEntry::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TMapTypeEntry::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TMapTypeEntry");
 
   xfer += oprot->writeFieldBegin("keyTypePtr", ::apache::thrift::protocol::T_I32, 1);
@@ -785,7 +767,6 @@ uint32_t TMapTypeEntry::write(::apache::thrift::protocol::TProtocol* oprot) cons
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -804,13 +785,12 @@ TMapTypeEntry& TMapTypeEntry::operator=(const TMapTypeEntry& other18) {
   valueTypePtr = other18.valueTypePtr;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TMapTypeEntry& obj) {
-  using apache::thrift::to_string;
+void TMapTypeEntry::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TMapTypeEntry(";
-  out << "keyTypePtr=" << to_string(obj.keyTypePtr);
-  out << ", " << "valueTypePtr=" << to_string(obj.valueTypePtr);
+  out << "keyTypePtr=" << to_string(keyTypePtr);
+  out << ", " << "valueTypePtr=" << to_string(valueTypePtr);
   out << ")";
-  return out;
 }
 
 
@@ -822,11 +802,9 @@ void TStructTypeEntry::__set_nameToTypePtr(const std::map<std::string, TTypeEntr
   this->nameToTypePtr = val;
 }
 
-const char* TStructTypeEntry::ascii_fingerprint = "91F548CA159B4AB4291F5741AC161402";
-const uint8_t TStructTypeEntry::binary_fingerprint[16] = {0x91,0xF5,0x48,0xCA,0x15,0x9B,0x4A,0xB4,0x29,0x1F,0x57,0x41,0xAC,0x16,0x14,0x02};
-
 uint32_t TStructTypeEntry::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -885,7 +863,7 @@ uint32_t TStructTypeEntry::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TStructTypeEntry::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TStructTypeEntry");
 
   xfer += oprot->writeFieldBegin("nameToTypePtr", ::apache::thrift::protocol::T_MAP, 1);
@@ -903,7 +881,6 @@ uint32_t TStructTypeEntry::write(::apache::thrift::protocol::TProtocol* oprot) c
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -919,12 +896,11 @@ TStructTypeEntry& TStructTypeEntry::operator=(const TStructTypeEntry& other28) {
   nameToTypePtr = other28.nameToTypePtr;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TStructTypeEntry& obj) {
-  using apache::thrift::to_string;
+void TStructTypeEntry::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TStructTypeEntry(";
-  out << "nameToTypePtr=" << to_string(obj.nameToTypePtr);
+  out << "nameToTypePtr=" << to_string(nameToTypePtr);
   out << ")";
-  return out;
 }
 
 
@@ -936,11 +912,9 @@ void TUnionTypeEntry::__set_nameToTypePtr(const std::map<std::string, TTypeEntry
   this->nameToTypePtr = val;
 }
 
-const char* TUnionTypeEntry::ascii_fingerprint = "91F548CA159B4AB4291F5741AC161402";
-const uint8_t TUnionTypeEntry::binary_fingerprint[16] = {0x91,0xF5,0x48,0xCA,0x15,0x9B,0x4A,0xB4,0x29,0x1F,0x57,0x41,0xAC,0x16,0x14,0x02};
-
 uint32_t TUnionTypeEntry::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -999,7 +973,7 @@ uint32_t TUnionTypeEntry::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TUnionTypeEntry::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TUnionTypeEntry");
 
   xfer += oprot->writeFieldBegin("nameToTypePtr", ::apache::thrift::protocol::T_MAP, 1);
@@ -1017,7 +991,6 @@ uint32_t TUnionTypeEntry::write(::apache::thrift::protocol::TProtocol* oprot) co
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1033,12 +1006,11 @@ TUnionTypeEntry& TUnionTypeEntry::operator=(const TUnionTypeEntry& other38) {
   nameToTypePtr = other38.nameToTypePtr;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TUnionTypeEntry& obj) {
-  using apache::thrift::to_string;
+void TUnionTypeEntry::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TUnionTypeEntry(";
-  out << "nameToTypePtr=" << to_string(obj.nameToTypePtr);
+  out << "nameToTypePtr=" << to_string(nameToTypePtr);
   out << ")";
-  return out;
 }
 
 
@@ -1050,11 +1022,9 @@ void TUserDefinedTypeEntry::__set_typeClassName(const std::string& val) {
   this->typeClassName = val;
 }
 
-const char* TUserDefinedTypeEntry::ascii_fingerprint = "EFB929595D312AC8F305D5A794CFEDA1";
-const uint8_t TUserDefinedTypeEntry::binary_fingerprint[16] = {0xEF,0xB9,0x29,0x59,0x5D,0x31,0x2A,0xC8,0xF3,0x05,0xD5,0xA7,0x94,0xCF,0xED,0xA1};
-
 uint32_t TUserDefinedTypeEntry::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1098,7 +1068,7 @@ uint32_t TUserDefinedTypeEntry::read(::apache::thrift::protocol::TProtocol* ipro
 
 uint32_t TUserDefinedTypeEntry::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TUserDefinedTypeEntry");
 
   xfer += oprot->writeFieldBegin("typeClassName", ::apache::thrift::protocol::T_STRING, 1);
@@ -1107,7 +1077,6 @@ uint32_t TUserDefinedTypeEntry::write(::apache::thrift::protocol::TProtocol* opr
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1123,12 +1092,11 @@ TUserDefinedTypeEntry& TUserDefinedTypeEntry::operator=(const TUserDefinedTypeEn
   typeClassName = other40.typeClassName;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TUserDefinedTypeEntry& obj) {
-  using apache::thrift::to_string;
+void TUserDefinedTypeEntry::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TUserDefinedTypeEntry(";
-  out << "typeClassName=" << to_string(obj.typeClassName);
+  out << "typeClassName=" << to_string(typeClassName);
   out << ")";
-  return out;
 }
 
 
@@ -1160,11 +1128,9 @@ void TTypeEntry::__set_userDefinedTypeEntry(const TUserDefinedTypeEntry& val) {
   this->userDefinedTypeEntry = val;
 }
 
-const char* TTypeEntry::ascii_fingerprint = "2FE56D9097E325DAA7E933738C6D325F";
-const uint8_t TTypeEntry::binary_fingerprint[16] = {0x2F,0xE5,0x6D,0x90,0x97,0xE3,0x25,0xDA,0xA7,0xE9,0x33,0x73,0x8C,0x6D,0x32,0x5F};
-
 uint32_t TTypeEntry::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1245,7 +1211,7 @@ uint32_t TTypeEntry::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TTypeEntry::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TTypeEntry");
 
   xfer += oprot->writeFieldBegin("primitiveEntry", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -1274,7 +1240,6 @@ uint32_t TTypeEntry::write(::apache::thrift::protocol::TProtocol* oprot) const {
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1308,17 +1273,16 @@ TTypeEntry& TTypeEntry::operator=(const TTypeEntry& other42) {
   __isset = other42.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TTypeEntry& obj) {
-  using apache::thrift::to_string;
+void TTypeEntry::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TTypeEntry(";
-  out << "primitiveEntry=" << to_string(obj.primitiveEntry);
-  out << ", " << "arrayEntry=" << to_string(obj.arrayEntry);
-  out << ", " << "mapEntry=" << to_string(obj.mapEntry);
-  out << ", " << "structEntry=" << to_string(obj.structEntry);
-  out << ", " << "unionEntry=" << to_string(obj.unionEntry);
-  out << ", " << "userDefinedTypeEntry=" << to_string(obj.userDefinedTypeEntry);
+  out << "primitiveEntry=" << to_string(primitiveEntry);
+  out << ", " << "arrayEntry=" << to_string(arrayEntry);
+  out << ", " << "mapEntry=" << to_string(mapEntry);
+  out << ", " << "structEntry=" << to_string(structEntry);
+  out << ", " << "unionEntry=" << to_string(unionEntry);
+  out << ", " << "userDefinedTypeEntry=" << to_string(userDefinedTypeEntry);
   out << ")";
-  return out;
 }
 
 
@@ -1330,11 +1294,9 @@ void TTypeDesc::__set_types(const std::vector<TTypeEntry> & val) {
   this->types = val;
 }
 
-const char* TTypeDesc::ascii_fingerprint = "90B3C5A0B73419A84E85E0E48C452AA5";
-const uint8_t TTypeDesc::binary_fingerprint[16] = {0x90,0xB3,0xC5,0xA0,0xB7,0x34,0x19,0xA8,0x4E,0x85,0xE0,0xE4,0x8C,0x45,0x2A,0xA5};
-
 uint32_t TTypeDesc::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1390,7 +1352,7 @@ uint32_t TTypeDesc::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TTypeDesc::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TTypeDesc");
 
   xfer += oprot->writeFieldBegin("types", ::apache::thrift::protocol::T_LIST, 1);
@@ -1407,7 +1369,6 @@ uint32_t TTypeDesc::write(::apache::thrift::protocol::TProtocol* oprot) const {
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1423,12 +1384,11 @@ TTypeDesc& TTypeDesc::operator=(const TTypeDesc& other50) {
   types = other50.types;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TTypeDesc& obj) {
-  using apache::thrift::to_string;
+void TTypeDesc::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TTypeDesc(";
-  out << "types=" << to_string(obj.types);
+  out << "types=" << to_string(types);
   out << ")";
-  return out;
 }
 
 
@@ -1453,11 +1413,9 @@ void TColumnDesc::__set_comment(const std::string& val) {
 __isset.comment = true;
 }
 
-const char* TColumnDesc::ascii_fingerprint = "EABED9009D5FCABFCA65612069F2A849";
-const uint8_t TColumnDesc::binary_fingerprint[16] = {0xEA,0xBE,0xD9,0x00,0x9D,0x5F,0xCA,0xBF,0xCA,0x65,0x61,0x20,0x69,0xF2,0xA8,0x49};
-
 uint32_t TColumnDesc::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1531,7 +1489,7 @@ uint32_t TColumnDesc::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TColumnDesc::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TColumnDesc");
 
   xfer += oprot->writeFieldBegin("columnName", ::apache::thrift::protocol::T_STRING, 1);
@@ -1553,7 +1511,6 @@ uint32_t TColumnDesc::write(::apache::thrift::protocol::TProtocol* oprot) const
   }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1581,15 +1538,14 @@ TColumnDesc& TColumnDesc::operator=(const TColumnDesc& other52) {
   __isset = other52.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TColumnDesc& obj) {
-  using apache::thrift::to_string;
+void TColumnDesc::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TColumnDesc(";
-  out << "columnName=" << to_string(obj.columnName);
-  out << ", " << "typeDesc=" << to_string(obj.typeDesc);
-  out << ", " << "position=" << to_string(obj.position);
-  out << ", " << "comment="; (obj.__isset.comment ? (out << to_string(obj.comment)) : (out << "<null>"));
+  out << "columnName=" << to_string(columnName);
+  out << ", " << "typeDesc=" << to_string(typeDesc);
+  out << ", " << "position=" << to_string(position);
+  out << ", " << "comment="; (__isset.comment ? (out << to_string(comment)) : (out << "<null>"));
   out << ")";
-  return out;
 }
 
 
@@ -1601,11 +1557,9 @@ void TTableSchema::__set_columns(const std::vector<TColumnDesc> & val) {
   this->columns = val;
 }
 
-const char* TTableSchema::ascii_fingerprint = "7A1811E49313E5977107FC667B20E39D";
-const uint8_t TTableSchema::binary_fingerprint[16] = {0x7A,0x18,0x11,0xE4,0x93,0x13,0xE5,0x97,0x71,0x07,0xFC,0x66,0x7B,0x20,0xE3,0x9D};
-
 uint32_t TTableSchema::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1661,7 +1615,7 @@ uint32_t TTableSchema::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TTableSchema::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TTableSchema");
 
   xfer += oprot->writeFieldBegin("columns", ::apache::thrift::protocol::T_LIST, 1);
@@ -1678,7 +1632,6 @@ uint32_t TTableSchema::write(::apache::thrift::protocol::TProtocol* oprot) const
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1694,12 +1647,11 @@ TTableSchema& TTableSchema::operator=(const TTableSchema& other60) {
   columns = other60.columns;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TTableSchema& obj) {
-  using apache::thrift::to_string;
+void TTableSchema::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TTableSchema(";
-  out << "columns=" << to_string(obj.columns);
+  out << "columns=" << to_string(columns);
   out << ")";
-  return out;
 }
 
 
@@ -1712,11 +1664,9 @@ void TBoolValue::__set_value(const bool val) {
 __isset.value = true;
 }
 
-const char* TBoolValue::ascii_fingerprint = "BF054652DEF86253C2BEE7D947F167DD";
-const uint8_t TBoolValue::binary_fingerprint[16] = {0xBF,0x05,0x46,0x52,0xDE,0xF8,0x62,0x53,0xC2,0xBE,0xE7,0xD9,0x47,0xF1,0x67,0xDD};
-
 uint32_t TBoolValue::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1757,7 +1707,7 @@ uint32_t TBoolValue::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TBoolValue::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TBoolValue");
 
   if (this->__isset.value) {
@@ -1767,7 +1717,6 @@ uint32_t TBoolValue::write(::apache::thrift::protocol::TProtocol* oprot) const {
   }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1786,12 +1735,11 @@ TBoolValue& TBoolValue::operator=(const TBoolValue& other62) {
   __isset = other62.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TBoolValue& obj) {
-  using apache::thrift::to_string;
+void TBoolValue::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TBoolValue(";
-  out << "value="; (obj.__isset.value ? (out << to_string(obj.value)) : (out << "<null>"));
+  out << "value="; (__isset.value ? (out << to_string(value)) : (out << "<null>"));
   out << ")";
-  return out;
 }
 
 
@@ -1804,11 +1752,9 @@ void TByteValue::__set_value(const int8_t val) {
 __isset.value = true;
 }
 
-const char* TByteValue::ascii_fingerprint = "9C15298ACB5D04AEA9B52D5DDE6F9208";
-const uint8_t TByteValue::binary_fingerprint[16] = {0x9C,0x15,0x29,0x8A,0xCB,0x5D,0x04,0xAE,0xA9,0xB5,0x2D,0x5D,0xDE,0x6F,0x92,0x08};
-
 uint32_t TByteValue::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1849,7 +1795,7 @@ uint32_t TByteValue::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TByteValue::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TByteValue");
 
   if (this->__isset.value) {
@@ -1859,7 +1805,6 @@ uint32_t TByteValue::write(::apache::thrift::protocol::TProtocol* oprot) const {
   }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1878,12 +1823,11 @@ TByteValue& TByteValue::operator=(const TByteValue& other64) {
   __isset = other64.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TByteValue& obj) {
-  using apache::thrift::to_string;
+void TByteValue::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TByteValue(";
-  out << "value="; (obj.__isset.value ? (out << to_string(obj.value)) : (out << "<null>"));
+  out << "value="; (__isset.value ? (out << to_string(value)) : (out << "<null>"));
   out << ")";
-  return out;
 }
 
 
@@ -1896,11 +1840,9 @@ void TI16Value::__set_value(const int16_t val) {
 __isset.value = true;
 }
 
-const char* TI16Value::ascii_fingerprint = "5DAC9C51C7E1106BF936FC71860BE9D5";
-const uint8_t TI16Value::binary_fingerprint[16] = {0x5D,0xAC,0x9C,0x51,0xC7,0xE1,0x10,0x6B,0xF9,0x36,0xFC,0x71,0x86,0x0B,0xE9,0xD5};
-
 uint32_t TI16Value::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1941,7 +1883,7 @@ uint32_t TI16Value::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TI16Value::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TI16Value");
 
   if (this->__isset.value) {
@@ -1951,7 +1893,6 @@ uint32_t TI16Value::write(::apache::thrift::protocol::TProtocol* oprot) const {
   }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1970,12 +1911,11 @@ TI16Value& TI16Value::operator=(const TI16Value& other66) {
   __isset = other66.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TI16Value& obj) {
-  using apache::thrift::to_string;
+void TI16Value::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TI16Value(";
-  out << "value="; (obj.__isset.value ? (out << to_string(obj.value)) : (out << "<null>"));
+  out << "value="; (__isset.value ? (out << to_string(value)) : (out << "<null>"));
   out << ")";
-  return out;
 }
 
 
@@ -1988,11 +1928,9 @@ void TI32Value::__set_value(const int32_t val) {
 __isset.value = true;
 }
 
-const char* TI32Value::ascii_fingerprint = "E7A96B151330359E84C0A3AC91BCBACD";
-const uint8_t TI32Value::binary_fingerprint[16] = {0xE7,0xA9,0x6B,0x15,0x13,0x30,0x35,0x9E,0x84,0xC0,0xA3,0xAC,0x91,0xBC,0xBA,0xCD};
-
 uint32_t TI32Value::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2033,7 +1971,7 @@ uint32_t TI32Value::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TI32Value::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TI32Value");
 
   if (this->__isset.value) {
@@ -2043,7 +1981,6 @@ uint32_t TI32Value::write(::apache::thrift::protocol::TProtocol* oprot) const {
   }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2062,12 +1999,11 @@ TI32Value& TI32Value::operator=(const TI32Value& other68) {
   __isset = other68.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TI32Value& obj) {
-  using apache::thrift::to_string;
+void TI32Value::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TI32Value(";
-  out << "value="; (obj.__isset.value ? (out << to_string(obj.value)) : (out << "<null>"));
+  out << "value="; (__isset.value ? (out << to_string(value)) : (out << "<null>"));
   out << ")";
-  return out;
 }
 
 
@@ -2080,11 +2016,9 @@ void TI64Value::__set_value(const int64_t val) {
 __isset.value = true;
 }
 
-const char* TI64Value::ascii_fingerprint = "148F3AAAC1D9859963D5E800D187BF26";
-const uint8_t TI64Value::binary_fingerprint[16] = {0x14,0x8F,0x3A,0xAA,0xC1,0xD9,0x85,0x99,0x63,0xD5,0xE8,0x00,0xD1,0x87,0xBF,0x26};
-
 uint32_t TI64Value::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2125,7 +2059,7 @@ uint32_t TI64Value::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TI64Value::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TI64Value");
 
   if (this->__isset.value) {
@@ -2135,7 +2069,6 @@ uint32_t TI64Value::write(::apache::thrift::protocol::TProtocol* oprot) const {
   }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2154,12 +2087,11 @@ TI64Value& TI64Value::operator=(const TI64Value& other70) {
   __isset = other70.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TI64Value& obj) {
-  using apache::thrift::to_string;
+void TI64Value::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TI64Value(";
-  out << "value="; (obj.__isset.value ? (out << to_string(obj.value)) : (out << "<null>"));
+  out << "value="; (__isset.value ? (out << to_string(value)) : (out << "<null>"));
   out << ")";
-  return out;
 }
 
 
@@ -2172,11 +2104,9 @@ void TDoubleValue::__set_value(const double val) {
 __isset.value = true;
 }
 
-const char* TDoubleValue::ascii_fingerprint = "3586E570A474C4A8603B4FF74903B3A6";
-const uint8_t TDoubleValue::binary_fingerprint[16] = {0x35,0x86,0xE5,0x70,0xA4,0x74,0xC4,0xA8,0x60,0x3B,0x4F,0xF7,0x49,0x03,0xB3,0xA6};
-
 uint32_t TDoubleValue::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2217,7 +2147,7 @@ uint32_t TDoubleValue::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TDoubleValue::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TDoubleValue");
 
   if (this->__isset.value) {
@@ -2227,7 +2157,6 @@ uint32_t TDoubleValue::write(::apache::thrift::protocol::TProtocol* oprot) const
   }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2246,12 +2175,11 @@ TDoubleValue& TDoubleValue::operator=(const TDoubleValue& other72) {
   __isset = other72.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TDoubleValue& obj) {
-  using apache::thrift::to_string;
+void TDoubleValue::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TDoubleValue(";
-  out << "value="; (obj.__isset.value ? (out << to_string(obj.value)) : (out << "<null>"));
+  out << "value="; (__isset.value ? (out << to_string(value)) : (out << "<null>"));
   out << ")";
-  return out;
 }
 
 
@@ -2264,11 +2192,9 @@ void TStringValue::__set_value(const std::string& val) {
 __isset.value = true;
 }
 
-const char* TStringValue::ascii_fingerprint = "66E694018C17E5B65A59AE8F55CCA3CD";
-const uint8_t TStringValue::binary_fingerprint[16] = {0x66,0xE6,0x94,0x01,0x8C,0x17,0xE5,0xB6,0x5A,0x59,0xAE,0x8F,0x55,0xCC,0xA3,0xCD};
-
 uint32_t TStringValue::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2309,7 +2235,7 @@ uint32_t TStringValue::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TStringValue::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TStringValue");
 
   if (this->__isset.value) {
@@ -2319,7 +2245,6 @@ uint32_t TStringValue::write(::apache::thrift::protocol::TProtocol* oprot) const
   }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2338,12 +2263,11 @@ TStringValue& TStringValue::operator=(const TStringValue& other74) {
   __isset = other74.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TStringValue& obj) {
-  using apache::thrift::to_string;
+void TStringValue::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TStringValue(";
-  out << "value="; (obj.__isset.value ? (out << to_string(obj.value)) : (out << "<null>"));
+  out << "value="; (__isset.value ? (out << to_string(value)) : (out << "<null>"));
   out << ")";
-  return out;
 }
 
 
@@ -2379,11 +2303,9 @@ void TColumnValue::__set_stringVal(const TStringValue& val) {
   this->stringVal = val;
 }
 
-const char* TColumnValue::ascii_fingerprint = "C2DDD988447EA7999A8285AA38AAE9AD";
-const uint8_t TColumnValue::binary_fingerprint[16] = {0xC2,0xDD,0xD9,0x88,0x44,0x7E,0xA7,0x99,0x9A,0x82,0x85,0xAA,0x38,0xAA,0xE9,0xAD};
-
 uint32_t TColumnValue::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2472,7 +2394,7 @@ uint32_t TColumnValue::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TColumnValue::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TColumnValue");
 
   xfer += oprot->writeFieldBegin("boolVal", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -2505,7 +2427,6 @@ uint32_t TColumnValue::write(::apache::thrift::protocol::TProtocol* oprot) const
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2542,18 +2463,17 @@ TColumnValue& TColumnValue::operator=(const TColumnValue& other76) {
   __isset = other76.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TColumnValue& obj) {
-  using apache::thrift::to_string;
+void TColumnValue::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TColumnValue(";
-  out << "boolVal=" << to_string(obj.boolVal);
-  out << ", " << "byteVal=" << to_string(obj.byteVal);
-  out << ", " << "i16Val=" << to_string(obj.i16Val);
-  out << ", " << "i32Val=" << to_string(obj.i32Val);
-  out << ", " << "i64Val=" << to_string(obj.i64Val);
-  out << ", " << "doubleVal=" << to_string(obj.doubleVal);
-  out << ", " << "stringVal=" << to_string(obj.stringVal);
+  out << "boolVal=" << to_string(boolVal);
+  out << ", " << "byteVal=" << to_string(byteVal);
+  out << ", " << "i16Val=" << to_string(i16Val);
+  out << ", " << "i32Val=" << to_string(i32Val);
+  out << ", " << "i64Val=" << to_string(i64Val);
+  out << ", " << "doubleVal=" << to_string(doubleVal);
+  out << ", " << "stringVal=" << to_string(stringVal);
   out << ")";
-  return out;
 }
 
 
@@ -2565,11 +2485,9 @@ void TRow::__set_colVals(const std::vector<TColumnValue> & val) {
   this->colVals = val;
 }
 
-const char* TRow::ascii_fingerprint = "E73FD1FCA0CA58A669FC3E02FB68D534";
-const uint8_t TRow::binary_fingerprint[16] = {0xE7,0x3F,0xD1,0xFC,0xA0,0xCA,0x58,0xA6,0x69,0xFC,0x3E,0x02,0xFB,0x68,0xD5,0x34};
-
 uint32_t TRow::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2625,7 +2543,7 @@ uint32_t TRow::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TRow::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TRow");
 
   xfer += oprot->writeFieldBegin("colVals", ::apache::thrift::protocol::T_LIST, 1);
@@ -2642,7 +2560,6 @@ uint32_t TRow::write(::apache::thrift::protocol::TProtocol* oprot) const {
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2658,12 +2575,11 @@ TRow& TRow::operator=(const TRow& other84) {
   colVals = other84.colVals;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TRow& obj) {
-  using apache::thrift::to_string;
+void TRow::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TRow(";
-  out << "colVals=" << to_string(obj.colVals);
+  out << "colVals=" << to_string(colVals);
   out << ")";
-  return out;
 }
 
 
@@ -2679,11 +2595,9 @@ void TBoolColumn::__set_nulls(const std::string& val) {
   this->nulls = val;
 }
 
-const char* TBoolColumn::ascii_fingerprint = "F9058324D96DB7F974D8ACDC01C54219";
-const uint8_t TBoolColumn::binary_fingerprint[16] = {0xF9,0x05,0x83,0x24,0xD9,0x6D,0xB7,0xF9,0x74,0xD8,0xAC,0xDC,0x01,0xC5,0x42,0x19};
-
 uint32_t TBoolColumn::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2750,7 +2664,7 @@ uint32_t TBoolColumn::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TBoolColumn::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TBoolColumn");
 
   xfer += oprot->writeFieldBegin("values", ::apache::thrift::protocol::T_LIST, 1);
@@ -2771,7 +2685,6 @@ uint32_t TBoolColumn::write(::apache::thrift::protocol::TProtocol* oprot) const
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2790,13 +2703,12 @@ TBoolColumn& TBoolColumn::operator=(const TBoolColumn& other92) {
   nulls = other92.nulls;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TBoolColumn& obj) {
-  using apache::thrift::to_string;
+void TBoolColumn::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TBoolColumn(";
-  out << "values=" << to_string(obj.values);
-  out << ", " << "nulls=" << to_string(obj.nulls);
+  out << "values=" << to_string(values);
+  out << ", " << "nulls=" << to_string(nulls);
   out << ")";
-  return out;
 }
 
 
@@ -2812,11 +2724,9 @@ void TByteColumn::__set_nulls(const std::string& val) {
   this->nulls = val;
 }
 
-const char* TByteColumn::ascii_fingerprint = "1CB300106BAA463A70BB2A2395900F48";
-const uint8_t TByteColumn::binary_fingerprint[16] = {0x1C,0xB3,0x00,0x10,0x6B,0xAA,0x46,0x3A,0x70,0xBB,0x2A,0x23,0x95,0x90,0x0F,0x48};
-
 uint32_t TByteColumn::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2883,7 +2793,7 @@ uint32_t TByteColumn::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TByteColumn::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TByteColumn");
 
   xfer += oprot->writeFieldBegin("values", ::apache::thrift::protocol::T_LIST, 1);
@@ -2904,7 +2814,6 @@ uint32_t TByteColumn::write(::apache::thrift::protocol::TProtocol* oprot) const
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2923,13 +2832,12 @@ TByteColumn& TByteColumn::operator=(const TByteColumn& other100) {
   nulls = other100.nulls;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TByteColumn& obj) {
-  using apache::thrift::to_string;
+void TByteColumn::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TByteColumn(";
-  out << "values=" << to_string(obj.values);
-  out << ", " << "nulls=" << to_string(obj.nulls);
+  out << "values=" << to_string(values);
+  out << ", " << "nulls=" << to_string(nulls);
   out << ")";
-  return out;
 }
 
 
@@ -2945,11 +2853,9 @@ void TI16Column::__set_nulls(const std::string& val) {
   this->nulls = val;
 }
 
-const char* TI16Column::ascii_fingerprint = "6574CDB1F121C8DB47FB257A3F104BDB";
-const uint8_t TI16Column::binary_fingerprint[16] = {0x65,0x74,0xCD,0xB1,0xF1,0x21,0xC8,0xDB,0x47,0xFB,0x25,0x7A,0x3F,0x10,0x4B,0xDB};
-
 uint32_t TI16Column::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -3016,7 +2922,7 @@ uint32_t TI16Column::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TI16Column::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TI16Column");
 
   xfer += oprot->writeFieldBegin("values", ::apache::thrift::protocol::T_LIST, 1);
@@ -3037,7 +2943,6 @@ uint32_t TI16Column::write(::apache::thrift::protocol::TProtocol* oprot) const {
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -3056,13 +2961,12 @@ TI16Column& TI16Column::operator=(const TI16Column& other108) {
   nulls = other108.nulls;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TI16Column& obj) {
-  using apache::thrift::to_string;
+void TI16Column::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TI16Column(";
-  out << "values=" << to_string(obj.values);
-  out << ", " << "nulls=" << to_string(obj.nulls);
+  out << "values=" << to_string(values);
+  out << ", " << "nulls=" << to_string(nulls);
   out << ")";
-  return out;
 }
 
 
@@ -3078,11 +2982,9 @@ void TI32Column::__set_nulls(const std::string& val) {
   this->nulls = val;
 }
 
-const char* TI32Column::ascii_fingerprint = "CCCCE89C7E9DA10280F5663700677313";
-const uint8_t TI32Column::binary_fingerprint[16] = {0xCC,0xCC,0xE8,0x9C,0x7E,0x9D,0xA1,0x02,0x80,0xF5,0x66,0x37,0x00,0x67,0x73,0x13};
-
 uint32_t TI32Column::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -3149,7 +3051,7 @@ uint32_t TI32Column::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TI32Column::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TI32Column");
 
   xfer += oprot->writeFieldBegin("values", ::apache::thrift::protocol::T_LIST, 1);
@@ -3170,7 +3072,6 @@ uint32_t TI32Column::write(::apache::thrift::protocol::TProtocol* oprot) const {
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -3189,13 +3090,12 @@ TI32Column& TI32Column::operator=(const TI32Column& other116) {
   nulls = other116.nulls;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TI32Column& obj) {
-  using apache::thrift::to_string;
+void TI32Column::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TI32Column(";
-  out << "values=" << to_string(obj.values);
-  out << ", " << "nulls=" << to_string(obj.nulls);
+  out << "values=" << to_string(values);
+  out << ", " << "nulls=" << to_string(nulls);
   out << ")";
-  return out;
 }
 
 
@@ -3211,11 +3111,9 @@ void TI64Column::__set_nulls(const std::string& val) {
   this->nulls = val;
 }
 
-const char* TI64Column::ascii_fingerprint = "925353917FC0AF87976A2338011F5A31";
-const uint8_t TI64Column::binary_fingerprint[16] = {0x92,0x53,0x53,0x91,0x7F,0xC0,0xAF,0x87,0x97,0x6A,0x23,0x38,0x01,0x1F,0x5A,0x31};
-
 uint32_t TI64Column::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -3282,7 +3180,7 @@ uint32_t TI64Column::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TI64Column::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TI64Column");
 
   xfer += oprot->writeFieldBegin("values", ::apache::thrift::protocol::T_LIST, 1);
@@ -3303,7 +3201,6 @@ uint32_t TI64Column::write(::apache::thrift::protocol::TProtocol* oprot) const {
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -3322,13 +3219,12 @@ TI64Column& TI64Column::operator=(const TI64Column& other124) {
   nulls = other124.nulls;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TI64Column& obj) {
-  using apache::thrift::to_string;
+void TI64Column::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TI64Column(";
-  out << "values=" << to_string(obj.values);
-  out << ", " << "nulls=" << to_string(obj.nulls);
+  out << "values=" << to_string(values);
+  out << ", " << "nulls=" << to_string(nulls);
   out << ")";
-  return out;
 }
 
 
@@ -3344,11 +3240,9 @@ void TDoubleColumn::__set_nulls(const std::string& val) {
   this->nulls = val;
 }
 
-const char* TDoubleColumn::ascii_fingerprint = "8FF1C050A8D7FD247AEB23CD71539C09";
-const uint8_t TDoubleColumn::binary_fingerprint[16] = {0x8F,0xF1,0xC0,0x50,0xA8,0xD7,0xFD,0x24,0x7A,0xEB,0x23,0xCD,0x71,0x53,0x9C,0x09};
-
 uint32_t TDoubleColumn::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -3415,7 +3309,7 @@ uint32_t TDoubleColumn::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TDoubleColumn::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TDoubleColumn");
 
   xfer += oprot->writeFieldBegin("values", ::apache::thrift::protocol::T_LIST, 1);
@@ -3436,7 +3330,6 @@ uint32_t TDoubleColumn::write(::apache::thrift::protocol::TProtocol* oprot) cons
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -3455,13 +3348,12 @@ TDoubleColumn& TDoubleColumn::operator=(const TDoubleColumn& other132) {
   nulls = other132.nulls;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TDoubleColumn& obj) {
-  using apache::thrift::to_string;
+void TDoubleColumn::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TDoubleColumn(";
-  out << "values=" << to_string(obj.values);
-  out << ", " << "nulls=" << to_string(obj.nulls);
+  out << "values=" << to_string(values);
+  out << ", " << "nulls=" << to_string(nulls);
   out << ")";
-  return out;
 }
 
 
@@ -3477,11 +3369,9 @@ void TStringColumn::__set_nulls(const std::string& val) {
   this->nulls = val;
 }
 
-const char* TStringColumn::ascii_fingerprint = "BE556BF7091B2DABBA1863D5E458B15F";
-const uint8_t TStringColumn::binary_fingerprint[16] = {0xBE,0x55,0x6B,0xF7,0x09,0x1B,0x2D,0xAB,0xBA,0x18,0x63,0xD5,0xE4,0x58,0xB1,0x5F};
-
 uint32_t TStringColumn::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -3548,7 +3438,7 @@ uint32_t TStringColumn::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TStringColumn::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TStringColumn");
 
   xfer += oprot->writeFieldBegin("values", ::apache::thrift::protocol::T_LIST, 1);
@@ -3569,7 +3459,6 @@ uint32_t TStringColumn::write(::apache::thrift::protocol::TProtocol* oprot) cons
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -3588,13 +3477,12 @@ TStringColumn& TStringColumn::operator=(const TStringColumn& other140) {
   nulls = other140.nulls;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TStringColumn& obj) {
-  using apache::thrift::to_string;
+void TStringColumn::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TStringColumn(";
-  out << "values=" << to_string(obj.values);
-  out << ", " << "nulls=" << to_string(obj.nulls);
+  out << "values=" << to_string(values);
+  out << ", " << "nulls=" << to_string(nulls);
   out << ")";
-  return out;
 }
 
 
@@ -3610,11 +3498,9 @@ void TBinaryColumn::__set_nulls(const std::string& val) {
   this->nulls = val;
 }
 
-const char* TBinaryColumn::ascii_fingerprint = "BE556BF7091B2DABBA1863D5E458B15F";
-const uint8_t TBinaryColumn::binary_fingerprint[16] = {0xBE,0x55,0x6B,0xF7,0x09,0x1B,0x2D,0xAB,0xBA,0x18,0x63,0xD5,0xE4,0x58,0xB1,0x5F};
-
 uint32_t TBinaryColumn::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -3681,7 +3567,7 @@ uint32_t TBinaryColumn::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TBinaryColumn::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TBinaryColumn");
 
   xfer += oprot->writeFieldBegin("values", ::apache::thrift::protocol::T_LIST, 1);
@@ -3702,7 +3588,6 @@ uint32_t TBinaryColumn::write(::apache::thrift::protocol::TProtocol* oprot) cons
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -3721,13 +3606,12 @@ TBinaryColumn& TBinaryColumn::operator=(const TBinaryColumn& other148) {
   nulls = other148.nulls;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TBinaryColumn& obj) {
-  using apache::thrift::to_string;
+void TBinaryColumn::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TBinaryColumn(";
-  out << "values=" << to_string(obj.values);
-  out << ", " << "nulls=" << to_string(obj.nulls);
+  out << "values=" << to_string(values);
+  out << ", " << "nulls=" << to_string(nulls);
   out << ")";
-  return out;
 }
 
 
@@ -3767,11 +3651,9 @@ void TColumn::__set_binaryVal(const TBinaryColumn& val) {
   this->binaryVal = val;
 }
 
-const char* TColumn::ascii_fingerprint = "E6ADD10B4CDDE61A19E8878CC7039A17";
-const uint8_t TColumn::binary_fingerprint[16] = {0xE6,0xAD,0xD1,0x0B,0x4C,0xDD,0xE6,0x1A,0x19,0xE8,0x87,0x8C,0xC7,0x03,0x9A,0x17};
-
 uint32_t TColumn::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -3868,7 +3750,7 @@ uint32_t TColumn::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TColumn::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TColumn");
 
   xfer += oprot->writeFieldBegin("boolVal", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -3905,7 +3787,6 @@ uint32_t TColumn::write(::apache::thrift::protocol::TProtocol* oprot) const {
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -3945,19 +3826,18 @@ TColumn& TColumn::operator=(const TColumn& other150) {
   __isset = other150.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TColumn& obj) {
-  using apache::thrift::to_string;
+void TColumn::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TColumn(";
-  out << "boolVal=" << to_string(obj.boolVal);
-  out << ", " << "byteVal=" << to_string(obj.byteVal);
-  out << ", " << "i16Val=" << to_string(obj.i16Val);
-  out << ", " << "i32Val=" << to_string(obj.i32Val);
-  out << ", " << "i64Val=" << to_string(obj.i64Val);
-  out << ", " << "doubleVal=" << to_string(obj.doubleVal);
-  out << ", " << "stringVal=" << to_string(obj.stringVal);
-  out << ", " << "binaryVal=" << to_string(obj.binaryVal);
+  out << "boolVal=" << to_string(boolVal);
+  out << ", " << "byteVal=" << to_string(byteVal);
+  out << ", " << "i16Val=" << to_string(i16Val);
+  out << ", " << "i32Val=" << to_string(i32Val);
+  out << ", " << "i64Val=" << to_string(i64Val);
+  out << ", " << "doubleVal=" << to_string(doubleVal);
+  out << ", " << "stringVal=" << to_string(stringVal);
+  out << ", " << "binaryVal=" << to_string(binaryVal);
   out << ")";
-  return out;
 }
 
 
@@ -3978,11 +3858,9 @@ void TRowSet::__set_columns(const std::vector<TColumn> & val) {
 __isset.columns = true;
 }
 
-const char* TRowSet::ascii_fingerprint = "46DA30A870489C7A58105AE0080DAEBF";
-const uint8_t TRowSet::binary_fingerprint[16] = {0x46,0xDA,0x30,0xA8,0x70,0x48,0x9C,0x7A,0x58,0x10,0x5A,0xE0,0x08,0x0D,0xAE,0xBF};
-
 uint32_t TRowSet::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -4069,7 +3947,7 @@ uint32_t TRowSet::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TRowSet::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TRowSet");
 
   xfer += oprot->writeFieldBegin("startRowOffset", ::apache::thrift::protocol::T_I64, 1);
@@ -4103,7 +3981,6 @@ uint32_t TRowSet::write(::apache::thrift::protocol::TProtocol* oprot) const {
   }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -4128,14 +4005,13 @@ TRowSet& TRowSet::operator=(const TRowSet& other164) {
   __isset = other164.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TRowSet& obj) {
-  using apache::thrift::to_string;
+void TRowSet::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TRowSet(";
-  out << "startRowOffset=" << to_string(obj.startRowOffset);
-  out << ", " << "rows=" << to_string(obj.rows);
-  out << ", " << "columns="; (obj.__isset.columns ? (out << to_string(obj.columns)) : (out << "<null>"));
+  out << "startRowOffset=" << to_string(startRowOffset);
+  out << ", " << "rows=" << to_string(rows);
+  out << ", " << "columns="; (__isset.columns ? (out << to_string(columns)) : (out << "<null>"));
   out << ")";
-  return out;
 }
 
 
@@ -4167,11 +4043,9 @@ void TStatus::__set_errorMessage(const std::string& val) {
 __isset.errorMessage = true;
 }
 
-const char* TStatus::ascii_fingerprint = "D5DEF49634A59C615C1B3A6F7D0DADB5";
-const uint8_t TStatus::binary_fingerprint[16] = {0xD5,0xDE,0xF4,0x96,0x34,0xA5,0x9C,0x61,0x5C,0x1B,0x3A,0x6F,0x7D,0x0D,0xAD,0xB5};
-
 uint32_t TStatus::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -4261,7 +4135,7 @@ uint32_t TStatus::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TStatus::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TStatus");
 
   xfer += oprot->writeFieldBegin("statusCode", ::apache::thrift::protocol::T_I32, 1);
@@ -4298,7 +4172,6 @@ uint32_t TStatus::write(::apache::thrift::protocol::TProtocol* oprot) const {
   }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -4329,16 +4202,15 @@ TStatus& TStatus::operator=(const TStatus& other173) {
   __isset = other173.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TStatus& obj) {
-  using apache::thrift::to_string;
+void TStatus::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TStatus(";
-  out << "statusCode=" << to_string(obj.statusCode);
-  out << ", " << "infoMessages="; (obj.__isset.infoMessages ? (out << to_string(obj.infoMessages)) : (out << "<null>"));
-  out << ", " << "sqlState="; (obj.__isset.sqlState ? (out << to_string(obj.sqlState)) : (out << "<null>"));
-  out << ", " << "errorCode="; (obj.__isset.errorCode ? (out << to_string(obj.errorCode)) : (out << "<null>"));
-  out << ", " << "errorMessage="; (obj.__isset.errorMessage ? (out << to_string(obj.errorMessage)) : (out << "<null>"));
+  out << "statusCode=" << to_string(statusCode);
+  out << ", " << "infoMessages="; (__isset.infoMessages ? (out << to_string(infoMessages)) : (out << "<null>"));
+  out << ", " << "sqlState="; (__isset.sqlState ? (out << to_string(sqlState)) : (out << "<null>"));
+  out << ", " << "errorCode="; (__isset.errorCode ? (out << to_string(errorCode)) : (out << "<null>"));
+  out << ", " << "errorMessage="; (__isset.errorMessage ? (out << to_string(errorMessage)) : (out << "<null>"));
   out << ")";
-  return out;
 }
 
 
@@ -4354,11 +4226,9 @@ void THandleIdentifier::__set_secret(const std::string& val) {
   this->secret = val;
 }
 
-const char* THandleIdentifier::ascii_fingerprint = "07A9615F837F7D0A952B595DD3020972";
-const uint8_t THandleIdentifier::binary_fingerprint[16] = {0x07,0xA9,0x61,0x5F,0x83,0x7F,0x7D,0x0A,0x95,0x2B,0x59,0x5D,0xD3,0x02,0x09,0x72};
-
 uint32_t THandleIdentifier::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -4413,7 +4283,7 @@ uint32_t THandleIdentifier::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t THandleIdentifier::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("THandleIdentifier");
 
   xfer += oprot->writeFieldBegin("guid", ::apache::thrift::protocol::T_STRING, 1);
@@ -4426,7 +4296,6 @@ uint32_t THandleIdentifier::write(::apache::thrift::protocol::TProtocol* oprot)
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -4445,13 +4314,12 @@ THandleIdentifier& THandleIdentifier::operator=(const THandleIdentifier& other17
   secret = other175.secret;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const THandleIdentifier& obj) {
-  using apache::thrift::to_string;
+void THandleIdentifier::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "THandleIdentifier(";
-  out << "guid=" << to_string(obj.guid);
-  out << ", " << "secret=" << to_string(obj.secret);
+  out << "guid=" << to_string(guid);
+  out << ", " << "secret=" << to_string(secret);
   out << ")";
-  return out;
 }
 
 
@@ -4463,11 +4331,9 @@ void TSessionHandle::__set_sessionId(const THandleIdentifier& val) {
   this->sessionId = val;
 }
 
-const char* TSessionHandle::ascii_fingerprint = "A756D3DBE614FB13F70BF7F7B6EB3D73";
-const uint8_t TSessionHandle::binary_fingerprint[16] = {0xA7,0x56,0xD3,0xDB,0xE6,0x14,0xFB,0x13,0xF7,0x0B,0xF7,0xF7,0xB6,0xEB,0x3D,0x73};
-
 uint32_t TSessionHandle::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -4511,7 +4377,7 @@ uint32_t TSessionHandle::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TSessionHandle::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TSessionHandle");
 
   xfer += oprot->writeFieldBegin("sessionId", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -4520,7 +4386,6 @@ uint32_t TSessionHandle::write(::apache::thrift::protocol::TProtocol* oprot) con
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -4536,12 +4401,11 @@ TSessionHandle& TSessionHandle::operator=(const TSessionHandle& other177) {
   sessionId = other177.sessionId;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TSessionHandle& obj) {
-  using apache::thrift::to_string;
+void TSessionHandle::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TSessionHandle(";
-  out << "sessionId=" << to_string(obj.sessionId);
+  out << "sessionId=" << to_string(sessionId);
   out << ")";
-  return out;
 }
 
 
@@ -4566,11 +4430,9 @@ void TOperationHandle::__set_modifiedRowCount(const double val) {
 __isset.modifiedRowCount = true;
 }
 
-const char* TOperationHandle::ascii_fingerprint = "29FD80F4F96804A30FCC59C23D2E5349";
-const uint8_t TOperationHandle::binary_fingerprint[16] = {0x29,0xFD,0x80,0xF4,0xF9,0x68,0x04,0xA3,0x0F,0xCC,0x59,0xC2,0x3D,0x2E,0x53,0x49};
-
 uint32_t TOperationHandle::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -4646,7 +4508,7 @@ uint32_t TOperationHandle::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TOperationHandle::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TOperationHandle");
 
   xfer += oprot->writeFieldBegin("operationId", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -4668,7 +4530,6 @@ uint32_t TOperationHandle::write(::apache::thrift::protocol::TProtocol* oprot) c
   }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -4696,15 +4557,14 @@ TOperationHandle& TOperationHandle::operator=(const TOperationHandle& other180)
   __isset = other180.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TOperationHandle& obj) {
-  using apache::thrift::to_string;
+void TOperationHandle::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TOperationHandle(";
-  out << "operationId=" << to_string(obj.operationId);
-  out << ", " << "operationType=" << to_string(obj.operationType);
-  out << ", " << "hasResultSet=" << to_string(obj.hasResultSet);
-  out << ", " << "modifiedRowCount="; (obj.__isset.modifiedRowCount ? (out << to_string(obj.modifiedRowCount)) : (out << "<null>"));
+  out << "operationId=" << to_string(operationId);
+  out << ", " << "operationType=" << to_string(operationType);
+  out << ", " << "hasResultSet=" << to_string(hasResultSet);
+  out << ", " << "modifiedRowCount="; (__isset.modifiedRowCount ? (out << to_string(modifiedRowCount)) : (out << "<null>"));
   out << ")";
-  return out;
 }
 
 
@@ -4731,11 +4591,9 @@ void TOpenSessionReq::__set_configuration(const std::map<std::string, std::strin
 __isset.configuration = true;
 }
 
-const char* TOpenSessionReq::ascii_fingerprint = "C8FD0F306A16C16BDA7B57F58BFAE5B2";
-const uint8_t TOpenSessionReq::binary_fingerprint[16] = {0xC8,0xFD,0x0F,0x30,0x6A,0x16,0xC1,0x6B,0xDA,0x7B,0x57,0xF5,0x8B,0xFA,0xE5,0xB2};
-
 uint32_t TOpenSessionReq::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -4820,7 +4678,7 @@ uint32_t TOpenSessionReq::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TOpenSessionReq::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TOpenSessionReq");
 
   xfer += oprot->writeFieldBegin("client_protocol", ::apache::thrift::protocol::T_I32, 1);
@@ -4853,7 +4711,6 @@ uint32_t TOpenSessionReq::write(::apache::thrift::protocol::TProtocol* oprot) co
   }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -4881,15 +4738,14 @@ TOpenSessionReq& TOpenSessionReq::operator=(const TOpenSessionReq& other191) {
   __isset = other191.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TOpenSessionReq& obj) {
-  using apache::thrift::to_string;
+void TOpenSessionReq::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TOpenSessionReq(";
-  out << "client_protocol=" << to_string(obj.client_protocol);
-  out << ", " << "username="; (obj.__isset.username ? (out << to_string(obj.username)) : (out << "<null>"));
-  out << ", " << "password="; (obj.__isset.password ? (out << to_string(obj.password)) : (out << "<null>"));
-  out << ", " << "configuration="; (obj.__isset.configuration ? (out << to_string(obj.configuration)) : (out << "<null>"));
+  out << "client_protocol=" << to_string(client_protocol);
+  out << ", " << "username="; (__isset.username ? (out << to_string(username)) : (out << "<null>"));
+  out << ", " << "password="; (__isset.password ? (out << to_string(password)) : (out << "<null>"));
+  out << ", " << "configuration="; (__isset.configuration ? (out << to_string(configuration)) : (out << "<null>"));
   out << ")";
-  return out;
 }
 
 
@@ -4915,11 +4771,9 @@ void TOpenSessionResp::__set_configuration(const std::map<std::string, std::stri
 __isset.configuration = true;
 }
 
-const char* TOpenSessionResp::ascii_fingerprint = "CFE7D7F4E9EC671F2518ED74FEE9F163";
-const uint8_t TOpenSessionResp::binary_fingerprint[16] = {0xCF,0xE7,0xD7,0xF4,0xE9,0xEC,0x67,0x1F,0x25,0x18,0xED,0x74,0xFE,0xE9,0xF1,0x63};
-
 uint32_t TOpenSessionResp::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -5007,7 +4861,7 @@ uint32_t TOpenSessionResp::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TOpenSessionResp::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TOpenSessionResp");
 
   xfer += oprot->writeFieldBegin("status", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -5039,7 +4893,6 @@ uint32_t TOpenSessionResp::write(::apache::thrift::protocol::TProtocol* oprot) c
   }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -5067,15 +4920,14 @@ TOpenSessionResp& TOpenSessionResp::operator=(const TOpenSessionResp& other202)
   __isset = other202.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TOpenSessionResp& obj) {
-  using apache::thrift::to_string;
+void TOpenSessionResp::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TOpenSessionResp(";
-  out << "status=" << to_string(obj.status);
-  out << ", " << "serverProtocolVersion=" << to_string(obj.serverProtocolVersion);
-  out << ", " << "sessionHandle="; (obj.__isset.sessionHandle ? (out << to_string(obj.sessionHandle)) : (out << "<null>"));
-  out << ", " << "configuration="; (obj.__isset.configuration ? (out << to_string(obj.configuration)) : (out << "<null>"));
+  out << "status=" << to_string(status);
+  out << ", " << "serverProtocolVersion=" << to_string(serverProtocolVersion);
+  out << ", " << "sessionHandle="; (__isset.sessionHandle ? (out << to_string(sessionHandle)) : (out << "<null>"));
+  out << ", " << "configuration="; (__isset.configuration ? (out << to_string(configuration)) : (out << "<null>"));
   out << ")";
-  return out;
 }
 
 
@@ -5087,11 +4939,9 @@ void TCloseSessionReq::__set_sessionHandle(const TSessionHandle& val) {
   this->sessionHandle = val;
 }
 
-const char* TCloseSessionReq::ascii_fingerprint = "82377107F8BD0526960537D5A112E6EF";
-const uint8_t TCloseSessionReq::binary_fingerprint[16] = {0x82,0x37,0x71,0x07,0xF8,0xBD,0x05,0x26,0x96,0x05,0x37,0xD5,0xA1,0x12,0xE6,0xEF};
-
 uint32_t TCloseSessionReq::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -5135,7 +4985,7 @@ uint32_t TCloseSessionReq::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TCloseSessionReq::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCloseSessionReq");
 
   xfer += oprot->writeFieldBegin("sessionHandle", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -5144,7 +4994,6 @@ uint32_t TCloseSessionReq::write(::apache::thrift::protocol::TProtocol* oprot) c
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -5160,12 +5009,11 @@ TCloseSessionReq& TCloseSessionReq::operator=(const TCloseSessionReq& other204)
   sessionHandle = other204.sessionHandle;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TCloseSessionReq& obj) {
-  using apache::thrift::to_string;
+void TCloseSessionReq::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TCloseSessionReq(";
-  out << "sessionHandle=" << to_string(obj.sessionHandle);
+  out << "sessionHandle=" << to_string(sessionHandle);
   out << ")";
-  return out;
 }
 
 
@@ -5177,11 +5025,9 @@ void TCloseSessionResp::__set_status(const TStatus& val) {
   this->status = val;
 }
 
-const char* TCloseSessionResp::ascii_fingerprint = "7142E89F09DC7C5F6FA916C7393F46C2";
-const uint8_t TCloseSessionResp::binary_fingerprint[16] = {0x71,0x42,0xE8,0x9F,0x09,0xDC,0x7C,0x5F,0x6F,0xA9,0x16,0xC7,0x39,0x3F,0x46,0xC2};
-
 uint32_t TCloseSessionResp::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -5225,7 +5071,7 @@ uint32_t TCloseSessionResp::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TCloseSessionResp::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCloseSessionResp");
 
   xfer += oprot->writeFieldBegin("status", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -5234,7 +5080,6 @@ uint32_t TCloseSessionResp::write(::apache::thrift::protocol::TProtocol* oprot)
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -5250,12 +5095,11 @@ TCloseSessionResp& TCloseSessionResp::operator=(const TCloseSessionResp& other20
   status = other206.status;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TCloseSessionResp& obj) {
-  using apache::thrift::to_string;
+void TCloseSessionResp::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TCloseSessionResp(";
-  out << "status=" << to_string(obj.status);
+  out << "status=" << to_string(status);
   out << ")";
-  return out;
 }
 
 
@@ -5287,11 +5131,9 @@ void TGetInfoValue::__set_lenValue(const int64_t val) {
   this->lenValue = val;
 }
 
-const char* TGetInfoValue::ascii_fingerprint = "057FED11279FD7248CFE73EE82ED579E";
-const uint8_t TGetInfoValue::binary_fingerprint[16] = {0x05,0x7F,0xED,0x11,0x27,0x9F,0xD7,0x24,0x8C,0xFE,0x73,0xEE,0x82,0xED,0x57,0x9E};
-
 uint32_t TGetInfoValue::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -5372,7 +5214,7 @@ uint32_t TGetInfoValue::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TGetInfoValue::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TGetInfoValue");
 
   xfer += oprot->writeFieldBegin("stringValue", ::apache::thrift::protocol::T_STRING, 1);
@@ -5401,7 +5243,6 @@ uint32_t TGetInfoValue::write(::apache::thrift::protocol::TProtocol* oprot) cons
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -5435,17 +5276,16 @@ TGetInfoValue& TGetInfoValue::operator=(const TGetInfoValue& other208) {
   __isset = other208.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TGetInfoValue& obj) {
-  using apache::thrift::to_string;
+void TGetInfoValue::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TGetInfoValue(";
-  out << "stringValue=" << to_string(obj.stringValue);
-  out << ", " << "smallIntValue=" << to_string(obj.smallIntValue);
-  out << ", " << "integerBitmask=" << to_string(obj.integerBitmask);
-  out << ", " << "integerFlag=" << to_string(obj.integerFlag);
-  out << ", " << "binaryValue=" << to_string(obj.binaryValue);
-  out << ", " << "lenValue=" << to_string(obj.lenValue);
+  out << "stringValue=" << to_string(stringValue);
+  out << ", " << "smallIntValue=" << to_string(smallIntValue);
+  out << ", " << "integerBitmask=" << to_string(integerBitmask);
+  out << ", " << "integerFlag=" << to_string(integerFlag);
+  out << ", " << "binaryValue=" << to_string(binaryValue);
+  out << ", " << "lenValue=" << to_string(lenValue);
   out << ")";
-  return out;
 }
 
 
@@ -5461,11 +5301,9 @@ void TGetInfoReq::__set_infoType(const TGetInfoType::type val) {
   this->infoType = val;
 }
 
-const char* TGetInfoReq::ascii_fingerprint = "95675B1A0BADE5F7EDE323809DB679B2";
-const uint8_t TGetInfoReq::binary_fingerprint[16] = {0x95,0x67,0x5B,0x1A,0x0B,0xAD,0xE5,0xF7,0xED,0xE3,0x23,0x80,0x9D,0xB6,0x79,0xB2};
-
 uint32_t TGetInfoReq::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -5522,7 +5360,7 @@ uint32_t TGetInfoReq::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TGetInfoReq::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TGetInfoReq");
 
   xfer += oprot->writeFieldBegin("sessionHandle", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -5535,7 +5373,6 @@ uint32_t TGetInfoReq::write(::apache::thrift::protocol::TProtocol* oprot) const
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -5554,13 +5391,12 @@ TGetInfoReq& TGetInfoReq::operator=(const TGetInfoReq& other211) {
   infoType = other211.infoType;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TGetInfoReq& obj) {
-  using apache::thrift::to_string;
+void TGetInfoReq::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TGetInfoReq(";
-  out << "sessionHandle=" << to_string(obj.sessionHandle);
-  out << ", " << "infoType=" << to_string(obj.infoType);
+  out << "sessionHandle=" << to_string(sessionHandle);
+  out << ", " << "infoType=" << to_string(infoType);
   out << ")";
-  return out;
 }
 
 
@@ -5576,11 +5412,9 @@ void TGetInfoResp::__set_infoValue(const TGetInfoValue& val) {
   this->infoValue = val;
 }
 
-const char* TGetInfoResp::ascii_fingerprint = "72AFA10A82728B51FDE91092012868DE";
-const uint8_t TGetInfoResp::binary_fingerprint[16] = {0x72,0xAF,0xA1,0x0A,0x82,0x72,0x8B,0x51,0xFD,0xE9,0x10,0x92,0x01,0x28,0x68,0xDE};
-
 uint32_t TGetInfoResp::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -5635,7 +5469,7 @@ uint32_t TGetInfoResp::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TGetInfoResp::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TGetInfoResp");
 
   xfer += oprot->writeFieldBegin("status", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -5648,7 +5482,6 @@ uint32_t TGetInfoResp::write(::apache::thrift::protocol::TProtocol* oprot) const
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -5667,13 +5500,12 @@ TGetInfoResp& TGetInfoResp::operator=(const TGetInfoResp& other213) {
   infoValue = other213.infoValue;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TGetInfoResp& obj) {
-  using apache::thrift::to_string;
+void TGetInfoResp::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TGetInfoResp(";
-  out << "status=" << to_string(obj.status);
-  out << ", " << "infoValue=" << to_string(obj.infoValue);
+  out << "status=" << to_string(status);
+  out << ", " << "infoValue=" << to_string(infoValue);
   out << ")";
-  return out;
 }
 
 
@@ -5699,11 +5531,9 @@ void TExecuteStatementReq::__set_runAsync(const bool val) {
 __isset.runAsync = true;
 }
 
-const char* TExecuteStatementReq::ascii_fingerprint = "FED75DB77E66D76EC1939A51FB0D96FA";
-const uint8_t TExecuteStatementReq::binary_fingerprint[16] = {0xFE,0xD7,0x5D,0xB7,0x7E,0x66,0xD7,0x6E,0xC1,0x93,0x9A,0x51,0xFB,0x0D,0x96,0xFA};
-
 uint32_t TExecuteStatementReq::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -5789,7 +5619,7 @@ uint32_t TExecuteStatementReq::read(::apache::thrift::protocol::TProtocol* iprot
 
 uint32_t TExecuteStatementReq::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TExecuteStatementReq");
 
   xfer += oprot->writeFieldBegin("sessionHandle", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -5821,7 +5651,6 @@ uint32_t TExecuteStatementReq::write(::apache::thrift::protocol::TProtocol* opro
   }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -5849,15 +5678,14 @@ TExecuteStatementReq& TExecuteStatementReq::operator=(const TExecuteStatementReq
   __isset = other223.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TExecuteStatementReq& obj) {
-  using apache::thrift::to_string;
+void TExecuteStatementReq::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TExecuteStatementReq(";
-  out << "sessionHandle=" << to_string(obj.sessionHandle);
-  out << ", " << "statement=" << to_string(obj.statement);
-  out << ", " << "confOverlay="; (obj.__isset.confOverlay ? (out << to_string(obj.confOverlay)) : (out << "<null>"));
-  out << ", " << "runAsync="; (obj.__isset.runAsync ? (out << to_string(obj.runAsync)) : (out << "<null>"));
+  out << "sessionHandle=" << to_string(sessionHandle);
+  out << ", " << "statement=" << to_string(statement);
+  out << ", " << "confOverlay="; (__isset.confOverlay ? (out << to_string(confOverlay)) : (out << "<null>"));
+  out << ", " << "runAsync="; (__isset.runAsync ? (out << to_string(runAsync)) : (out << "<null>"));
   out << ")";
-  return out;
 }
 
 
@@ -5874,11 +5702,9 @@ void TExecuteStatementResp::__set_operationHandle(const TOperationHandle& val) {
 __isset.operationHandle = true;
 }
 
-const char* TExecuteStatementResp::ascii_fingerprint = "02A075A0FF88D3A172916D8F23C7B286";
-const uint8_t TExecuteStatementResp::binary_fingerprint[16] = {0x02,0xA0,0x75,0xA0,0xFF,0x88,0xD3,0xA1,0x72,0x91,0x6D,0x8F,0x23,0xC7,0xB2,0x86};
-
 uint32_t TExecuteStatementResp::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -5930,7 +5756,7 @@ uint32_t TExecuteStatementResp::read(::apache::thrift::protocol::TProtocol* ipro
 
 uint32_t TExecuteStatementResp::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TExecuteStatementResp");
 
   xfer += oprot->writeFieldBegin("status", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -5944,7 +5770,6 @@ uint32_t TExecuteStatementResp::write(::apache::thrift::protocol::TProtocol* opr
   }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -5966,13 +5791,12 @@ TExecuteStatementResp& TExecuteStatementResp::operator=(const TExecuteStatementR
   __isset = other225.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TExecuteStatementResp& obj) {
-  using apache::thrift::to_string;
+void TExecuteStatementResp::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "TExecuteStatementResp(";
-  out << "status=" << to_string(obj.status);
-  out << ", " << "operationHandle="; (obj.__isset.operationHandle ? (out << to_string(obj.operationHandle)) : (out << "<null>"));
+  out << "status=" << to_string(status);
+  out << ", " << "operationHandle="; (__isset.operationHandle ? (out << to_string(operationHandle)) : (out << "<null>"));
   out << ")";
-  return out;
 }
 
 
@@ -5984,11 +5808,9 @@ void TGetTypeInfoReq::__set_sessionHandle(const TSessionHandle& val) {
   this->sessionHandle = val;
 }
 
-const char* TGetTypeInfoReq::ascii_fingerprint = "82377107F8BD0526960537D5A112E6EF";
-const uint8_t TGetTypeInfoReq::binary_fingerprint[16] = {0x82,0x37,0x71,0x07,0xF8,0xBD,0x05,0x26,0x96,0x05,0x37,0xD5,0xA1,0x12,0xE6,0xEF};
-
 uint32_t TGetTypeInfoReq::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -6032,7 +5854,7 @@ uint32_t TGetTypeInfoReq::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t TGetTypeInfoReq::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TGetTypeInfoReq");
 
   xfer += oprot->writeFieldBegin("sessionHandle", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -6041,7 +5863,6 @@ uint32_t TGetTypeInfoReq::write(::apache::thrift::protocol::TProtocol* oprot) co
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -6057,12 +5878,11 @@ TGetTypeInfoReq& TGetTypeInfoReq::operator=(const TGetTypeInfoReq& other227) {
   sessionHandle = other227.sessionHandle;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const TGetTypeInfoReq& obj) {
-  using apache::thrift::to_str

<TRUNCATED>

[27/55] [abbrv] hive git commit: HIVE-12059 : Clean up reference to deprecated constants in AvroSerdeUtils (Aaron Dossett via Ashutosh Chauhan)

Posted by xu...@apache.org.
HIVE-12059 : Clean up reference to deprecated constants in AvroSerdeUtils (Aaron Dossett via Ashutosh Chauhan)

Signed-off-by: Ashutosh Chauhan <ha...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/03c62d0d
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/03c62d0d
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/03c62d0d

Branch: refs/heads/spark
Commit: 03c62d0da2c4d71fbd144d2636807d38a008820c
Parents: e8f71f4
Author: Aaron Dossett <do...@apache.org>
Authored: Wed Oct 7 13:59:00 2015 -0800
Committer: Ashutosh Chauhan <ha...@apache.org>
Committed: Sat Oct 24 14:44:39 2015 -0700

----------------------------------------------------------------------
 .../hadoop/hive/hbase/HBaseSerDeHelper.java     | 21 ++++++++-------
 .../hadoop/hive/hbase/HBaseSerDeParameters.java | 20 +++++++-------
 .../hbase/struct/AvroHBaseValueFactory.java     |  3 ++-
 .../hadoop/hive/hbase/TestHBaseSerDe.java       | 12 ++++-----
 .../ql/io/avro/AvroGenericRecordReader.java     |  5 ++--
 .../hadoop/hive/serde2/avro/TestAvroSerde.java  | 28 +++++++++-----------
 .../hive/serde2/avro/TestAvroSerdeUtils.java    | 18 ++++++-------
 7 files changed, 52 insertions(+), 55 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/03c62d0d/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeHelper.java
----------------------------------------------------------------------
diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeHelper.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeHelper.java
index 3bcc5c0..20362e5 100644
--- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeHelper.java
+++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeHelper.java
@@ -41,7 +41,7 @@ import org.apache.hadoop.hive.hbase.ColumnMappings.ColumnMapping;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.avro.AvroObjectInspectorGenerator;
-import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils;
+import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AvroTableProperties;
 import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
 import org.apache.hadoop.hive.serde2.lazy.LazyObjectBase;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyMapObjectInspector;
@@ -215,16 +215,17 @@ public class HBaseSerDeHelper {
               // for avro type, the serialization class parameter is optional
               schemaLiteral =
                   tbl.getProperty(colMap.familyName + "." + colMap.qualifierPrefix + "."
-                      + AvroSerdeUtils.SCHEMA_LITERAL);
+                      + AvroTableProperties.SCHEMA_LITERAL.getPropName());
               schemaUrl =
                   tbl.getProperty(colMap.familyName + "." + colMap.qualifierPrefix + "."
-                      + AvroSerdeUtils.SCHEMA_URL);
+                      + AvroTableProperties.SCHEMA_URL.getPropName());
 
               if (schemaLiteral == null && schemaUrl == null) {
                 // either schema literal, schema url or serialization class must
                 // be provided
                 throw new SerDeException("For an avro schema, either "
-                    + AvroSerdeUtils.SCHEMA_LITERAL + ", " + AvroSerdeUtils.SCHEMA_URL + " or "
+                    + AvroTableProperties.SCHEMA_LITERAL.getPropName() + ", "
+                        + AvroTableProperties.SCHEMA_URL.getPropName() + " or "
                     + serdeConstants.SERIALIZATION_CLASS + " property must be set.");
               }
 
@@ -254,13 +255,13 @@ public class HBaseSerDeHelper {
             if (serType.equalsIgnoreCase(AVRO_SERIALIZATION_TYPE)) {
               // for avro type, the serialization class parameter is optional
               schemaLiteral =
-                  tbl.getProperty(colMap.familyName + "." + AvroSerdeUtils.SCHEMA_LITERAL);
-              schemaUrl = tbl.getProperty(colMap.familyName + "." + AvroSerdeUtils.SCHEMA_URL);
+                  tbl.getProperty(colMap.familyName + "." + AvroTableProperties.SCHEMA_LITERAL.getPropName());
+              schemaUrl = tbl.getProperty(colMap.familyName + "." + AvroTableProperties.SCHEMA_URL.getPropName());
 
               if (schemaLiteral == null && schemaUrl == null) {
                 // either schema literal or serialization class must be provided
                 throw new SerDeException("For an avro schema, either "
-                    + AvroSerdeUtils.SCHEMA_LITERAL + " property or "
+                    + AvroTableProperties.SCHEMA_LITERAL.getPropName() + " property or "
                     + serdeConstants.SERIALIZATION_CLASS + " property must be set.");
               }
 
@@ -315,16 +316,16 @@ public class HBaseSerDeHelper {
             // for avro type, the serialization class parameter is optional
             schemaLiteral =
                 tbl.getProperty(colMap.familyName + "." + qualifierName + "."
-                    + AvroSerdeUtils.SCHEMA_LITERAL);
+                    + AvroTableProperties.SCHEMA_LITERAL.getPropName());
             schemaUrl =
                 tbl.getProperty(colMap.familyName + "." + qualifierName + "."
-                    + AvroSerdeUtils.SCHEMA_URL);
+                    + AvroTableProperties.SCHEMA_URL.getPropName());
 
             if (schemaLiteral == null && schemaUrl == null) {
               // either schema literal, schema url or serialization class must
               // be provided
               throw new SerDeException("For an avro schema, either "
-                  + AvroSerdeUtils.SCHEMA_LITERAL + ", " + AvroSerdeUtils.SCHEMA_URL + " or "
+                  + AvroTableProperties.SCHEMA_LITERAL.getPropName() + ", " + AvroTableProperties.SCHEMA_URL.getPropName() + " or "
                   + serdeConstants.SERIALIZATION_CLASS + " property must be set.");
             }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/03c62d0d/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java
----------------------------------------------------------------------
diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java
index 43c1f0c..a11d3cd 100644
--- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java
+++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java
@@ -35,7 +35,7 @@ import org.apache.hadoop.hive.hbase.struct.HBaseValueFactory;
 import org.apache.hadoop.hive.hbase.struct.StructHBaseValueFactory;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.SerDeException;
-import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils;
+import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AvroTableProperties;
 import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.util.ReflectionUtils;
@@ -302,20 +302,20 @@ public class HBaseSerDeParameters {
 
         schemaLiteral =
             tbl.getProperty(colMap.familyName + "." + colMap.qualifierPrefix + "."
-                + AvroSerdeUtils.SCHEMA_LITERAL);
+                + AvroTableProperties.SCHEMA_LITERAL.getPropName());
 
         schemaUrl =
             tbl.getProperty(colMap.familyName + "." + colMap.qualifierPrefix + "."
-                + AvroSerdeUtils.SCHEMA_URL);
+                + AvroTableProperties.SCHEMA_URL.getPropName());
       } else {
         serType = tbl.getProperty(colMap.familyName + "." + HBaseSerDe.SERIALIZATION_TYPE);
 
         serClassName =
             tbl.getProperty(colMap.familyName + "." + serdeConstants.SERIALIZATION_CLASS);
 
-        schemaLiteral = tbl.getProperty(colMap.familyName + "." + AvroSerdeUtils.SCHEMA_LITERAL);
+        schemaLiteral = tbl.getProperty(colMap.familyName + "." + AvroTableProperties.SCHEMA_LITERAL.getPropName());
 
-        schemaUrl = tbl.getProperty(colMap.familyName + "." + AvroSerdeUtils.SCHEMA_URL);
+        schemaUrl = tbl.getProperty(colMap.familyName + "." + AvroTableProperties.SCHEMA_URL.getPropName());
       }
     } else if (!colMap.hbaseRowKey) {
       // not an hbase row key. This should either be a prefix or an individual qualifier
@@ -335,23 +335,23 @@ public class HBaseSerDeParameters {
 
       schemaLiteral =
           tbl.getProperty(colMap.familyName + "." + qualifierName + "."
-              + AvroSerdeUtils.SCHEMA_LITERAL);
+              + AvroTableProperties.SCHEMA_LITERAL.getPropName());
 
       schemaUrl =
-          tbl.getProperty(colMap.familyName + "." + qualifierName + "." + AvroSerdeUtils.SCHEMA_URL);
+          tbl.getProperty(colMap.familyName + "." + qualifierName + "." + AvroTableProperties.SCHEMA_URL.getPropName());
     }
 
     if (serType == null) {
       throw new IllegalArgumentException("serialization.type property is missing");
     }
 
-    String avroSchemaRetClass = tbl.getProperty(AvroSerdeUtils.SCHEMA_RETRIEVER);
+    String avroSchemaRetClass = tbl.getProperty(AvroTableProperties.SCHEMA_RETRIEVER.getPropName());
 
     if (schemaLiteral == null && serClassName == null && schemaUrl == null
         && avroSchemaRetClass == null) {
       throw new IllegalArgumentException("serialization.type was set to [" + serType
-          + "] but neither " + AvroSerdeUtils.SCHEMA_LITERAL + ", " + AvroSerdeUtils.SCHEMA_URL
-          + ", serialization.class or " + AvroSerdeUtils.SCHEMA_RETRIEVER + " property was set");
+          + "] but neither " + AvroTableProperties.SCHEMA_LITERAL.getPropName() + ", " + AvroTableProperties.SCHEMA_URL.getPropName()
+          + ", serialization.class or " + AvroTableProperties.SCHEMA_RETRIEVER.getPropName() + " property was set");
     }
 
     Class<?> deserializerClass = null;

http://git-wip-us.apache.org/repos/asf/hive/blob/03c62d0d/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/AvroHBaseValueFactory.java
----------------------------------------------------------------------
diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/AvroHBaseValueFactory.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/AvroHBaseValueFactory.java
index 3225e5c..514d5eb 100644
--- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/AvroHBaseValueFactory.java
+++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/AvroHBaseValueFactory.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.avro.AvroLazyObjectInspector;
 import org.apache.hadoop.hive.serde2.avro.AvroSchemaRetriever;
 import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils;
+import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AvroTableProperties;
 import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
@@ -57,7 +58,7 @@ public class AvroHBaseValueFactory extends DefaultHBaseValueFactory {
   public void init(HBaseSerDeParameters hbaseParams, Configuration conf, Properties properties)
       throws SerDeException {
     super.init(hbaseParams, conf, properties);
-    String avroSchemaRetClass = properties.getProperty(AvroSerdeUtils.SCHEMA_RETRIEVER);
+    String avroSchemaRetClass = properties.getProperty(AvroTableProperties.SCHEMA_RETRIEVER.getPropName());
 
     if (avroSchemaRetClass != null) {
       Class<?> avroSchemaRetrieverClass = null;

http://git-wip-us.apache.org/repos/asf/hive/blob/03c62d0d/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java
index e28fc87..f244ed6 100644
--- a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java
+++ b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java
@@ -56,7 +56,7 @@ import org.apache.hadoop.hive.hbase.avro.OfficePhone;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
-import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils;
+import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AvroTableProperties;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
@@ -1077,7 +1077,7 @@ public class TestHBaseSerDe extends TestCase {
   private Properties createPropertiesForHiveAvroSchemaInline() {
     Properties tbl = new Properties();
     tbl.setProperty("cola.avro.serialization.type", "avro");
-    tbl.setProperty("cola.avro." + AvroSerdeUtils.SCHEMA_LITERAL, RECORD_SCHEMA);
+    tbl.setProperty("cola.avro." + AvroTableProperties.SCHEMA_LITERAL.getPropName(), RECORD_SCHEMA);
     tbl.setProperty(HBaseSerDe.HBASE_COLUMNS_MAPPING, ":key,cola:avro");
     tbl.setProperty(HBaseSerDe.HBASE_AUTOGENERATE_STRUCT, "true");
 
@@ -1123,7 +1123,7 @@ public class TestHBaseSerDe extends TestCase {
   private Properties createPropertiesForHiveAvroForwardEvolvedSchema() {
     Properties tbl = new Properties();
     tbl.setProperty("cola.avro.serialization.type", "avro");
-    tbl.setProperty("cola.avro." + AvroSerdeUtils.SCHEMA_LITERAL, RECORD_SCHEMA_EVOLVED);
+    tbl.setProperty("cola.avro." + AvroTableProperties.SCHEMA_LITERAL.getPropName(), RECORD_SCHEMA_EVOLVED);
     tbl.setProperty(HBaseSerDe.HBASE_COLUMNS_MAPPING, ":key,cola:avro");
     tbl.setProperty(HBaseSerDe.HBASE_AUTOGENERATE_STRUCT, "true");
 
@@ -1168,7 +1168,7 @@ public class TestHBaseSerDe extends TestCase {
   private Properties createPropertiesForHiveAvroBackwardEvolvedSchema() {
     Properties tbl = new Properties();
     tbl.setProperty("cola.avro.serialization.type", "avro");
-    tbl.setProperty("cola.avro." + AvroSerdeUtils.SCHEMA_LITERAL, RECORD_SCHEMA);
+    tbl.setProperty("cola.avro." + AvroTableProperties.SCHEMA_LITERAL.getPropName(), RECORD_SCHEMA);
     tbl.setProperty(HBaseSerDe.HBASE_COLUMNS_MAPPING, ":key,cola:avro");
     tbl.setProperty(HBaseSerDe.HBASE_AUTOGENERATE_STRUCT, "true");
 
@@ -1283,7 +1283,7 @@ public class TestHBaseSerDe extends TestCase {
   private Properties createPropertiesForHiveAvroSchemaUrl(String schemaUrl) {
     Properties tbl = new Properties();
     tbl.setProperty("cola.avro.serialization.type", "avro");
-    tbl.setProperty("cola.avro." + AvroSerdeUtils.SCHEMA_URL, schemaUrl);
+    tbl.setProperty("cola.avro." + AvroTableProperties.SCHEMA_URL.getPropName(), schemaUrl);
     tbl.setProperty(HBaseSerDe.HBASE_COLUMNS_MAPPING, ":key,cola:avro");
     tbl.setProperty(HBaseSerDe.HBASE_AUTOGENERATE_STRUCT, "true");
 
@@ -1333,7 +1333,7 @@ public class TestHBaseSerDe extends TestCase {
   private Properties createPropertiesForHiveAvroExternalSchema() {
     Properties tbl = new Properties();
     tbl.setProperty("cola.avro.serialization.type", "avro");
-    tbl.setProperty(AvroSerdeUtils.SCHEMA_RETRIEVER,
+    tbl.setProperty(AvroTableProperties.SCHEMA_RETRIEVER.getPropName(),
         "org.apache.hadoop.hive.hbase.HBaseTestAvroSchemaRetriever");
     tbl.setProperty("cola.avro." + serdeConstants.SERIALIZATION_CLASS,
         "org.apache.hadoop.hive.hbase.avro.Employee");

http://git-wip-us.apache.org/repos/asf/hive/blob/03c62d0d/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroGenericRecordReader.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroGenericRecordReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroGenericRecordReader.java
index 8d58d74..89fac3f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroGenericRecordReader.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroGenericRecordReader.java
@@ -38,6 +38,7 @@ import org.apache.hadoop.hive.ql.plan.PartitionDesc;
 import org.apache.hadoop.hive.serde2.avro.AvroGenericRecordWritable;
 import org.apache.hadoop.hive.serde2.avro.AvroSerdeException;
 import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils;
+import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AvroTableProperties;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.mapred.FileSplit;
 import org.apache.hadoop.mapred.JobConf;
@@ -117,7 +118,7 @@ public class AvroGenericRecordReader implements
           }
 
           Properties props = pathsAndParts.getValue().getProperties();
-          if(props.containsKey(AvroSerdeUtils.SCHEMA_LITERAL) || props.containsKey(AvroSerdeUtils.SCHEMA_URL)) {
+          if(props.containsKey(AvroTableProperties.SCHEMA_LITERAL.getPropName()) || props.containsKey(AvroTableProperties.SCHEMA_URL.getPropName())) {
             return AvroSerdeUtils.determineSchemaOrThrowException(job, props);
           }
           else {
@@ -133,7 +134,7 @@ public class AvroGenericRecordReader implements
     // In "select * from table" situations (non-MR), we can add things to the job
     // It's safe to add this to the job since it's not *actually* a mapred job.
     // Here the global state is confined to just this process.
-    String s = job.get(AvroSerdeUtils.AVRO_SERDE_SCHEMA);
+    String s = job.get(AvroTableProperties.AVRO_SERDE_SCHEMA.getPropName());
     if(s != null) {
       LOG.info("Found the avro schema in the job: " + s);
       return AvroSerdeUtils.getSchemaFor(s);

http://git-wip-us.apache.org/repos/asf/hive/blob/03c62d0d/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerde.java
----------------------------------------------------------------------
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerde.java b/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerde.java
index 36dc484..008d9ec 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerde.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerde.java
@@ -21,20 +21,16 @@ import org.apache.avro.Schema;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.io.Writable;
 import org.junit.Test;
-import org.mockito.Mockito;
 
 import java.util.List;
 import java.util.Properties;
 
-import static org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AVRO_SERDE_SCHEMA;
-import static org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.SCHEMA_LITERAL;
+//import static org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AVRO_SERDE_SCHEMA;
+//import static org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.SCHEMA_LITERAL;
+import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AvroTableProperties;
 import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
 public class TestAvroSerde {
@@ -70,10 +66,10 @@ public class TestAvroSerde {
     // initialized.  Therefore we need to make sure we don't look for any
     // old schemas within it.
     Configuration conf = new Configuration();
-    conf.set(AVRO_SERDE_SCHEMA, originalSchema.toString(false));
+    conf.set(AvroTableProperties.AVRO_SERDE_SCHEMA.getPropName(), originalSchema.toString(false));
 
     Properties props = new Properties();
-    props.put(SCHEMA_LITERAL, newSchemaString);
+    props.put(AvroTableProperties.SCHEMA_LITERAL.getPropName(), newSchemaString);
 
 
     AvroSerDe asd = new AvroSerDe();
@@ -81,7 +77,7 @@ public class TestAvroSerde {
 
     // Verify that the schema now within the configuration is the one passed
     // in via the properties
-    assertEquals(newSchema, AvroSerdeUtils.getSchemaFor(conf.get(AVRO_SERDE_SCHEMA)));
+    assertEquals(newSchema, AvroSerdeUtils.getSchemaFor(conf.get(AvroTableProperties.AVRO_SERDE_SCHEMA.getPropName())));
   }
 
   @Test
@@ -94,7 +90,7 @@ public class TestAvroSerde {
   @Test
   public void gibberishSchemaProvidedReturnsErrorSchema() {
     Properties props = new Properties();
-    props.put(AvroSerdeUtils.SCHEMA_LITERAL, "blahblahblah");
+    props.put(AvroTableProperties.SCHEMA_LITERAL.getPropName(), "blahblahblah");
 
     verifyExpectedException(props);
   }
@@ -102,7 +98,7 @@ public class TestAvroSerde {
   @Test
   public void emptySchemaProvidedThrowsException() {
     Properties props = new Properties();
-    props.put(AvroSerdeUtils.SCHEMA_LITERAL, "");
+    props.put(AvroTableProperties.SCHEMA_LITERAL.getPropName(), "");
 
     verifyExpectedException(props);
   }
@@ -110,7 +106,7 @@ public class TestAvroSerde {
   @Test
   public void badSchemaURLProvidedThrowsException() {
     Properties props = new Properties();
-    props.put(AvroSerdeUtils.SCHEMA_URL, "not://a/url");
+    props.put(AvroTableProperties.SCHEMA_URL.getPropName(), "not://a/url");
 
     verifyExpectedException(props);
   }
@@ -118,7 +114,7 @@ public class TestAvroSerde {
   @Test
   public void emptySchemaURLProvidedThrowsException() {
     Properties props = new Properties();
-    props.put(AvroSerdeUtils.SCHEMA_URL, "");
+    props.put(AvroTableProperties.SCHEMA_URL.getPropName(), "");
 
     verifyExpectedException(props);
   }
@@ -126,8 +122,8 @@ public class TestAvroSerde {
   @Test
   public void bothPropertiesSetToNoneThrowsException() {
     Properties props = new Properties();
-    props.put(AvroSerdeUtils.SCHEMA_URL, AvroSerdeUtils.SCHEMA_NONE);
-    props.put(AvroSerdeUtils.SCHEMA_LITERAL, AvroSerdeUtils.SCHEMA_NONE);
+    props.put(AvroTableProperties.SCHEMA_URL.getPropName(), AvroSerdeUtils.SCHEMA_NONE);
+    props.put(AvroTableProperties.SCHEMA_LITERAL.getPropName(), AvroSerdeUtils.SCHEMA_NONE);
 
     verifyExpectedException(props);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/03c62d0d/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerdeUtils.java
----------------------------------------------------------------------
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerdeUtils.java b/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerdeUtils.java
index e07d06b..0013b78 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerdeUtils.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerdeUtils.java
@@ -25,14 +25,12 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.junit.Test;
 
 import java.io.IOException;
-import java.net.MalformedURLException;
 import java.net.URISyntaxException;
 import java.util.Properties;
 
 import static org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.EXCEPTION_MESSAGE;
-import static org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.SCHEMA_LITERAL;
 import static org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.SCHEMA_NONE;
-import static org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.SCHEMA_URL;
+import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AvroTableProperties;
 import static org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.determineSchemaOrThrowException;
 import static org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.getOtherTypeFromNullableType;
 import static org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.isNullableType;
@@ -127,7 +125,7 @@ public class TestAvroSerdeUtils {
     String schema = TestAvroObjectInspectorGenerator.RECORD_SCHEMA;
     Configuration conf = new Configuration();
     Properties props = new Properties();
-    props.put(AvroSerdeUtils.SCHEMA_LITERAL, schema);
+    props.put(AvroTableProperties.SCHEMA_LITERAL.getPropName(), schema);
     Schema expected = AvroSerdeUtils.getSchemaFor(schema);
     assertEquals(expected, AvroSerdeUtils.determineSchemaOrThrowException(conf, props));
   }
@@ -136,7 +134,7 @@ public class TestAvroSerdeUtils {
   public void detemineSchemaTriesToOpenUrl() throws AvroSerdeException, IOException {
     Configuration conf = new Configuration();
     Properties props = new Properties();
-    props.put(AvroSerdeUtils.SCHEMA_URL, "not:///a.real.url");
+    props.put(AvroTableProperties.SCHEMA_URL.getPropName(), "not:///a.real.url");
 
     try {
       AvroSerdeUtils.determineSchemaOrThrowException(conf, props);
@@ -152,8 +150,8 @@ public class TestAvroSerdeUtils {
     Properties props = new Properties();
 
     // Combo 1: Both set to none
-    props.put(SCHEMA_URL, SCHEMA_NONE);
-    props.put(SCHEMA_LITERAL, SCHEMA_NONE);
+    props.put(AvroTableProperties.SCHEMA_URL.getPropName(), SCHEMA_NONE);
+    props.put(AvroTableProperties.SCHEMA_LITERAL.getPropName(), SCHEMA_NONE);
     try {
       determineSchemaOrThrowException(conf, props);
       fail("Should have thrown exception with none set for both url and literal");
@@ -162,7 +160,7 @@ public class TestAvroSerdeUtils {
     }
 
     // Combo 2: Literal set, url set to none
-    props.put(SCHEMA_LITERAL, TestAvroObjectInspectorGenerator.RECORD_SCHEMA);
+    props.put(AvroTableProperties.SCHEMA_LITERAL.getPropName(), TestAvroObjectInspectorGenerator.RECORD_SCHEMA);
     Schema s;
     try {
       s = determineSchemaOrThrowException(conf, props);
@@ -173,8 +171,8 @@ public class TestAvroSerdeUtils {
     }
 
     // Combo 3: url set, literal set to none
-    props.put(SCHEMA_LITERAL, SCHEMA_NONE);
-    props.put(SCHEMA_URL, "not:///a.real.url");
+    props.put(AvroTableProperties.SCHEMA_LITERAL.getPropName(), SCHEMA_NONE);
+    props.put(AvroTableProperties.SCHEMA_URL.getPropName(), "not:///a.real.url");
     try {
       determineSchemaOrThrowException(conf, props);
       fail("Should have tried to open that bogus URL");


[15/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)

Posted by xu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AbortTxnRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AbortTxnRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AbortTxnRequest.java
index dcfbbe0..8e3a0ae 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AbortTxnRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AbortTxnRequest.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class AbortTxnRequest implements org.apache.thrift.TBase<AbortTxnRequest, AbortTxnRequest._Fields>, java.io.Serializable, Cloneable, Comparable<AbortTxnRequest> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("AbortTxnRequest");
 
@@ -185,7 +185,7 @@ public class AbortTxnRequest implements org.apache.thrift.TBase<AbortTxnRequest,
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case TXNID:
-      return Long.valueOf(getTxnid());
+      return getTxnid();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddDynamicPartitions.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddDynamicPartitions.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddDynamicPartitions.java
index ffef8a6..bb6e584 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddDynamicPartitions.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddDynamicPartitions.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class AddDynamicPartitions implements org.apache.thrift.TBase<AddDynamicPartitions, AddDynamicPartitions._Fields>, java.io.Serializable, Cloneable, Comparable<AddDynamicPartitions> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("AddDynamicPartitions");
 
@@ -334,7 +334,7 @@ public class AddDynamicPartitions implements org.apache.thrift.TBase<AddDynamicP
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case TXNID:
-      return Long.valueOf(getTxnid());
+      return getTxnid();
 
     case DBNAME:
       return getDbname();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsRequest.java
index 05f83d3..083d340 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsRequest.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class AddPartitionsRequest implements org.apache.thrift.TBase<AddPartitionsRequest, AddPartitionsRequest._Fields>, java.io.Serializable, Cloneable, Comparable<AddPartitionsRequest> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("AddPartitionsRequest");
 
@@ -390,10 +390,10 @@ public class AddPartitionsRequest implements org.apache.thrift.TBase<AddPartitio
       return getParts();
 
     case IF_NOT_EXISTS:
-      return Boolean.valueOf(isIfNotExists());
+      return isIfNotExists();
 
     case NEED_RESULT:
-      return Boolean.valueOf(isNeedResult());
+      return isNeedResult();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsResult.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsResult.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsResult.java
index b74b6d6..9004457 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsResult.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsResult.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class AddPartitionsResult implements org.apache.thrift.TBase<AddPartitionsResult, AddPartitionsResult._Fields>, java.io.Serializable, Cloneable, Comparable<AddPartitionsResult> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("AddPartitionsResult");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AggrStats.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AggrStats.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AggrStats.java
index 92911e3..bfd1206 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AggrStats.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AggrStats.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class AggrStats implements org.apache.thrift.TBase<AggrStats, AggrStats._Fields>, java.io.Serializable, Cloneable, Comparable<AggrStats> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("AggrStats");
 
@@ -252,7 +252,7 @@ public class AggrStats implements org.apache.thrift.TBase<AggrStats, AggrStats._
       return getColStats();
 
     case PARTS_FOUND:
-      return Long.valueOf(getPartsFound());
+      return getPartsFound();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AlreadyExistsException.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AlreadyExistsException.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AlreadyExistsException.java
index 8d596ca..4032f19 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AlreadyExistsException.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AlreadyExistsException.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class AlreadyExistsException extends TException implements org.apache.thrift.TBase<AlreadyExistsException, AlreadyExistsException._Fields>, java.io.Serializable, Cloneable, Comparable<AlreadyExistsException> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("AlreadyExistsException");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/BinaryColumnStatsData.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/BinaryColumnStatsData.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/BinaryColumnStatsData.java
index a99ba16..84e393c 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/BinaryColumnStatsData.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/BinaryColumnStatsData.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class BinaryColumnStatsData implements org.apache.thrift.TBase<BinaryColumnStatsData, BinaryColumnStatsData._Fields>, java.io.Serializable, Cloneable, Comparable<BinaryColumnStatsData> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("BinaryColumnStatsData");
 
@@ -273,13 +273,13 @@ public class BinaryColumnStatsData implements org.apache.thrift.TBase<BinaryColu
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case MAX_COL_LEN:
-      return Long.valueOf(getMaxColLen());
+      return getMaxColLen();
 
     case AVG_COL_LEN:
-      return Double.valueOf(getAvgColLen());
+      return getAvgColLen();
 
     case NUM_NULLS:
-      return Long.valueOf(getNumNulls());
+      return getNumNulls();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/BooleanColumnStatsData.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/BooleanColumnStatsData.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/BooleanColumnStatsData.java
index 3d4e264..6aa4668 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/BooleanColumnStatsData.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/BooleanColumnStatsData.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class BooleanColumnStatsData implements org.apache.thrift.TBase<BooleanColumnStatsData, BooleanColumnStatsData._Fields>, java.io.Serializable, Cloneable, Comparable<BooleanColumnStatsData> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("BooleanColumnStatsData");
 
@@ -273,13 +273,13 @@ public class BooleanColumnStatsData implements org.apache.thrift.TBase<BooleanCo
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case NUM_TRUES:
-      return Long.valueOf(getNumTrues());
+      return getNumTrues();
 
     case NUM_FALSES:
-      return Long.valueOf(getNumFalses());
+      return getNumFalses();
 
     case NUM_NULLS:
-      return Long.valueOf(getNumNulls());
+      return getNumNulls();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CheckLockRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CheckLockRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CheckLockRequest.java
index b221de4..430be03 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CheckLockRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CheckLockRequest.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class CheckLockRequest implements org.apache.thrift.TBase<CheckLockRequest, CheckLockRequest._Fields>, java.io.Serializable, Cloneable, Comparable<CheckLockRequest> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("CheckLockRequest");
 
@@ -185,7 +185,7 @@ public class CheckLockRequest implements org.apache.thrift.TBase<CheckLockReques
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case LOCKID:
-      return Long.valueOf(getLockid());
+      return getLockid();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ClearFileMetadataRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ClearFileMetadataRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ClearFileMetadataRequest.java
index d847b90..657bb7b 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ClearFileMetadataRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ClearFileMetadataRequest.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class ClearFileMetadataRequest implements org.apache.thrift.TBase<ClearFileMetadataRequest, ClearFileMetadataRequest._Fields>, java.io.Serializable, Cloneable, Comparable<ClearFileMetadataRequest> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ClearFileMetadataRequest");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ClearFileMetadataResult.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ClearFileMetadataResult.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ClearFileMetadataResult.java
index 6a68b32..8e46d83 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ClearFileMetadataResult.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ClearFileMetadataResult.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class ClearFileMetadataResult implements org.apache.thrift.TBase<ClearFileMetadataResult, ClearFileMetadataResult._Fields>, java.io.Serializable, Cloneable, Comparable<ClearFileMetadataResult> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ClearFileMetadataResult");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ColumnStatistics.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ColumnStatistics.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ColumnStatistics.java
index 68c9e2f..ba059fe 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ColumnStatistics.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ColumnStatistics.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class ColumnStatistics implements org.apache.thrift.TBase<ColumnStatistics, ColumnStatistics._Fields>, java.io.Serializable, Cloneable, Comparable<ColumnStatistics> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ColumnStatistics");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ColumnStatisticsData.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ColumnStatisticsData.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ColumnStatisticsData.java
index 6558add..036d438 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ColumnStatisticsData.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ColumnStatisticsData.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ColumnStatisticsDesc.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ColumnStatisticsDesc.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ColumnStatisticsDesc.java
index 63727e9..daf3d10 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ColumnStatisticsDesc.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ColumnStatisticsDesc.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class ColumnStatisticsDesc implements org.apache.thrift.TBase<ColumnStatisticsDesc, ColumnStatisticsDesc._Fields>, java.io.Serializable, Cloneable, Comparable<ColumnStatisticsDesc> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ColumnStatisticsDesc");
 
@@ -357,7 +357,7 @@ public class ColumnStatisticsDesc implements org.apache.thrift.TBase<ColumnStati
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case IS_TBL_LEVEL:
-      return Boolean.valueOf(isIsTblLevel());
+      return isIsTblLevel();
 
     case DB_NAME:
       return getDbName();
@@ -369,7 +369,7 @@ public class ColumnStatisticsDesc implements org.apache.thrift.TBase<ColumnStati
       return getPartName();
 
     case LAST_ANALYZED:
-      return Long.valueOf(getLastAnalyzed());
+      return getLastAnalyzed();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ColumnStatisticsObj.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ColumnStatisticsObj.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ColumnStatisticsObj.java
index 0999569..4191beb 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ColumnStatisticsObj.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ColumnStatisticsObj.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class ColumnStatisticsObj implements org.apache.thrift.TBase<ColumnStatisticsObj, ColumnStatisticsObj._Fields>, java.io.Serializable, Cloneable, Comparable<ColumnStatisticsObj> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ColumnStatisticsObj");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CommitTxnRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CommitTxnRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CommitTxnRequest.java
index 2fa37b1..77554b7 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CommitTxnRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CommitTxnRequest.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class CommitTxnRequest implements org.apache.thrift.TBase<CommitTxnRequest, CommitTxnRequest._Fields>, java.io.Serializable, Cloneable, Comparable<CommitTxnRequest> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("CommitTxnRequest");
 
@@ -185,7 +185,7 @@ public class CommitTxnRequest implements org.apache.thrift.TBase<CommitTxnReques
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case TXNID:
-      return Long.valueOf(getTxnid());
+      return getTxnid();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CompactionRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CompactionRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CompactionRequest.java
index 9fea802..e028ecb 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CompactionRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CompactionRequest.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class CompactionRequest implements org.apache.thrift.TBase<CompactionRequest, CompactionRequest._Fields>, java.io.Serializable, Cloneable, Comparable<CompactionRequest> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("CompactionRequest");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CompactionType.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CompactionType.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CompactionType.java
index 09474ee..7450b27 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CompactionType.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CompactionType.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ConfigValSecurityException.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ConfigValSecurityException.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ConfigValSecurityException.java
index a54dd16..d454a96 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ConfigValSecurityException.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ConfigValSecurityException.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class ConfigValSecurityException extends TException implements org.apache.thrift.TBase<ConfigValSecurityException, ConfigValSecurityException._Fields>, java.io.Serializable, Cloneable, Comparable<ConfigValSecurityException> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ConfigValSecurityException");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CurrentNotificationEventId.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CurrentNotificationEventId.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CurrentNotificationEventId.java
index ac22eb2..2565a08 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CurrentNotificationEventId.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CurrentNotificationEventId.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class CurrentNotificationEventId implements org.apache.thrift.TBase<CurrentNotificationEventId, CurrentNotificationEventId._Fields>, java.io.Serializable, Cloneable, Comparable<CurrentNotificationEventId> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("CurrentNotificationEventId");
 
@@ -185,7 +185,7 @@ public class CurrentNotificationEventId implements org.apache.thrift.TBase<Curre
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case EVENT_ID:
-      return Long.valueOf(getEventId());
+      return getEventId();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Database.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Database.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Database.java
index e1435cf..2769845 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Database.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Database.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class Database implements org.apache.thrift.TBase<Database, Database._Fields>, java.io.Serializable, Cloneable, Comparable<Database> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Database");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Date.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Date.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Date.java
index 7d86cff..bf064e5 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Date.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Date.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class Date implements org.apache.thrift.TBase<Date, Date._Fields>, java.io.Serializable, Cloneable, Comparable<Date> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Date");
 
@@ -185,7 +185,7 @@ public class Date implements org.apache.thrift.TBase<Date, Date._Fields>, java.i
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case DAYS_SINCE_EPOCH:
-      return Long.valueOf(getDaysSinceEpoch());
+      return getDaysSinceEpoch();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DateColumnStatsData.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DateColumnStatsData.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DateColumnStatsData.java
index 712d2e5..2ebb811 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DateColumnStatsData.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DateColumnStatsData.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class DateColumnStatsData implements org.apache.thrift.TBase<DateColumnStatsData, DateColumnStatsData._Fields>, java.io.Serializable, Cloneable, Comparable<DateColumnStatsData> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("DateColumnStatsData");
 
@@ -320,10 +320,10 @@ public class DateColumnStatsData implements org.apache.thrift.TBase<DateColumnSt
       return getHighValue();
 
     case NUM_NULLS:
-      return Long.valueOf(getNumNulls());
+      return getNumNulls();
 
     case NUM_DVS:
-      return Long.valueOf(getNumDVs());
+      return getNumDVs();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Decimal.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Decimal.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Decimal.java
index fbf2bbc..5b23dba 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Decimal.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Decimal.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class Decimal implements org.apache.thrift.TBase<Decimal, Decimal._Fields>, java.io.Serializable, Cloneable, Comparable<Decimal> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Decimal");
 
@@ -241,7 +241,7 @@ public class Decimal implements org.apache.thrift.TBase<Decimal, Decimal._Fields
       return getUnscaled();
 
     case SCALE:
-      return Short.valueOf(getScale());
+      return getScale();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DecimalColumnStatsData.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DecimalColumnStatsData.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DecimalColumnStatsData.java
index 204e57e..720176a 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DecimalColumnStatsData.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DecimalColumnStatsData.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class DecimalColumnStatsData implements org.apache.thrift.TBase<DecimalColumnStatsData, DecimalColumnStatsData._Fields>, java.io.Serializable, Cloneable, Comparable<DecimalColumnStatsData> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("DecimalColumnStatsData");
 
@@ -320,10 +320,10 @@ public class DecimalColumnStatsData implements org.apache.thrift.TBase<DecimalCo
       return getHighValue();
 
     case NUM_NULLS:
-      return Long.valueOf(getNumNulls());
+      return getNumNulls();
 
     case NUM_DVS:
-      return Long.valueOf(getNumDVs());
+      return getNumDVs();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DoubleColumnStatsData.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DoubleColumnStatsData.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DoubleColumnStatsData.java
index 661efa2..5d48b5d 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DoubleColumnStatsData.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DoubleColumnStatsData.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class DoubleColumnStatsData implements org.apache.thrift.TBase<DoubleColumnStatsData, DoubleColumnStatsData._Fields>, java.io.Serializable, Cloneable, Comparable<DoubleColumnStatsData> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("DoubleColumnStatsData");
 
@@ -312,16 +312,16 @@ public class DoubleColumnStatsData implements org.apache.thrift.TBase<DoubleColu
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case LOW_VALUE:
-      return Double.valueOf(getLowValue());
+      return getLowValue();
 
     case HIGH_VALUE:
-      return Double.valueOf(getHighValue());
+      return getHighValue();
 
     case NUM_NULLS:
-      return Long.valueOf(getNumNulls());
+      return getNumNulls();
 
     case NUM_DVS:
-      return Long.valueOf(getNumDVs());
+      return getNumDVs();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DropPartitionsExpr.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DropPartitionsExpr.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DropPartitionsExpr.java
index 9bb5030..ac97aa3 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DropPartitionsExpr.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DropPartitionsExpr.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class DropPartitionsExpr implements org.apache.thrift.TBase<DropPartitionsExpr, DropPartitionsExpr._Fields>, java.io.Serializable, Cloneable, Comparable<DropPartitionsExpr> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("DropPartitionsExpr");
 
@@ -239,7 +239,7 @@ public class DropPartitionsExpr implements org.apache.thrift.TBase<DropPartition
       return getExpr();
 
     case PART_ARCHIVE_LEVEL:
-      return Integer.valueOf(getPartArchiveLevel());
+      return getPartArchiveLevel();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DropPartitionsRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DropPartitionsRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DropPartitionsRequest.java
index 96e20ec..2dbde0c 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DropPartitionsRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DropPartitionsRequest.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class DropPartitionsRequest implements org.apache.thrift.TBase<DropPartitionsRequest, DropPartitionsRequest._Fields>, java.io.Serializable, Cloneable, Comparable<DropPartitionsRequest> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("DropPartitionsRequest");
 
@@ -493,19 +493,19 @@ public class DropPartitionsRequest implements org.apache.thrift.TBase<DropPartit
       return getParts();
 
     case DELETE_DATA:
-      return Boolean.valueOf(isDeleteData());
+      return isDeleteData();
 
     case IF_EXISTS:
-      return Boolean.valueOf(isIfExists());
+      return isIfExists();
 
     case IGNORE_PROTECTION:
-      return Boolean.valueOf(isIgnoreProtection());
+      return isIgnoreProtection();
 
     case ENVIRONMENT_CONTEXT:
       return getEnvironmentContext();
 
     case NEED_RESULT:
-      return Boolean.valueOf(isNeedResult());
+      return isNeedResult();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DropPartitionsResult.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DropPartitionsResult.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DropPartitionsResult.java
index 2ebf2a0..adef415 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DropPartitionsResult.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DropPartitionsResult.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class DropPartitionsResult implements org.apache.thrift.TBase<DropPartitionsResult, DropPartitionsResult._Fields>, java.io.Serializable, Cloneable, Comparable<DropPartitionsResult> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("DropPartitionsResult");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/EnvironmentContext.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/EnvironmentContext.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/EnvironmentContext.java
index 796e27c..de588e2 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/EnvironmentContext.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/EnvironmentContext.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class EnvironmentContext implements org.apache.thrift.TBase<EnvironmentContext, EnvironmentContext._Fields>, java.io.Serializable, Cloneable, Comparable<EnvironmentContext> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("EnvironmentContext");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/EventRequestType.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/EventRequestType.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/EventRequestType.java
index 422b44f..4295046 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/EventRequestType.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/EventRequestType.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FieldSchema.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FieldSchema.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FieldSchema.java
index e113707..06a7b4d 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FieldSchema.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FieldSchema.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class FieldSchema implements org.apache.thrift.TBase<FieldSchema, FieldSchema._Fields>, java.io.Serializable, Cloneable, Comparable<FieldSchema> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("FieldSchema");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FileMetadataExprType.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FileMetadataExprType.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FileMetadataExprType.java
index 44dc6bd..4e393e2 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FileMetadataExprType.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FileMetadataExprType.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FireEventRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FireEventRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FireEventRequest.java
index 985648e..6b08234 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FireEventRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FireEventRequest.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class FireEventRequest implements org.apache.thrift.TBase<FireEventRequest, FireEventRequest._Fields>, java.io.Serializable, Cloneable, Comparable<FireEventRequest> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("FireEventRequest");
 
@@ -373,7 +373,7 @@ public class FireEventRequest implements org.apache.thrift.TBase<FireEventReques
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case SUCCESSFUL:
-      return Boolean.valueOf(isSuccessful());
+      return isSuccessful();
 
     case DATA:
       return getData();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FireEventRequestData.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FireEventRequestData.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FireEventRequestData.java
index 2830496..db8dc8b 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FireEventRequestData.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FireEventRequestData.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FireEventResponse.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FireEventResponse.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FireEventResponse.java
index 4674ed1..9a86ed5 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FireEventResponse.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FireEventResponse.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class FireEventResponse implements org.apache.thrift.TBase<FireEventResponse, FireEventResponse._Fields>, java.io.Serializable, Cloneable, Comparable<FireEventResponse> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("FireEventResponse");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Function.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Function.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Function.java
index 0a35abb..5f8ce0d 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Function.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Function.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class Function implements org.apache.thrift.TBase<Function, Function._Fields>, java.io.Serializable, Cloneable, Comparable<Function> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Function");
 
@@ -552,7 +552,7 @@ public class Function implements org.apache.thrift.TBase<Function, Function._Fie
       return getOwnerType();
 
     case CREATE_TIME:
-      return Integer.valueOf(getCreateTime());
+      return getCreateTime();
 
     case FUNCTION_TYPE:
       return getFunctionType();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FunctionType.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FunctionType.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FunctionType.java
index 6958566..1116f88 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FunctionType.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FunctionType.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetAllFunctionsResponse.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetAllFunctionsResponse.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetAllFunctionsResponse.java
index 3025066..f88e279 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetAllFunctionsResponse.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetAllFunctionsResponse.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class GetAllFunctionsResponse implements org.apache.thrift.TBase<GetAllFunctionsResponse, GetAllFunctionsResponse._Fields>, java.io.Serializable, Cloneable, Comparable<GetAllFunctionsResponse> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("GetAllFunctionsResponse");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataByExprRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataByExprRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataByExprRequest.java
index b997361..0236b4a 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataByExprRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataByExprRequest.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<GetFileMetadataByExprRequest, GetFileMetadataByExprRequest._Fields>, java.io.Serializable, Cloneable, Comparable<GetFileMetadataByExprRequest> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("GetFileMetadataByExprRequest");
 
@@ -357,7 +357,7 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
       return getExpr();
 
     case DO_GET_FOOTERS:
-      return Boolean.valueOf(isDoGetFooters());
+      return isDoGetFooters();
 
     case TYPE:
       return getType();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataByExprResult.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataByExprResult.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataByExprResult.java
index badcad8..89eb819 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataByExprResult.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataByExprResult.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class GetFileMetadataByExprResult implements org.apache.thrift.TBase<GetFileMetadataByExprResult, GetFileMetadataByExprResult._Fields>, java.io.Serializable, Cloneable, Comparable<GetFileMetadataByExprResult> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("GetFileMetadataByExprResult");
 
@@ -257,7 +257,7 @@ public class GetFileMetadataByExprResult implements org.apache.thrift.TBase<GetF
       return getMetadata();
 
     case IS_SUPPORTED:
-      return Boolean.valueOf(isIsSupported());
+      return isIsSupported();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataRequest.java
index acb2599..2408ad1 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataRequest.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class GetFileMetadataRequest implements org.apache.thrift.TBase<GetFileMetadataRequest, GetFileMetadataRequest._Fields>, java.io.Serializable, Cloneable, Comparable<GetFileMetadataRequest> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("GetFileMetadataRequest");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataResult.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataResult.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataResult.java
index ff1d1f7..8946635 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataResult.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataResult.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class GetFileMetadataResult implements org.apache.thrift.TBase<GetFileMetadataResult, GetFileMetadataResult._Fields>, java.io.Serializable, Cloneable, Comparable<GetFileMetadataResult> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("GetFileMetadataResult");
 
@@ -246,7 +246,7 @@ public class GetFileMetadataResult implements org.apache.thrift.TBase<GetFileMet
       return getMetadata();
 
     case IS_SUPPORTED:
-      return Boolean.valueOf(isIsSupported());
+      return isIsSupported();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetOpenTxnsInfoResponse.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetOpenTxnsInfoResponse.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetOpenTxnsInfoResponse.java
index 86469ab..629c042 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetOpenTxnsInfoResponse.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetOpenTxnsInfoResponse.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class GetOpenTxnsInfoResponse implements org.apache.thrift.TBase<GetOpenTxnsInfoResponse, GetOpenTxnsInfoResponse._Fields>, java.io.Serializable, Cloneable, Comparable<GetOpenTxnsInfoResponse> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("GetOpenTxnsInfoResponse");
 
@@ -249,7 +249,7 @@ public class GetOpenTxnsInfoResponse implements org.apache.thrift.TBase<GetOpenT
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case TXN_HIGH_WATER_MARK:
-      return Long.valueOf(getTxn_high_water_mark());
+      return getTxn_high_water_mark();
 
     case OPEN_TXNS:
       return getOpen_txns();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetOpenTxnsResponse.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetOpenTxnsResponse.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetOpenTxnsResponse.java
index 5c7216d..9f57a4a 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetOpenTxnsResponse.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetOpenTxnsResponse.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class GetOpenTxnsResponse implements org.apache.thrift.TBase<GetOpenTxnsResponse, GetOpenTxnsResponse._Fields>, java.io.Serializable, Cloneable, Comparable<GetOpenTxnsResponse> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("GetOpenTxnsResponse");
 
@@ -246,7 +246,7 @@ public class GetOpenTxnsResponse implements org.apache.thrift.TBase<GetOpenTxnsR
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case TXN_HIGH_WATER_MARK:
-      return Long.valueOf(getTxn_high_water_mark());
+      return getTxn_high_water_mark();
 
     case OPEN_TXNS:
       return getOpen_txns();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetPrincipalsInRoleRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetPrincipalsInRoleRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetPrincipalsInRoleRequest.java
index eff62bd..342b268 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetPrincipalsInRoleRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetPrincipalsInRoleRequest.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class GetPrincipalsInRoleRequest implements org.apache.thrift.TBase<GetPrincipalsInRoleRequest, GetPrincipalsInRoleRequest._Fields>, java.io.Serializable, Cloneable, Comparable<GetPrincipalsInRoleRequest> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("GetPrincipalsInRoleRequest");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetPrincipalsInRoleResponse.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetPrincipalsInRoleResponse.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetPrincipalsInRoleResponse.java
index e327d3b..02c652a 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetPrincipalsInRoleResponse.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetPrincipalsInRoleResponse.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class GetPrincipalsInRoleResponse implements org.apache.thrift.TBase<GetPrincipalsInRoleResponse, GetPrincipalsInRoleResponse._Fields>, java.io.Serializable, Cloneable, Comparable<GetPrincipalsInRoleResponse> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("GetPrincipalsInRoleResponse");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetRoleGrantsForPrincipalRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetRoleGrantsForPrincipalRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetRoleGrantsForPrincipalRequest.java
index 2061570..ea667d3 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetRoleGrantsForPrincipalRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetRoleGrantsForPrincipalRequest.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class GetRoleGrantsForPrincipalRequest implements org.apache.thrift.TBase<GetRoleGrantsForPrincipalRequest, GetRoleGrantsForPrincipalRequest._Fields>, java.io.Serializable, Cloneable, Comparable<GetRoleGrantsForPrincipalRequest> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("GetRoleGrantsForPrincipalRequest");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetRoleGrantsForPrincipalResponse.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetRoleGrantsForPrincipalResponse.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetRoleGrantsForPrincipalResponse.java
index 7bbcd5d..c013e0e 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetRoleGrantsForPrincipalResponse.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetRoleGrantsForPrincipalResponse.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class GetRoleGrantsForPrincipalResponse implements org.apache.thrift.TBase<GetRoleGrantsForPrincipalResponse, GetRoleGrantsForPrincipalResponse._Fields>, java.io.Serializable, Cloneable, Comparable<GetRoleGrantsForPrincipalResponse> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("GetRoleGrantsForPrincipalResponse");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokePrivilegeRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokePrivilegeRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokePrivilegeRequest.java
index a42369f..014c857 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokePrivilegeRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokePrivilegeRequest.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class GrantRevokePrivilegeRequest implements org.apache.thrift.TBase<GrantRevokePrivilegeRequest, GrantRevokePrivilegeRequest._Fields>, java.io.Serializable, Cloneable, Comparable<GrantRevokePrivilegeRequest> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("GrantRevokePrivilegeRequest");
 
@@ -289,7 +289,7 @@ public class GrantRevokePrivilegeRequest implements org.apache.thrift.TBase<Gran
       return getPrivileges();
 
     case REVOKE_GRANT_OPTION:
-      return Boolean.valueOf(isRevokeGrantOption());
+      return isRevokeGrantOption();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokePrivilegeResponse.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokePrivilegeResponse.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokePrivilegeResponse.java
index 36fddba..ddbc4c3 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokePrivilegeResponse.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokePrivilegeResponse.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class GrantRevokePrivilegeResponse implements org.apache.thrift.TBase<GrantRevokePrivilegeResponse, GrantRevokePrivilegeResponse._Fields>, java.io.Serializable, Cloneable, Comparable<GrantRevokePrivilegeResponse> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("GrantRevokePrivilegeResponse");
 
@@ -178,7 +178,7 @@ public class GrantRevokePrivilegeResponse implements org.apache.thrift.TBase<Gra
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case SUCCESS:
-      return Boolean.valueOf(isSuccess());
+      return isSuccess();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokeRoleRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokeRoleRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokeRoleRequest.java
index 8a2ff50..adbd345 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokeRoleRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokeRoleRequest.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class GrantRevokeRoleRequest implements org.apache.thrift.TBase<GrantRevokeRoleRequest, GrantRevokeRoleRequest._Fields>, java.io.Serializable, Cloneable, Comparable<GrantRevokeRoleRequest> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("GrantRevokeRoleRequest");
 
@@ -497,7 +497,7 @@ public class GrantRevokeRoleRequest implements org.apache.thrift.TBase<GrantRevo
       return getGrantorType();
 
     case GRANT_OPTION:
-      return Boolean.valueOf(isGrantOption());
+      return isGrantOption();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokeRoleResponse.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokeRoleResponse.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokeRoleResponse.java
index 41a5713..541ac21 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokeRoleResponse.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokeRoleResponse.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class GrantRevokeRoleResponse implements org.apache.thrift.TBase<GrantRevokeRoleResponse, GrantRevokeRoleResponse._Fields>, java.io.Serializable, Cloneable, Comparable<GrantRevokeRoleResponse> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("GrantRevokeRoleResponse");
 
@@ -178,7 +178,7 @@ public class GrantRevokeRoleResponse implements org.apache.thrift.TBase<GrantRev
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case SUCCESS:
-      return Boolean.valueOf(isSuccess());
+      return isSuccess();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokeType.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokeType.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokeType.java
index f7b4587..ac65ec7 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokeType.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokeType.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatRequest.java
index 96cb736..0d36f22 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatRequest.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class HeartbeatRequest implements org.apache.thrift.TBase<HeartbeatRequest, HeartbeatRequest._Fields>, java.io.Serializable, Cloneable, Comparable<HeartbeatRequest> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HeartbeatRequest");
 
@@ -219,10 +219,10 @@ public class HeartbeatRequest implements org.apache.thrift.TBase<HeartbeatReques
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case LOCKID:
-      return Long.valueOf(getLockid());
+      return getLockid();
 
     case TXNID:
-      return Long.valueOf(getTxnid());
+      return getTxnid();
 
     }
     throw new IllegalStateException();


[17/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)

Posted by xu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp b/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
index a0bb4c0..5fd4a90 100644
--- a/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
+++ b/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -172,11 +172,9 @@ void Version::__set_comments(const std::string& val) {
   this->comments = val;
 }
 
-const char* Version::ascii_fingerprint = "07A9615F837F7D0A952B595DD3020972";
-const uint8_t Version::binary_fingerprint[16] = {0x07,0xA9,0x61,0x5F,0x83,0x7F,0x7D,0x0A,0x95,0x2B,0x59,0x5D,0xD3,0x02,0x09,0x72};
-
 uint32_t Version::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -225,7 +223,7 @@ uint32_t Version::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t Version::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("Version");
 
   xfer += oprot->writeFieldBegin("version", ::apache::thrift::protocol::T_STRING, 1);
@@ -238,7 +236,6 @@ uint32_t Version::write(::apache::thrift::protocol::TProtocol* oprot) const {
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -260,13 +257,12 @@ Version& Version::operator=(const Version& other1) {
   __isset = other1.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const Version& obj) {
-  using apache::thrift::to_string;
+void Version::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "Version(";
-  out << "version=" << to_string(obj.version);
-  out << ", " << "comments=" << to_string(obj.comments);
+  out << "version=" << to_string(version);
+  out << ", " << "comments=" << to_string(comments);
   out << ")";
-  return out;
 }
 
 
@@ -286,11 +282,9 @@ void FieldSchema::__set_comment(const std::string& val) {
   this->comment = val;
 }
 
-const char* FieldSchema::ascii_fingerprint = "AB879940BD15B6B25691265F7384B271";
-const uint8_t FieldSchema::binary_fingerprint[16] = {0xAB,0x87,0x99,0x40,0xBD,0x15,0xB6,0xB2,0x56,0x91,0x26,0x5F,0x73,0x84,0xB2,0x71};
-
 uint32_t FieldSchema::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -347,7 +341,7 @@ uint32_t FieldSchema::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t FieldSchema::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("FieldSchema");
 
   xfer += oprot->writeFieldBegin("name", ::apache::thrift::protocol::T_STRING, 1);
@@ -364,7 +358,6 @@ uint32_t FieldSchema::write(::apache::thrift::protocol::TProtocol* oprot) const
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -389,14 +382,13 @@ FieldSchema& FieldSchema::operator=(const FieldSchema& other3) {
   __isset = other3.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const FieldSchema& obj) {
-  using apache::thrift::to_string;
+void FieldSchema::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "FieldSchema(";
-  out << "name=" << to_string(obj.name);
-  out << ", " << "type=" << to_string(obj.type);
-  out << ", " << "comment=" << to_string(obj.comment);
+  out << "name=" << to_string(name);
+  out << ", " << "type=" << to_string(type);
+  out << ", " << "comment=" << to_string(comment);
   out << ")";
-  return out;
 }
 
 
@@ -423,11 +415,9 @@ void Type::__set_fields(const std::vector<FieldSchema> & val) {
 __isset.fields = true;
 }
 
-const char* Type::ascii_fingerprint = "20DF02DE523C27F7066C7BD4D9120842";
-const uint8_t Type::binary_fingerprint[16] = {0x20,0xDF,0x02,0xDE,0x52,0x3C,0x27,0xF7,0x06,0x6C,0x7B,0xD4,0xD9,0x12,0x08,0x42};
-
 uint32_t Type::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -504,7 +494,7 @@ uint32_t Type::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t Type::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("Type");
 
   xfer += oprot->writeFieldBegin("name", ::apache::thrift::protocol::T_STRING, 1);
@@ -536,7 +526,6 @@ uint32_t Type::write(::apache::thrift::protocol::TProtocol* oprot) const {
   }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -564,15 +553,14 @@ Type& Type::operator=(const Type& other11) {
   __isset = other11.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const Type& obj) {
-  using apache::thrift::to_string;
+void Type::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "Type(";
-  out << "name=" << to_string(obj.name);
-  out << ", " << "type1="; (obj.__isset.type1 ? (out << to_string(obj.type1)) : (out << "<null>"));
-  out << ", " << "type2="; (obj.__isset.type2 ? (out << to_string(obj.type2)) : (out << "<null>"));
-  out << ", " << "fields="; (obj.__isset.fields ? (out << to_string(obj.fields)) : (out << "<null>"));
+  out << "name=" << to_string(name);
+  out << ", " << "type1="; (__isset.type1 ? (out << to_string(type1)) : (out << "<null>"));
+  out << ", " << "type2="; (__isset.type2 ? (out << to_string(type2)) : (out << "<null>"));
+  out << ", " << "fields="; (__isset.fields ? (out << to_string(fields)) : (out << "<null>"));
   out << ")";
-  return out;
 }
 
 
@@ -600,11 +588,9 @@ void HiveObjectRef::__set_columnName(const std::string& val) {
   this->columnName = val;
 }
 
-const char* HiveObjectRef::ascii_fingerprint = "205CD8311CF3AA9EC161BAEF8D7C933C";
-const uint8_t HiveObjectRef::binary_fingerprint[16] = {0x20,0x5C,0xD8,0x31,0x1C,0xF3,0xAA,0x9E,0xC1,0x61,0xBA,0xEF,0x8D,0x7C,0x93,0x3C};
-
 uint32_t HiveObjectRef::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -691,7 +677,7 @@ uint32_t HiveObjectRef::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t HiveObjectRef::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("HiveObjectRef");
 
   xfer += oprot->writeFieldBegin("objectType", ::apache::thrift::protocol::T_I32, 1);
@@ -724,7 +710,6 @@ uint32_t HiveObjectRef::write(::apache::thrift::protocol::TProtocol* oprot) cons
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -755,16 +740,15 @@ HiveObjectRef& HiveObjectRef::operator=(const HiveObjectRef& other20) {
   __isset = other20.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const HiveObjectRef& obj) {
-  using apache::thrift::to_string;
+void HiveObjectRef::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "HiveObjectRef(";
-  out << "objectType=" << to_string(obj.objectType);
-  out << ", " << "dbName=" << to_string(obj.dbName);
-  out << ", " << "objectName=" << to_string(obj.objectName);
-  out << ", " << "partValues=" << to_string(obj.partValues);
-  out << ", " << "columnName=" << to_string(obj.columnName);
+  out << "objectType=" << to_string(objectType);
+  out << ", " << "dbName=" << to_string(dbName);
+  out << ", " << "objectName=" << to_string(objectName);
+  out << ", " << "partValues=" << to_string(partValues);
+  out << ", " << "columnName=" << to_string(columnName);
   out << ")";
-  return out;
 }
 
 
@@ -792,11 +776,9 @@ void PrivilegeGrantInfo::__set_grantOption(const bool val) {
   this->grantOption = val;
 }
 
-const char* PrivilegeGrantInfo::ascii_fingerprint = "A58923AF7294BE492D6F90E07E8CEE1F";
-const uint8_t PrivilegeGrantInfo::binary_fingerprint[16] = {0xA5,0x89,0x23,0xAF,0x72,0x94,0xBE,0x49,0x2D,0x6F,0x90,0xE0,0x7E,0x8C,0xEE,0x1F};
-
 uint32_t PrivilegeGrantInfo::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -871,7 +853,7 @@ uint32_t PrivilegeGrantInfo::read(::apache::thrift::protocol::TProtocol* iprot)
 
 uint32_t PrivilegeGrantInfo::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("PrivilegeGrantInfo");
 
   xfer += oprot->writeFieldBegin("privilege", ::apache::thrift::protocol::T_STRING, 1);
@@ -896,7 +878,6 @@ uint32_t PrivilegeGrantInfo::write(::apache::thrift::protocol::TProtocol* oprot)
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -927,16 +908,15 @@ PrivilegeGrantInfo& PrivilegeGrantInfo::operator=(const PrivilegeGrantInfo& othe
   __isset = other23.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const PrivilegeGrantInfo& obj) {
-  using apache::thrift::to_string;
+void PrivilegeGrantInfo::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "PrivilegeGrantInfo(";
-  out << "privilege=" << to_string(obj.privilege);
-  out << ", " << "createTime=" << to_string(obj.createTime);
-  out << ", " << "grantor=" << to_string(obj.grantor);
-  out << ", " << "grantorType=" << to_string(obj.grantorType);
-  out << ", " << "grantOption=" << to_string(obj.grantOption);
+  out << "privilege=" << to_string(privilege);
+  out << ", " << "createTime=" << to_string(createTime);
+  out << ", " << "grantor=" << to_string(grantor);
+  out << ", " << "grantorType=" << to_string(grantorType);
+  out << ", " << "grantOption=" << to_string(grantOption);
   out << ")";
-  return out;
 }
 
 
@@ -960,11 +940,9 @@ void HiveObjectPrivilege::__set_grantInfo(const PrivilegeGrantInfo& val) {
   this->grantInfo = val;
 }
 
-const char* HiveObjectPrivilege::ascii_fingerprint = "83D71969B23BD853E29DBA9D43B29AF8";
-const uint8_t HiveObjectPrivilege::binary_fingerprint[16] = {0x83,0xD7,0x19,0x69,0xB2,0x3B,0xD8,0x53,0xE2,0x9D,0xBA,0x9D,0x43,0xB2,0x9A,0xF8};
-
 uint32_t HiveObjectPrivilege::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1031,7 +1009,7 @@ uint32_t HiveObjectPrivilege::read(::apache::thrift::protocol::TProtocol* iprot)
 
 uint32_t HiveObjectPrivilege::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("HiveObjectPrivilege");
 
   xfer += oprot->writeFieldBegin("hiveObject", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -1052,7 +1030,6 @@ uint32_t HiveObjectPrivilege::write(::apache::thrift::protocol::TProtocol* oprot
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1080,15 +1057,14 @@ HiveObjectPrivilege& HiveObjectPrivilege::operator=(const HiveObjectPrivilege& o
   __isset = other26.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const HiveObjectPrivilege& obj) {
-  using apache::thrift::to_string;
+void HiveObjectPrivilege::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "HiveObjectPrivilege(";
-  out << "hiveObject=" << to_string(obj.hiveObject);
-  out << ", " << "principalName=" << to_string(obj.principalName);
-  out << ", " << "principalType=" << to_string(obj.principalType);
-  out << ", " << "grantInfo=" << to_string(obj.grantInfo);
+  out << "hiveObject=" << to_string(hiveObject);
+  out << ", " << "principalName=" << to_string(principalName);
+  out << ", " << "principalType=" << to_string(principalType);
+  out << ", " << "grantInfo=" << to_string(grantInfo);
   out << ")";
-  return out;
 }
 
 
@@ -1100,11 +1076,9 @@ void PrivilegeBag::__set_privileges(const std::vector<HiveObjectPrivilege> & val
   this->privileges = val;
 }
 
-const char* PrivilegeBag::ascii_fingerprint = "BB89E4701B7B709B046A74C90B1147F2";
-const uint8_t PrivilegeBag::binary_fingerprint[16] = {0xBB,0x89,0xE4,0x70,0x1B,0x7B,0x70,0x9B,0x04,0x6A,0x74,0xC9,0x0B,0x11,0x47,0xF2};
-
 uint32_t PrivilegeBag::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1157,7 +1131,7 @@ uint32_t PrivilegeBag::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t PrivilegeBag::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("PrivilegeBag");
 
   xfer += oprot->writeFieldBegin("privileges", ::apache::thrift::protocol::T_LIST, 1);
@@ -1174,7 +1148,6 @@ uint32_t PrivilegeBag::write(::apache::thrift::protocol::TProtocol* oprot) const
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1193,12 +1166,11 @@ PrivilegeBag& PrivilegeBag::operator=(const PrivilegeBag& other34) {
   __isset = other34.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const PrivilegeBag& obj) {
-  using apache::thrift::to_string;
+void PrivilegeBag::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "PrivilegeBag(";
-  out << "privileges=" << to_string(obj.privileges);
+  out << "privileges=" << to_string(privileges);
   out << ")";
-  return out;
 }
 
 
@@ -1218,11 +1190,9 @@ void PrincipalPrivilegeSet::__set_rolePrivileges(const std::map<std::string, std
   this->rolePrivileges = val;
 }
 
-const char* PrincipalPrivilegeSet::ascii_fingerprint = "08F75D2533906EA87BE34EA640856683";
-const uint8_t PrincipalPrivilegeSet::binary_fingerprint[16] = {0x08,0xF7,0x5D,0x25,0x33,0x90,0x6E,0xA8,0x7B,0xE3,0x4E,0xA6,0x40,0x85,0x66,0x83};
-
 uint32_t PrincipalPrivilegeSet::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1360,7 +1330,7 @@ uint32_t PrincipalPrivilegeSet::read(::apache::thrift::protocol::TProtocol* ipro
 
 uint32_t PrincipalPrivilegeSet::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("PrincipalPrivilegeSet");
 
   xfer += oprot->writeFieldBegin("userPrivileges", ::apache::thrift::protocol::T_MAP, 1);
@@ -1428,7 +1398,6 @@ uint32_t PrincipalPrivilegeSet::write(::apache::thrift::protocol::TProtocol* opr
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1453,14 +1422,13 @@ PrincipalPrivilegeSet& PrincipalPrivilegeSet::operator=(const PrincipalPrivilege
   __isset = other78.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const PrincipalPrivilegeSet& obj) {
-  using apache::thrift::to_string;
+void PrincipalPrivilegeSet::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "PrincipalPrivilegeSet(";
-  out << "userPrivileges=" << to_string(obj.userPrivileges);
-  out << ", " << "groupPrivileges=" << to_string(obj.groupPrivileges);
-  out << ", " << "rolePrivileges=" << to_string(obj.rolePrivileges);
+  out << "userPrivileges=" << to_string(userPrivileges);
+  out << ", " << "groupPrivileges=" << to_string(groupPrivileges);
+  out << ", " << "rolePrivileges=" << to_string(rolePrivileges);
   out << ")";
-  return out;
 }
 
 
@@ -1481,11 +1449,9 @@ void GrantRevokePrivilegeRequest::__set_revokeGrantOption(const bool val) {
 __isset.revokeGrantOption = true;
 }
 
-const char* GrantRevokePrivilegeRequest::ascii_fingerprint = "DF474A3CB526AD40DC0F2C3702F7AA2C";
-const uint8_t GrantRevokePrivilegeRequest::binary_fingerprint[16] = {0xDF,0x47,0x4A,0x3C,0xB5,0x26,0xAD,0x40,0xDC,0x0F,0x2C,0x37,0x02,0xF7,0xAA,0x2C};
-
 uint32_t GrantRevokePrivilegeRequest::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1544,7 +1510,7 @@ uint32_t GrantRevokePrivilegeRequest::read(::apache::thrift::protocol::TProtocol
 
 uint32_t GrantRevokePrivilegeRequest::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("GrantRevokePrivilegeRequest");
 
   xfer += oprot->writeFieldBegin("requestType", ::apache::thrift::protocol::T_I32, 1);
@@ -1562,7 +1528,6 @@ uint32_t GrantRevokePrivilegeRequest::write(::apache::thrift::protocol::TProtoco
   }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1587,14 +1552,13 @@ GrantRevokePrivilegeRequest& GrantRevokePrivilegeRequest::operator=(const GrantR
   __isset = other81.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const GrantRevokePrivilegeRequest& obj) {
-  using apache::thrift::to_string;
+void GrantRevokePrivilegeRequest::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "GrantRevokePrivilegeRequest(";
-  out << "requestType=" << to_string(obj.requestType);
-  out << ", " << "privileges=" << to_string(obj.privileges);
-  out << ", " << "revokeGrantOption="; (obj.__isset.revokeGrantOption ? (out << to_string(obj.revokeGrantOption)) : (out << "<null>"));
+  out << "requestType=" << to_string(requestType);
+  out << ", " << "privileges=" << to_string(privileges);
+  out << ", " << "revokeGrantOption="; (__isset.revokeGrantOption ? (out << to_string(revokeGrantOption)) : (out << "<null>"));
   out << ")";
-  return out;
 }
 
 
@@ -1607,11 +1571,9 @@ void GrantRevokePrivilegeResponse::__set_success(const bool val) {
 __isset.success = true;
 }
 
-const char* GrantRevokePrivilegeResponse::ascii_fingerprint = "BF054652DEF86253C2BEE7D947F167DD";
-const uint8_t GrantRevokePrivilegeResponse::binary_fingerprint[16] = {0xBF,0x05,0x46,0x52,0xDE,0xF8,0x62,0x53,0xC2,0xBE,0xE7,0xD9,0x47,0xF1,0x67,0xDD};
-
 uint32_t GrantRevokePrivilegeResponse::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1652,7 +1614,7 @@ uint32_t GrantRevokePrivilegeResponse::read(::apache::thrift::protocol::TProtoco
 
 uint32_t GrantRevokePrivilegeResponse::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("GrantRevokePrivilegeResponse");
 
   if (this->__isset.success) {
@@ -1662,7 +1624,6 @@ uint32_t GrantRevokePrivilegeResponse::write(::apache::thrift::protocol::TProtoc
   }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1681,12 +1642,11 @@ GrantRevokePrivilegeResponse& GrantRevokePrivilegeResponse::operator=(const Gran
   __isset = other83.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const GrantRevokePrivilegeResponse& obj) {
-  using apache::thrift::to_string;
+void GrantRevokePrivilegeResponse::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "GrantRevokePrivilegeResponse(";
-  out << "success="; (obj.__isset.success ? (out << to_string(obj.success)) : (out << "<null>"));
+  out << "success="; (__isset.success ? (out << to_string(success)) : (out << "<null>"));
   out << ")";
-  return out;
 }
 
 
@@ -1706,11 +1666,9 @@ void Role::__set_ownerName(const std::string& val) {
   this->ownerName = val;
 }
 
-const char* Role::ascii_fingerprint = "70563A0628F75DF9555F4D24690B1E26";
-const uint8_t Role::binary_fingerprint[16] = {0x70,0x56,0x3A,0x06,0x28,0xF7,0x5D,0xF9,0x55,0x5F,0x4D,0x24,0x69,0x0B,0x1E,0x26};
-
 uint32_t Role::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1767,7 +1725,7 @@ uint32_t Role::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t Role::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("Role");
 
   xfer += oprot->writeFieldBegin("roleName", ::apache::thrift::protocol::T_STRING, 1);
@@ -1784,7 +1742,6 @@ uint32_t Role::write(::apache::thrift::protocol::TProtocol* oprot) const {
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1809,14 +1766,13 @@ Role& Role::operator=(const Role& other85) {
   __isset = other85.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const Role& obj) {
-  using apache::thrift::to_string;
+void Role::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "Role(";
-  out << "roleName=" << to_string(obj.roleName);
-  out << ", " << "createTime=" << to_string(obj.createTime);
-  out << ", " << "ownerName=" << to_string(obj.ownerName);
+  out << "roleName=" << to_string(roleName);
+  out << ", " << "createTime=" << to_string(createTime);
+  out << ", " << "ownerName=" << to_string(ownerName);
   out << ")";
-  return out;
 }
 
 
@@ -1852,11 +1808,9 @@ void RolePrincipalGrant::__set_grantorPrincipalType(const PrincipalType::type va
   this->grantorPrincipalType = val;
 }
 
-const char* RolePrincipalGrant::ascii_fingerprint = "899BA3F6214DD1B79D27206BA857C772";
-const uint8_t RolePrincipalGrant::binary_fingerprint[16] = {0x89,0x9B,0xA3,0xF6,0x21,0x4D,0xD1,0xB7,0x9D,0x27,0x20,0x6B,0xA8,0x57,0xC7,0x72};
-
 uint32_t RolePrincipalGrant::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1949,7 +1903,7 @@ uint32_t RolePrincipalGrant::read(::apache::thrift::protocol::TProtocol* iprot)
 
 uint32_t RolePrincipalGrant::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("RolePrincipalGrant");
 
   xfer += oprot->writeFieldBegin("roleName", ::apache::thrift::protocol::T_STRING, 1);
@@ -1982,7 +1936,6 @@ uint32_t RolePrincipalGrant::write(::apache::thrift::protocol::TProtocol* oprot)
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2019,18 +1972,17 @@ RolePrincipalGrant& RolePrincipalGrant::operator=(const RolePrincipalGrant& othe
   __isset = other89.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const RolePrincipalGrant& obj) {
-  using apache::thrift::to_string;
+void RolePrincipalGrant::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "RolePrincipalGrant(";
-  out << "roleName=" << to_string(obj.roleName);
-  out << ", " << "principalName=" << to_string(obj.principalName);
-  out << ", " << "principalType=" << to_string(obj.principalType);
-  out << ", " << "grantOption=" << to_string(obj.grantOption);
-  out << ", " << "grantTime=" << to_string(obj.grantTime);
-  out << ", " << "grantorName=" << to_string(obj.grantorName);
-  out << ", " << "grantorPrincipalType=" << to_string(obj.grantorPrincipalType);
+  out << "roleName=" << to_string(roleName);
+  out << ", " << "principalName=" << to_string(principalName);
+  out << ", " << "principalType=" << to_string(principalType);
+  out << ", " << "grantOption=" << to_string(grantOption);
+  out << ", " << "grantTime=" << to_string(grantTime);
+  out << ", " << "grantorName=" << to_string(grantorName);
+  out << ", " << "grantorPrincipalType=" << to_string(grantorPrincipalType);
   out << ")";
-  return out;
 }
 
 
@@ -2046,11 +1998,9 @@ void GetRoleGrantsForPrincipalRequest::__set_principal_type(const PrincipalType:
   this->principal_type = val;
 }
 
-const char* GetRoleGrantsForPrincipalRequest::ascii_fingerprint = "D6FD826D949221396F4FFC3ECCD3D192";
-const uint8_t GetRoleGrantsForPrincipalRequest::binary_fingerprint[16] = {0xD6,0xFD,0x82,0x6D,0x94,0x92,0x21,0x39,0x6F,0x4F,0xFC,0x3E,0xCC,0xD3,0xD1,0x92};
-
 uint32_t GetRoleGrantsForPrincipalRequest::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2107,7 +2057,7 @@ uint32_t GetRoleGrantsForPrincipalRequest::read(::apache::thrift::protocol::TPro
 
 uint32_t GetRoleGrantsForPrincipalRequest::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("GetRoleGrantsForPrincipalRequest");
 
   xfer += oprot->writeFieldBegin("principal_name", ::apache::thrift::protocol::T_STRING, 1);
@@ -2120,7 +2070,6 @@ uint32_t GetRoleGrantsForPrincipalRequest::write(::apache::thrift::protocol::TPr
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2139,13 +2088,12 @@ GetRoleGrantsForPrincipalRequest& GetRoleGrantsForPrincipalRequest::operator=(co
   principal_type = other92.principal_type;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const GetRoleGrantsForPrincipalRequest& obj) {
-  using apache::thrift::to_string;
+void GetRoleGrantsForPrincipalRequest::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "GetRoleGrantsForPrincipalRequest(";
-  out << "principal_name=" << to_string(obj.principal_name);
-  out << ", " << "principal_type=" << to_string(obj.principal_type);
+  out << "principal_name=" << to_string(principal_name);
+  out << ", " << "principal_type=" << to_string(principal_type);
   out << ")";
-  return out;
 }
 
 
@@ -2157,11 +2105,9 @@ void GetRoleGrantsForPrincipalResponse::__set_principalGrants(const std::vector<
   this->principalGrants = val;
 }
 
-const char* GetRoleGrantsForPrincipalResponse::ascii_fingerprint = "5926B4B3541A62E17663820C7E3BE690";
-const uint8_t GetRoleGrantsForPrincipalResponse::binary_fingerprint[16] = {0x59,0x26,0xB4,0xB3,0x54,0x1A,0x62,0xE1,0x76,0x63,0x82,0x0C,0x7E,0x3B,0xE6,0x90};
-
 uint32_t GetRoleGrantsForPrincipalResponse::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2217,7 +2163,7 @@ uint32_t GetRoleGrantsForPrincipalResponse::read(::apache::thrift::protocol::TPr
 
 uint32_t GetRoleGrantsForPrincipalResponse::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("GetRoleGrantsForPrincipalResponse");
 
   xfer += oprot->writeFieldBegin("principalGrants", ::apache::thrift::protocol::T_LIST, 1);
@@ -2234,7 +2180,6 @@ uint32_t GetRoleGrantsForPrincipalResponse::write(::apache::thrift::protocol::TP
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2250,12 +2195,11 @@ GetRoleGrantsForPrincipalResponse& GetRoleGrantsForPrincipalResponse::operator=(
   principalGrants = other100.principalGrants;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const GetRoleGrantsForPrincipalResponse& obj) {
-  using apache::thrift::to_string;
+void GetRoleGrantsForPrincipalResponse::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "GetRoleGrantsForPrincipalResponse(";
-  out << "principalGrants=" << to_string(obj.principalGrants);
+  out << "principalGrants=" << to_string(principalGrants);
   out << ")";
-  return out;
 }
 
 
@@ -2267,11 +2211,9 @@ void GetPrincipalsInRoleRequest::__set_roleName(const std::string& val) {
   this->roleName = val;
 }
 
-const char* GetPrincipalsInRoleRequest::ascii_fingerprint = "EFB929595D312AC8F305D5A794CFEDA1";
-const uint8_t GetPrincipalsInRoleRequest::binary_fingerprint[16] = {0xEF,0xB9,0x29,0x59,0x5D,0x31,0x2A,0xC8,0xF3,0x05,0xD5,0xA7,0x94,0xCF,0xED,0xA1};
-
 uint32_t GetPrincipalsInRoleRequest::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2315,7 +2257,7 @@ uint32_t GetPrincipalsInRoleRequest::read(::apache::thrift::protocol::TProtocol*
 
 uint32_t GetPrincipalsInRoleRequest::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("GetPrincipalsInRoleRequest");
 
   xfer += oprot->writeFieldBegin("roleName", ::apache::thrift::protocol::T_STRING, 1);
@@ -2324,7 +2266,6 @@ uint32_t GetPrincipalsInRoleRequest::write(::apache::thrift::protocol::TProtocol
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2340,12 +2281,11 @@ GetPrincipalsInRoleRequest& GetPrincipalsInRoleRequest::operator=(const GetPrinc
   roleName = other102.roleName;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const GetPrincipalsInRoleRequest& obj) {
-  using apache::thrift::to_string;
+void GetPrincipalsInRoleRequest::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "GetPrincipalsInRoleRequest(";
-  out << "roleName=" << to_string(obj.roleName);
+  out << "roleName=" << to_string(roleName);
   out << ")";
-  return out;
 }
 
 
@@ -2357,11 +2297,9 @@ void GetPrincipalsInRoleResponse::__set_principalGrants(const std::vector<RolePr
   this->principalGrants = val;
 }
 
-const char* GetPrincipalsInRoleResponse::ascii_fingerprint = "5926B4B3541A62E17663820C7E3BE690";
-const uint8_t GetPrincipalsInRoleResponse::binary_fingerprint[16] = {0x59,0x26,0xB4,0xB3,0x54,0x1A,0x62,0xE1,0x76,0x63,0x82,0x0C,0x7E,0x3B,0xE6,0x90};
-
 uint32_t GetPrincipalsInRoleResponse::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2417,7 +2355,7 @@ uint32_t GetPrincipalsInRoleResponse::read(::apache::thrift::protocol::TProtocol
 
 uint32_t GetPrincipalsInRoleResponse::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("GetPrincipalsInRoleResponse");
 
   xfer += oprot->writeFieldBegin("principalGrants", ::apache::thrift::protocol::T_LIST, 1);
@@ -2434,7 +2372,6 @@ uint32_t GetPrincipalsInRoleResponse::write(::apache::thrift::protocol::TProtoco
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2450,12 +2387,11 @@ GetPrincipalsInRoleResponse& GetPrincipalsInRoleResponse::operator=(const GetPri
   principalGrants = other110.principalGrants;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const GetPrincipalsInRoleResponse& obj) {
-  using apache::thrift::to_string;
+void GetPrincipalsInRoleResponse::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "GetPrincipalsInRoleResponse(";
-  out << "principalGrants=" << to_string(obj.principalGrants);
+  out << "principalGrants=" << to_string(principalGrants);
   out << ")";
-  return out;
 }
 
 
@@ -2494,11 +2430,9 @@ void GrantRevokeRoleRequest::__set_grantOption(const bool val) {
 __isset.grantOption = true;
 }
 
-const char* GrantRevokeRoleRequest::ascii_fingerprint = "907DEA796F2BA7AF76DC2566E75FAEE7";
-const uint8_t GrantRevokeRoleRequest::binary_fingerprint[16] = {0x90,0x7D,0xEA,0x79,0x6F,0x2B,0xA7,0xAF,0x76,0xDC,0x25,0x66,0xE7,0x5F,0xAE,0xE7};
-
 uint32_t GrantRevokeRoleRequest::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2593,7 +2527,7 @@ uint32_t GrantRevokeRoleRequest::read(::apache::thrift::protocol::TProtocol* ipr
 
 uint32_t GrantRevokeRoleRequest::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("GrantRevokeRoleRequest");
 
   xfer += oprot->writeFieldBegin("requestType", ::apache::thrift::protocol::T_I32, 1);
@@ -2629,7 +2563,6 @@ uint32_t GrantRevokeRoleRequest::write(::apache::thrift::protocol::TProtocol* op
   }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2666,18 +2599,17 @@ GrantRevokeRoleRequest& GrantRevokeRoleRequest::operator=(const GrantRevokeRoleR
   __isset = other115.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const GrantRevokeRoleRequest& obj) {
-  using apache::thrift::to_string;
+void GrantRevokeRoleRequest::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "GrantRevokeRoleRequest(";
-  out << "requestType=" << to_string(obj.requestType);
-  out << ", " << "roleName=" << to_string(obj.roleName);
-  out << ", " << "principalName=" << to_string(obj.principalName);
-  out << ", " << "principalType=" << to_string(obj.principalType);
-  out << ", " << "grantor="; (obj.__isset.grantor ? (out << to_string(obj.grantor)) : (out << "<null>"));
-  out << ", " << "grantorType="; (obj.__isset.grantorType ? (out << to_string(obj.grantorType)) : (out << "<null>"));
-  out << ", " << "grantOption="; (obj.__isset.grantOption ? (out << to_string(obj.grantOption)) : (out << "<null>"));
+  out << "requestType=" << to_string(requestType);
+  out << ", " << "roleName=" << to_string(roleName);
+  out << ", " << "principalName=" << to_string(principalName);
+  out << ", " << "principalType=" << to_string(principalType);
+  out << ", " << "grantor="; (__isset.grantor ? (out << to_string(grantor)) : (out << "<null>"));
+  out << ", " << "grantorType="; (__isset.grantorType ? (out << to_string(grantorType)) : (out << "<null>"));
+  out << ", " << "grantOption="; (__isset.grantOption ? (out << to_string(grantOption)) : (out << "<null>"));
   out << ")";
-  return out;
 }
 
 
@@ -2690,11 +2622,9 @@ void GrantRevokeRoleResponse::__set_success(const bool val) {
 __isset.success = true;
 }
 
-const char* GrantRevokeRoleResponse::ascii_fingerprint = "BF054652DEF86253C2BEE7D947F167DD";
-const uint8_t GrantRevokeRoleResponse::binary_fingerprint[16] = {0xBF,0x05,0x46,0x52,0xDE,0xF8,0x62,0x53,0xC2,0xBE,0xE7,0xD9,0x47,0xF1,0x67,0xDD};
-
 uint32_t GrantRevokeRoleResponse::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2735,7 +2665,7 @@ uint32_t GrantRevokeRoleResponse::read(::apache::thrift::protocol::TProtocol* ip
 
 uint32_t GrantRevokeRoleResponse::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("GrantRevokeRoleResponse");
 
   if (this->__isset.success) {
@@ -2745,7 +2675,6 @@ uint32_t GrantRevokeRoleResponse::write(::apache::thrift::protocol::TProtocol* o
   }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2764,12 +2693,11 @@ GrantRevokeRoleResponse& GrantRevokeRoleResponse::operator=(const GrantRevokeRol
   __isset = other117.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const GrantRevokeRoleResponse& obj) {
-  using apache::thrift::to_string;
+void GrantRevokeRoleResponse::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "GrantRevokeRoleResponse(";
-  out << "success="; (obj.__isset.success ? (out << to_string(obj.success)) : (out << "<null>"));
+  out << "success="; (__isset.success ? (out << to_string(success)) : (out << "<null>"));
   out << ")";
-  return out;
 }
 
 
@@ -2808,11 +2736,9 @@ void Database::__set_ownerType(const PrincipalType::type val) {
 __isset.ownerType = true;
 }
 
-const char* Database::ascii_fingerprint = "553495CAE243A1C583D5C3DD990AED53";
-const uint8_t Database::binary_fingerprint[16] = {0x55,0x34,0x95,0xCA,0xE2,0x43,0xA1,0xC5,0x83,0xD5,0xC3,0xDD,0x99,0x0A,0xED,0x53};
-
 uint32_t Database::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2918,7 +2844,7 @@ uint32_t Database::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t Database::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("Database");
 
   xfer += oprot->writeFieldBegin("name", ::apache::thrift::protocol::T_STRING, 1);
@@ -2963,7 +2889,6 @@ uint32_t Database::write(::apache::thrift::protocol::TProtocol* oprot) const {
   }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -3000,18 +2925,17 @@ Database& Database::operator=(const Database& other128) {
   __isset = other128.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const Database& obj) {
-  using apache::thrift::to_string;
+void Database::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "Database(";
-  out << "name=" << to_string(obj.name);
-  out << ", " << "description=" << to_string(obj.description);
-  out << ", " << "locationUri=" << to_string(obj.locationUri);
-  out << ", " << "parameters=" << to_string(obj.parameters);
-  out << ", " << "privileges="; (obj.__isset.privileges ? (out << to_string(obj.privileges)) : (out << "<null>"));
-  out << ", " << "ownerName="; (obj.__isset.ownerName ? (out << to_string(obj.ownerName)) : (out << "<null>"));
-  out << ", " << "ownerType="; (obj.__isset.ownerType ? (out << to_string(obj.ownerType)) : (out << "<null>"));
+  out << "name=" << to_string(name);
+  out << ", " << "description=" << to_string(description);
+  out << ", " << "locationUri=" << to_string(locationUri);
+  out << ", " << "parameters=" << to_string(parameters);
+  out << ", " << "privileges="; (__isset.privileges ? (out << to_string(privileges)) : (out << "<null>"));
+  out << ", " << "ownerName="; (__isset.ownerName ? (out << to_string(ownerName)) : (out << "<null>"));
+  out << ", " << "ownerType="; (__isset.ownerType ? (out << to_string(ownerType)) : (out << "<null>"));
   out << ")";
-  return out;
 }
 
 
@@ -3031,11 +2955,9 @@ void SerDeInfo::__set_parameters(const std::map<std::string, std::string> & val)
   this->parameters = val;
 }
 
-const char* SerDeInfo::ascii_fingerprint = "B1021C32A35A2AEFCD2F57A5424159A7";
-const uint8_t SerDeInfo::binary_fingerprint[16] = {0xB1,0x02,0x1C,0x32,0xA3,0x5A,0x2A,0xEF,0xCD,0x2F,0x57,0xA5,0x42,0x41,0x59,0xA7};
-
 uint32_t SerDeInfo::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -3107,7 +3029,7 @@ uint32_t SerDeInfo::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t SerDeInfo::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("SerDeInfo");
 
   xfer += oprot->writeFieldBegin("name", ::apache::thrift::protocol::T_STRING, 1);
@@ -3133,7 +3055,6 @@ uint32_t SerDeInfo::write(::apache::thrift::protocol::TProtocol* oprot) const {
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -3158,14 +3079,13 @@ SerDeInfo& SerDeInfo::operator=(const SerDeInfo& other138) {
   __isset = other138.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const SerDeInfo& obj) {
-  using apache::thrift::to_string;
+void SerDeInfo::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "SerDeInfo(";
-  out << "name=" << to_string(obj.name);
-  out << ", " << "serializationLib=" << to_string(obj.serializationLib);
-  out << ", " << "parameters=" << to_string(obj.parameters);
+  out << "name=" << to_string(name);
+  out << ", " << "serializationLib=" << to_string(serializationLib);
+  out << ", " << "parameters=" << to_string(parameters);
   out << ")";
-  return out;
 }
 
 
@@ -3181,11 +3101,9 @@ void Order::__set_order(const int32_t val) {
   this->order = val;
 }
 
-const char* Order::ascii_fingerprint = "EEBC915CE44901401D881E6091423036";
-const uint8_t Order::binary_fingerprint[16] = {0xEE,0xBC,0x91,0x5C,0xE4,0x49,0x01,0x40,0x1D,0x88,0x1E,0x60,0x91,0x42,0x30,0x36};
-
 uint32_t Order::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -3234,7 +3152,7 @@ uint32_t Order::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t Order::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("Order");
 
   xfer += oprot->writeFieldBegin("col", ::apache::thrift::protocol::T_STRING, 1);
@@ -3247,7 +3165,6 @@ uint32_t Order::write(::apache::thrift::protocol::TProtocol* oprot) const {
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -3269,13 +3186,12 @@ Order& Order::operator=(const Order& other140) {
   __isset = other140.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const Order& obj) {
-  using apache::thrift::to_string;
+void Order::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "Order(";
-  out << "col=" << to_string(obj.col);
-  out << ", " << "order=" << to_string(obj.order);
+  out << "col=" << to_string(col);
+  out << ", " << "order=" << to_string(order);
   out << ")";
-  return out;
 }
 
 
@@ -3295,11 +3211,9 @@ void SkewedInfo::__set_skewedColValueLocationMaps(const std::map<std::vector<std
   this->skewedColValueLocationMaps = val;
 }
 
-const char* SkewedInfo::ascii_fingerprint = "4BF2ED84BC3C3EB297A2AE2FA8427EB1";
-const uint8_t SkewedInfo::binary_fingerprint[16] = {0x4B,0xF2,0xED,0x84,0xBC,0x3C,0x3E,0xB2,0x97,0xA2,0xAE,0x2F,0xA8,0x42,0x7E,0xB1};
-
 uint32_t SkewedInfo::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -3419,7 +3333,7 @@ uint32_t SkewedInfo::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t SkewedInfo::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("SkewedInfo");
 
   xfer += oprot->writeFieldBegin("skewedColNames", ::apache::thrift::protocol::T_LIST, 1);
@@ -3477,7 +3391,6 @@ uint32_t SkewedInfo::write(::apache::thrift::protocol::TProtocol* oprot) const {
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -3502,14 +3415,13 @@ SkewedInfo& SkewedInfo::operator=(const SkewedInfo& other174) {
   __isset = other174.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const SkewedInfo& obj) {
-  using apache::thrift::to_string;
+void SkewedInfo::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "SkewedInfo(";
-  out << "skewedColNames=" << to_string(obj.skewedColNames);
-  out << ", " << "skewedColValues=" << to_string(obj.skewedColValues);
-  out << ", " << "skewedColValueLocationMaps=" << to_string(obj.skewedColValueLocationMaps);
+  out << "skewedColNames=" << to_string(skewedColNames);
+  out << ", " << "skewedColValues=" << to_string(skewedColValues);
+  out << ", " << "skewedColValueLocationMaps=" << to_string(skewedColValueLocationMaps);
   out << ")";
-  return out;
 }
 
 
@@ -3567,11 +3479,9 @@ void StorageDescriptor::__set_storedAsSubDirectories(const bool val) {
 __isset.storedAsSubDirectories = true;
 }
 
-const char* StorageDescriptor::ascii_fingerprint = "CA8C9AA5FE4C32643757D8639CEF0CD7";
-const uint8_t StorageDescriptor::binary_fingerprint[16] = {0xCA,0x8C,0x9A,0xA5,0xFE,0x4C,0x32,0x64,0x37,0x57,0xD8,0x63,0x9C,0xEF,0x0C,0xD7};
-
 uint32_t StorageDescriptor::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -3751,7 +3661,7 @@ uint32_t StorageDescriptor::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t StorageDescriptor::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("StorageDescriptor");
 
   xfer += oprot->writeFieldBegin("cols", ::apache::thrift::protocol::T_LIST, 1);
@@ -3839,7 +3749,6 @@ uint32_t StorageDescriptor::write(::apache::thrift::protocol::TProtocol* oprot)
   }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -3891,23 +3800,22 @@ StorageDescriptor& StorageDescriptor::operator=(const StorageDescriptor& other20
   __isset = other202.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const StorageDescriptor& obj) {
-  using apache::thrift::to_string;
+void StorageDescriptor::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "StorageDescriptor(";
-  out << "cols=" << to_string(obj.cols);
-  out << ", " << "location=" << to_string(obj.location);
-  out << ", " << "inputFormat=" << to_string(obj.inputFormat);
-  out << ", " << "outputFormat=" << to_string(obj.outputFormat);
-  out << ", " << "compressed=" << to_string(obj.compressed);
-  out << ", " << "numBuckets=" << to_string(obj.numBuckets);
-  out << ", " << "serdeInfo=" << to_string(obj.serdeInfo);
-  out << ", " << "bucketCols=" << to_string(obj.bucketCols);
-  out << ", " << "sortCols=" << to_string(obj.sortCols);
-  out << ", " << "parameters=" << to_string(obj.parameters);
-  out << ", " << "skewedInfo="; (obj.__isset.skewedInfo ? (out << to_string(obj.skewedInfo)) : (out << "<null>"));
-  out << ", " << "storedAsSubDirectories="; (obj.__isset.storedAsSubDirectories ? (out << to_string(obj.storedAsSubDirectories)) : (out << "<null>"));
+  out << "cols=" << to_string(cols);
+  out << ", " << "location=" << to_string(location);
+  out << ", " << "inputFormat=" << to_string(inputFormat);
+  out << ", " << "outputFormat=" << to_string(outputFormat);
+  out << ", " << "compressed=" << to_string(compressed);
+  out << ", " << "numBuckets=" << to_string(numBuckets);
+  out << ", " << "serdeInfo=" << to_string(serdeInfo);
+  out << ", " << "bucketCols=" << to_string(bucketCols);
+  out << ", " << "sortCols=" << to_string(sortCols);
+  out << ", " << "parameters=" << to_string(parameters);
+  out << ", " << "skewedInfo="; (__isset.skewedInfo ? (out << to_string(skewedInfo)) : (out << "<null>"));
+  out << ", " << "storedAsSubDirectories="; (__isset.storedAsSubDirectories ? (out << to_string(storedAsSubDirectories)) : (out << "<null>"));
   out << ")";
-  return out;
 }
 
 
@@ -3973,11 +3881,9 @@ void Table::__set_temporary(const bool val) {
 __isset.temporary = true;
 }
 
-const char* Table::ascii_fingerprint = "29EFB2A5970EF572039E5D94CC78AA85";
-const uint8_t Table::binary_fingerprint[16] = {0x29,0xEF,0xB2,0xA5,0x97,0x0E,0xF5,0x72,0x03,0x9E,0x5D,0x94,0xCC,0x78,0xAA,0x85};
-
 uint32_t Table::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -4149,7 +4055,7 @@ uint32_t Table::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t Table::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("Table");
 
   xfer += oprot->writeFieldBegin("tableName", ::apache::thrift::protocol::T_STRING, 1);
@@ -4229,7 +4135,6 @@ uint32_t Table::write(::apache::thrift::protocol::TProtocol* oprot) const {
   }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -4287,25 +4192,24 @@ Table& Table::operator=(const Table& other218) {
   __isset = other218.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const Table& obj) {
-  using apache::thrift::to_string;
+void Table::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "Table(";
-  out << "tableName=" << to_string(obj.tableName);
-  out << ", " << "dbName=" << to_string(obj.dbName);
-  out << ", " << "owner=" << to_string(obj.owner);
-  out << ", " << "createTime=" << to_string(obj.createTime);
-  out << ", " << "lastAccessTime=" << to_string(obj.lastAccessTime);
-  out << ", " << "retention=" << to_string(obj.retention);
-  out << ", " << "sd=" << to_string(obj.sd);
-  out << ", " << "partitionKeys=" << to_string(obj.partitionKeys);
-  out << ", " << "parameters=" << to_string(obj.parameters);
-  out << ", " << "viewOriginalText=" << to_string(obj.viewOriginalText);
-  out << ", " << "viewExpandedText=" << to_string(obj.viewExpandedText);
-  out << ", " << "tableType=" << to_string(obj.tableType);
-  out << ", " << "privileges="; (obj.__isset.privileges ? (out << to_string(obj.privileges)) : (out << "<null>"));
-  out << ", " << "temporary="; (obj.__isset.temporary ? (out << to_string(obj.temporary)) : (out << "<null>"));
+  out << "tableName=" << to_string(tableName);
+  out << ", " << "dbName=" << to_string(dbName);
+  out << ", " << "owner=" << to_string(owner);
+  out << ", " << "createTime=" << to_string(createTime);
+  out << ", " << "lastAccessTime=" << to_string(lastAccessTime);
+  out << ", " << "retention=" << to_string(retention);
+  out << ", " << "sd=" << to_string(sd);
+  out << ", " << "partitionKeys=" << to_string(partitionKeys);
+  out << ", " << "parameters=" << to_string(parameters);
+  out << ", " << "viewOriginalText=" << to_string(viewOriginalText);
+  out << ", " << "viewExpandedText=" << to_string(viewExpandedText);
+  out << ", " << "tableType=" << to_string(tableType);
+  out << ", " << "privileges="; (__isset.privileges ? (out << to_string(privileges)) : (out << "<null>"));
+  out << ", " << "temporary="; (__isset.temporary ? (out << to_string(temporary)) : (out << "<null>"));
   out << ")";
-  return out;
 }
 
 
@@ -4346,11 +4250,9 @@ void Partition::__set_privileges(const PrincipalPrivilegeSet& val) {
 __isset.privileges = true;
 }
 
-const char* Partition::ascii_fingerprint = "31A52241B88A426C34087FE38343FF51";
-const uint8_t Partition::binary_fingerprint[16] = {0x31,0xA5,0x22,0x41,0xB8,0x8A,0x42,0x6C,0x34,0x08,0x7F,0xE3,0x83,0x43,0xFF,0x51};
-
 uint32_t Partition::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -4474,7 +4376,7 @@ uint32_t Partition::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t Partition::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("Partition");
 
   xfer += oprot->writeFieldBegin("values", ::apache::thrift::protocol::T_LIST, 1);
@@ -4529,7 +4431,6 @@ uint32_t Partition::write(::apache::thrift::protocol::TProtocol* oprot) const {
   }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -4569,19 +4470,18 @@ Partition& Partition::operator=(const Partition& other234) {
   __isset = other234.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const Partition& obj) {
-  using apache::thrift::to_string;
+void Partition::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "Partition(";
-  out << "values=" << to_string(obj.values);
-  out << ", " << "dbName=" << to_string(obj.dbName);
-  out << ", " << "tableName=" << to_string(obj.tableName);
-  out << ", " << "createTime=" << to_string(obj.createTime);
-  out << ", " << "lastAccessTime=" << to_string(obj.lastAccessTime);
-  out << ", " << "sd=" << to_string(obj.sd);
-  out << ", " << "parameters=" << to_string(obj.parameters);
-  out << ", " << "privileges="; (obj.__isset.privileges ? (out << to_string(obj.privileges)) : (out << "<null>"));
+  out << "values=" << to_string(values);
+  out << ", " << "dbName=" << to_string(dbName);
+  out << ", " << "tableName=" << to_string(tableName);
+  out << ", " << "createTime=" << to_string(createTime);
+  out << ", " << "lastAccessTime=" << to_string(lastAccessTime);
+  out << ", " << "sd=" << to_string(sd);
+  out << ", " << "parameters=" << to_string(parameters);
+  out << ", " << "privileges="; (__isset.privileges ? (out << to_string(privileges)) : (out << "<null>"));
   out << ")";
-  return out;
 }
 
 
@@ -4614,11 +4514,9 @@ void PartitionWithoutSD::__set_privileges(const PrincipalPrivilegeSet& val) {
 __isset.privileges = true;
 }
 
-const char* PartitionWithoutSD::ascii_fingerprint = "D79FA44499888D0E50B5625E0C536DEA";
-const uint8_t PartitionWithoutSD::binary_fingerprint[16] = {0xD7,0x9F,0xA4,0x44,0x99,0x88,0x8D,0x0E,0x50,0xB5,0x62,0x5E,0x0C,0x53,0x6D,0xEA};
-
 uint32_t PartitionWithoutSD::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -4726,7 +4624,7 @@ uint32_t PartitionWithoutSD::read(::apache::thrift::protocol::TProtocol* iprot)
 
 uint32_t PartitionWithoutSD::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("PartitionWithoutSD");
 
   xfer += oprot->writeFieldBegin("values", ::apache::thrift::protocol::T_LIST, 1);
@@ -4773,7 +4671,6 @@ uint32_t PartitionWithoutSD::write(::apache::thrift::protocol::TProtocol* oprot)
   }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -4807,17 +4704,16 @@ PartitionWithoutSD& PartitionWithoutSD::operator=(const PartitionWithoutSD& othe
   __isset = other250.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const PartitionWithoutSD& obj) {
-  using apache::thrift::to_string;
+void PartitionWithoutSD::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "PartitionWithoutSD(";
-  out << "values=" << to_string(obj.values);
-  out << ", " << "createTime=" << to_string(obj.createTime);
-  out << ", " << "lastAccessTime=" << to_string(obj.lastAccessTime);
-  out << ", " << "relativePath=" << to_string(obj.relativePath);
-  out << ", " << "parameters=" << to_string(obj.parameters);
-  out << ", " << "privileges="; (obj.__isset.privileges ? (out << to_string(obj.privileges)) : (out << "<null>"));
+  out << "values=" << to_string(values);
+  out << ", " << "createTime=" << to_string(createTime);
+  out << ", " << "lastAccessTime=" << to_string(lastAccessTime);
+  out << ", " << "relativePath=" << to_string(relativePath);
+  out << ", " << "parameters=" << to_string(parameters);
+  out << ", " << "privileges="; (__isset.privileges ? (out << to_string(privileges)) : (out << "<null>"));
   out << ")";
-  return out;
 }
 
 
@@ -4833,11 +4729,9 @@ void PartitionSpecWithSharedSD::__set_sd(const StorageDescriptor& val) {
   this->sd = val;
 }
 
-const char* PartitionSpecWithSharedSD::ascii_fingerprint = "7BEE9305B42DCD083FF06BEE6DDC61CF";
-const uint8_t PartitionSpecWithSharedSD::binary_fingerprint[16] = {0x7B,0xEE,0x93,0x05,0xB4,0x2D,0xCD,0x08,0x3F,0xF0,0x6B,0xEE,0x6D,0xDC,0x61,0xCF};
-
 uint32_t PartitionSpecWithSharedSD::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -4898,7 +4792,7 @@ uint32_t PartitionSpecWithSharedSD::read(::apache::thrift::protocol::TProtocol*
 
 uint32_t PartitionSpecWithSharedSD::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("PartitionSpecWithSharedSD");
 
   xfer += oprot->writeFieldBegin("partitions", ::apache::thrift::protocol::T_LIST, 1);
@@ -4919,7 +4813,6 @@ uint32_t PartitionSpecWithSharedSD::write(::apache::thrift::protocol::TProtocol*
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -4941,13 +4834,12 @@ PartitionSpecWithSharedSD& PartitionSpecWithSharedSD::operator=(const PartitionS
   __isset = other258.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const PartitionSpecWithSharedSD& obj) {
-  using apache::thrift::to_string;
+void PartitionSpecWithSharedSD::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "PartitionSpecWithSharedSD(";
-  out << "partitions=" << to_string(obj.partitions);
-  out << ", " << "sd=" << to_string(obj.sd);
+  out << "partitions=" << to_string(partitions);
+  out << ", " << "sd=" << to_string(sd);
   out << ")";
-  return out;
 }
 
 
@@ -4959,11 +4851,9 @@ void PartitionListComposingSpec::__set_partitions(const std::vector<Partition> &
   this->partitions = val;
 }
 
-const char* PartitionListComposingSpec::ascii_fingerprint = "A048235CB9A257C8A74E3691BEFE0674";
-const uint8_t PartitionListComposingSpec::binary_fingerprint[16] = {0xA0,0x48,0x23,0x5C,0xB9,0xA2,0x57,0xC8,0xA7,0x4E,0x36,0x91,0xBE,0xFE,0x06,0x74};
-
 uint32_t PartitionListComposingSpec::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -5016,7 +4906,7 @@ uint32_t PartitionListComposingSpec::read(::apache::thrift::protocol::TProtocol*
 
 uint32_t PartitionListComposingSpec::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("PartitionListComposingSpec");
 
   xfer += oprot->writeFieldBegin("partitions", ::apache::thrift::protocol::T_LIST, 1);
@@ -5033,7 +4923,6 @@ uint32_t PartitionListComposingSpec::write(::apache::thrift::protocol::TProtocol
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -5052,12 +4941,11 @@ PartitionListComposingSpec& PartitionListComposingSpec::operator=(const Partitio
   __isset = other266.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const PartitionListComposingSpec& obj) {
-  using apache::thrift::to_string;
+void PartitionListComposingSpec::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "PartitionListComposingSpec(";
-  out << "partitions=" << to_string(obj.partitions);
+  out << "partitions=" << to_string(partitions);
   out << ")";
-  return out;
 }
 
 
@@ -5087,11 +4975,9 @@ void PartitionSpec::__set_partitionList(const PartitionListComposingSpec& val) {
 __isset.partitionList = true;
 }
 
-const char* PartitionSpec::ascii_fingerprint = "C3F548C24D072CF6422F25096143E3E8";
-const uint8_t PartitionSpec::binary_fingerprint[16] = {0xC3,0xF5,0x48,0xC2,0x4D,0x07,0x2C,0xF6,0x42,0x2F,0x25,0x09,0x61,0x43,0xE3,0xE8};
-
 uint32_t PartitionSpec::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -5164,7 +5050,7 @@ uint32_t PartitionSpec::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t PartitionSpec::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("PartitionSpec");
 
   xfer += oprot->writeFieldBegin("dbName", ::apache::thrift::protocol::T_STRING, 1);
@@ -5191,7 +5077,6 @@ uint32_t PartitionSpec::write(::apache::thrift::protocol::TProtocol* oprot) cons
   }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -5222,16 +5107,15 @@ PartitionSpec& PartitionSpec::operator=(const PartitionSpec& other268) {
   __isset = other268.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const PartitionSpec& obj) {
-  using apache::thrift::to_string;
+void PartitionSpec::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "PartitionSpec(";
-  out << "dbName=" << to_string(obj.dbName);
-  out << ", " << "tableName=" << to_string(obj.tableName);
-  out << ", " << "rootPath=" << to_string(obj.rootPath);
-  out << ", " << "sharedSDPartitionSpec="; (obj.__isset.sharedSDPartitionSpec ? (out << to_string(obj.sharedSDPartitionSpec)) : (out << "<null>"));
-  out << ", " << "partitionList="; (obj.__isset.partitionList ? (out << to_string(obj.partitionList)) : (out << "<null>"));
+  out << "dbName=" << to_string(dbName);
+  out << ", " << "tableName=" << to_string(tableName);
+  out << ", " << "rootPath=" << to_string(rootPath);
+  out << ", " << "sharedSDPartitionSpec="; (__isset.sharedSDPartitionSpec ? (out << to_string(sharedSDPartitionSpec)) : (out << "<null>"));
+  out << ", " << "partitionList="; (__isset.partitionList ? (out << to_string(partitionList)) : (out << "<null>"));
   out << ")";
-  return out;
 }
 
 
@@ -5279,11 +5163,9 @@ void Index::__set_deferredRebuild(const bool val) {
   this->deferredRebuild = val;
 }
 
-const char* Index::ascii_fingerprint = "09EEF655216AC81802850988D6C470A6";
-const uint8_t Index::binary_fingerprint[16] = {0x09,0xEE,0xF6,0x55,0x21,0x6A,0xC8,0x18,0x02,0x85,0x09,0x88,0xD6,0xC4,0x70,0xA6};
-
 uint32_t Index::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -5411,7 +5293,7 @@ uint32_t Index::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t Index::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("Index");
 
   xfer += oprot->writeFieldBegin("indexName", ::apache::thrift::protocol::T_STRING, 1);
@@ -5465,7 +5347,6 @@ uint32_t Index::write(::apache::thrift::protocol::TProtocol* oprot) const {
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -5511,21 +5392,20 @@ Index& Index::operator=(const Index& other278) {
   __isset = other278.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const Index& obj) {
-  using apache::thrift::to_string;
+void Index::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "Index(";
-  out << "indexName=" << to_string(obj.indexName);
-  out << ", " << "indexHandlerClass=" << to_string(obj.indexHandlerClass);
-  out << ", " << "dbName=" << to_string(obj.dbName);
-  out << ", " << "origTableName=" << to_string(obj.origTableName);
-  out << ", " << "createTime=" << to_string(obj.createTime);
-  out << ", " << "lastAccessTime=" << to_string(obj.lastAccessTime);
-  out << ", " << "indexTableName=" << to_string(obj.indexTableName);
-  out << ", " << "sd=" << to_string(obj.sd);
-  out << ", " << "parameters=" << to_string(obj.parameters);
-  out << ", " << "deferredRebuild=" << to_string(obj.deferredRebuild);
+  out << "indexName=" << to_string(indexName);
+  out << ", " << "indexHandlerClass=" << to_string(indexHandlerClass);
+  out << ", " << "dbName=" << to_string(dbName);
+  out << ", " << "origTableName=" << to_string(origTableName);
+  out << ", " << "createTime=" << to_string(createTime);
+  out << ", " << "lastAccessTime=" << to_string(lastAccessTime);
+  out << ", " << "indexTableName=" << to_string(indexTableName);
+  out << ", " << "sd=" << to_string(sd);
+  out << ", " << "parameters=" << to_string(parameters);
+  out << ", " << "deferredRebuild=" << to_string(deferredRebuild);
   out << ")";
-  return out;
 }
 
 
@@ -5545,11 +5425,9 @@ void BooleanColumnStatsData::__set_numNulls(const int64_t val) {
   this->numNulls = val;
 }
 
-const char* BooleanColumnStatsData::ascii_fingerprint = "EA2D65F1E0BB78760205682082304B41";
-const uint8_t BooleanColumnStatsData::binary_fingerprint[16] = {0xEA,0x2D,0x65,0xF1,0xE0,0xBB,0x78,0x76,0x02,0x05,0x68,0x20,0x82,0x30,0x4B,0x41};
-
 uint32_t BooleanColumnStatsData::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -5615,7 +5493,7 @@ uint32_t BooleanColumnStatsData::read(::apache::thrift::protocol::TProtocol* ipr
 
 uint32_t BooleanColumnStatsData::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("BooleanColumnStatsData");
 
   xfer += oprot->writeFieldBegin("numTrues", ::apache::thrift::protocol::T_I64, 1);
@@ -5632,7 +5510,6 @@ uint32_t BooleanColumnStatsData::write(::apache::thrift::protocol::TProtocol* op
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -5654,14 +5531,13 @@ BooleanColumnStatsData& BooleanColumnStatsData::operator=(const BooleanColumnSta
   numNulls = other280.numNulls;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const BooleanColumnStatsData& obj) {
-  using apache::thrift::to_string;
+void BooleanColumnStatsData::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "BooleanColumnStatsData(";
-  out << "numTrues=" << to_string(obj.numTrues);
-  out << ", " << "numFalses=" << to_string(obj.numFalses);
-  out << ", " << "numNulls=" << to_string(obj.numNulls);
+  out << "numTrues=" << to_string(numTrues);
+  out << ", " << "numFalses=" << to_string(numFalses);
+  out << ", " << "numNulls=" << to_string(numNulls);
   out << ")";
-  return out;
 }
 
 
@@ -5687,11 +5563,9 @@ void DoubleColumnStatsData::__set_numDVs(const int64_t val) {
   this->numDVs = val;
 }
 
-const char* DoubleColumnStatsData::ascii_fingerprint = "DA7C011321D74C48396AA002E61A0CBB";
-const uint8_t DoubleColumnStatsData::binary_fingerprint[16] = {0xDA,0x7C,0x01,0x13,0x21,0xD7,0x4C,0x48,0x39,0x6A,0xA0,0x02,0xE6,0x1A,0x0C,0xBB};
-
 uint32_t DoubleColumnStatsData::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -5762,7 +5636,7 @@ uint32_t DoubleColumnStatsData::read(::apache::thrift::protocol::TProtocol* ipro
 
 uint32_t DoubleColumnStatsData::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("DoubleColumnStatsData");
 
   if (this->__isset.lowValue) {
@@ -5785,7 +5659,6 @@ uint32_t DoubleColumnStatsData::write(::apache::thrift::protocol::TProtocol* opr
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -5813,15 +5686,14 @@ DoubleColumnStatsData& DoubleColumnStatsData::operator=(const DoubleColumnStatsD
   __isset = other282.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const DoubleColumnStatsData& obj) {
-  using apache::thrift::to_string;
+void DoubleColumnStatsData::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "DoubleColumnStatsData(";
-  out << "lowValue="; (obj.__isset.lowValue ? (out << to_string(obj.lowValue)) : (out << "<null>"));
-  out << ", " << "highValue="; (obj.__isset.highValue ? (out << to_string(obj.highValue)) : (out << "<null>"));
-  out << ", " << "numNulls=" << to_string(obj.numNulls);
-  out << ", " << "numDVs=" << to_string(obj.numDVs);
+  out << "lowValue="; (__isset.lowValue ? (out << to_string(lowValue)) : (out << "<null>"));
+  out << ", " << "highValue="; (__isset.highValue ? (out << to_string(highValue)) : (out << "<null>"));
+  out << ", " << "numNulls=" << to_string(numNulls);
+  out << ", " << "numDVs=" << to_string(numDVs);
   out << ")";
-  return out;
 }
 
 
@@ -5847,11 +5719,9 @@ void LongColumnStatsData::__set_numDVs(const int64_t val) {
   this->numDVs = val;
 }
 
-const char* LongColumnStatsData::ascii_fingerprint = "E685FC220B24E3B8B93604790DCB9AEA";
-const uint8_t LongColumnStatsData::binary_fingerprint[16] = {0xE6,0x85,0xFC,0x22,0x0B,0x24,0xE3,0xB8,0xB9,0x36,0x04,0x79,0x0D,0xCB,0x9A,0xEA};
-
 uint32_t LongColumnStatsData::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -5922,7 +5792,7 @@ uint32_t LongColumnStatsData::read(::apache::thrift::protocol::TProtocol* iprot)
 
 uint32_t LongColumnStatsData::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("LongColumnStatsData");
 
   if (this->__isset.lowValue) {
@@ -5945,7 +5815,6 @@ uint32_t LongColumnStatsData::write(::apache::thrift::protocol::TProtocol* oprot
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -5973,15 +5842,14 @@ LongColumnStatsData& LongColumnStatsData::operator=(const LongColumnStatsData& o
   __isset = other284.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const LongColumnStatsData& obj) {
-  using apache::thrift::to_string;
+void LongColumnStatsData::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "LongColumnStatsData(";
-  out << "lowValue="; (obj.__isset.lowValue ? (out << to_string(obj.lowValue)) : (out << "<null>"));
-  out << ", " << "highValue="; (obj.__isset.highValue ? (out << to_string(obj.highValue)) : (out << "<null>"));
-  out << ", " << "numNulls=" << to_string(obj.numNulls);
-  out << ", " << "numDVs=" << to_string(obj.numDVs);
+  out << "lowValue="; (__isset.lowValue ? (out << to_string(lowValue)) : (out << "<null>"));
+  out << ", " << "highValue="; (__isset.highValue ? (out << to_string(highValue)) : (out << "<null>"));
+  out << ", " << "numNulls=" << to_string(numNulls);
+  out << ", " << "numDVs=" << to_string(numDVs);
   out << ")";
-  return out;
 }
 
 
@@ -6005,11 +5873,9 @@ void StringColumnStatsData::__set_numDVs(const int64_t val) {
   this->numDVs = val;
 }
 
-const char* StringColumnStatsData::ascii_fingerprint = "D017B08C3DF12C3AB98788B2E67DAAB3";
-const uint8_t StringColumnStatsData::binary_fingerprint[16] = {0xD0,0x17,0xB0,0x8C,0x3D,0xF1,0x2C,0x3A,0xB9,0x87,0x88,0xB2,0xE6,0x7D,0xAA,0xB3};
-
 uint32_t StringColumnStatsData::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -6086,7 +5952,7 @@ uint32_t StringColumnStatsData::read(::apache::thrift::protocol::TProtocol* ipro
 
 uint32_t StringColumnStatsData::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("StringColumnStatsData");
 
   xfer += oprot->writeFieldBegin("maxColLen", ::apache::thrift::protocol::T_I64, 1);
@@ -6107,7 +5973,6 @@ uint32_t StringColumnStatsData::write(::apache::thrift::protocol::TProtocol* opr
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -6132,15 +5997,14 @@ StringColumnStatsData& StringColumnStatsData::operator=(const StringColumnStatsD
   numDVs = other286.numDVs;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const StringColumnStatsData& obj) {
-  using apache::thrift::to_string;
+void StringColumnStatsData::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "StringColumnStatsData(";
-  out << "maxColLen=" << to_string(obj.maxColLen);
-  out << ", " << "avgColLen=" << to_string(obj.avgColLen);
-  out << ", " << "numNulls=" << to_string(obj.numNulls);
-  out << ", " << "numDVs=" << to_string(obj.numDVs);
+  out << "maxColLen=" << to_string(maxColLen);
+  out << ", " << "avgColLen=" << to_string(avgColLen);
+  out << ", " << "numNulls=" << to_string(numNulls);
+  out << ", " << "numDVs=" << to_string(numDVs);
   out << ")";
-  return out;
 }
 
 
@@ -6160,11 +6024,9 @@ void BinaryColumnStatsData::__set_numNulls(const int64_t val) {
   this->numNulls = val;
 }
 
-const char* BinaryColumnStatsData::ascii_fingerprint = "22B0CB67183FCDB945892B9974518D06";
-const uint8_t BinaryColumnStatsData::binary_fingerprint[16] = {0x22,0xB0,0xCB,0x67,0x18,0x3F,0xCD,0xB9,0x45,0x89,0x2B,0x99,0x74,0x51,0x8D,0x06};
-
 uint32_t BinaryColumnStatsData::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -6230,7 +6092,7 @@ uint32_t BinaryColumnStatsData::read(::apache::thrift::protocol::TProtocol* ipro
 
 uint32_t BinaryColumnStatsData::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("BinaryColumnStatsData");
 
   xfer += oprot->writeFieldBegin("maxColLen", ::apache::thrift::protocol::T_I64, 1);
@@ -6247,7 +6109,6 @@ uint32_t BinaryColumnStatsData::write(::apache::thrift::protocol::TProtocol* opr
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -6269,14 +6130,13 @@ BinaryColumnStatsData& BinaryColumnStatsData::operator=(const BinaryColumnStatsD
   numNulls = other288.numNulls;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const BinaryColumnStatsData& obj) {
-  using apache::thrift::to_string;
+void BinaryColumnStatsData::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "BinaryColumnStatsData(";
-  out << "maxColLen=" << to_string(obj.maxColLen);
-  out << ", " << "avgColLen=" << to_string(obj.avgColLen);
-  out << ", " << "numNulls=" << to_string(obj.numNulls);
+  out << "maxColLen=" << to_string(maxColLen);
+  out << ", " << "avgColLen=" << to_string(avgColLen);
+  out << ", " << "numNulls=" << to_string(numNulls);
   out << ")";
-  return out;
 }
 
 
@@ -6292,11 +6152,9 @@ void Decimal::__set_scale(const int16_t val) {
   this->scale = val;
 }
 
-const char* Decimal::ascii_fingerprint = "C4DDF6759F9B17C5C380806CE743DE8E";
-const uint8_t Decimal::binary_fingerprint[16] = {0xC4,0xDD,0xF6,0x75,0x9F,0x9B,0x17,0xC5,0xC3,0x80,0x80,0x6C,0xE7,0x43,0xDE,0x8E};
-
 uint32_t Decimal::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -6351,7 +6209,7 @@ uint32_t Decimal::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t Decimal::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("Decimal");
 
   xfer += oprot->writeFieldBegin("unscaled", ::apache::thrift::protocol::T_STRING, 1);
@@ -6364,7 +6222,6 @@ uint32_t Decimal::write(::apache::thrift::protocol::TProtocol* oprot) const {
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -6383,13 +6240,12 @@ Decimal& Decimal::operator=(const Decimal& other290) {
   scale = other290.scale;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const Decimal& obj) {
-  using apache::thrift::to_string;
+void Decimal::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "Decimal(";
-  out << "unscaled=" << to_string(obj.unscaled);
-  out << ", " << "scale=" << to_string(obj.scale);
+  out << "unscaled=" << to_string(unscaled);
+  out << ", " << "scale=" << to_string(scale);
   out << ")";
-  return out;
 }
 
 
@@ -6415,11 +6271,9 @@ void DecimalColumnStatsData::__set_numDVs(const int64_t val) {
   this->numDVs = val;
 }
 
-const char* DecimalColumnStatsData::ascii_fingerprint = "B6D47E7A28922BFA93FE05E9F1B04748";
-const uint8_t DecimalColumnStatsData::binary_fingerprint[16] = {0xB6,0xD4,0x7E,0x7A,0x28,0x92,0x2B,0xFA,0x93,0xFE,0x05,0xE9,0xF1,0xB0,0x47,0x48};
-
 uint32_t DecimalColumnStatsData::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -6490,7 +6344,7 @@ uint32_t DecimalColumnStatsData::read(::apache::thrift::protocol::TProtocol* ipr
 
 uint32_t DecimalColumnStatsData::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("DecimalColumnStatsData");
 
   if (this->__isset.lowValue) {
@@ -6513,7 +6367,6 @@ uint32_t DecimalColumnStatsData::write(::apache::thrift::protocol::TProtocol* op
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -6541,15 +6394,14 @@ DecimalColumnStatsData& DecimalColumnStatsData::operator=(const DecimalColumnSta
   __isset = other292.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const DecimalColumnStatsData& obj) {
-  using apache::thrift::to_string;
+void DecimalColumnStatsData::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "DecimalColumnStatsData(";
-  out << "lowValue="; (obj.__isset.lowValue ? (out << to_string(obj.lowValue)) : (out << "<null>"));
-  out << ", " << "highValue="; (obj.__isset.highValue ? (out << to_string(obj.highValue)) : (out << "<null>"));
-  out << ", " << "numNulls=" << to_string(obj.numNulls);
-  out << ", " << "numDVs=" << to_string(obj.numDVs);
+  out << "lowValue="; (__isset.lowValue ? (out << to_string(lowValue)) : (out << "<null>"));
+  out << ", " << "highValue="; (__isset.highValue ? (out << to_string(highValue)) : (out << "<null>"));
+  out << ", " << "numNulls=" << to_string(numNulls);
+  out << ", " << "numDVs=" << to_string(numDVs);
   out << ")";
-  return out;
 }
 
 
@@ -6561,11 +6413,9 @@ void Date::__set_daysSinceEpoch(const int64_t val) {
   this->daysSinceEpoch = val;
 }
 
-const char* Date::ascii_fingerprint = "56A59CE7FFAF82BCA8A19FAACDE4FB75";
-const uint8_t Date::binary_fingerprint[16] = {0x56,0xA5,0x9C,0xE7,0xFF,0xAF,0x82,0xBC,0xA8,0xA1,0x9F,0xAA,0xCD,0xE4,0xFB,0x75};
-
 uint32_t Date::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -6609,7 +6459,7 @@ uint32_t Date::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t Date::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("Date");
 
   xfer += oprot->writeFieldBegin("daysSinceEpoch", ::apache::thrift::protocol::T_I64, 1);
@@ -6618,7 +6468,6 @@ uint32_t Date::write(::apache::thrift::protocol::TProtocol* oprot) const {
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -6634,12 +6483,11 @@ Date& Date::operator=(const Date& other294) {
   daysSinceEpoch = other294.daysSinceEpoch;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const Date& obj) {
-  using apache::thrift::to_string;
+void Date::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "Date(";
-  out << "daysSinceEpoch=" << to_string(obj.daysSinceEpoch);
+  out << "daysSinceEpoch=" << to_string(daysSinceEpoch);
   out << ")";
-  return out;
 }
 
 
@@ -6665,11 +6513,9 @@ void DateColumnStatsData::__set_numDVs(const int64_t val) {
   this->numDVs = val;
 }
 
-const char* DateColumnStatsData::ascii_fingerprint = "D0719F3BBA8248297BB5287552897F59";
-const uint8_t DateColumnStatsData::binary_fingerprint[16] = {0xD0,0x71,0x9F,0x3B,0xBA,0x82,0x48,0x29,0x7B,0xB5,0x28,0x75,0x52,0x89,0x7F,0x59};
-
 uint32_t DateColumnStatsData::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -6740,7 +6586,7 @@ uint32_t DateColumnStatsData::read(::apache::thrift::protocol::TProtocol* iprot)
 
 uint32_t DateColumnStatsData::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("DateColumnStatsData");
 
   if (this->__isset.lowValue) {
@@ -6763,7 +6609,6 @@ uint32_t DateColumnStatsData::write(::apache::thrift::protocol::TProtocol* oprot
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -6791,15 +6636,14 @@ DateColumnStatsData& DateColumnStatsData::operator=(const DateColumnStatsData& o
   __isset = other296.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const DateColumnStatsData& obj) {
-  using apache::thrift::to_string;
+void DateColumnStatsData::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "DateColumnStatsData(";
-  out << "lowValue="; (obj.__isset.lowValue ? (out << to_string(obj.lowValue)) : (out << "<null>"));
-  out << ", " << "highValue="; (obj.__isset.highValue ? (out << to_string(obj.highValue)) : (out << "<null>"));
-  out << ", " << "numNulls=" << to_string(obj.numNulls);
-  out << ", " << "numDVs=" << to_string(obj.numDVs);
+  out << "lowValue="; (__isset.lowValue ? (out << to_string(lowValue)) : (out << "<null>"));
+  out << ", " << "highValue="; (__isset.highValue ? (out << to_string(highValue)) : (out << "<null>"));
+  out << ", " << "numNulls=" << to_string(numNulls);
+  out << ", " << "numDVs=" << to_string(numDVs);
   out << ")";
-  return out;
 }
 
 
@@ -6835,11 +6679,9 @@ void ColumnStatisticsData::__set_dateStats(const DateColumnStatsData& val) {
   this->dateStats = val;
 }
 
-const char* ColumnStatisticsData::ascii_fingerprint = "15E449CA15A23E37F2D54C31ACA52106";
-const uint8_t ColumnStatisticsData::binary_fingerprint[16] = {0x15,0xE4,0x49,0xCA,0x15,0xA2,0x3E,0x37,0xF2,0xD5,0x4C,0x31,0xAC,0xA5,0x21,0x06};
-
 uint32_t ColumnStatisticsData::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -6928,7 +6770,7 @@ uint32_t ColumnStatisticsData::read(::apache::thrift::protocol::TProtocol* iprot
 
 uint32_t ColumnStatisticsData::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ColumnStatisticsData");
 
   xfer += oprot->writeFieldBegin("booleanStats", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -6961,7 +6803,6 @@ uint32_t ColumnStatisticsData::write(::apache::thrift::protocol::TProtocol* opro
 
   xfer += oprot->writeFieldStop();

<TRUNCATED>

[40/55] [abbrv] hive git commit: HIVE-11523: org.apache.hadoop.hive.ql.io.orc.FileDump should handle errors (Prasanth Jayachandran reviewed by Ashutosh Chauhan)

Posted by xu...@apache.org.
HIVE-11523: org.apache.hadoop.hive.ql.io.orc.FileDump should handle errors (Prasanth Jayachandran reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/d84e393e
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/d84e393e
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/d84e393e

Branch: refs/heads/spark
Commit: d84e393ed66829fe0c8cc87254fef2a329b96163
Parents: a91e147
Author: Prasanth Jayachandran <j....@gmail.com>
Authored: Mon Oct 26 14:18:36 2015 -0500
Committer: Prasanth Jayachandran <j....@gmail.com>
Committed: Mon Oct 26 14:18:36 2015 -0500

----------------------------------------------------------------------
 .../apache/hadoop/hive/ql/io/orc/FileDump.java  | 193 +++++++++--------
 .../hadoop/hive/ql/io/orc/JsonFileDump.java     | 210 ++++++++++---------
 .../hadoop/hive/ql/io/orc/TestFileDump.java     |  50 -----
 3 files changed, 213 insertions(+), 240 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/d84e393e/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java
index a1c5058..9c6538f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java
@@ -136,9 +136,16 @@ public final class FileDump {
   private static void printData(List<String> files, Configuration conf) throws IOException,
       JSONException {
     for (String file : files) {
-      printJsonData(conf, file);
-      if (files.size() > 1) {
-        System.out.println(Strings.repeat("=", 80) + "\n");
+      try {
+        printJsonData(conf, file);
+        if (files.size() > 1) {
+          System.out.println(Strings.repeat("=", 80) + "\n");
+        }
+      } catch (Exception e) {
+        System.err.println("Unable to dump data for file: " + file);
+        e.printStackTrace();
+        System.err.println(Strings.repeat("=", 80) + "\n");
+        continue;
       }
     }
   }
@@ -146,103 +153,111 @@ public final class FileDump {
   private static void printMetaData(List<String> files, Configuration conf,
       List<Integer> rowIndexCols, boolean printTimeZone) throws IOException {
     for (String filename : files) {
-      System.out.println("Structure for " + filename);
-      Path path = new Path(filename);
-      Reader reader = OrcFile.createReader(path, OrcFile.readerOptions(conf));
-      System.out.println("File Version: " + reader.getFileVersion().getName() +
-          " with " + reader.getWriterVersion());
-      RecordReaderImpl rows = (RecordReaderImpl) reader.rows();
-      System.out.println("Rows: " + reader.getNumberOfRows());
-      System.out.println("Compression: " + reader.getCompression());
-      if (reader.getCompression() != CompressionKind.NONE) {
-        System.out.println("Compression size: " + reader.getCompressionSize());
-      }
-      System.out.println("Type: " + reader.getObjectInspector().getTypeName());
-      System.out.println("\nStripe Statistics:");
-      List<StripeStatistics> stripeStats = reader.getStripeStatistics();
-      for (int n = 0; n < stripeStats.size(); n++) {
-        System.out.println("  Stripe " + (n + 1) + ":");
-        StripeStatistics ss = stripeStats.get(n);
-        for (int i = 0; i < ss.getColumnStatistics().length; ++i) {
-          System.out.println("    Column " + i + ": " +
-              ss.getColumnStatistics()[i].toString());
+      try {
+        Path path = new Path(filename);
+        Reader reader = OrcFile.createReader(path, OrcFile.readerOptions(conf));
+        System.out.println("Structure for " + filename);
+        System.out.println("File Version: " + reader.getFileVersion().getName() +
+            " with " + reader.getWriterVersion());
+        RecordReaderImpl rows = (RecordReaderImpl) reader.rows();
+        System.out.println("Rows: " + reader.getNumberOfRows());
+        System.out.println("Compression: " + reader.getCompression());
+        if (reader.getCompression() != CompressionKind.NONE) {
+          System.out.println("Compression size: " + reader.getCompressionSize());
         }
-      }
-      ColumnStatistics[] stats = reader.getStatistics();
-      int colCount = stats.length;
-      System.out.println("\nFile Statistics:");
-      for (int i = 0; i < stats.length; ++i) {
-        System.out.println("  Column " + i + ": " + stats[i].toString());
-      }
-      System.out.println("\nStripes:");
-      int stripeIx = -1;
-      for (StripeInformation stripe : reader.getStripes()) {
-        ++stripeIx;
-        long stripeStart = stripe.getOffset();
-        OrcProto.StripeFooter footer = rows.readStripeFooter(stripe);
-        if (printTimeZone) {
-          String tz = footer.getWriterTimezone();
-          if (tz == null || tz.isEmpty()) {
-            tz = UNKNOWN;
+        System.out.println("Type: " + reader.getObjectInspector().getTypeName());
+        System.out.println("\nStripe Statistics:");
+        List<StripeStatistics> stripeStats = reader.getStripeStatistics();
+        for (int n = 0; n < stripeStats.size(); n++) {
+          System.out.println("  Stripe " + (n + 1) + ":");
+          StripeStatistics ss = stripeStats.get(n);
+          for (int i = 0; i < ss.getColumnStatistics().length; ++i) {
+            System.out.println("    Column " + i + ": " +
+                ss.getColumnStatistics()[i].toString());
           }
-          System.out.println("  Stripe: " + stripe.toString() + " timezone: " + tz);
-        } else {
-          System.out.println("  Stripe: " + stripe.toString());
         }
-        long sectionStart = stripeStart;
-        for (OrcProto.Stream section : footer.getStreamsList()) {
-          String kind = section.hasKind() ? section.getKind().name() : UNKNOWN;
-          System.out.println("    Stream: column " + section.getColumn() +
-              " section " + kind + " start: " + sectionStart +
-              " length " + section.getLength());
-          sectionStart += section.getLength();
+        ColumnStatistics[] stats = reader.getStatistics();
+        int colCount = stats.length;
+        System.out.println("\nFile Statistics:");
+        for (int i = 0; i < stats.length; ++i) {
+          System.out.println("  Column " + i + ": " + stats[i].toString());
         }
-        for (int i = 0; i < footer.getColumnsCount(); ++i) {
-          OrcProto.ColumnEncoding encoding = footer.getColumns(i);
-          StringBuilder buf = new StringBuilder();
-          buf.append("    Encoding column ");
-          buf.append(i);
-          buf.append(": ");
-          buf.append(encoding.getKind());
-          if (encoding.getKind() == OrcProto.ColumnEncoding.Kind.DICTIONARY ||
-              encoding.getKind() == OrcProto.ColumnEncoding.Kind.DICTIONARY_V2) {
-            buf.append("[");
-            buf.append(encoding.getDictionarySize());
-            buf.append("]");
+        System.out.println("\nStripes:");
+        int stripeIx = -1;
+        for (StripeInformation stripe : reader.getStripes()) {
+          ++stripeIx;
+          long stripeStart = stripe.getOffset();
+          OrcProto.StripeFooter footer = rows.readStripeFooter(stripe);
+          if (printTimeZone) {
+            String tz = footer.getWriterTimezone();
+            if (tz == null || tz.isEmpty()) {
+              tz = UNKNOWN;
+            }
+            System.out.println("  Stripe: " + stripe.toString() + " timezone: " + tz);
+          } else {
+            System.out.println("  Stripe: " + stripe.toString());
           }
-          System.out.println(buf);
-        }
-        if (rowIndexCols != null && !rowIndexCols.isEmpty()) {
-          // include the columns that are specified, only if the columns are included, bloom filter
-          // will be read
-          boolean[] sargColumns = new boolean[colCount];
-          for (int colIdx : rowIndexCols) {
-            sargColumns[colIdx] = true;
+          long sectionStart = stripeStart;
+          for (OrcProto.Stream section : footer.getStreamsList()) {
+            String kind = section.hasKind() ? section.getKind().name() : UNKNOWN;
+            System.out.println("    Stream: column " + section.getColumn() +
+                " section " + kind + " start: " + sectionStart +
+                " length " + section.getLength());
+            sectionStart += section.getLength();
           }
-          RecordReaderImpl.Index indices = rows.readRowIndex(stripeIx, null, null, null, sargColumns);
-          for (int col : rowIndexCols) {
+          for (int i = 0; i < footer.getColumnsCount(); ++i) {
+            OrcProto.ColumnEncoding encoding = footer.getColumns(i);
             StringBuilder buf = new StringBuilder();
-            String rowIdxString = getFormattedRowIndices(col, indices.getRowGroupIndex());
-            buf.append(rowIdxString);
-            String bloomFilString = getFormattedBloomFilters(col, indices.getBloomFilterIndex());
-            buf.append(bloomFilString);
+            buf.append("    Encoding column ");
+            buf.append(i);
+            buf.append(": ");
+            buf.append(encoding.getKind());
+            if (encoding.getKind() == OrcProto.ColumnEncoding.Kind.DICTIONARY ||
+                encoding.getKind() == OrcProto.ColumnEncoding.Kind.DICTIONARY_V2) {
+              buf.append("[");
+              buf.append(encoding.getDictionarySize());
+              buf.append("]");
+            }
             System.out.println(buf);
           }
+          if (rowIndexCols != null && !rowIndexCols.isEmpty()) {
+            // include the columns that are specified, only if the columns are included, bloom filter
+            // will be read
+            boolean[] sargColumns = new boolean[colCount];
+            for (int colIdx : rowIndexCols) {
+              sargColumns[colIdx] = true;
+            }
+            RecordReaderImpl.Index indices = rows
+                .readRowIndex(stripeIx, null, null, null, sargColumns);
+            for (int col : rowIndexCols) {
+              StringBuilder buf = new StringBuilder();
+              String rowIdxString = getFormattedRowIndices(col, indices.getRowGroupIndex());
+              buf.append(rowIdxString);
+              String bloomFilString = getFormattedBloomFilters(col, indices.getBloomFilterIndex());
+              buf.append(bloomFilString);
+              System.out.println(buf);
+            }
+          }
         }
-      }
 
-      FileSystem fs = path.getFileSystem(conf);
-      long fileLen = fs.getContentSummary(path).getLength();
-      long paddedBytes = getTotalPaddingSize(reader);
-      // empty ORC file is ~45 bytes. Assumption here is file length always >0
-      double percentPadding = ((double) paddedBytes / (double) fileLen) * 100;
-      DecimalFormat format = new DecimalFormat("##.##");
-      System.out.println("\nFile length: " + fileLen + " bytes");
-      System.out.println("Padding length: " + paddedBytes + " bytes");
-      System.out.println("Padding ratio: " + format.format(percentPadding) + "%");
-      rows.close();
-      if (files.size() > 1) {
-        System.out.println(Strings.repeat("=", 80) + "\n");
+        FileSystem fs = path.getFileSystem(conf);
+        long fileLen = fs.getContentSummary(path).getLength();
+        long paddedBytes = getTotalPaddingSize(reader);
+        // empty ORC file is ~45 bytes. Assumption here is file length always >0
+        double percentPadding = ((double) paddedBytes / (double) fileLen) * 100;
+        DecimalFormat format = new DecimalFormat("##.##");
+        System.out.println("\nFile length: " + fileLen + " bytes");
+        System.out.println("Padding length: " + paddedBytes + " bytes");
+        System.out.println("Padding ratio: " + format.format(percentPadding) + "%");
+        rows.close();
+        if (files.size() > 1) {
+          System.out.println(Strings.repeat("=", 80) + "\n");
+        }
+      } catch (Exception e) {
+        System.err.println("Unable to dump metadata for file: " + filename);
+        e.printStackTrace();
+        System.err.println(Strings.repeat("=", 80) + "\n");
+        continue;
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/d84e393e/ql/src/java/org/apache/hadoop/hive/ql/io/orc/JsonFileDump.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/JsonFileDump.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/JsonFileDump.java
index a438855..02e01b4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/JsonFileDump.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/JsonFileDump.java
@@ -45,130 +45,138 @@ public class JsonFileDump {
       writer.object();
     }
     for (String filename : files) {
-      if (multiFile) {
-        writer.object();
-      }
-      writer.key("fileName").value(filename);
-      Path path = new Path(filename);
-      Reader reader = OrcFile.createReader(path, OrcFile.readerOptions(conf));
-      writer.key("fileVersion").value(reader.getFileVersion().getName());
-      writer.key("writerVersion").value(reader.getWriterVersion());
-      RecordReaderImpl rows = (RecordReaderImpl) reader.rows();
-      writer.key("numberOfRows").value(reader.getNumberOfRows());
-      writer.key("compression").value(reader.getCompression());
-      if (reader.getCompression() != CompressionKind.NONE) {
-        writer.key("compressionBufferSize").value(reader.getCompressionSize());
-      }
-      writer.key("schemaString").value(reader.getObjectInspector().getTypeName());
-      writer.key("schema").array();
-      writeSchema(writer, reader.getTypes());
-      writer.endArray();
-
-      writer.key("stripeStatistics").array();
-      List<StripeStatistics> stripeStatistics = reader.getStripeStatistics();
-      for (int n = 0; n < stripeStatistics.size(); n++) {
-        writer.object();
-        writer.key("stripeNumber").value(n + 1);
-        StripeStatistics ss = stripeStatistics.get(n);
-        writer.key("columnStatistics").array();
-        for (int i = 0; i < ss.getColumnStatistics().length; i++) {
+      try {
+        if (multiFile) {
           writer.object();
-          writer.key("columnId").value(i);
-          writeColumnStatistics(writer, ss.getColumnStatistics()[i]);
-          writer.endObject();
         }
-        writer.endArray();
-        writer.endObject();
-      }
-      writer.endArray();
-
-      ColumnStatistics[] stats = reader.getStatistics();
-      int colCount = stats.length;
-      writer.key("fileStatistics").array();
-      for (int i = 0; i < stats.length; ++i) {
-        writer.object();
-        writer.key("columnId").value(i);
-        writeColumnStatistics(writer, stats[i]);
-        writer.endObject();
-      }
-      writer.endArray();
-
-      writer.key("stripes").array();
-      int stripeIx = -1;
-      for (StripeInformation stripe : reader.getStripes()) {
-        ++stripeIx;
-        long stripeStart = stripe.getOffset();
-        OrcProto.StripeFooter footer = rows.readStripeFooter(stripe);
-        writer.object(); // start of stripe information
-        writer.key("stripeNumber").value(stripeIx + 1);
-        writer.key("stripeInformation");
-        writeStripeInformation(writer, stripe);
-        if (printTimeZone) {
-          writer.key("writerTimezone").value(
-              footer.hasWriterTimezone() ? footer.getWriterTimezone() : FileDump.UNKNOWN);
+        writer.key("fileName").value(filename);
+        Path path = new Path(filename);
+        Reader reader = OrcFile.createReader(path, OrcFile.readerOptions(conf));
+        writer.key("fileVersion").value(reader.getFileVersion().getName());
+        writer.key("writerVersion").value(reader.getWriterVersion());
+        RecordReaderImpl rows = (RecordReaderImpl) reader.rows();
+        writer.key("numberOfRows").value(reader.getNumberOfRows());
+        writer.key("compression").value(reader.getCompression());
+        if (reader.getCompression() != CompressionKind.NONE) {
+          writer.key("compressionBufferSize").value(reader.getCompressionSize());
         }
-        long sectionStart = stripeStart;
+        writer.key("schemaString").value(reader.getObjectInspector().getTypeName());
+        writer.key("schema").array();
+        writeSchema(writer, reader.getTypes());
+        writer.endArray();
 
-        writer.key("streams").array();
-        for (OrcProto.Stream section : footer.getStreamsList()) {
+        writer.key("stripeStatistics").array();
+        List<StripeStatistics> stripeStatistics = reader.getStripeStatistics();
+        for (int n = 0; n < stripeStatistics.size(); n++) {
           writer.object();
-          String kind = section.hasKind() ? section.getKind().name() : FileDump.UNKNOWN;
-          writer.key("columnId").value(section.getColumn());
-          writer.key("section").value(kind);
-          writer.key("startOffset").value(sectionStart);
-          writer.key("length").value(section.getLength());
-          sectionStart += section.getLength();
+          writer.key("stripeNumber").value(n + 1);
+          StripeStatistics ss = stripeStatistics.get(n);
+          writer.key("columnStatistics").array();
+          for (int i = 0; i < ss.getColumnStatistics().length; i++) {
+            writer.object();
+            writer.key("columnId").value(i);
+            writeColumnStatistics(writer, ss.getColumnStatistics()[i]);
+            writer.endObject();
+          }
+          writer.endArray();
           writer.endObject();
         }
         writer.endArray();
 
-        writer.key("encodings").array();
-        for (int i = 0; i < footer.getColumnsCount(); ++i) {
+        ColumnStatistics[] stats = reader.getStatistics();
+        int colCount = stats.length;
+        writer.key("fileStatistics").array();
+        for (int i = 0; i < stats.length; ++i) {
           writer.object();
-          OrcProto.ColumnEncoding encoding = footer.getColumns(i);
           writer.key("columnId").value(i);
-          writer.key("kind").value(encoding.getKind());
-          if (encoding.getKind() == OrcProto.ColumnEncoding.Kind.DICTIONARY ||
-              encoding.getKind() == OrcProto.ColumnEncoding.Kind.DICTIONARY_V2) {
-            writer.key("dictionarySize").value(encoding.getDictionarySize());
-          }
+          writeColumnStatistics(writer, stats[i]);
           writer.endObject();
         }
         writer.endArray();
 
-        if (rowIndexCols != null && !rowIndexCols.isEmpty()) {
-          // include the columns that are specified, only if the columns are included, bloom filter
-          // will be read
-          boolean[] sargColumns = new boolean[colCount];
-          for (int colIdx : rowIndexCols) {
-            sargColumns[colIdx] = true;
+        writer.key("stripes").array();
+        int stripeIx = -1;
+        for (StripeInformation stripe : reader.getStripes()) {
+          ++stripeIx;
+          long stripeStart = stripe.getOffset();
+          OrcProto.StripeFooter footer = rows.readStripeFooter(stripe);
+          writer.object(); // start of stripe information
+          writer.key("stripeNumber").value(stripeIx + 1);
+          writer.key("stripeInformation");
+          writeStripeInformation(writer, stripe);
+          if (printTimeZone) {
+            writer.key("writerTimezone").value(
+                footer.hasWriterTimezone() ? footer.getWriterTimezone() : FileDump.UNKNOWN);
           }
-          RecordReaderImpl.Index indices = rows.readRowIndex(stripeIx, null, sargColumns);
-          writer.key("indexes").array();
-          for (int col : rowIndexCols) {
+          long sectionStart = stripeStart;
+
+          writer.key("streams").array();
+          for (OrcProto.Stream section : footer.getStreamsList()) {
             writer.object();
-            writer.key("columnId").value(col);
-            writeRowGroupIndexes(writer, col, indices.getRowGroupIndex());
-            writeBloomFilterIndexes(writer, col, indices.getBloomFilterIndex());
+            String kind = section.hasKind() ? section.getKind().name() : FileDump.UNKNOWN;
+            writer.key("columnId").value(section.getColumn());
+            writer.key("section").value(kind);
+            writer.key("startOffset").value(sectionStart);
+            writer.key("length").value(section.getLength());
+            sectionStart += section.getLength();
             writer.endObject();
           }
           writer.endArray();
+
+          writer.key("encodings").array();
+          for (int i = 0; i < footer.getColumnsCount(); ++i) {
+            writer.object();
+            OrcProto.ColumnEncoding encoding = footer.getColumns(i);
+            writer.key("columnId").value(i);
+            writer.key("kind").value(encoding.getKind());
+            if (encoding.getKind() == OrcProto.ColumnEncoding.Kind.DICTIONARY ||
+                encoding.getKind() == OrcProto.ColumnEncoding.Kind.DICTIONARY_V2) {
+              writer.key("dictionarySize").value(encoding.getDictionarySize());
+            }
+            writer.endObject();
+          }
+          writer.endArray();
+
+          if (rowIndexCols != null && !rowIndexCols.isEmpty()) {
+            // include the columns that are specified, only if the columns are included, bloom filter
+            // will be read
+            boolean[] sargColumns = new boolean[colCount];
+            for (int colIdx : rowIndexCols) {
+              sargColumns[colIdx] = true;
+            }
+            RecordReaderImpl.Index indices = rows.readRowIndex(stripeIx, null, sargColumns);
+            writer.key("indexes").array();
+            for (int col : rowIndexCols) {
+              writer.object();
+              writer.key("columnId").value(col);
+              writeRowGroupIndexes(writer, col, indices.getRowGroupIndex());
+              writeBloomFilterIndexes(writer, col, indices.getBloomFilterIndex());
+              writer.endObject();
+            }
+            writer.endArray();
+          }
+          writer.endObject(); // end of stripe information
         }
-        writer.endObject(); // end of stripe information
-      }
-      writer.endArray();
+        writer.endArray();
 
-      FileSystem fs = path.getFileSystem(conf);
-      long fileLen = fs.getContentSummary(path).getLength();
-      long paddedBytes = FileDump.getTotalPaddingSize(reader);
-      // empty ORC file is ~45 bytes. Assumption here is file length always >0
-      double percentPadding = ((double) paddedBytes / (double) fileLen) * 100;
-      writer.key("fileLength").value(fileLen);
-      writer.key("paddingLength").value(paddedBytes);
-      writer.key("paddingRatio").value(percentPadding);
-      rows.close();
+        FileSystem fs = path.getFileSystem(conf);
+        long fileLen = fs.getContentSummary(path).getLength();
+        long paddedBytes = FileDump.getTotalPaddingSize(reader);
+        // empty ORC file is ~45 bytes. Assumption here is file length always >0
+        double percentPadding = ((double) paddedBytes / (double) fileLen) * 100;
+        writer.key("fileLength").value(fileLen);
+        writer.key("paddingLength").value(paddedBytes);
+        writer.key("paddingRatio").value(percentPadding);
+        writer.key("status").value("OK");
+        rows.close();
 
-      writer.endObject();
+        writer.endObject();
+      } catch (Exception e) {
+        writer.key("status").value("FAILED");
+        System.err.println("Unable to dump data for file: " + filename);
+        e.printStackTrace();
+        throw e;
+      }
     }
     if (multiFile) {
       writer.endArray();

http://git-wip-us.apache.org/repos/asf/hive/blob/d84e393e/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestFileDump.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestFileDump.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestFileDump.java
index 68d503e..40674ea 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestFileDump.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestFileDump.java
@@ -26,8 +26,6 @@ import java.io.ByteArrayOutputStream;
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.FileReader;
-import java.io.IOException;
-import java.io.OutputStream;
 import java.io.PrintStream;
 import java.sql.Date;
 import java.sql.Timestamp;
@@ -252,54 +250,6 @@ public class TestFileDump {
     assertEquals("{\"b\":false,\"bt\":20,\"s\":200,\"i\":2000,\"l\":20000,\"f\":8,\"d\":40,\"de\":\"2.2222\",\"t\":\"2014-11-25 18:02:44\",\"dt\":\"2014-09-28\",\"str\":\"abcd\",\"c\":\"world                                                                                                                                                                                                                                                          \",\"vc\":\"world\",\"m\":[{\"_key\":\"k3\",\"_value\":\"v3\"}],\"a\":[200,300],\"st\":{\"i\":20,\"s\":\"bar\"}}", lines[1]);
   }
   
-  @Test(expected = IOException.class)
-  public void testDataDumpThrowsIOException() throws Exception {
-    PrintStream origOut = System.out;
-    try {
-      ObjectInspector inspector;
-      synchronized (TestOrcFile.class) {
-        inspector = ObjectInspectorFactory.getReflectionObjectInspector
-            (AllTypesRecord.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
-      }
-      Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-          100000, CompressionKind.NONE, 10000, 1000);
-      Map<String, String> m = new HashMap<String, String>(2);
-      m.put("k1", "v1");
-      writer.addRow(new AllTypesRecord(
-          true,
-          (byte) 10,
-          (short) 100,
-          1000,
-          10000L,
-          4.0f,
-          20.0,
-          HiveDecimal.create("4.2222"),
-          new Timestamp(1416967764000L),
-          new Date(1416967764000L),
-          "string",
-          new HiveChar("hello", 5),
-          new HiveVarchar("hello", 10),
-          m,
-          Arrays.asList(100, 200),
-          new AllTypesRecord.Struct(10, "foo")));
-      
-      writer.close();
-      
-      OutputStream myOut = new OutputStream() {
-        @Override
-        public void write(int b) throws IOException {
-          throw new IOException();
-        }
-      };
-      
-      // replace stdout and run command
-      System.setOut(new PrintStream(myOut));
-      FileDump.main(new String[]{testFilePath.toString(), "-d"});
-    } finally {
-      System.setOut(origOut);
-    }
-  }
-
   // Test that if the fraction of rows that have distinct strings is greater than the configured
   // threshold dictionary encoding is turned off.  If dictionary encoding is turned off the length
   // of the dictionary stream for the column will be 0 in the ORC file dump.


[55/55] [abbrv] hive git commit: HIVE-12284: Merge branch 'master' into spark

Posted by xu...@apache.org.
HIVE-12284: Merge branch 'master' into spark


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/c9073aad
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/c9073aad
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/c9073aad

Branch: refs/heads/spark
Commit: c9073aadc00b01372b85522e777acaea997d5936
Parents: 51f257a 3e21a6d
Author: Xuefu Zhang <xz...@Cloudera.com>
Authored: Wed Oct 28 05:08:53 2015 -0700
Committer: Xuefu Zhang <xz...@Cloudera.com>
Committed: Wed Oct 28 05:08:53 2015 -0700

----------------------------------------------------------------------
 LICENSE                                         |    34 +-
 NOTICE                                          |     3 +
 accumulo-handler/pom.xml                        |    61 +-
 .../hive/accumulo/HiveAccumuloHelper.java       |    55 +-
 .../mr/HiveAccumuloTableOutputFormat.java       |    50 +-
 .../hive/accumulo/TestHiveAccumuloHelper.java   |    69 +-
 .../mr/TestHiveAccumuloTableOutputFormat.java   |    86 +-
 .../apache/hadoop/hive/ant/GenVectorCode.java   |    61 +-
 .../apache/hadoop/hive/ant/QTestGenTask.java    |    13 +
 beeline/pom.xml                                 |    44 +-
 .../java/org/apache/hive/beeline/BeeLine.java   |   304 +-
 .../org/apache/hive/beeline/BeeLineOpts.java    |    52 +-
 .../hive/beeline/ClientCommandHookFactory.java  |    85 +
 .../org/apache/hive/beeline/ClientHook.java     |    33 +
 .../java/org/apache/hive/beeline/Commands.java  |   492 +-
 .../apache/hive/beeline/DatabaseConnection.java |     4 +-
 .../apache/hive/beeline/HiveSchemaHelper.java   |     4 +-
 .../org/apache/hive/beeline/HiveSchemaTool.java |    22 +-
 .../hive/beeline/cli/CliOptionsProcessor.java   |   104 +
 .../org/apache/hive/beeline/cli/HiveCli.java    |    37 +
 .../hive/beeline/TestBeelineArgParsing.java     |     1 +
 .../beeline/TestClientCommandHookFactory.java   |    32 +
 .../apache/hive/beeline/cli/TestHiveCli.java    |   289 +
 beeline/src/test/resources/hive-site.xml        |    37 +
 bin/beeline                                     |    10 +
 bin/ext/cli.cmd                                 |    39 +-
 bin/ext/cli.sh                                  |    35 +-
 bin/ext/hbaseimport.cmd                         |    35 +
 bin/ext/hbaseimport.sh                          |    27 +
 bin/ext/hbaseschematool.sh                      |    27 +
 bin/ext/llap.sh                                 |    49 +
 bin/ext/util/execHiveCmd.sh                     |    21 +-
 bin/hive                                        |     8 +-
 cli/pom.xml                                     |    63 +-
 .../org/apache/hadoop/hive/cli/CliDriver.java   |    33 +-
 common/pom.xml                                  |    41 +-
 .../hadoop/hive/common/CallableWithNdc.java     |    44 +
 .../hadoop/hive/common/CompressionUtils.java    |   131 +
 .../apache/hadoop/hive/common/DiskRange.java    |    99 -
 .../hadoop/hive/common/DiskRangeInfo.java       |    59 +
 .../hadoop/hive/common/DiskRangeList.java       |   205 -
 .../apache/hadoop/hive/common/FileUtils.java    |    38 +
 .../apache/hadoop/hive/common/ObjectPair.java   |     5 +
 .../hadoop/hive/common/RunnableWithNdc.java     |    43 +
 .../hadoop/hive/common/StatsSetupConst.java     |    16 -
 .../common/metrics/common/MetricsConstant.java  |    22 +-
 .../org/apache/hadoop/hive/conf/HiveConf.java   |   253 +-
 .../hadoop/hive/conf/HiveVariableSource.java    |    24 +
 .../hadoop/hive/conf/VariableSubstitution.java  |    70 +
 .../apache/hadoop/hive/ql/log/PerfLogger.java   |   196 +
 .../apache/hive/common/util/BloomFilter.java    |    20 +-
 .../hive/common/util/FixedSizedObjectPool.java  |   315 +
 .../apache/hive/common/util/ReflectionUtil.java |     2 +-
 .../hive/common/util/ShutdownHookManager.java   |    52 +-
 common/src/main/resources/hive-log4j.properties |    88 +
 common/src/main/resources/hive-log4j2.xml       |     4 +-
 .../hive/common/type/TestHiveDecimal.java       |    12 +-
 .../apache/hadoop/hive/conf/TestHiveConf.java   |    25 +
 .../hive/conf/TestVariableSubstitution.java     |    63 +
 .../common/util/TestFixedSizedObjectPool.java   |   246 +
 .../common/util/TestShutdownHookManager.java    |    22 +-
 contrib/pom.xml                                 |    44 +-
 .../hive/contrib/serde2/MultiDelimitSerDe.java  |     1 +
 data/conf/hive-log4j2.xml                       |    27 +
 data/conf/hive-site.xml                         |    45 +-
 data/conf/llap/hive-site.xml                    |   266 +
 data/conf/llap/llap-daemon-site.xml             |    57 +
 data/conf/llap/tez-site.xml                     |     6 +
 data/conf/spark/standalone/hive-site.xml        |     6 -
 data/conf/spark/yarn-client/hive-site.xml       |     6 -
 data/conf/tez/hive-site.xml                     |    26 +-
 data/conf/tez/tez-site.xml                      |     6 +
 data/files/decimal_1_1.txt                      |    30 +
 data/files/escape_crlf.txt                      |     2 +
 data/files/identity_udf.jar                     |   Bin 0 -> 710 bytes
 data/files/mapNull.txt                          |     1 +
 data/files/sample2.json                         |     2 +
 errata.txt                                      |    68 +
 hbase-handler/pom.xml                           |   246 +-
 .../apache/hadoop/hive/hbase/HBaseSerDe.java    |     1 +
 .../hadoop/hive/hbase/HBaseSerDeHelper.java     |    21 +-
 .../hadoop/hive/hbase/HBaseSerDeParameters.java |    20 +-
 .../hadoop/hive/hbase/HBaseStatsAggregator.java |   128 -
 .../hadoop/hive/hbase/HBaseStatsPublisher.java  |   154 -
 .../hive/hbase/HBaseStatsSetupConstants.java    |    34 -
 .../hadoop/hive/hbase/HBaseStatsUtils.java      |   135 -
 .../hive/hbase/HiveHBaseTableInputFormat.java   |   105 +-
 .../hive/hbase/HiveHBaseTableOutputFormat.java  |     9 +
 .../HiveHBaseTableSnapshotInputFormat.java      |    21 +-
 .../hbase/struct/AvroHBaseValueFactory.java     |     3 +-
 .../hadoop/hive/hbase/TestHBaseSerDe.java       |    12 +-
 .../queries/positive/hbase_handler_snapshot.q   |     4 +
 .../src/test/queries/positive/hbase_queries.q   |    16 +
 .../src/test/queries/positive/hbase_stats.q     |    30 -
 .../src/test/queries/positive/hbase_stats2.q    |    31 -
 .../positive/hbase_stats_empty_partition.q      |    13 -
 .../positive/hbase_handler_snapshot.q.out       |    22 +
 .../test/results/positive/hbase_queries.q.out   |    99 +
 .../src/test/results/positive/hbase_stats.q.out |   311 -
 .../test/results/positive/hbase_stats2.q.out    |   311 -
 .../positive/hbase_stats_empty_partition.q.out  |    63 -
 hcatalog/conf/proto-hive-site.xml               |     2 +-
 hcatalog/core/pom.xml                           |   194 +-
 .../apache/hive/hcatalog/data/JsonSerDe.java    |     8 +-
 .../hive/hcatalog/data/TestJsonSerDe.java       |    36 +
 hcatalog/hcatalog-pig-adapter/pom.xml           |   160 +-
 .../apache/hive/hcatalog/pig/HCatLoader.java    |     9 +
 .../hive/hcatalog/pig/TestHCatLoader.java       |    72 +-
 .../hcatalog/pig/TestHCatLoaderEncryption.java  |    64 +-
 hcatalog/pom.xml                                |    65 +-
 hcatalog/server-extensions/pom.xml              |    29 +-
 .../listener/TestNotificationListener.java      |     4 +-
 .../templates/conf/hive-site.xml.template       |     2 +-
 hcatalog/streaming/pom.xml                      |    47 +-
 .../streaming/AbstractRecordWriter.java         |    93 +-
 .../streaming/DelimitedInputWriter.java         |    54 +-
 .../hive/hcatalog/streaming/HiveEndPoint.java   |    21 +
 .../hive/hcatalog/streaming/InvalidTable.java   |     8 +
 .../hcatalog/streaming/StrictJsonWriter.java    |    46 +-
 .../mutate/worker/BucketIdResolverImpl.java     |    16 +-
 .../hive/hcatalog/streaming/TestStreaming.java  |   764 +-
 .../mutate/worker/TestBucketIdResolverImpl.java |     2 +-
 hcatalog/webhcat/java-client/pom.xml            |    39 +-
 .../hive/hcatalog/api/TestHCatClient.java       |    39 +-
 hcatalog/webhcat/svr/pom.xml                    |    60 +-
 .../hive/hcatalog/templeton/AppConfig.java      |    21 +
 .../apache/hive/hcatalog/templeton/Server.java  |    19 +-
 .../hcatalog/templeton/tool/LaunchMapper.java   |     7 +-
 hplsql/pom.xml                                  |    31 +-
 .../antlr4/org/apache/hive/hplsql/Hplsql.g4     |    70 +-
 .../main/java/org/apache/hive/hplsql/Cmp.java   |   314 +
 .../java/org/apache/hive/hplsql/Column.java     |    29 +-
 .../main/java/org/apache/hive/hplsql/Conn.java  |    21 +
 .../main/java/org/apache/hive/hplsql/Copy.java  |    50 +-
 .../main/java/org/apache/hive/hplsql/Exec.java  |    66 +-
 .../java/org/apache/hive/hplsql/Expression.java |    33 +-
 .../main/java/org/apache/hive/hplsql/File.java  |    18 +-
 .../main/java/org/apache/hive/hplsql/Meta.java  |    28 +-
 .../main/java/org/apache/hive/hplsql/Query.java |    18 +
 .../java/org/apache/hive/hplsql/Select.java     |    23 +-
 .../main/java/org/apache/hive/hplsql/Stmt.java  |     8 +-
 .../main/java/org/apache/hive/hplsql/Var.java   |   110 +-
 .../apache/hive/hplsql/functions/Function.java  |     6 +-
 .../hive/hplsql/functions/FunctionMisc.java     |   121 +
 .../org/apache/hive/hplsql/TestHplsqlLocal.java |    18 +
 .../apache/hive/hplsql/TestHplsqlOffline.java   |     5 +
 hplsql/src/test/queries/db/cmp_row_count.sql    |     4 +
 hplsql/src/test/queries/db/cmp_sum.sql          |     3 +
 hplsql/src/test/queries/db/copy_to_file.sql     |     2 +
 hplsql/src/test/queries/db/copy_to_hdfs.sql     |     2 +
 hplsql/src/test/queries/db/copy_to_table.sql    |     2 +
 hplsql/src/test/queries/db/part_count.sql       |    17 +
 hplsql/src/test/queries/db/part_count_by.sql    |     4 +
 hplsql/src/test/queries/db/schema.sql           |    32 +
 hplsql/src/test/queries/db/select_into.sql      |    20 +-
 hplsql/src/test/queries/db/select_into2.sql     |    17 +
 .../test/queries/local/create_procedure2.sql    |    16 +
 hplsql/src/test/queries/local/if2.sql           |     5 +
 hplsql/src/test/queries/local/include.sql       |     2 +
 hplsql/src/test/queries/local/include_file.sql  |     1 +
 hplsql/src/test/queries/local/mult_div.sql      |     8 +
 hplsql/src/test/queries/offline/select_db2.sql  |     5 +
 .../src/test/results/db/cmp_row_count.out.txt   |    12 +
 hplsql/src/test/results/db/cmp_sum.out.txt      |   320 +
 hplsql/src/test/results/db/copy_to_file.out.txt |     6 +
 hplsql/src/test/results/db/copy_to_hdfs.out.txt |     4 +
 .../src/test/results/db/copy_to_table.out.txt   |     2 +
 hplsql/src/test/results/db/part_count.out.txt   |    15 +
 .../src/test/results/db/part_count_by.out.txt   |    13 +
 hplsql/src/test/results/db/select_into.out.txt  |    58 +-
 hplsql/src/test/results/db/select_into2.out.txt |    19 +
 .../results/local/create_procedure2.out.txt     |    10 +
 hplsql/src/test/results/local/if2.out.txt       |     4 +
 hplsql/src/test/results/local/include.out.txt   |     8 +
 hplsql/src/test/results/local/mult_div.out.txt  |     7 +
 .../src/test/results/offline/select_db2.out.txt |     6 +
 hwi/pom.xml                                     |    61 +-
 itests/custom-serde/pom.xml                     |    31 +-
 itests/hcatalog-unit/pom.xml                    |   389 +-
 itests/hive-jmh/pom.xml                         |    38 +-
 .../vectorization/VectorizationBench.java       |   198 +-
 itests/hive-minikdc/pom.xml                     |   181 +-
 itests/hive-unit-hadoop2/pom.xml                |    12 +-
 .../hive/thrift/TestHadoop20SAuthBridge.java    |   420 -
 .../hive/thrift/TestHadoopAuthBridge23.java     |   422 +
 itests/hive-unit/pom.xml                        |   323 +-
 .../org/apache/hive/jdbc/miniHS2/MiniHS2.java   |    14 +-
 .../hadoop/hive/metastore/TestAdminUser.java    |     4 +-
 .../hive/metastore/TestHiveMetaStore.java       |    66 +
 .../metastore/hbase/HBaseIntegrationTests.java  |   111 +
 .../TestHBaseAggrStatsCacheIntegration.java     |   691 +
 .../hive/metastore/hbase/TestHBaseImport.java   |   650 +
 .../metastore/hbase/TestHBaseMetastoreSql.java  |   223 +
 .../hbase/TestHBaseStoreIntegration.java        |  1796 +
 .../hbase/TestStorageDescriptorSharing.java     |   191 +
 .../hadoop/hive/ql/TestLocationQueries.java     |     2 +-
 .../hadoop/hive/ql/history/TestHiveHistory.java |     2 +-
 .../hive/ql/security/FolderPermissionBase.java  |    63 +-
 .../TestClientSideAuthorizationProvider.java    |     9 +
 ...ageBasedClientSideAuthorizationProvider.java |     6 +
 ...StorageBasedMetastoreAuthorizationReads.java |     7 +-
 .../hive/ql/txn/compactor/TestCompactor.java    |    13 +-
 .../hive/beeline/TestBeeLineWithArgs.java       |    21 +
 .../org/apache/hive/jdbc/TestJdbcDriver2.java   |   108 +-
 .../apache/hive/jdbc/TestJdbcWithMiniHS2.java   |   153 +-
 .../test/java/org/apache/hive/jdbc/TestSSL.java |    44 +-
 .../hive/jdbc/cbo_rp_TestJdbcDriver2.java       |  2419 ++
 .../apache/hive/jdbc/miniHS2/TestMiniHS2.java   |     8 +-
 itests/pom.xml                                  |     9 +-
 itests/qtest-accumulo/pom.xml                   |   404 +-
 itests/qtest-spark/pom.xml                      |    38 +-
 itests/qtest/pom.xml                            |   545 +-
 .../test/resources/testconfiguration.properties |    83 +-
 itests/test-serde/pom.xml                       |    32 +-
 itests/util/pom.xml                             |   131 +-
 .../hadoop/hive/hbase/HBaseQTestUtil.java       |     8 +-
 .../metastore/hbase/HBaseStoreTestUtil.java     |    45 +
 .../org/apache/hadoop/hive/ql/QTestUtil.java    |   153 +-
 .../hive/ql/stats/DummyStatsAggregator.java     |    18 +-
 .../hive/ql/stats/DummyStatsPublisher.java      |    15 +-
 .../ql/stats/KeyVerifyingStatsAggregator.java   |    13 +-
 jdbc/pom.xml                                    |    29 +-
 .../org/apache/hive/jdbc/HiveConnection.java    |    63 +-
 .../apache/hive/jdbc/HivePreparedStatement.java |     4 +-
 .../hive/jdbc/ZooKeeperHiveClientHelper.java    |    34 +-
 llap-client/pom.xml                             |   147 +
 .../llap/configuration/LlapConfiguration.java   |   176 +
 .../apache/hadoop/hive/llap/io/api/LlapIo.java  |    27 +
 .../hadoop/hive/llap/io/api/LlapIoProxy.java    |    78 +
 llap-server/bin/llap-daemon-env.sh              |    44 +
 llap-server/bin/llapDaemon.sh                   |   152 +
 llap-server/bin/runLlapDaemon.sh                |   130 +
 llap-server/pom.xml                             |   272 +
 .../daemon/rpc/LlapDaemonProtocolProtos.java    | 12674 ++++++
 .../hadoop/hive/llap/ConsumerFeedback.java      |    33 +
 .../llap/IncrementalObjectSizeEstimator.java    |   628 +
 .../org/apache/hadoop/hive/llap/LlapNodeId.java |    86 +
 .../hadoop/hive/llap/cache/BuddyAllocator.java  |   547 +
 .../hive/llap/cache/BuddyAllocatorMXBean.java   |    62 +
 .../apache/hadoop/hive/llap/cache/Cache.java    |    27 +
 .../hive/llap/cache/EvictionAwareAllocator.java |    30 +
 .../hive/llap/cache/EvictionDispatcher.java     |    52 +
 .../hive/llap/cache/EvictionListener.java       |    23 +
 .../hive/llap/cache/LlapCacheableBuffer.java    |    56 +
 .../hadoop/hive/llap/cache/LlapDataBuffer.java  |   142 +
 .../hive/llap/cache/LlapOomDebugDump.java       |    23 +
 .../hadoop/hive/llap/cache/LowLevelCache.java   |    76 +
 .../hive/llap/cache/LowLevelCacheCounters.java  |    26 +
 .../hive/llap/cache/LowLevelCacheImpl.java      |   540 +
 .../llap/cache/LowLevelCacheMemoryManager.java  |   111 +
 .../hive/llap/cache/LowLevelCachePolicy.java    |    30 +
 .../llap/cache/LowLevelFifoCachePolicy.java     |   116 +
 .../llap/cache/LowLevelLrfuCachePolicy.java     |   432 +
 .../hadoop/hive/llap/cache/MemoryManager.java   |    25 +
 .../hadoop/hive/llap/cache/NoopCache.java       |    33 +
 .../hive/llap/cli/LlapOptionsProcessor.java     |   184 +
 .../hadoop/hive/llap/cli/LlapServiceDriver.java |   283 +
 .../llap/counters/QueryFragmentCounters.java    |   143 +
 .../hive/llap/daemon/ContainerRunner.java       |    34 +
 .../daemon/FinishableStateUpdateHandler.java    |    21 +
 .../llap/daemon/FragmentCompletionHandler.java  |    22 +
 .../hadoop/hive/llap/daemon/HistoryLogger.java  |   154 +
 .../hive/llap/daemon/KilledTaskHandler.java     |    29 +
 .../daemon/LlapDaemonProtocolBlockingPB.java    |    22 +
 .../hive/llap/daemon/QueryFailedHandler.java    |    20 +
 .../hive/llap/daemon/impl/AMReporter.java       |   474 +
 .../llap/daemon/impl/ContainerRunnerImpl.java   |   353 +
 .../impl/EvictingPriorityBlockingQueue.java     |    76 +
 .../hive/llap/daemon/impl/LlapDaemon.java       |   435 +
 .../hive/llap/daemon/impl/LlapDaemonMXBean.java |    83 +
 .../impl/LlapDaemonProtocolClientImpl.java      |   126 +
 .../impl/LlapDaemonProtocolServerImpl.java      |   155 +
 .../hive/llap/daemon/impl/LlapTaskReporter.java |   451 +
 .../llap/daemon/impl/PriorityBlockingDeque.java |   767 +
 .../hive/llap/daemon/impl/QueryFileCleaner.java |    94 +
 .../llap/daemon/impl/QueryFragmentInfo.java     |   181 +
 .../hadoop/hive/llap/daemon/impl/QueryInfo.java |   252 +
 .../hive/llap/daemon/impl/QueryTracker.java     |   242 +
 .../hadoop/hive/llap/daemon/impl/Scheduler.java |    42 +
 .../llap/daemon/impl/TaskExecutorService.java   |   715 +
 .../llap/daemon/impl/TaskRunnerCallable.java    |   491 +
 .../comparator/FirstInFirstOutComparator.java   |    81 +
 .../comparator/ShortestJobFirstComparator.java  |    70 +
 .../llap/daemon/registry/ServiceInstance.java   |    73 +
 .../daemon/registry/ServiceInstanceSet.java     |    57 +
 .../llap/daemon/registry/ServiceRegistry.java   |    59 +
 .../registry/impl/LlapFixedRegistryImpl.java    |   222 +
 .../registry/impl/LlapRegistryService.java      |    86 +
 .../registry/impl/LlapYarnRegistryImpl.java     |   383 +
 .../llap/daemon/services/impl/LlapWebApp.java   |    12 +
 .../daemon/services/impl/LlapWebServices.java   |    53 +
 .../llap/io/api/impl/ColumnVectorBatch.java     |    46 +
 .../hive/llap/io/api/impl/LlapInputFormat.java  |   322 +
 .../hive/llap/io/api/impl/LlapIoImpl.java       |   152 +
 .../llap/io/decode/ColumnVectorProducer.java    |    36 +
 .../llap/io/decode/EncodedDataConsumer.java     |   194 +
 .../llap/io/decode/OrcColumnVectorProducer.java |    79 +
 .../llap/io/decode/OrcEncodedDataConsumer.java  |   161 +
 .../hive/llap/io/decode/ReadPipeline.java       |    27 +
 .../llap/io/encoded/OrcEncodedDataReader.java   |   971 +
 .../llap/io/metadata/CompressionBuffer.java     |   119 +
 .../hive/llap/io/metadata/OrcFileMetadata.java  |   231 +
 .../hive/llap/io/metadata/OrcMetadataCache.java |    91 +
 .../llap/io/metadata/OrcStripeMetadata.java     |   163 +
 .../hive/llap/metrics/LlapDaemonCacheInfo.java  |    56 +
 .../llap/metrics/LlapDaemonCacheMetrics.java    |   154 +
 .../metrics/LlapDaemonCustomMetricsInfo.java    |    43 +
 .../llap/metrics/LlapDaemonExecutorInfo.java    |    56 +
 .../llap/metrics/LlapDaemonExecutorMetrics.java |   196 +
 .../hive/llap/metrics/LlapDaemonQueueInfo.java  |    50 +
 .../llap/metrics/LlapDaemonQueueMetrics.java    |   116 +
 .../hive/llap/metrics/LlapMetricsSystem.java    |    57 +
 .../hadoop/hive/llap/metrics/MetricsUtils.java  |    44 +
 .../hadoop/hive/llap/old/BufferInProgress.java  |    82 +
 .../apache/hadoop/hive/llap/old/BufferPool.java |   225 +
 .../hadoop/hive/llap/old/CachePolicy.java       |    34 +
 .../apache/hadoop/hive/llap/old/ChunkPool.java  |   237 +
 .../protocol/LlapTaskUmbilicalProtocol.java     |    39 +
 .../AttemptRegistrationListener.java            |    24 +
 .../hive/llap/shufflehandler/DirWatcher.java    |   414 +
 .../shufflehandler/FadvisedChunkedFile.java     |    78 +
 .../llap/shufflehandler/FadvisedFileRegion.java |   160 +
 .../hive/llap/shufflehandler/IndexCache.java    |   199 +
 .../llap/shufflehandler/ShuffleHandler.java     |  1047 +
 .../hadoop/hive/llap/tezplugins/Converters.java |   266 +
 .../llap/tezplugins/LlapContainerLauncher.java  |    43 +
 .../llap/tezplugins/LlapTaskCommunicator.java   |   617 +
 .../hive/llap/tezplugins/TaskCommunicator.java  |   479 +
 .../tezplugins/helpers/SourceStateTracker.java  |   291 +
 .../apache/tez/dag/app/rm/ContainerFactory.java |    51 +
 .../dag/app/rm/LlapTaskSchedulerService.java    |  1363 +
 .../main/resources/llap-daemon-log4j.properties |    78 +
 llap-server/src/main/resources/llap.py          |    75 +
 llap-server/src/main/resources/package.py       |   153 +
 llap-server/src/main/resources/params.py        |    39 +
 llap-server/src/main/resources/templates.py     |   123 +
 .../src/main/resources/webapps/llap/.keep       |     0
 .../src/protobuf/LlapDaemonProtocol.proto       |   125 +
 .../hive/llap/cache/TestBuddyAllocator.java     |   287 +
 .../TestIncrementalObjectSizeEstimator.java     |   247 +
 .../hive/llap/cache/TestLowLevelCacheImpl.java  |   520 +
 .../llap/cache/TestLowLevelLrfuCachePolicy.java |   322 +
 .../hive/llap/cache/TestOrcMetadataCache.java   |   112 +
 .../hive/llap/daemon/MiniLlapCluster.java       |   192 +
 .../daemon/impl/TaskExecutorTestHelpers.java    |   243 +
 .../impl/TestLlapDaemonProtocolServerImpl.java  |    61 +
 .../daemon/impl/TestTaskExecutorService.java    |   290 +
 .../TestFirstInFirstOutComparator.java          |   321 +
 .../TestShortestJobFirstComparator.java         |   199 +
 .../llap/tezplugins/TestTaskCommunicator.java   |   143 +
 .../app/rm/TestLlapTaskSchedulerService.java    |   454 +
 .../test/resources/llap-daemon-log4j.properties |    94 +
 .../src/test/resources/llap-daemon-site.xml     |    73 +
 llap-server/src/test/resources/log4j.properties |    19 +
 .../src/test/resources/webapps/llap/.keep       |     0
 metastore/if/hive_metastore.thrift              |    60 +
 metastore/pom.xml                               |   110 +-
 .../upgrade/derby/021-HIVE-11970.derby.sql      |     6 +
 .../upgrade/derby/hive-schema-1.3.0.derby.sql   |    12 +-
 .../upgrade/derby/hive-schema-2.0.0.derby.sql   |    12 +-
 .../derby/upgrade-1.2.0-to-1.3.0.derby.sql      |     1 +
 .../derby/upgrade-1.2.0-to-2.0.0.derby.sql      |     3 +-
 .../upgrade/mssql/007-HIVE-11970.mssql.sql      |     6 +
 .../upgrade/mssql/hive-schema-1.3.0.mssql.sql   |    12 +-
 .../upgrade/mssql/hive-schema-2.0.0.mssql.sql   |    12 +-
 .../mssql/upgrade-1.2.0-to-1.3.0.mssql.sql      |     1 +
 .../mssql/upgrade-1.2.0-to-2.0.0.mssql.sql      |     7 +-
 .../upgrade/mysql/022-HIVE-11970.mysql.sql      |     6 +
 .../upgrade/mysql/hive-schema-1.3.0.mysql.sql   |    12 +-
 .../upgrade/mysql/hive-schema-2.0.0.mysql.sql   |    12 +-
 .../mysql/upgrade-1.2.0-to-1.3.0.mysql.sql      |     1 +
 .../mysql/upgrade-1.2.0-to-2.0.0.mysql.sql      |     2 +
 .../upgrade/oracle/022-HIVE-11970.oracle.sql    |    23 +
 .../oracle/hive-schema-0.13.0.oracle.sql        |    10 +-
 .../oracle/hive-schema-0.14.0.oracle.sql        |    10 +-
 .../upgrade/oracle/hive-schema-1.3.0.oracle.sql |    12 +-
 .../upgrade/oracle/hive-schema-2.0.0.oracle.sql |    12 +-
 .../oracle/hive-txn-schema-0.13.0.oracle.sql    |    10 +-
 .../oracle/hive-txn-schema-0.14.0.oracle.sql    |    10 +-
 .../oracle/upgrade-1.2.0-to-1.3.0.oracle.sql    |     2 +
 .../oracle/upgrade-1.2.0-to-2.0.0.oracle.sql    |     2 +
 .../postgres/021-HIVE-11970.postgres.sql        |     6 +
 .../postgres/hive-schema-1.3.0.postgres.sql     |    12 +-
 .../postgres/hive-schema-2.0.0.postgres.sql     |    12 +-
 .../upgrade-1.2.0-to-1.3.0.postgres.sql         |     1 +
 .../upgrade-1.2.0-to-2.0.0.postgres.sql         |     1 +
 .../apache/hadoop/hive/metastore/Metastore.java |  1331 +
 .../metastore/hbase/HbaseMetastoreProto.java    | 34901 +++++++++++++++++
 .../gen/thrift/gen-cpp/ThriftHiveMetastore.cpp  | 32381 ++++++++++-----
 .../gen/thrift/gen-cpp/ThriftHiveMetastore.h    |  2989 +-
 .../ThriftHiveMetastore_server.skeleton.cpp     |    25 +
 .../thrift/gen-cpp/hive_metastore_constants.cpp |     2 +-
 .../thrift/gen-cpp/hive_metastore_constants.h   |     2 +-
 .../gen/thrift/gen-cpp/hive_metastore_types.cpp |  3516 +-
 .../gen/thrift/gen-cpp/hive_metastore_types.h   |  1666 +-
 .../hive/metastore/api/AbortTxnRequest.java     |     6 +-
 .../metastore/api/AddDynamicPartitions.java     |     6 +-
 .../metastore/api/AddPartitionsRequest.java     |     8 +-
 .../hive/metastore/api/AddPartitionsResult.java |     4 +-
 .../hadoop/hive/metastore/api/AggrStats.java    |     6 +-
 .../metastore/api/AlreadyExistsException.java   |     4 +-
 .../metastore/api/BinaryColumnStatsData.java    |    10 +-
 .../metastore/api/BooleanColumnStatsData.java   |    10 +-
 .../hive/metastore/api/CheckLockRequest.java    |     6 +-
 .../metastore/api/ClearFileMetadataRequest.java |   438 +
 .../metastore/api/ClearFileMetadataResult.java  |   283 +
 .../hive/metastore/api/ColumnStatistics.java    |     4 +-
 .../metastore/api/ColumnStatisticsData.java     |     2 +-
 .../metastore/api/ColumnStatisticsDesc.java     |     8 +-
 .../hive/metastore/api/ColumnStatisticsObj.java |     4 +-
 .../hive/metastore/api/CommitTxnRequest.java    |     6 +-
 .../hive/metastore/api/CompactionRequest.java   |     4 +-
 .../hive/metastore/api/CompactionType.java      |     2 +-
 .../api/ConfigValSecurityException.java         |     4 +-
 .../api/CurrentNotificationEventId.java         |     6 +-
 .../hadoop/hive/metastore/api/Database.java     |     4 +-
 .../apache/hadoop/hive/metastore/api/Date.java  |     6 +-
 .../hive/metastore/api/DateColumnStatsData.java |     8 +-
 .../hadoop/hive/metastore/api/Decimal.java      |     6 +-
 .../metastore/api/DecimalColumnStatsData.java   |     8 +-
 .../metastore/api/DoubleColumnStatsData.java    |    12 +-
 .../hive/metastore/api/DropPartitionsExpr.java  |     6 +-
 .../metastore/api/DropPartitionsRequest.java    |    12 +-
 .../metastore/api/DropPartitionsResult.java     |     4 +-
 .../hive/metastore/api/EnvironmentContext.java  |     4 +-
 .../hive/metastore/api/EventRequestType.java    |     2 +-
 .../hadoop/hive/metastore/api/FieldSchema.java  |     4 +-
 .../metastore/api/FileMetadataExprType.java     |    42 +
 .../hive/metastore/api/FireEventRequest.java    |     6 +-
 .../metastore/api/FireEventRequestData.java     |     2 +-
 .../hive/metastore/api/FireEventResponse.java   |     4 +-
 .../hadoop/hive/metastore/api/Function.java     |     6 +-
 .../hadoop/hive/metastore/api/FunctionType.java |     2 +-
 .../metastore/api/GetAllFunctionsResponse.java  |    40 +-
 .../api/GetFileMetadataByExprRequest.java       |   773 +
 .../api/GetFileMetadataByExprResult.java        |   553 +
 .../metastore/api/GetFileMetadataRequest.java   |   438 +
 .../metastore/api/GetFileMetadataResult.java    |   540 +
 .../metastore/api/GetOpenTxnsInfoResponse.java  |     6 +-
 .../hive/metastore/api/GetOpenTxnsResponse.java |     6 +-
 .../api/GetPrincipalsInRoleRequest.java         |     4 +-
 .../api/GetPrincipalsInRoleResponse.java        |     4 +-
 .../api/GetRoleGrantsForPrincipalRequest.java   |     4 +-
 .../api/GetRoleGrantsForPrincipalResponse.java  |     4 +-
 .../api/GrantRevokePrivilegeRequest.java        |     6 +-
 .../api/GrantRevokePrivilegeResponse.java       |     6 +-
 .../metastore/api/GrantRevokeRoleRequest.java   |     6 +-
 .../metastore/api/GrantRevokeRoleResponse.java  |     6 +-
 .../hive/metastore/api/GrantRevokeType.java     |     2 +-
 .../hive/metastore/api/HeartbeatRequest.java    |     8 +-
 .../metastore/api/HeartbeatTxnRangeRequest.java |     8 +-
 .../api/HeartbeatTxnRangeResponse.java          |     4 +-
 .../hive/metastore/api/HiveObjectPrivilege.java |     4 +-
 .../hive/metastore/api/HiveObjectRef.java       |     4 +-
 .../hive/metastore/api/HiveObjectType.java      |     2 +-
 .../apache/hadoop/hive/metastore/api/Index.java |    10 +-
 .../api/IndexAlreadyExistsException.java        |     4 +-
 .../metastore/api/InsertEventRequestData.java   |     4 +-
 .../metastore/api/InvalidInputException.java    |     4 +-
 .../metastore/api/InvalidObjectException.java   |     4 +-
 .../api/InvalidOperationException.java          |     4 +-
 .../api/InvalidPartitionException.java          |     4 +-
 .../hive/metastore/api/LockComponent.java       |     4 +-
 .../hadoop/hive/metastore/api/LockLevel.java    |     2 +-
 .../hadoop/hive/metastore/api/LockRequest.java  |     6 +-
 .../hadoop/hive/metastore/api/LockResponse.java |     6 +-
 .../hadoop/hive/metastore/api/LockState.java    |     2 +-
 .../hadoop/hive/metastore/api/LockType.java     |     2 +-
 .../hive/metastore/api/LongColumnStatsData.java |    12 +-
 .../hive/metastore/api/MetaException.java       |     4 +-
 .../hive/metastore/api/MetadataPpdResult.java   |   517 +
 .../hive/metastore/api/NoSuchLockException.java |     4 +-
 .../metastore/api/NoSuchObjectException.java    |     4 +-
 .../hive/metastore/api/NoSuchTxnException.java  |     4 +-
 .../hive/metastore/api/NotificationEvent.java   |     8 +-
 .../metastore/api/NotificationEventRequest.java |     8 +-
 .../api/NotificationEventResponse.java          |     4 +-
 .../hive/metastore/api/OpenTxnRequest.java      |     6 +-
 .../hive/metastore/api/OpenTxnsResponse.java    |     4 +-
 .../apache/hadoop/hive/metastore/api/Order.java |     6 +-
 .../hadoop/hive/metastore/api/Partition.java    |     8 +-
 .../hive/metastore/api/PartitionEventType.java  |     2 +-
 .../api/PartitionListComposingSpec.java         |     4 +-
 .../hive/metastore/api/PartitionSpec.java       |     4 +-
 .../api/PartitionSpecWithSharedSD.java          |     4 +-
 .../hive/metastore/api/PartitionWithoutSD.java  |     8 +-
 .../metastore/api/PartitionsByExprRequest.java  |     6 +-
 .../metastore/api/PartitionsByExprResult.java   |     6 +-
 .../metastore/api/PartitionsStatsRequest.java   |     4 +-
 .../metastore/api/PartitionsStatsResult.java    |     4 +-
 .../metastore/api/PrincipalPrivilegeSet.java    |     4 +-
 .../hive/metastore/api/PrincipalType.java       |     2 +-
 .../hadoop/hive/metastore/api/PrivilegeBag.java |     4 +-
 .../hive/metastore/api/PrivilegeGrantInfo.java  |     8 +-
 .../metastore/api/PutFileMetadataRequest.java   |   588 +
 .../metastore/api/PutFileMetadataResult.java    |   283 +
 .../hive/metastore/api/RequestPartsSpec.java    |     2 +-
 .../hadoop/hive/metastore/api/ResourceType.java |     2 +-
 .../hadoop/hive/metastore/api/ResourceUri.java  |     4 +-
 .../apache/hadoop/hive/metastore/api/Role.java  |     6 +-
 .../hive/metastore/api/RolePrincipalGrant.java  |     8 +-
 .../hadoop/hive/metastore/api/Schema.java       |     4 +-
 .../hadoop/hive/metastore/api/SerDeInfo.java    |     4 +-
 .../api/SetPartitionsStatsRequest.java          |     4 +-
 .../hive/metastore/api/ShowCompactRequest.java  |     4 +-
 .../hive/metastore/api/ShowCompactResponse.java |     4 +-
 .../api/ShowCompactResponseElement.java         |     6 +-
 .../hive/metastore/api/ShowLocksRequest.java    |     4 +-
 .../hive/metastore/api/ShowLocksResponse.java   |     4 +-
 .../metastore/api/ShowLocksResponseElement.java |    12 +-
 .../hadoop/hive/metastore/api/SkewedInfo.java   |     4 +-
 .../hive/metastore/api/StorageDescriptor.java   |    10 +-
 .../metastore/api/StringColumnStatsData.java    |    12 +-
 .../apache/hadoop/hive/metastore/api/Table.java |    12 +-
 .../hive/metastore/api/TableStatsRequest.java   |     4 +-
 .../hive/metastore/api/TableStatsResult.java    |     4 +-
 .../hive/metastore/api/ThriftHiveMetastore.java |  7884 +++-
 .../hive/metastore/api/TxnAbortedException.java |     4 +-
 .../hadoop/hive/metastore/api/TxnInfo.java      |     6 +-
 .../hive/metastore/api/TxnOpenException.java    |     4 +-
 .../hadoop/hive/metastore/api/TxnState.java     |     2 +-
 .../apache/hadoop/hive/metastore/api/Type.java  |     4 +-
 .../hive/metastore/api/UnknownDBException.java  |     4 +-
 .../api/UnknownPartitionException.java          |     4 +-
 .../metastore/api/UnknownTableException.java    |     4 +-
 .../hive/metastore/api/UnlockRequest.java       |     6 +-
 .../hadoop/hive/metastore/api/Version.java      |     4 +-
 .../metastore/api/hive_metastoreConstants.java  |     2 +-
 .../gen-php/metastore/ThriftHiveMetastore.php   |  2812 +-
 .../src/gen/thrift/gen-php/metastore/Types.php  |  1037 +-
 .../hive_metastore/ThriftHiveMetastore-remote   |    51 +-
 .../hive_metastore/ThriftHiveMetastore.py       |  4109 +-
 .../thrift/gen-py/hive_metastore/constants.py   |     2 +-
 .../gen/thrift/gen-py/hive_metastore/ttypes.py  |  1304 +-
 .../thrift/gen-rb/hive_metastore_constants.rb   |     2 +-
 .../gen/thrift/gen-rb/hive_metastore_types.rb   |   177 +-
 .../gen/thrift/gen-rb/thrift_hive_metastore.rb  |   269 +-
 .../hive/metastore/FileMetadataHandler.java     |    30 +
 .../hadoop/hive/metastore/HiveAlterHandler.java |    40 +-
 .../hadoop/hive/metastore/HiveMetaStore.java    |   422 +-
 .../hive/metastore/HiveMetaStoreClient.java     |   216 +-
 .../hadoop/hive/metastore/IMetaStoreClient.java |    33 +
 .../hive/metastore/MetaStoreDirectSql.java      |    39 +-
 .../hive/metastore/MetaStoreSchemaInfo.java     |    47 +
 .../hadoop/hive/metastore/MetaStoreUtils.java   |    50 +-
 .../hadoop/hive/metastore/ObjectStore.java      |   479 +-
 .../hive/metastore/PartFilterExprUtil.java      |   149 +
 .../metastore/PartitionExpressionProxy.java     |    20 +
 .../apache/hadoop/hive/metastore/RawStore.java  |    98 +-
 .../hadoop/hive/metastore/RawStoreProxy.java    |     5 +-
 .../hive/metastore/RetryingHMSHandler.java      |    33 +-
 .../apache/hadoop/hive/metastore/Warehouse.java |     4 +-
 .../filemeta/OrcFileMetadataHandler.java        |    63 +
 .../hbase/AggrStatsInvalidatorFilter.java       |   121 +
 .../hadoop/hive/metastore/hbase/Counter.java    |    53 +
 .../hive/metastore/hbase/HBaseConnection.java   |    96 +
 .../metastore/hbase/HBaseFilterPlanUtil.java    |   612 +
 .../hive/metastore/hbase/HBaseImport.java       |   535 +
 .../hive/metastore/hbase/HBaseReadWrite.java    |  2121 +
 .../hive/metastore/hbase/HBaseSchemaTool.java   |   240 +
 .../hadoop/hive/metastore/hbase/HBaseStore.java |  2430 ++
 .../hadoop/hive/metastore/hbase/HBaseUtils.java |  1340 +
 .../hive/metastore/hbase/ObjectCache.java       |    81 +
 .../hive/metastore/hbase/PartitionCache.java    |   168 +
 .../metastore/hbase/PartitionKeyComparator.java |   292 +
 .../hbase/SharedStorageDescriptor.java          |   251 +
 .../hadoop/hive/metastore/hbase/StatsCache.java |   326 +
 .../metastore/hbase/TephraHBaseConnection.java  |   127 +
 .../metastore/hbase/VanillaHBaseConnection.java |   137 +
 .../stats/BinaryColumnStatsAggregator.java      |    35 +
 .../stats/BooleanColumnStatsAggregator.java     |    35 +
 .../hbase/stats/ColumnStatsAggregator.java      |    26 +
 .../stats/ColumnStatsAggregatorFactory.java     |    94 +
 .../stats/DecimalColumnStatsAggregator.java     |    43 +
 .../stats/DoubleColumnStatsAggregator.java      |    36 +
 .../hbase/stats/LongColumnStatsAggregator.java  |    36 +
 .../stats/StringColumnStatsAggregator.java      |    36 +
 .../hive/metastore/parser/ExpressionTree.java   |     9 +-
 .../metastore/txn/CompactionTxnHandler.java     |    36 +-
 .../hadoop/hive/metastore/txn/TxnDbUtil.java    |     4 +-
 .../hadoop/hive/metastore/txn/TxnHandler.java   |   225 +-
 .../metastore/hbase/hbase_metastore_proto.proto |   282 +
 .../hadoop/hive/metastore/metastore.proto       |    29 +
 .../metastore/AlternateFailurePreListener.java  |     2 +-
 .../DummyRawStoreControlledCommit.java          |    68 +-
 .../DummyRawStoreForJdoConnection.java          |    61 +-
 .../MockPartitionExpressionForMetastore.java    |    12 +
 .../hadoop/hive/metastore/TestObjectStore.java  |    55 +-
 .../hadoop/hive/metastore/hbase/MockUtils.java  |   211 +
 .../hbase/TestHBaseAggregateStatsCache.java     |   316 +
 .../hbase/TestHBaseFilterPlanUtil.java          |   483 +
 .../hive/metastore/hbase/TestHBaseStore.java    |  1307 +
 .../metastore/hbase/TestHBaseStoreCached.java   |   378 +
 .../hbase/TestSharedStorageDescriptor.java      |   153 +
 packaging/pom.xml                               |    15 +
 packaging/src/main/assembly/bin.xml             |    25 +
 pom.xml                                         |   255 +-
 ql/pom.xml                                      |   163 +-
 .../gen/thrift/gen-cpp/queryplan_constants.cpp  |     2 +-
 ql/src/gen/thrift/gen-cpp/queryplan_constants.h |     2 +-
 ql/src/gen/thrift/gen-cpp/queryplan_types.cpp   |   162 +-
 ql/src/gen/thrift/gen-cpp/queryplan_types.h     |    79 +-
 .../hadoop/hive/ql/plan/api/Adjacency.java      |     4 +-
 .../hadoop/hive/ql/plan/api/AdjacencyType.java  |     2 +-
 .../apache/hadoop/hive/ql/plan/api/Graph.java   |     4 +-
 .../hadoop/hive/ql/plan/api/NodeType.java       |     2 +-
 .../hadoop/hive/ql/plan/api/Operator.java       |     8 +-
 .../hadoop/hive/ql/plan/api/OperatorType.java   |     2 +-
 .../apache/hadoop/hive/ql/plan/api/Query.java   |     8 +-
 .../hadoop/hive/ql/plan/api/QueryPlan.java      |     8 +-
 .../apache/hadoop/hive/ql/plan/api/Stage.java   |     8 +-
 .../hadoop/hive/ql/plan/api/StageType.java      |     2 +-
 .../apache/hadoop/hive/ql/plan/api/Task.java    |     8 +-
 .../hadoop/hive/ql/plan/api/TaskType.java       |     2 +-
 ql/src/gen/thrift/gen-php/Types.php             |     4 +-
 ql/src/gen/thrift/gen-py/queryplan/constants.py |     2 +-
 ql/src/gen/thrift/gen-py/queryplan/ttypes.py    |    80 +-
 ql/src/gen/thrift/gen-rb/queryplan_constants.rb |     2 +-
 ql/src/gen/thrift/gen-rb/queryplan_types.rb     |     2 +-
 .../ExpressionTemplates/IfExprColumnColumn.txt  |   186 -
 .../org/apache/hadoop/hive/llap/DebugUtils.java |    78 +
 .../org/apache/hadoop/hive/llap/LogLevels.java  |    53 +
 .../java/org/apache/hadoop/hive/ql/Driver.java  |   130 +-
 .../org/apache/hadoop/hive/ql/ErrorMsg.java     |     6 +-
 .../apache/hadoop/hive/ql/QueryProperties.java  |    10 -
 .../hive/ql/exec/AbstractFileMergeOperator.java |     5 +-
 .../hive/ql/exec/AbstractMapJoinOperator.java   |     6 +-
 .../hive/ql/exec/AppMasterEventOperator.java    |     5 +-
 .../hadoop/hive/ql/exec/CollectOperator.java    |     5 +-
 .../apache/hadoop/hive/ql/exec/ColumnInfo.java  |     2 +-
 .../hadoop/hive/ql/exec/CommonJoinOperator.java |     5 +-
 .../hive/ql/exec/CommonMergeJoinOperator.java   |     6 +-
 .../org/apache/hadoop/hive/ql/exec/DDLTask.java |   355 +-
 .../hadoop/hive/ql/exec/DemuxOperator.java      |     6 +-
 .../hadoop/hive/ql/exec/DummyStoreOperator.java |     5 +-
 .../hadoop/hive/ql/exec/FetchOperator.java      |     7 +-
 .../hadoop/hive/ql/exec/FileSinkOperator.java   |    69 +-
 .../hadoop/hive/ql/exec/FilterOperator.java     |     5 +-
 .../hadoop/hive/ql/exec/ForwardOperator.java    |     4 +-
 .../hadoop/hive/ql/exec/FunctionRegistry.java   |   100 +-
 .../hive/ql/exec/GlobalWorkMapFactory.java      |   105 +
 .../hadoop/hive/ql/exec/GroupByOperator.java    |     5 +-
 .../hive/ql/exec/HashTableDummyOperator.java    |     5 +-
 .../hive/ql/exec/HashTableSinkOperator.java     |     6 +-
 .../hadoop/hive/ql/exec/JoinOperator.java       |     5 +-
 .../ql/exec/LateralViewForwardOperator.java     |     4 +-
 .../hive/ql/exec/LateralViewJoinOperator.java   |     6 +-
 .../hadoop/hive/ql/exec/LimitOperator.java      |     5 +-
 .../hadoop/hive/ql/exec/ListSinkOperator.java   |     5 +-
 .../hadoop/hive/ql/exec/MapJoinOperator.java    |    31 +-
 .../apache/hadoop/hive/ql/exec/MapOperator.java |    20 +-
 .../apache/hadoop/hive/ql/exec/MoveTask.java    |    13 +-
 .../apache/hadoop/hive/ql/exec/MuxOperator.java |     5 +-
 .../hadoop/hive/ql/exec/ObjectCacheFactory.java |    51 +-
 .../hadoop/hive/ql/exec/ObjectCacheWrapper.java |    57 +
 .../apache/hadoop/hive/ql/exec/Operator.java    |    70 +-
 .../hadoop/hive/ql/exec/OperatorUtils.java      |    21 +
 .../apache/hadoop/hive/ql/exec/PTFOperator.java |     5 +-
 .../hadoop/hive/ql/exec/ReduceSinkOperator.java |    28 +-
 .../hadoop/hive/ql/exec/SMBMapJoinOperator.java |     5 +-
 .../hadoop/hive/ql/exec/ScriptOperator.java     |     5 +-
 .../hadoop/hive/ql/exec/SelectOperator.java     |     7 +-
 .../ql/exec/SparkHashTableSinkOperator.java     |     8 +-
 .../hadoop/hive/ql/exec/StatsNoJobTask.java     |    25 +-
 .../apache/hadoop/hive/ql/exec/StatsTask.java   |    59 +-
 .../hadoop/hive/ql/exec/TableScanOperator.java  |    30 +-
 .../org/apache/hadoop/hive/ql/exec/Task.java    |     8 +
 .../apache/hadoop/hive/ql/exec/TopNHash.java    |     2 +-
 .../hadoop/hive/ql/exec/UDTFOperator.java       |     5 +-
 .../hadoop/hive/ql/exec/UnionOperator.java      |     5 +-
 .../apache/hadoop/hive/ql/exec/Utilities.java   |   135 +-
 .../hadoop/hive/ql/exec/mr/ExecDriver.java      |    16 +-
 .../hadoop/hive/ql/exec/mr/ExecMapper.java      |     2 +-
 .../hadoop/hive/ql/exec/mr/ExecReducer.java     |     3 +-
 .../persistence/MapJoinBytesTableContainer.java |     4 +-
 .../ql/exec/persistence/PTFRowContainer.java    |    14 +-
 .../hive/ql/exec/persistence/RowContainer.java  |    12 +-
 .../ql/exec/spark/RemoteHiveSparkClient.java    |    22 +
 .../ql/exec/spark/SparkMapRecordHandler.java    |     2 +-
 .../hadoop/hive/ql/exec/spark/SparkPlan.java    |     3 +-
 .../hive/ql/exec/spark/SparkPlanGenerator.java  |    31 +-
 .../hive/ql/exec/spark/SparkRecordHandler.java  |     3 +-
 .../ql/exec/spark/SparkReduceRecordHandler.java |     2 +-
 .../hadoop/hive/ql/exec/spark/SparkTask.java    |     2 +-
 .../ql/exec/spark/status/SparkJobMonitor.java   |     2 +-
 .../ql/exec/tez/ColumnarSplitSizeEstimator.java |    59 +
 .../hive/ql/exec/tez/CustomPartitionVertex.java |     6 +-
 .../hadoop/hive/ql/exec/tez/DagUtils.java       |    35 +-
 .../hive/ql/exec/tez/HashTableLoader.java       |    25 +-
 .../hive/ql/exec/tez/HiveSplitGenerator.java    |    62 +-
 .../hadoop/hive/ql/exec/tez/InPlaceUpdates.java |     6 +-
 .../hive/ql/exec/tez/LlapObjectCache.java       |   141 +
 .../hive/ql/exec/tez/MapRecordProcessor.java    |    95 +-
 .../hive/ql/exec/tez/MapRecordSource.java       |    18 +
 .../ql/exec/tez/MergeFileRecordProcessor.java   |    15 +-
 .../hive/ql/exec/tez/RecordProcessor.java       |    28 +-
 .../hive/ql/exec/tez/ReduceRecordProcessor.java |    46 +-
 .../hive/ql/exec/tez/ReduceRecordSource.java    |     3 +-
 .../hadoop/hive/ql/exec/tez/SplitGrouper.java   |    32 +-
 .../hadoop/hive/ql/exec/tez/TezJobMonitor.java  |    67 +-
 .../hadoop/hive/ql/exec/tez/TezProcessor.java   |    43 +-
 .../hive/ql/exec/tez/TezSessionPoolManager.java |    40 +-
 .../hive/ql/exec/tez/TezSessionState.java       |   271 +-
 .../apache/hadoop/hive/ql/exec/tez/TezTask.java |    71 +-
 .../ql/exec/tez/tools/KeyValuesInputMerger.java |     1 -
 .../vector/VectorAppMasterEventOperator.java    |     8 +-
 .../ql/exec/vector/VectorFileSinkOperator.java  |     9 +-
 .../ql/exec/vector/VectorFilterOperator.java    |     6 +-
 .../ql/exec/vector/VectorGroupByOperator.java   |    11 +-
 .../exec/vector/VectorMapJoinBaseOperator.java  |     7 +-
 .../ql/exec/vector/VectorMapJoinOperator.java   |     6 +-
 .../VectorMapJoinOuterFilteredOperator.java     |     6 +-
 .../exec/vector/VectorReduceSinkOperator.java   |     7 +-
 .../exec/vector/VectorSMBMapJoinOperator.java   |    20 +-
 .../ql/exec/vector/VectorSelectOperator.java    |     7 +-
 .../VectorSparkHashTableSinkOperator.java       |     7 +-
 ...VectorSparkPartitionPruningSinkOperator.java |     7 +-
 .../ql/exec/vector/VectorizationContext.java    |   296 +-
 .../ql/exec/vector/VectorizedBatchUtil.java     |   183 +-
 .../ql/exec/vector/VectorizedRowBatchCtx.java   |    55 +-
 .../expressions/FilterStringColumnInList.java   |    13 +-
 .../expressions/FilterStructColumnInList.java   |   178 +
 .../exec/vector/expressions/IStructInExpr.java  |    36 +
 .../IfExprDoubleColumnDoubleColumn.java         |   167 +
 .../expressions/IfExprLongColumnLongColumn.java |   166 +
 .../expressions/LongColEqualLongColumn.java     |   169 +
 .../expressions/LongColEqualLongScalar.java     |   151 +
 .../LongColGreaterEqualLongColumn.java          |   169 +
 .../LongColGreaterEqualLongScalar.java          |   151 +
 .../expressions/LongColGreaterLongColumn.java   |   169 +
 .../expressions/LongColGreaterLongScalar.java   |   151 +
 .../expressions/LongColLessEqualLongColumn.java |   169 +
 .../expressions/LongColLessEqualLongScalar.java |   151 +
 .../expressions/LongColLessLongColumn.java      |   169 +
 .../expressions/LongColLessLongScalar.java      |   151 +
 .../expressions/LongColNotEqualLongColumn.java  |   169 +
 .../expressions/LongColNotEqualLongScalar.java  |   151 +
 .../expressions/LongScalarEqualLongColumn.java  |   151 +
 .../LongScalarGreaterEqualLongColumn.java       |   151 +
 .../LongScalarGreaterLongColumn.java            |   151 +
 .../LongScalarLessEqualLongColumn.java          |   151 +
 .../expressions/LongScalarLessLongColumn.java   |   151 +
 .../LongScalarNotEqualLongColumn.java           |   151 +
 .../ql/exec/vector/expressions/NullUtil.java    |    27 +
 .../vector/expressions/StringColumnInList.java  |     4 +
 .../vector/expressions/StructColumnInList.java  |   174 +
 .../mapjoin/VectorMapJoinCommonOperator.java    |     9 +-
 .../hadoop/hive/ql/hooks/LineageLogger.java     |    93 +-
 .../hive/ql/index/AggregateIndexHandler.java    |     1 -
 .../hive/ql/index/TableBasedIndexHandler.java   |     7 -
 .../ql/index/bitmap/BitmapIndexHandler.java     |     1 -
 .../ql/index/compact/CompactIndexHandler.java   |     1 -
 .../org/apache/hadoop/hive/ql/io/AcidUtils.java |    15 +-
 .../apache/hadoop/hive/ql/io/ColumnarSplit.java |    33 +
 .../hive/ql/io/CombineHiveInputFormat.java      |    10 +-
 .../hive/ql/io/DefaultHivePartitioner.java      |     3 +-
 .../org/apache/hadoop/hive/ql/io/HdfsUtils.java |    61 +
 .../hadoop/hive/ql/io/HiveFileFormatUtils.java  |    95 +-
 .../hadoop/hive/ql/io/HiveInputFormat.java      |    60 +-
 .../apache/hadoop/hive/ql/io/IOContextMap.java  |    39 +-
 .../hadoop/hive/ql/io/InputFormatChecker.java   |     5 +-
 .../io/LlapWrappableInputFormatInterface.java   |    22 +
 .../hadoop/hive/ql/io/RCFileInputFormat.java    |     3 +-
 .../ql/io/SequenceFileInputFormatChecker.java   |     3 +-
 .../hive/ql/io/VectorizedRCFileInputFormat.java |     3 +-
 .../ql/io/avro/AvroGenericRecordReader.java     |    27 +-
 .../hadoop/hive/ql/io/orc/BitFieldReader.java   |   106 +-
 .../hive/ql/io/orc/ColumnStatisticsImpl.java    |    55 +-
 .../hadoop/hive/ql/io/orc/DataReader.java       |    58 +
 .../hadoop/hive/ql/io/orc/DynamicByteArray.java |     2 +-
 .../apache/hadoop/hive/ql/io/orc/FileDump.java  |   197 +-
 .../hadoop/hive/ql/io/orc/FileMetaInfo.java     |    64 +
 .../hadoop/hive/ql/io/orc/FileMetadata.java     |    63 +
 .../apache/hadoop/hive/ql/io/orc/InStream.java  |    93 +-
 .../hadoop/hive/ql/io/orc/IntegerReader.java    |     5 +-
 .../hadoop/hive/ql/io/orc/JsonFileDump.java     |   216 +-
 .../apache/hadoop/hive/ql/io/orc/Metadata.java  |    45 -
 .../hadoop/hive/ql/io/orc/MetadataReader.java   |   105 +-
 .../hive/ql/io/orc/MetadataReaderImpl.java      |   123 +
 .../apache/hadoop/hive/ql/io/orc/OrcFile.java   |    75 +-
 .../hadoop/hive/ql/io/orc/OrcInputFormat.java   |   531 +-
 .../hadoop/hive/ql/io/orc/OrcNewSplit.java      |     6 +-
 .../hadoop/hive/ql/io/orc/OrcOutputFormat.java  |   145 +-
 .../hive/ql/io/orc/OrcRawRecordMerger.java      |     3 +
 .../hadoop/hive/ql/io/orc/OrcRecordUpdater.java |    34 +-
 .../apache/hadoop/hive/ql/io/orc/OrcSplit.java  |    18 +-
 .../apache/hadoop/hive/ql/io/orc/OrcUtils.java  |   177 +-
 .../apache/hadoop/hive/ql/io/orc/OutStream.java |     4 +-
 .../apache/hadoop/hive/ql/io/orc/Reader.java    |    48 +-
 .../hadoop/hive/ql/io/orc/ReaderImpl.java       |   288 +-
 .../hadoop/hive/ql/io/orc/RecordReaderImpl.java |   228 +-
 .../hive/ql/io/orc/RecordReaderUtils.java       |   101 +-
 .../hive/ql/io/orc/RunLengthByteReader.java     |    28 +-
 .../hive/ql/io/orc/RunLengthIntegerReader.java  |    28 +-
 .../ql/io/orc/RunLengthIntegerReaderV2.java     |    76 +-
 .../hive/ql/io/orc/SerializationUtils.java      |     6 +-
 .../ql/io/orc/SettableUncompressedStream.java   |    44 +
 .../hadoop/hive/ql/io/orc/StreamName.java       |     2 +-
 .../hive/ql/io/orc/TreeReaderFactory.java       |   104 +-
 .../hadoop/hive/ql/io/orc/TypeDescription.java  |   466 +
 .../ql/io/orc/VectorizedOrcInputFormat.java     |     2 +-
 .../apache/hadoop/hive/ql/io/orc/Writer.java    |     9 +
 .../hadoop/hive/ql/io/orc/WriterImpl.java       |   549 +-
 .../hive/ql/io/orc/encoded/CacheChunk.java      |    69 +
 .../hadoop/hive/ql/io/orc/encoded/Consumer.java |    30 +
 .../hive/ql/io/orc/encoded/EncodedOrcFile.java  |    30 +
 .../hive/ql/io/orc/encoded/EncodedReader.java   |    59 +
 .../ql/io/orc/encoded/EncodedReaderImpl.java    |  1326 +
 .../orc/encoded/EncodedTreeReaderFactory.java   |  1924 +
 .../hive/ql/io/orc/encoded/OrcBatchKey.java     |    60 +
 .../hive/ql/io/orc/encoded/OrcCacheKey.java     |    58 +
 .../hadoop/hive/ql/io/orc/encoded/Reader.java   |    72 +
 .../hive/ql/io/orc/encoded/ReaderImpl.java      |    42 +
 .../hive/ql/io/orc/encoded/StreamUtils.java     |    71 +
 .../serde/ArrayWritableObjectInspector.java     |     7 +
 .../ql/io/rcfile/stats/PartialScanMapper.java   |    12 +-
 .../ql/io/rcfile/stats/PartialScanTask.java     |    11 +-
 .../ql/io/rcfile/stats/PartialScanWork.java     |    14 +
 .../hive/ql/io/sarg/ConvertAstToSearchArg.java  |    11 +-
 .../apache/hadoop/hive/ql/lib/RuleRegExp.java   |    61 +-
 .../hadoop/hive/ql/lockmgr/DbLockManager.java   |    21 +
 .../hadoop/hive/ql/lockmgr/DbTxnManager.java    |    34 +-
 .../hadoop/hive/ql/lockmgr/DummyTxnManager.java |     3 +-
 .../hadoop/hive/ql/lockmgr/HiveLockObject.java  |     6 +-
 .../zookeeper/ZooKeeperHiveLockManager.java     |    41 +
 .../apache/hadoop/hive/ql/log/PerfLogger.java   |   195 -
 .../apache/hadoop/hive/ql/metadata/Hive.java    |   262 +-
 .../hadoop/hive/ql/metadata/Partition.java      |    29 +-
 .../ql/metadata/SessionHiveMetaStoreClient.java |     2 +-
 .../apache/hadoop/hive/ql/metadata/Table.java   |     5 +-
 .../formatting/MetaDataPrettyFormatUtils.java   |    24 +-
 .../hive/ql/optimizer/ColumnPrunerProcCtx.java  |     2 +-
 .../ql/optimizer/ColumnPrunerProcFactory.java   |     9 +-
 .../optimizer/ConstantPropagateProcFactory.java |    11 +-
 .../hive/ql/optimizer/ConvertJoinMapJoin.java   |   103 +-
 .../hive/ql/optimizer/GenMRTableScan1.java      |     3 +
 .../hive/ql/optimizer/GenMapRedUtils.java       |    56 +-
 .../hive/ql/optimizer/SimpleFetchOptimizer.java |    11 +-
 .../optimizer/SortedDynPartitionOptimizer.java  |     7 +-
 .../hive/ql/optimizer/StatsOptimizer.java       |    84 +-
 .../ql/optimizer/calcite/HiveCalciteUtil.java   |    12 +-
 .../ql/optimizer/calcite/HiveConfigContext.java |    37 -
 .../calcite/HiveHepPlannerContext.java          |    37 +
 .../calcite/HiveVolcanoPlannerContext.java      |    37 +
 .../ql/optimizer/calcite/RelOptHiveTable.java   |    15 +
 .../calcite/cost/HiveVolcanoPlanner.java        |     6 +-
 .../functions/HiveSqlCountAggFunction.java      |    72 +
 .../functions/HiveSqlMinMaxAggFunction.java     |    49 +
 .../functions/HiveSqlSumAggFunction.java        |   125 +
 .../calcite/reloperators/HiveBetween.java       |    75 +
 .../optimizer/calcite/reloperators/HiveIn.java  |    41 +
 .../rules/HiveAggregateJoinTransposeRule.java   |   372 +
 .../calcite/rules/HivePreFilteringRule.java     |   135 +-
 .../calcite/rules/HiveRulesRegistry.java        |    44 +
 .../calcite/translator/HiveGBOpConvUtil.java    |    43 +-
 .../translator/PlanModifierForASTConv.java      |    11 +
 .../translator/PlanModifierForReturnPath.java   |    26 +
 .../translator/SqlFunctionConverter.java        |    56 +-
 .../correlation/CorrelationUtilities.java       |    33 -
 .../correlation/ReduceSinkDeDuplication.java    |    15 +-
 .../RewriteQueryUsingAggregateIndexCtx.java     |     2 +-
 .../hive/ql/optimizer/lineage/LineageCtx.java   |     9 +-
 .../hive/ql/optimizer/physical/LlapDecider.java |   447 +
 .../hive/ql/optimizer/physical/Vectorizer.java  |    91 +-
 .../ql/optimizer/physical/Vectorizer.java.orig  |  1744 +
 .../ppr/PartitionExpressionForMetastore.java    |    40 +
 .../hive/ql/optimizer/ppr/PartitionPruner.java  |    33 +-
 .../hadoop/hive/ql/parse/CalcitePlanner.java    |    44 +-
 .../ql/parse/ColumnStatsSemanticAnalyzer.java   |    16 +-
 .../hive/ql/parse/DDLSemanticAnalyzer.java      |    17 +
 .../hadoop/hive/ql/parse/FromClauseParser.g     |    30 +-
 .../hadoop/hive/ql/parse/GenTezUtils.java       |     2 +-
 .../org/apache/hadoop/hive/ql/parse/HiveLexer.g |     1 -
 .../apache/hadoop/hive/ql/parse/HiveParser.g    |    17 +-
 .../hive/ql/parse/LoadSemanticAnalyzer.java     |    12 -
 .../hive/ql/parse/ProcessAnalyzeTable.java      |     4 +-
 .../hadoop/hive/ql/parse/QBParseInfo.java       |     9 -
 .../apache/hadoop/hive/ql/parse/QBSubQuery.java |     7 -
 .../hadoop/hive/ql/parse/SelectClauseParser.g   |     1 -
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  |   149 +-
 .../hive/ql/parse/SemanticAnalyzerFactory.java  |     2 +
 .../hadoop/hive/ql/parse/SplitSample.java       |     4 +
 .../hadoop/hive/ql/parse/SubQueryUtils.java     |    11 -
 .../hadoop/hive/ql/parse/TaskCompiler.java      |     1 +
 .../hadoop/hive/ql/parse/TezCompiler.java       |     7 +
 .../hive/ql/parse/VariableSubstitution.java     |    60 -
 .../hive/ql/parse/spark/SparkCompiler.java      |     3 +-
 .../SparkPartitionPruningSinkOperator.java      |     5 +-
 .../parse/spark/SparkProcessAnalyzeTable.java   |     2 +
 .../apache/hadoop/hive/ql/plan/BaseWork.java    |    19 +
 .../org/apache/hadoop/hive/ql/plan/DDLWork.java |    21 +
 .../hive/ql/plan/DynamicPartitionCtx.java       |    27 -
 .../apache/hadoop/hive/ql/plan/ExplainWork.java |     4 +-
 .../hive/ql/plan/ExprNodeGenericFuncDesc.java   |    10 +-
 .../hadoop/hive/ql/plan/FileSinkDesc.java       |    16 +-
 .../hadoop/hive/ql/plan/HiveOperation.java      |     1 +
 .../hadoop/hive/ql/plan/LoadTableDesc.java      |    14 -
 .../org/apache/hadoop/hive/ql/plan/MapWork.java |    50 +-
 .../hadoop/hive/ql/plan/MergeJoinWork.java      |    34 +-
 .../apache/hadoop/hive/ql/plan/PlanUtils.java   |     3 +-
 .../apache/hadoop/hive/ql/plan/ReduceWork.java  |    20 +-
 .../hive/ql/plan/ShowCreateDatabaseDesc.java    |    94 +
 .../apache/hadoop/hive/ql/plan/StatsWork.java   |    15 +-
 .../hadoop/hive/ql/plan/TableScanDesc.java      |    12 +-
 .../org/apache/hadoop/hive/ql/plan/TezWork.java |    17 +-
 .../hadoop/hive/ql/plan/VectorGroupByDesc.java  |    10 +
 .../hadoop/hive/ql/ppd/ExprWalkerInfo.java      |    23 +-
 .../hadoop/hive/ql/ppd/OpProcFactory.java       |    10 +-
 .../ql/processors/AddResourceProcessor.java     |    11 +-
 .../ql/processors/CommandProcessorFactory.java  |     3 +
 .../hive/ql/processors/CompileProcessor.java    |    11 +-
 .../ql/processors/DeleteResourceProcessor.java  |    11 +-
 .../hadoop/hive/ql/processors/DfsProcessor.java |    11 +-
 .../hadoop/hive/ql/processors/SetProcessor.java |    62 +-
 .../AuthorizationPreEventListener.java          |     2 +-
 .../StorageBasedAuthorizationProvider.java      |    10 +-
 .../authorization/plugin/HiveOperationType.java |     1 +
 .../plugin/sqlstd/Operation2Privilege.java      |     2 +
 .../hadoop/hive/ql/session/SessionState.java    |   162 +-
 .../hive/ql/stats/CounterStatsAggregator.java   |    15 +-
 .../ql/stats/CounterStatsAggregatorSpark.java   |    13 +-
 .../ql/stats/CounterStatsAggregatorTez.java     |    17 +-
 .../hive/ql/stats/CounterStatsPublisher.java    |     9 +-
 .../hadoop/hive/ql/stats/StatsAggregator.java   |    23 +-
 .../hive/ql/stats/StatsCollectionContext.java   |    63 +
 .../stats/StatsCollectionTaskIndependent.java   |    25 -
 .../hadoop/hive/ql/stats/StatsFactory.java      |     3 +-
 .../hadoop/hive/ql/stats/StatsPublisher.java    |     8 +-
 .../apache/hadoop/hive/ql/stats/StatsUtils.java |   185 +-
 .../hive/ql/stats/fs/FSStatsAggregator.java     |    31 +-
 .../hive/ql/stats/fs/FSStatsPublisher.java      |    35 +-
 .../hive/ql/stats/jdbc/JDBCStatsAggregator.java |   264 -
 .../hive/ql/stats/jdbc/JDBCStatsPublisher.java  |   338 -
 .../ql/stats/jdbc/JDBCStatsSetupConstants.java  |    39 -
 .../hive/ql/stats/jdbc/JDBCStatsUtils.java      |   212 -
 .../hive/ql/txn/AcidHouseKeeperService.java     |     5 +-
 .../hadoop/hive/ql/txn/compactor/Cleaner.java   |     3 +-
 .../hive/ql/txn/compactor/CompactorMR.java      |   104 +-
 .../hive/ql/txn/compactor/CompactorThread.java  |     9 +-
 .../hadoop/hive/ql/txn/compactor/Initiator.java |     5 +-
 .../hadoop/hive/ql/txn/compactor/Worker.java    |     6 +-
 .../org/apache/hadoop/hive/ql/udf/UDFJson.java  |     2 +
 .../hive/ql/udf/generic/GenericUDAFSum.java     |     2 +-
 .../udf/generic/GenericUDAFSumEmptyIsZero.java  |    63 +
 .../udf/generic/GenericUDFBaseNwayCompare.java  |   115 +
 .../hive/ql/udf/generic/GenericUDFGreatest.java |    75 +-
 .../hive/ql/udf/generic/GenericUDFHash.java     |    11 +-
 .../hive/ql/udf/generic/GenericUDFIf.java       |     4 +-
 .../hive/ql/udf/generic/GenericUDFLeast.java    |     9 +-
 .../hive/ql/udf/generic/GenericUDFOPEqual.java  |     3 +
 .../generic/GenericUDFOPEqualOrGreaterThan.java |     3 +
 .../generic/GenericUDFOPEqualOrLessThan.java    |     3 +
 .../ql/udf/generic/GenericUDFOPGreaterThan.java |     3 +
 .../ql/udf/generic/GenericUDFOPLessThan.java    |     3 +
 .../ql/udf/generic/GenericUDFOPNotEqual.java    |     3 +
 .../hive/ql/udf/generic/GenericUDFUtils.java    |     3 +-
 .../hadoop/hive/ql/util/JavaDataModel.java      |    75 +-
 .../main/resources/hive-exec-log4j.properties   |    77 +
 ql/src/main/resources/hive-exec-log4j2.xml      |     4 +-
 .../hadoop/hive/ql/io/orc/orc_proto.proto       |     1 +
 .../hive/metastore/TestMetastoreExpr.java       |     2 +-
 .../hadoop/hive/ql/exec/TestExecDriver.java     |     2 +-
 .../hive/ql/exec/TestFileSinkOperator.java      |   405 +-
 .../ql/exec/TestStatsPublisherEnhanced.java     |   377 -
 .../persistence/TestBytesBytesMultiHashMap.java |     3 +
 .../ql/exec/persistence/TestHashPartition.java  |    29 +
 .../exec/persistence/TestPTFRowContainer.java   |    31 +-
 .../hive/ql/exec/tez/TestTezSessionPool.java    |    79 +-
 .../hadoop/hive/ql/exec/tez/TestTezTask.java    |     2 +
 .../exec/vector/TestVectorizationContext.java   |   102 +-
 .../TestVectorConditionalExpressions.java       |     3 +-
 .../vector/util/FakeCaptureOutputOperator.java  |     5 +-
 .../util/FakeVectorDataSourceOperator.java      |     4 +-
 .../hadoop/hive/ql/io/TestIOContextMap.java     |    76 +-
 .../ql/io/avro/TestAvroGenericRecordReader.java |    59 +
 .../hive/ql/io/orc/TestBitFieldReader.java      |    11 +-
 .../hadoop/hive/ql/io/orc/TestBitPack.java      |     2 +-
 .../hive/ql/io/orc/TestColumnStatistics.java    |    45 +-
 .../hadoop/hive/ql/io/orc/TestFileDump.java     |    50 -
 .../hadoop/hive/ql/io/orc/TestInStream.java     |    20 +-
 .../hive/ql/io/orc/TestInputOutputFormat.java   |    45 +-
 .../ql/io/orc/TestIntegerCompressionReader.java |     5 +-
 .../hadoop/hive/ql/io/orc/TestOrcFile.java      |    58 +-
 .../hive/ql/io/orc/TestOrcRawRecordMerger.java  |     2 +-
 .../hadoop/hive/ql/io/orc/TestOrcWideTable.java |   246 +-
 .../hive/ql/io/orc/TestRecordReaderImpl.java    |     2 +-
 .../hive/ql/io/orc/TestRunLengthByteReader.java |    10 +-
 .../ql/io/orc/TestRunLengthIntegerReader.java   |     8 +-
 .../hive/ql/io/orc/TestTypeDescription.java     |    67 +
 .../hive/ql/io/sarg/TestSearchArgumentImpl.java |    87 +-
 .../hive/ql/lockmgr/TestDbTxnManager.java       |     8 +-
 .../zookeeper/TestZookeeperLockManager.java     |    50 +
 .../hadoop/hive/ql/metadata/StringAppender.java |     2 +-
 .../hadoop/hive/ql/metadata/TestHive.java       |     2 +-
 .../calcite/TestCBORuleFiredOnlyOnce.java       |   168 +
 .../TestNegativePartitionPrunerCompactExpr.java |    27 +
 .../TestPositivePartitionPrunerCompactExpr.java |   115 +
 .../hadoop/hive/ql/parse/TestSplitSample.java   |    60 +
 .../hive/ql/processors/TestSetProcessor.java    |    54 +
 .../hive/ql/txn/compactor/CompactorTest.java    |     6 +-
 .../hive/ql/txn/compactor/TestWorker.java       |   120 +-
 .../ql/udf/generic/TestGenericUDFGreatest.java  |   153 +-
 .../ql/udf/generic/TestGenericUDFLeast.java     |   149 +-
 ql/src/test/queries/clientcompare/llap_0.q      |    12 +
 ql/src/test/queries/clientcompare/llap_0_00.qv  |     1 +
 ql/src/test/queries/clientcompare/llap_0_01.qv  |     1 +
 .../test/queries/clientnegative/ctasnullcol.q   |     2 +
 ql/src/test/queries/clientnegative/ddltime.q    |     6 -
 .../special_character_in_tabnames_1.q           |    13 +
 .../clientnegative/udf_greatest_error_2.q       |     2 +-
 .../clientnegative/udf_greatest_error_3.q       |     1 -
 .../clientnegative/udf_greatest_error_4.q       |     1 -
 .../acid_vectorization_partition.q              |     2 +-
 .../test/queries/clientpositive/add_jar_pfile.q |     8 +
 .../clientpositive/alter_table_update_status.q  |     4 +-
 .../queries/clientpositive/analyze_tbl_part.q   |     4 +-
 .../clientpositive/auto_sortmerge_join_6.q      |     1 +
 .../queries/clientpositive/avrocountemptytbl.q  |     8 +
 .../cbo_rp_gby2_map_multi_distinct.q            |    38 +
 .../queries/clientpositive/cbo_rp_lineage2.q    |   117 +
 .../cbo_rp_udaf_percentile_approx_23.q          |    97 +
 .../test/queries/clientpositive/constprog_dpp.q |     3 +-
 ql/src/test/queries/clientpositive/cross_join.q |     8 +
 ql/src/test/queries/clientpositive/ddltime.q    |    45 -
 .../test/queries/clientpositive/decimal_1_1.q   |     9 +
 .../clientpositive/drop_table_with_index.q      |    35 +
 .../dynpart_sort_opt_vectorization.q            |     4 +-
 .../clientpositive/dynpart_sort_optimization.q  |     4 +-
 .../clientpositive/dynpart_sort_optimization2.q |     2 -
 .../test/queries/clientpositive/escape_crlf.q   |    19 +
 .../test/queries/clientpositive/explainuser_1.q |     2 +
 .../clientpositive/filter_cond_pushdown.q       |     5 +
 .../clientpositive/groupby_join_pushdown.q      |    55 +
 .../identity_project_remove_skip.q              |     1 +
 .../infer_bucket_sort_multi_insert.q            |     1 +
 ql/src/test/queries/clientpositive/insert1.q    |     2 +
 .../test/queries/clientpositive/insert_into1.q  |    18 +-
 .../test/queries/clientpositive/insert_into2.q  |     8 +
 .../clientpositive/insertvalues_espchars.q      |     5 +
 ql/src/test/queries/clientpositive/join44.q     |    12 +
 .../queries/clientpositive/join_grp_diff_keys.q |    21 +
 ql/src/test/queries/clientpositive/join_parse.q |    20 +
 .../test/queries/clientpositive/json_serde1.q   |    36 +
 .../test/queries/clientpositive/lb_fs_stats.q   |     2 -
 ql/src/test/queries/clientpositive/lineage3.q   |    26 +
 .../queries/clientpositive/llap_partitioned.q   |    66 +
 .../queries/clientpositive/llap_uncompressed.q  |    48 +
 .../test/queries/clientpositive/llapdecider.q   |    64 +
 .../queries/clientpositive/load_non_hdfs_path.q |     6 +
 .../test/queries/clientpositive/load_orc_part.q |     5 +
 .../clientpositive/metadata_only_queries.q      |    17 +-
 .../metadata_only_queries_with_filters.q        |     2 +-
 ql/src/test/queries/clientpositive/mrr.q        |     8 +-
 .../test/queries/clientpositive/multi_insert.q  |     2 +-
 .../queries/clientpositive/multi_insert_gby2.q  |     2 +-
 .../queries/clientpositive/multi_insert_gby3.q  |     2 +-
 .../clientpositive/multi_insert_lateral_view.q  |     1 +
 .../queries/clientpositive/multi_insert_mixed.q |     2 +-
 ...multi_insert_move_tasks_share_dependencies.q |     2 +-
 .../clientpositive/multi_insert_union_src.q     |     2 +-
 ql/src/test/queries/clientpositive/nullMap.q    |    14 +
 ql/src/test/queries/clientpositive/orc_llap.q   |   148 +
 .../parquet_mixed_partition_formats2.q          |    31 +
 .../clientpositive/parquet_ppd_boolean.q        |     4 +-
 .../queries/clientpositive/parquet_ppd_char.q   |    12 +-
 .../queries/clientpositive/parquet_ppd_date.q   |    16 +-
 .../clientpositive/parquet_ppd_decimal.q        |    32 +-
 .../clientpositive/parquet_ppd_timestamp.q      |    16 +-
 .../clientpositive/parquet_ppd_varchar.q        |    12 +-
 .../test/queries/clientpositive/scriptfile1.q   |     3 +
 .../test/queries/clientpositive/selectindate.q  |     9 +
 ql/src/test/queries/clientpositive/show_conf.q  |     2 +-
 .../clientpositive/show_create_database.q       |     3 +
 .../clientpositive/skewjoin_onesideskew.q       |    22 +
 .../special_character_in_tabnames_1.q           |  1075 +
 .../special_character_in_tabnames_2.q           |    40 +
 ql/src/test/queries/clientpositive/stats19.q    |     4 +-
 .../queries/clientpositive/stats_only_null.q    |     1 -
 ql/src/test/queries/clientpositive/statsfs.q    |     2 -
 .../queries/clientpositive/subquery_views.q     |    22 +-
 ql/src/test/queries/clientpositive/temp_table.q |    26 +
 ql/src/test/queries/clientpositive/tez_fsstat.q |     2 -
 ql/src/test/queries/clientpositive/topn.q       |    13 +
 .../test/queries/clientpositive/udf_greatest.q  |    20 +
 ql/src/test/queries/clientpositive/udf_least.q  |    20 +
 .../queries/clientpositive/udtf_nofetchtask.q   |    10 +
 ql/src/test/queries/clientpositive/union36.q    |    10 +
 .../queries/clientpositive/unionDistinct_1.q    |     5 +
 .../queries/clientpositive/union_fast_stats.q   |    68 +
 ql/src/test/queries/clientpositive/union_view.q |     2 +
 .../queries/clientpositive/update_all_types.q   |     2 +-
 .../clientpositive/vector_auto_smb_mapjoin_14.q |   297 +
 .../queries/clientpositive/vector_char_cast.q   |     9 +
 .../queries/clientpositive/vector_coalesce.q    |    22 +
 .../clientpositive/vector_groupby_reduce.q      |    62 +-
 .../clientpositive/vector_leftsemi_mapjoin.q    |     4 +-
 .../clientpositive/vector_multi_insert.q        |     2 +
 ql/src/test/queries/clientpositive/vector_nvl.q |    36 +
 .../queries/clientpositive/vector_struct_in.q   |   247 +
 .../clientpositive/vectorization_limit.q        |     4 +-
 .../clientpositive/windowing_windowspec2.q      |    16 +-
 .../resources/orc-file-dump-bloomfilter.out     |   114 +-
 .../resources/orc-file-dump-bloomfilter2.out    |   146 +-
 .../orc-file-dump-dictionary-threshold.out      |   182 +-
 ql/src/test/resources/orc-file-dump.json        |   189 +-
 ql/src/test/resources/orc-file-dump.out         |   160 +-
 ql/src/test/resources/orc-file-has-null.out     |    82 +-
 .../results/clientnegative/ctasnullcol.q.out    |     5 +
 .../clientnegative/cte_with_in_subquery.q.out   |     2 +-
 .../test/results/clientnegative/ddltime.q.out   |     9 -
 .../results/clientnegative/insertsel_fail.q.out |     2 +-
 .../special_character_in_tabnames_1.q.out       |    10 +
 .../subquery_exists_implicit_gby.q.out          |     8 +-
 .../subquery_nested_subquery.q.out              |     4 +-
 .../subquery_notexists_implicit_gby.q.out       |     8 +-
 .../subquery_windowing_corr.q.out               |     7 +-
 .../clientnegative/udf_greatest_error_2.q.out   |     2 +-
 .../clientnegative/udf_greatest_error_3.q.out   |     1 -
 .../clientnegative/udf_greatest_error_4.q.out   |     1 -
 .../acid_vectorization_partition.q.out          |    14 +-
 .../results/clientpositive/add_jar_pfile.q.out  |    12 +
 .../alter_partition_coltype.q.out               |     3 +
 .../clientpositive/annotate_stats_part.q.out    |     6 +-
 .../clientpositive/annotate_stats_table.q.out   |     4 +-
 .../auto_join_reordering_values.q.out           |     7 +-
 .../clientpositive/auto_sortmerge_join_1.q.out  |     5 +
 .../clientpositive/auto_sortmerge_join_11.q.out |     4 +
 .../clientpositive/auto_sortmerge_join_12.q.out |     1 +
 .../clientpositive/auto_sortmerge_join_2.q.out  |     4 +
 .../clientpositive/auto_sortmerge_join_3.q.out  |     5 +
 .../clientpositive/auto_sortmerge_join_4.q.out  |     5 +
 .../clientpositive/auto_sortmerge_join_5.q.out  |     5 +
 .../clientpositive/auto_sortmerge_join_7.q.out  |     5 +
 .../clientpositive/auto_sortmerge_join_8.q.out  |     5 +
 .../clientpositive/avrocountemptytbl.q.out      |    58 +
 .../clientpositive/binary_output_format.q.out   |     4 +-
 .../test/results/clientpositive/bucket4.q.out   |     8 +-
 .../test/results/clientpositive/bucket5.q.out   |    10 +-
 .../results/clientpositive/bucket_many.q.out    |     8 +-
 .../clientpositive/bucket_map_join_1.q.out      |     5 +-
 .../clientpositive/bucket_map_join_2.q.out      |     5 +-
 .../clientpositive/bucket_map_join_spark4.q.out |     2 +
 .../clientpositive/bucketcontext_1.q.out        |     2 +
 .../clientpositive/bucketcontext_2.q.out        |     2 +
 .../clientpositive/bucketcontext_3.q.out        |     2 +
 .../clientpositive/bucketcontext_4.q.out        |     2 +
 .../clientpositive/bucketcontext_5.q.out        |     2 +
 .../clientpositive/bucketcontext_6.q.out        |     2 +
 .../clientpositive/bucketcontext_7.q.out        |     2 +
 .../clientpositive/bucketcontext_8.q.out        |     2 +
 .../results/clientpositive/bucketmapjoin1.q.out |    10 +-
 .../clientpositive/bucketmapjoin10.q.out        |     1 +
 .../clientpositive/bucketmapjoin11.q.out        |     2 +
 .../clientpositive/bucketmapjoin12.q.out        |     2 +
 .../clientpositive/bucketmapjoin13.q.out        |     4 +
 .../results/clientpositive/bucketmapjoin2.q.out |    12 +-
 .../results/clientpositive/bucketmapjoin3.q.out |     8 +-
 .../results/clientpositive/bucketmapjoin4.q.out |     8 +-
 .../results/clientpositive/bucketmapjoin5.q.out |     8 +-
 .../results/clientpositive/bucketmapjoin7.q.out |     1 +
 .../results/clientpositive/bucketmapjoin8.q.out |     2 +
 .../results/clientpositive/bucketmapjoin9.q.out |     2 +
 .../clientpositive/bucketmapjoin_negative.q.out |     4 +-
 .../bucketmapjoin_negative2.q.out               |     4 +-
 .../bucketmapjoin_negative3.q.out               |     9 +
 .../clientpositive/cbo_rp_auto_join1.q.out      |   151 +-
 .../cbo_rp_cross_product_check_2.q.out          |    18 +-
 .../cbo_rp_gby2_map_multi_distinct.q.out        |   236 +
 .../clientpositive/cbo_rp_lineage2.q.out        |   677 +
 .../cbo_rp_outer_join_ppr.q.java1.7.out         |     2 +
 .../cbo_rp_udaf_percentile_approx_23.q.out      |   628 +
 .../clientpositive/columnstats_partlvl.q.out    |     2 +
 .../clientpositive/columnstats_tbllvl.q.out     |     2 +
 .../test/results/clientpositive/combine2.q.out  |     1 +
 .../constantPropagateForSubQuery.q.out          |     1 +
 .../results/clientpositive/cross_join.q.out     |   196 +
 ql/src/test/results/clientpositive/ctas.q.out   |     2 +-
 .../test/results/clientpositive/ddltime.q.out   |   188 -
 .../results/clientpositive/decimal_1_1.q.out    |   104 +
 .../clientpositive/describe_pretty.q.out        |   366 +-
 .../disable_merge_for_bucketing.q.out           |    12 +-
 .../display_colstats_tbllvl.q.out               |     1 +
 .../clientpositive/drop_table_with_index.q.out  |   152 +
 .../clientpositive/dynamic_rdd_cache.q.out      |    28 +-
 .../dynpart_sort_opt_vectorization.q.out        |    28 +-
 .../dynpart_sort_optimization.q.out             |    12 +-
 .../dynpart_sort_optimization2.q.out            |     8 +-
 .../encryption_insert_partition_dynamic.q.out   |    24 +-
 .../encryption_join_unencrypted_tbl.q.out       |    27 +-
 ...on_join_with_different_encryption_keys.q.out |    37 +-
 .../results/clientpositive/escape_crlf.q.out    |    98 +
 .../extrapolate_part_stats_full.q.out           |    24 +-
 .../extrapolate_part_stats_partial.q.out        |    76 +-
 .../extrapolate_part_stats_partial_ndv.q.out    |    38 +-
 .../clientpositive/filter_cond_pushdown.q.out   |    80 +
 .../clientpositive/filter_join_breaktask.q.out  |     3 +-
 .../clientpositive/fouter_join_ppr.q.out        |     4 +
 .../clientpositive/groupby_join_pushdown.q.out  |  1522 +
 .../clientpositive/groupby_sort_1_23.q.out      |    45 +-
 .../clientpositive/groupby_sort_skew_1_23.q.out |    59 +-
 .../infer_bucket_sort_bucketed_table.q.out      |     4 +-
 .../infer_bucket_sort_dyn_part.q.out            |    32 +-
 .../infer_bucket_sort_map_operators.q.out       |    38 +-
 .../infer_bucket_sort_merge.q.out               |     8 +-
 .../infer_bucket_sort_num_buckets.q.out         |     8 +-
 .../infer_bucket_sort_reducers_power_two.q.out  |    24 +-
 .../test/results/clientpositive/input23.q.out   |     1 +
 .../results/clientpositive/input_part1.q.out    |     4 +-
 .../results/clientpositive/input_part2.q.out    |     8 +-
 .../results/clientpositive/input_part7.q.out    |     1 +
 .../results/clientpositive/insert_into1.q.out   |   355 +
 .../results/clientpositive/insert_into2.q.out   |    69 +
 .../clientpositive/insertvalues_espchars.q.out  |    30 +
 ql/src/test/results/clientpositive/join26.q.out |     4 +-
 .../clientpositive/join32_lessSize.q.out        |    10 +-
 ql/src/test/results/clientpositive/join35.q.out |     4 +-
 ql/src/test/results/clientpositive/join44.q.out |    88 +
 .../join_cond_pushdown_unqual1.q.out            |    12 +-
 .../join_cond_pushdown_unqual3.q.out            |    18 +-
 .../join_cond_pushdown_unqual4.q.out            |     2 +-
 .../clientpositive/join_filters_overlap.q.out   |     5 +
 .../clientpositive/join_grp_diff_keys.q.out     |   190 +
 .../results/clientpositive/join_map_ppr.q.out   |     8 +-
 .../results/clientpositive/join_parse.q.out     |   516 +
 .../results/clientpositive/json_serde1.q.out    |   113 +
 .../clientpositive/lateral_view_noalias.q.out   |   120 +-
 .../test/results/clientpositive/lineage3.q.out  |    68 +-
 .../list_bucket_dml_10.q.java1.7.out            |     4 +-
 .../clientpositive/list_bucket_dml_14.q.out     |     1 +
 .../list_bucket_dml_4.q.java1.8.out             |    76 +-
 .../list_bucket_dml_6.q.java1.8.out             |     8 +-
 .../list_bucket_dml_9.q.java1.8.out             |    76 +-
 .../list_bucket_query_multiskew_3.q.out         |     1 +
 .../list_bucket_query_oneskew_2.q.out           |     2 +
 .../llap/acid_vectorization.q.out               |    62 +
 .../llap/acid_vectorization_partition.q.out     |    60 +
 .../llap/acid_vectorization_project.q.out       |    73 +
 .../clientpositive/llap/alter_merge_2_orc.q.out |   123 +
 .../clientpositive/llap/alter_merge_orc.q.out   |   263 +
 .../llap/alter_merge_stats_orc.q.out            |   382 +
 .../clientpositive/llap/auto_join0.q.out        |   172 +
 .../clientpositive/llap/auto_join1.q.out        |   125 +
 .../clientpositive/llap/auto_join21.q.out       |   615 +
 .../clientpositive/llap/auto_join29.q.out       |  3556 ++
 .../clientpositive/llap/auto_join30.q.out       |  1361 +
 .../clientpositive/llap/auto_join_filters.q.out |   540 +
 .../clientpositive/llap/auto_join_nulls.q.out   |   217 +
 .../llap/auto_sortmerge_join_1.q.out            |  1034 +
 .../llap/auto_sortmerge_join_10.q.out           |   369 +
 .../llap/auto_sortmerge_join_11.q.out           |  1485 +
 .../llap/auto_sortmerge_join_12.q.out           |   645 +
 .../llap/auto_sortmerge_join_13.q.out           |   692 +
 .../llap/auto_sortmerge_join_14.q.out           |   224 +
 .../llap/auto_sortmerge_join_15.q.out           |   188 +
 .../llap/auto_sortmerge_join_16.q.out           |   256 +
 .../llap/auto_sortmerge_join_2.q.out            |   707 +
 .../llap/auto_sortmerge_join_3.q.out            |  1014 +
 .../llap/auto_sortmerge_join_4.q.out            |  1030 +
 .../llap/auto_sortmerge_join_5.q.out            |   780 +
 .../llap/auto_sortmerge_join_6.q.out            |  1200 +
 .../llap/auto_sortmerge_join_7.q.out            |  1200 +
 .../llap/auto_sortmerge_join_8.q.out            |  1202 +
 .../llap/auto_sortmerge_join_9.q.out            |  3521 ++
 .../results/clientpositive/llap/bucket2.q.out   |   473 +
 .../results/clientpositive/llap/bucket3.q.out   |   498 +
 .../results/clientpositive/llap/bucket4.q.out   |   472 +
 .../llap/bucket_map_join_tez1.q.out             |  1602 +
 .../llap/bucket_map_join_tez2.q.out             |   684 +
 .../results/clientpositive/llap/cbo_gby.q.out   |   124 +
 .../clientpositive/llap/cbo_gby_empty.q.out     |    77 +
 .../results/clientpositive/llap/cbo_join.q.out  | 15028 +++++++
 .../results/clientpositive/llap/cbo_limit.q.out |    90 +
 .../clientpositive/llap/cbo_semijoin.q.out      |   440 +
 .../clientpositive/llap/cbo_simple_select.q.out |   755 +
 .../results/clientpositive/llap/cbo_stats.q.out |    14 +
 .../clientpositive/llap/cbo_subq_exists.q.out   |   297 +
 .../clientpositive/llap/cbo_subq_in.q.out       |   151 +
 .../clientpositive/llap/cbo_subq_not_in.q.out   |   365 +
 .../clientpositive/llap/cbo_udf_udaf.q.out      |   125 +
 .../results/clientpositive/llap/cbo_union.q.out |   920 +
 .../results/clientpositive/llap/cbo_views.q.out |   237 +
 .../clientpositive/llap/cbo_windowing.q.out     |   293 +
 .../clientpositive/llap/constprog_dpp.q.out     |   113 +
 .../llap/correlationoptimizer1.q.out            |  3084 ++
 .../results/clientpositive/llap/count.q.out     |   298 +
 .../llap/create_merge_compressed.q.out          |   138 +
 .../clientpositive/llap/cross_join.q.out        |   214 +
 .../llap/cross_product_check_1.q.out            |   575 +
 .../llap/cross_product_check_2.q.out            |   534 +
 .../test/results/clientpositive/llap/ctas.q.out |   930 +
 .../llap/custom_input_output_format.q.out       |   102 +
 .../llap/delete_all_non_partitioned.q.out       |    52 +
 .../llap/delete_all_partitioned.q.out           |    86 +
 .../clientpositive/llap/delete_orig_table.q.out |    61 +
 .../clientpositive/llap/delete_tmp_table.q.out  |    60 +
 .../llap/delete_where_no_match.q.out            |    62 +
 .../llap/delete_where_non_partitioned.q.out     |    61 +
 .../llap/delete_where_partitioned.q.out         |   105 +
 .../llap/delete_whole_partition.q.out           |    92 +
 .../llap/disable_merge_for_bucketing.q.out      |   502 +
 .../llap/dynamic_partition_pruning.q.out        |  5341 +++
 .../llap/dynamic_partition_pruning_2.q.out      |  1114 +
 .../llap/dynpart_sort_opt_vectorization.q.out   |  2616 ++
 .../llap/dynpart_sort_optimization.q.out        |  2401 ++
 .../llap/dynpart_sort_optimization2.q.out       |  1844 +
 .../clientpositive/llap/enforce_order.q.out     |    80 +
 .../clientpositive/llap/explainuser_1.q.out     |  8937 +++++
 .../clientpositive/llap/explainuser_2.q.out     |  5521 +++
 .../clientpositive/llap/explainuser_3.q.out     |   522 +
 .../llap/filter_join_breaktask.q.out            |   445 +
 .../llap/filter_join_breaktask2.q.out           |   272 +
 .../results/clientpositive/llap/groupby1.q.out  |   428 +
 .../results/clientpositive/llap/groupby2.q.out  |   133 +
 .../results/clientpositive/llap/groupby3.q.out  |   158 +
 .../results/clientpositive/llap/having.q.out    |  1290 +
 .../llap/hybridgrace_hashjoin_1.q.out           |  1617 +
 .../llap/hybridgrace_hashjoin_2.q.out           |  1477 +
 .../llap/identity_project_remove_skip.q.out     |   124 +
 .../results/clientpositive/llap/insert1.q.out   |   445 +
 .../llap/insert_acid_dynamic_partition.q.out    |    48 +
 .../llap/insert_acid_not_bucketed.q.out         |    36 +
 .../clientpositive/llap/insert_into1.q.out      |   381 +
 .../clientpositive/llap/insert_into2.q.out      |   440 +
 .../clientpositive/llap/insert_orig_table.q.out |    80 +
 .../llap/insert_update_delete.q.out             |    78 +
 .../llap/insert_values_acid_not_bucketed.q.out  |    28 +
 .../insert_values_dynamic_partitioned.q.out     |    45 +
 .../llap/insert_values_non_partitioned.q.out    |    70 +
 .../llap/insert_values_orig_table.q.out         |    82 +
 .../llap/insert_values_partitioned.q.out        |    66 +
 .../llap/insert_values_tmp_table.q.out          |    36 +
 .../clientpositive/llap/join0.q.java1.7.out     |   242 +
 .../clientpositive/llap/join0.q.java1.8.out     |   242 +
 .../results/clientpositive/llap/join1.q.out     |  1158 +
 .../clientpositive/llap/join_nullsafe.q.out     |  1667 +
 .../clientpositive/llap/leftsemijoin.q.out      |   114 +
 .../clientpositive/llap/limit_pushdown.q.out    |  1487 +
 .../clientpositive/llap/llapdecider.q.out       |  1195 +
 .../clientpositive/llap/load_dyn_part1.q.out    |  2215 ++
 .../clientpositive/llap/load_dyn_part2.q.out    |  2152 +
 .../clientpositive/llap/load_dyn_part3.q.out    |  2138 +
 .../clientpositive/llap/lvj_mapjoin.q.out       |   296 +
 .../clientpositive/llap/mapjoin_decimal.q.out   |   393 +
 .../clientpositive/llap/mapjoin_mapjoin.q.out   |   821 +
 .../clientpositive/llap/mapreduce1.q.out        |   621 +
 .../clientpositive/llap/mapreduce2.q.out        |   616 +
 .../results/clientpositive/llap/merge1.q.out    |   596 +
 .../results/clientpositive/llap/merge2.q.out    |   596 +
 .../results/clientpositive/llap/mergejoin.q.out |  3150 ++
 .../llap/metadata_only_queries.q.out            |   504 +
 .../metadata_only_queries_with_filters.q.out    |   224 +
 .../clientpositive/llap/metadataonly1.q.out     |  2050 +
 .../test/results/clientpositive/llap/mrr.q.out  |  2294 ++
 .../clientpositive/llap/optimize_nullscan.q.out |  2159 +
 .../clientpositive/llap/orc_analyze.q.out       |  1726 +
 .../results/clientpositive/llap/orc_llap.q.out  |  1013 +
 .../clientpositive/llap/orc_merge1.q.out        |   500 +
 .../clientpositive/llap/orc_merge2.q.out        |   231 +
 .../clientpositive/llap/orc_merge3.q.out        |   170 +
 .../clientpositive/llap/orc_merge4.q.out        |   186 +
 .../clientpositive/llap/orc_merge5.q.out        |   344 +
 .../clientpositive/llap/orc_merge6.q.out        |   518 +
 .../clientpositive/llap/orc_merge7.q.out        |   629 +
 .../clientpositive/llap/orc_merge8.q.out        |   130 +
 .../clientpositive/llap/orc_merge9.q.out        |   186 +
 .../llap/orc_merge_incompat1.q.out              |   245 +
 .../llap/orc_merge_incompat2.q.out              |   375 +
 .../clientpositive/llap/orc_ppd_basic.q.out     |   701 +
 .../llap/orc_vectorization_ppd.q.out            |   288 +
 .../results/clientpositive/llap/parallel.q.out  |  1444 +
 .../test/results/clientpositive/llap/ptf.q.out  |  4895 +++
 .../clientpositive/llap/ptf_matchpath.q.out     |   403 +
 .../clientpositive/llap/ptf_streaming.q.out     |  2640 ++
 .../results/clientpositive/llap/sample1.q.out   |   727 +
 .../clientpositive/llap/script_env_var1.q.out   |    18 +
 .../clientpositive/llap/script_env_var2.q.out   |    16 +
 .../clientpositive/llap/script_pipe.q.out       |   126 +
 .../clientpositive/llap/scriptfile1.q.out       |    53 +
 .../llap/selectDistinctStar.q.out               |  4910 +++
 .../llap/select_dummy_source.q.out              |   229 +
 .../llap/show_create_database.q.out             |    19 +
 .../results/clientpositive/llap/skewjoin.q.out  |  1195 +
 .../clientpositive/llap/stats_counter.q.out     |   102 +
 .../llap/stats_counter_partitioned.q.out        |   465 +
 .../clientpositive/llap/stats_noscan_1.q.out    |   520 +
 .../clientpositive/llap/stats_only_null.q.out   |   422 +
 .../clientpositive/llap/subquery_exists.q.out   |   214 +
 .../clientpositive/llap/subquery_in.q.out       |   961 +
 .../clientpositive/llap/temp_table.q.out        |   469 +
 .../llap/tez_bmj_schema_evolution.q.out         |  2214 ++
 .../results/clientpositive/llap/tez_dml.q.out   |  1526 +
 .../llap/tez_dynpart_hashjoin_1.q.out           |   817 +
 .../llap/tez_dynpart_hashjoin_2.q.out           |   579 +
 .../clientpositive/llap/tez_fsstat.q.out        |   102 +
 ...tez_insert_overwrite_local_directory_1.q.out |    20 +
 .../results/clientpositive/llap/tez_join.q.out  |   150 +
 .../clientpositive/llap/tez_join_hash.q.out     |   980 +
 .../llap/tez_join_result_complex.q.out          |  2163 +
 .../clientpositive/llap/tez_join_tests.q.out    |  2227 ++
 .../clientpositive/llap/tez_joins_explain.q.out |   715 +
 .../clientpositive/llap/tez_multi_union.q.out   |   833 +
 .../llap/tez_schema_evolution.q.out             |   114 +
 .../clientpositive/llap/tez_self_join.q.out     |   210 +
 .../results/clientpositive/llap/tez_smb_1.q.out |   616 +
 .../clientpositive/llap/tez_smb_main.q.out      |  1422 +
 .../results/clientpositive/llap/tez_union.q.out |  1438 +
 .../clientpositive/llap/tez_union2.q.out        |   820 +
 .../clientpositive/llap/tez_union_decimal.q.out |   101 +
 .../llap/tez_union_dynamic_partition.q.out      |   158 +
 .../llap/tez_union_group_by.q.out               |   410 +
 .../llap/tez_union_multiinsert.q.out            |  4399 +++
 .../llap/tez_vector_dynpart_hashjoin_1.q.out    |   817 +
 .../llap/tez_vector_dynpart_hashjoin_2.q.out    |   579 +
 .../clientpositive/llap/transform1.q.out        |   138 +
 .../clientpositive/llap/transform2.q.out        |    11 +
 .../clientpositive/llap/transform_ppr1.q.out    |   569 +
 .../clientpositive/llap/transform_ppr2.q.out    |   475 +
 .../results/clientpositive/llap/union2.q.out    |   104 +
 .../results/clientpositive/llap/union3.q.out    |   251 +
 .../results/clientpositive/llap/union4.q.out    |   175 +
 .../results/clientpositive/llap/union5.q.out    |   154 +
 .../results/clientpositive/llap/union6.q.out    |   172 +
 .../results/clientpositive/llap/union7.q.out    |   150 +
 .../results/clientpositive/llap/union8.q.out    |  1601 +
 .../results/clientpositive/llap/union9.q.out    |   130 +
 .../clientpositive/llap/unionDistinct_1.q.out   | 16453 ++++++++
 .../clientpositive/llap/unionDistinct_2.q.out   |   545 +
 .../clientpositive/llap/union_fast_stats.q.out  |   526 +
 .../clientpositive/llap/union_view.q.out        |  1209 +
 .../llap/update_after_multiple_inserts.q.out    |    78 +
 .../llap/update_all_non_partitioned.q.out       |    62 +
 .../llap/update_all_partitioned.q.out           |   106 +
 .../clientpositive/llap/update_all_types.q.out  |   196 +
 .../clientpositive/llap/update_orig_table.q.out |    62 +
 .../clientpositive/llap/update_tmp_table.q.out  |    62 +
 .../clientpositive/llap/update_two_cols.q.out   |    63 +
 .../llap/update_where_no_match.q.out            |    62 +
 .../llap/update_where_non_partitioned.q.out     |    62 +
 .../llap/update_where_partitioned.q.out         |   106 +
 .../clientpositive/llap/vector_acid3.q.out      |    31 +
 .../llap/vector_aggregate_9.q.out               |   174 +
 .../llap/vector_auto_smb_mapjoin_14.q.out       |  1918 +
 .../clientpositive/llap/vector_between_in.q.out |   691 +
 .../llap/vector_binary_join_groupby.q.out       |   305 +
 .../clientpositive/llap/vector_bucket.q.out     |   104 +
 .../llap/vector_cast_constant.q.java1.7.out     |   217 +
 .../llap/vector_cast_constant.q.java1.8.out     |   217 +
 .../clientpositive/llap/vector_char_2.q.out     |   292 +
 .../clientpositive/llap/vector_char_4.q.out     |   175 +
 .../clientpositive/llap/vector_char_cast.q.out  |    35 +
 .../llap/vector_char_mapjoin1.q.out             |   470 +
 .../llap/vector_char_simple.q.out               |   342 +
 .../clientpositive/llap/vector_coalesce.q.out   |   362 +
 .../clientpositive/llap/vector_coalesce_2.q.out |   304 +
 .../llap/vector_count_distinct.q.out            |  1381 +
 .../clientpositive/llap/vector_data_types.q.out |   285 +
 .../clientpositive/llap/vector_date_1.q.out     |   719 +
 .../clientpositive/llap/vector_decimal_1.q.out  |   591 +
 .../llap/vector_decimal_10_0.q.out              |   112 +
 .../clientpositive/llap/vector_decimal_2.q.out  |  1658 +
 .../clientpositive/llap/vector_decimal_3.q.out  |   390 +
 .../clientpositive/llap/vector_decimal_4.q.out  |   250 +
 .../clientpositive/llap/vector_decimal_5.q.out  |   239 +
 .../clientpositive/llap/vector_decimal_6.q.out  |   303 +
 .../llap/vector_decimal_aggregate.q.out         |   232 +
 .../llap/vector_decimal_cast.q.out              |    41 +
 .../llap/vector_decimal_expressions.q.out       |    96 +
 .../llap/vector_decimal_mapjoin.q.out           |   264 +
 .../llap/vector_decimal_math_funcs.q.out        |   192 +
 .../llap/vector_decimal_precision.q.out         |   672 +
 .../llap/vector_decimal_round.q.out             |   460 +
 .../llap/vector_decimal_round_2.q.out           |   500 +
 .../llap/vector_decimal_trailing.q.out          |   121 +
 .../llap/vector_decimal_udf.q.out               |  2756 ++
 .../llap/vector_decimal_udf2.q.out              |   188 +
 .../clientpositive/llap/vector_distinct_2.q.out |  1870 +
 .../clientpositive/llap/vector_elt.q.out        |   121 +
 .../clientpositive/llap/vector_groupby_3.q.out  |  1873 +
 .../llap/vector_groupby_reduce.q.out            |  1882 +
 .../llap/vector_grouping_sets.q.out             |   269 +
 .../clientpositive/llap/vector_if_expr.q.out    |    82 +
 .../clientpositive/llap/vector_inner_join.q.out |   806 +
 .../clientpositive/llap/vector_interval_1.q.out |   822 +
 .../clientpositive/llap/vector_interval_2.q.out |  1620 +
 .../llap/vector_interval_mapjoin.q.out          |   281 +
 .../clientpositive/llap/vector_join30.q.out     |  1375 +
 .../llap/vector_join_filters.q.out              |   222 +
 .../clientpositive/llap/vector_join_nulls.q.out |   195 +
 .../llap/vector_left_outer_join.q.out           |   141 +
 .../llap/vector_left_outer_join2.q.out          |   559 +
 .../llap/vector_leftsemi_mapjoin.q.out          | 13973 +++++++
 .../llap/vector_mapjoin_reduce.q.out            |   319 +
 .../llap/vector_mr_diff_schema_alias.q.out      |   383 +
 .../llap/vector_multi_insert.q.out              |   233 +
 .../llap/vector_non_string_partition.q.out      |   182 +
 .../llap/vector_null_projection.q.out           |   186 +
 .../llap/vector_nullsafe_join.q.out             |  1210 +
 .../clientpositive/llap/vector_orderby_5.q.out  |   189 +
 .../llap/vector_outer_join0.q.out               |   232 +
 .../llap/vector_outer_join1.q.out               |   613 +
 .../llap/vector_outer_join2.q.out               |   316 +
 .../llap/vector_outer_join3.q.out               |   609 +
 .../llap/vector_outer_join4.q.out               |   982 +
 .../llap/vector_outer_join5.q.out               |  1330 +
 .../llap/vector_partition_diff_num_cols.q.out   |   614 +
 .../llap/vector_partitioned_date_time.q.out     |  2047 +
 .../llap/vector_reduce_groupby_decimal.q.out    |   201 +
 .../llap/vector_string_concat.q.out             |   415 +
 .../clientpositive/llap/vector_varchar_4.q.out  |   175 +
 .../llap/vector_varchar_mapjoin1.q.out          |   454 +
 .../llap/vector_varchar_simple.q.out            |   342 +
 .../clientpositive/llap/vectorization_0.q.out   |  1099 +
 .../clientpositive/llap/vectorization_1.q.out   |    49 +
 .../clientpositive/llap/vectorization_10.q.out  |   298 +
 .../clientpositive/llap/vectorization_11.q.out  |    80 +
 .../clientpositive/llap/vectorization_12.q.out  |   602 +
 .../clientpositive/llap/vectorization_13.q.out  |   510 +
 .../clientpositive/llap/vectorization_14.q.out  |   836 +
 .../clientpositive/llap/vectorization_15.q.out  |   253 +
 .../clientpositive/llap/vectorization_16.q.out  |   671 +
 .../clientpositive/llap/vectorization_17.q.out  |   507 +
 .../clientpositive/llap/vectorization_2.q.out   |    53 +
 .../clientpositive/llap/vectorization_3.q.out   |    59 +
 .../clientpositive/llap/vectorization_4.q.out   |    53 +
 .../clientpositive/llap/vectorization_5.q.out   |    47 +
 .../clientpositive/llap/vectorization_6.q.out   |  1624 +
 .../clientpositive/llap/vectorization_7.q.out   |   380 +
 .../clientpositive/llap/vectorization_8.q.out   |   354 +
 .../clientpositive/llap/vectorization_9.q.out   |   671 +
 .../llap/vectorization_decimal_date.q.out       |    51 +
 .../llap/vectorization_div0.q.out               |   485 +
 .../llap/vectorization_limit.q.out              |   554 +
 .../llap/vectorization_nested_udf.q.out         |     9 +
 .../clientpositive/llap/vectorization_not.q.out |    58 +
 .../llap/vectorization_part.q.out               |    72 +
 .../llap/vectorization_part_project.q.out       |   123 +
 .../llap/vectorization_pushdown.q.out           |    71 +
 .../llap/vectorization_short_regress.q.out      |  3414 ++
 .../llap/vectorized_bucketmapjoin1.q.out        |   376 +
 .../clientpositive/llap/vectorized_case.q.out   |    95 +
 .../clientpositive/llap/vectorized_casts.q.out  |   370 +
 .../llap/vectorized_context.q.out               |   332 +
 .../llap/vectorized_date_funcs.q.out            |  1019 +
 .../llap/vectorized_distinct_gby.q.out          |   172 +
 .../vectorized_dynamic_partition_pruning.q.out  |  5341 +++
 .../llap/vectorized_mapjoin.q.out               |   114 +
 .../llap/vectorized_math_funcs.q.out            |   247 +
 .../llap/vectorized_nested_mapjoin.q.out        |   136 +
 .../llap/vectorized_parquet.q.out               |   325 +
 .../llap/vectorized_parquet_types.q.out         |   349 +
 .../clientpositive/llap/vectorized_ptf.q.out    |  8992 +++++
 .../llap/vectorized_rcfile_columnar.q.out       |    62 +
 .../llap/vectorized_shufflejoin.q.out           |   132 +
 .../llap/vectorized_string_funcs.q.out          |   123 +
 .../llap/vectorized_timestamp_funcs.q.out       |   883 +
 .../llap/vectorized_timestamp_ints_casts.q.out  |   234 +
 .../clientpositive/llap_partitioned.q.out       |  1999 +
 .../clientpositive/llap_uncompressed.q.out      |   228 +
 .../clientpositive/load_non_hdfs_path.q.out     |    16 +
 .../results/clientpositive/load_orc_part.q.out  |    26 +
 .../clientpositive/louter_join_ppr.q.out        |     4 +
 .../clientpositive/mapjoin_mapjoin.q.out        |     1 +
 ql/src/test/results/clientpositive/merge3.q.out |    12 +-
 .../clientpositive/metadata_only_queries.q.out  |   158 +
 .../results/clientpositive/metadataonly1.q.out  |    97 +-
 .../results/clientpositive/nonmr_fetch.q.out    |    78 +-
 .../test/results/clientpositive/nullMap.q.out   |    46 +
 .../clientpositive/optimize_nullscan.q.out      |   116 +-
 .../results/clientpositive/orc_analyze.q.out    |    46 +-
 .../results/clientpositive/orc_file_dump.q.out  |    18 +-
 .../clientpositive/orc_int_type_promotion.q.out |     6 +-
 .../test/results/clientpositive/orc_llap.q.out  |  1095 +
 .../clientpositive/outer_join_ppr.q.java1.7.out |     2 +
 .../clientpositive/parallel_orderby.q.out       |     8 +-
 .../parquet_mixed_partition_formats2.q.out      |    99 +
 .../clientpositive/parquet_ppd_boolean.q.out    |    28 +-
 .../clientpositive/parquet_ppd_char.q.out       |    84 +-
 .../clientpositive/parquet_ppd_date.q.out       |   112 +-
 .../clientpositive/parquet_ppd_decimal.q.out    |   224 +-
 .../clientpositive/parquet_ppd_timestamp.q.out  |   112 +-
 .../clientpositive/parquet_ppd_varchar.q.out    |    84 +-
 ql/src/test/results/clientpositive/pcr.q.out    |    37 +-
 .../results/clientpositive/pointlookup2.q.out   |    25 +-
 .../results/clientpositive/pointlookup3.q.out   |    23 +-
 .../clientpositive/ppd_join_filter.q.out        |    12 +-
 .../results/clientpositive/ppd_union_view.q.out |     8 +-
 ql/src/test/results/clientpositive/ppd_vc.q.out |     4 +-
 .../clientpositive/ppr_allchildsarenull.q.out   |     2 +
 .../test/results/clientpositive/push_or.q.out   |     1 +
 .../query_result_fileformat.q.out               |     6 +-
 .../clientpositive/rand_partitionpruner1.q.out  |     1 +
 .../clientpositive/rand_partitionpruner2.q.out  |     4 +-
 .../results/clientpositive/regexp_extract.q.out |     2 +
 .../results/clientpositive/remote_script.q.out  |     8 +-
 .../clientpositive/router_join_ppr.q.out        |     4 +
 .../test/results/clientpositive/sample1.q.out   |     4 +-
 .../test/results/clientpositive/sample10.q.out  |     3 +-
 .../test/results/clientpositive/sample2.q.out   |     4 +-
 .../test/results/clientpositive/sample4.q.out   |     4 +-
 .../test/results/clientpositive/sample5.q.out   |     4 +-
 .../test/results/clientpositive/sample6.q.out   |    11 +-
 .../test/results/clientpositive/sample7.q.out   |     4 +-
 .../test/results/clientpositive/sample8.q.out   |     1 +
 .../test/results/clientpositive/sample9.q.out   |     1 +
 .../results/clientpositive/scriptfile1.q.out    |     6 +
 .../clientpositive/select_dummy_source.q.out    |    38 +-
 .../results/clientpositive/selectindate.q.out   |    70 +
 .../clientpositive/serde_user_properties.q.out  |     3 +
 .../test/results/clientpositive/show_conf.q.out |     6 +-
 .../clientpositive/show_create_database.q.out   |    19 +
 .../results/clientpositive/show_functions.q.out |     1 +
 .../clientpositive/skewjoin_onesideskew.q.out   |   212 +
 .../results/clientpositive/smb_mapjoin9.q.out   |     2 +
 .../results/clientpositive/smb_mapjoin_11.q.out |     2 +
 .../results/clientpositive/smb_mapjoin_13.q.out |     2 +
 .../results/clientpositive/smb_mapjoin_15.q.out |     4 +
 .../clientpositive/sort_merge_join_desc_5.q.out |     1 +
 .../clientpositive/sort_merge_join_desc_6.q.out |     1 +
 .../clientpositive/sort_merge_join_desc_7.q.out |     1 +
 .../spark/auto_join_reordering_values.q.out     |     1 +
 .../spark/auto_sortmerge_join_1.q.out           |     3 +
 .../spark/auto_sortmerge_join_12.q.out          |     1 +
 .../spark/auto_sortmerge_join_2.q.out           |     2 +
 .../spark/auto_sortmerge_join_3.q.out           |     3 +
 .../spark/auto_sortmerge_join_4.q.out           |     3 +
 .../spark/auto_sortmerge_join_5.q.out           |     3 +
 .../spark/auto_sortmerge_join_7.q.out           |     3 +
 .../spark/auto_sortmerge_join_8.q.out           |     3 +
 .../spark/bucket_map_join_1.q.out               |     9 +-
 .../spark/bucket_map_join_2.q.out               |     9 +-
 .../spark/bucket_map_join_spark4.q.out          |     2 +
 .../clientpositive/spark/bucketmapjoin1.q.out   |     2 +
 .../clientpositive/spark/bucketmapjoin10.q.out  |     1 +
 .../clientpositive/spark/bucketmapjoin11.q.out  |     2 +
 .../clientpositive/spark/bucketmapjoin12.q.out  |     2 +
 .../clientpositive/spark/bucketmapjoin13.q.out  |     4 +
 .../clientpositive/spark/bucketmapjoin7.q.out   |     1 +
 .../clientpositive/spark/bucketmapjoin8.q.out   |     2 +
 .../clientpositive/spark/bucketmapjoin9.q.out   |     2 +
 .../spark/bucketmapjoin_negative3.q.out         |     9 +
 .../spark/column_access_stats.q.out             |    46 +-
 .../clientpositive/spark/cross_join.q.out       |   211 +
 .../clientpositive/spark/decimal_1_1.q.out      |   104 +
 .../spark/dynamic_rdd_cache.q.out               |    28 +-
 .../spark/filter_join_breaktask.q.out           |     1 +
 .../spark/groupby_sort_1_23.q.out               |     1 +
 .../spark/groupby_sort_skew_1_23.q.out          |     1 +
 .../clientpositive/spark/insert_into1.q.out     |   238 +
 .../clientpositive/spark/insert_into2.q.out     |    75 +
 .../spark/join_cond_pushdown_unqual1.q.out      |    12 +-
 .../spark/join_cond_pushdown_unqual3.q.out      |    18 +-
 .../spark/join_cond_pushdown_unqual4.q.out      |     2 +-
 .../spark/join_filters_overlap.q.out            |     5 +
 .../clientpositive/spark/louter_join_ppr.q.out  |     4 +
 .../clientpositive/spark/mapjoin_mapjoin.q.out  |     1 +
 .../spark/metadata_only_queries.q.out           |   170 +
 .../spark/optimize_nullscan.q.out               |     9 +
 .../spark/outer_join_ppr.q.java1.7.out          |     2 +
 .../test/results/clientpositive/spark/pcr.q.out |    33 +-
 .../clientpositive/spark/ppd_join5.q.out        |    58 +-
 .../clientpositive/spark/ppd_join_filter.q.out  |     4 +
 .../clientpositive/spark/remote_script.q.out    |     8 +-
 .../clientpositive/spark/router_join_ppr.q.out  |     4 +
 .../results/clientpositive/spark/sample10.q.out |     1 +
 .../results/clientpositive/spark/sample6.q.out  |     7 +
 .../results/clientpositive/spark/sample8.q.out  |     1 +
 .../clientpositive/spark/scriptfile1.q.out      |     6 +
 .../clientpositive/spark/smb_mapjoin_12.q.out   |     6 +-
 .../clientpositive/spark/smb_mapjoin_13.q.out   |    38 +-
 .../clientpositive/spark/smb_mapjoin_15.q.out   |    16 +-
 .../clientpositive/spark/smb_mapjoin_16.q.out   |     2 +-
 .../results/clientpositive/spark/stats3.q.out   |     2 -
 .../clientpositive/spark/temp_table.q.out       |   107 +
 .../clientpositive/spark/transform_ppr1.q.out   |     1 +
 .../clientpositive/spark/transform_ppr2.q.out   |     1 +
 .../results/clientpositive/spark/union24.q.out  |    10 +
 .../results/clientpositive/spark/union34.q.out  |    68 +-
 .../clientpositive/spark/union_ppr.q.out        |     1 +
 .../spark/vector_between_in.q.out               |    16 +-
 .../spark/vector_cast_constant.q.java1.7.out    |     2 +-
 .../spark/vector_count_distinct.q.out           |     4 +-
 .../spark/vector_data_types.q.out               |     2 +-
 .../spark/vector_decimal_aggregate.q.out        |     2 +-
 .../spark/vector_decimal_mapjoin.q.out          |     4 +-
 .../spark/vector_distinct_2.q.out               |     2 +-
 .../clientpositive/spark/vector_groupby_3.q.out |     2 +-
 .../spark/vector_left_outer_join.q.out          |     8 +-
 .../spark/vector_mapjoin_reduce.q.out           |     4 +-
 .../clientpositive/spark/vector_orderby_5.q.out |     4 +-
 .../spark/vector_string_concat.q.out            |     4 +-
 .../clientpositive/spark/vectorization_0.q.out  |    30 +-
 .../clientpositive/spark/vectorization_13.q.out |     4 +-
 .../clientpositive/spark/vectorization_14.q.out |     2 +-
 .../clientpositive/spark/vectorization_15.q.out |     2 +-
 .../clientpositive/spark/vectorization_17.q.out |     2 +-
 .../spark/vectorization_div0.q.out              |     4 +-
 .../spark/vectorization_part_project.q.out      |     2 +-
 .../spark/vectorization_short_regress.q.out     |    32 +-
 .../spark/vectorized_mapjoin.q.out              |     4 +-
 .../spark/vectorized_nested_mapjoin.q.out       |     8 +-
 .../clientpositive/spark/vectorized_ptf.q.out   |   182 +-
 .../spark/vectorized_shufflejoin.q.out          |     2 +-
 .../spark/vectorized_timestamp_funcs.q.out      |    12 +-
 .../special_character_in_tabnames_1.q.out       | 19550 +++++++++
 .../special_character_in_tabnames_2.q.out       |   304 +
 ql/src/test/results/clientpositive/stats0.q.out |     4 +-
 .../test/results/clientpositive/stats11.q.out   |     8 +-
 ql/src/test/results/clientpositive/stats3.q.out |     2 -
 .../subquery_notin_having.q.java1.8.out         |    86 +-
 .../results/clientpositive/subquery_views.q.out |   116 +
 .../results/clientpositive/temp_table.q.out     |   107 +
 .../temp_table_display_colstats_tbllvl.q.out    |     1 +
 .../tez/acid_vectorization_partition.q.out      |    18 +-
 .../results/clientpositive/tez/auto_join0.q.out |     1 -
 .../clientpositive/tez/auto_join_nulls.q.out    |     2 +-
 .../tez/auto_sortmerge_join_1.q.out             |     3 +
 .../tez/auto_sortmerge_join_10.q.out            |    57 +-
 .../tez/auto_sortmerge_join_11.q.out            |     4 +
 .../tez/auto_sortmerge_join_12.q.out            |    97 +-
 .../tez/auto_sortmerge_join_2.q.out             |     2 +
 .../tez/auto_sortmerge_join_3.q.out             |     3 +
 .../tez/auto_sortmerge_join_4.q.out             |     3 +
 .../tez/auto_sortmerge_join_5.q.out             |     3 +
 .../tez/auto_sortmerge_join_6.q.out             |   160 +-
 .../tez/auto_sortmerge_join_7.q.out             |     3 +
 .../tez/auto_sortmerge_join_8.q.out             |     3 +
 .../tez/bucket_map_join_tez1.q.out              |   236 +-
 .../tez/bucket_map_join_tez2.q.out              |   108 +-
 .../results/clientpositive/tez/cross_join.q.out |   187 +
 .../tez/cross_product_check_2.q.out             |   201 +-
 .../tez/dynamic_partition_pruning.q.out         |   133 +-
 .../tez/dynamic_partition_pruning_2.q.out       |    54 +-
 .../tez/dynpart_sort_opt_vectorization.q.out    |    74 +-
 .../tez/dynpart_sort_optimization.q.out         |    12 +-
 .../tez/dynpart_sort_optimization2.q.out        |    16 +-
 .../clientpositive/tez/explainuser_1.q.out      |   125 +-
 .../clientpositive/tez/explainuser_2.q.out      |  1070 +-
 .../clientpositive/tez/explainuser_3.q.out      |    55 +-
 .../clientpositive/tez/fileformat_mix.q.out     |   573 -
 .../tez/filter_join_breaktask.q.out             |     1 +
 .../tez/identity_project_remove_skip.q.out      |     3 +-
 .../results/clientpositive/tez/insert1.q.out    |   392 +-
 .../clientpositive/tez/insert_into1.q.out       |   250 +
 .../clientpositive/tez/insert_into2.q.out       |    75 +
 .../clientpositive/tez/llapdecider.q.out        |  1195 +
 .../clientpositive/tez/mapjoin_mapjoin.q.out    |     1 +
 .../results/clientpositive/tez/mergejoin.q.out  |    26 +-
 .../tez/metadata_only_queries.q.out             |   170 +
 .../metadata_only_queries_with_filters.q.out    |    32 +-
 .../clientpositive/tez/metadataonly1.q.out      |     9 +
 .../test/results/clientpositive/tez/mrr.q.out   |    83 +-
 .../clientpositive/tez/optimize_nullscan.q.out  |     9 +
 .../clientpositive/tez/orc_analyze.q.out        |    46 +-
 .../clientpositive/tez/scriptfile1.q.out        |     6 +
 .../tez/select_dummy_source.q.out               |    76 +-
 .../tez/show_create_database.q.out              |    19 +
 .../results/clientpositive/tez/temp_table.q.out |   107 +
 .../tez/tez_dynpart_hashjoin_1.q.out            |    10 +-
 .../clientpositive/tez/tez_join_hash.q.out      |     4 +-
 .../tez/tez_vector_dynpart_hashjoin_1.q.out     |    24 +-
 .../tez/tez_vector_dynpart_hashjoin_2.q.out     |     8 +-
 .../clientpositive/tez/transform_ppr1.q.out     |     1 +
 .../clientpositive/tez/transform_ppr2.q.out     |     1 +
 .../clientpositive/tez/unionDistinct_1.q.out    |    93 +-
 .../clientpositive/tez/union_fast_stats.q.out   |   526 +
 .../results/clientpositive/tez/union_view.q.out |   167 +
 .../clientpositive/tez/update_all_types.q.out   |     4 +-
 .../tez/vector_auto_smb_mapjoin_14.q.out        |  1576 +
 .../clientpositive/tez/vector_between_in.q.out  |    16 +-
 .../tez/vector_binary_join_groupby.q.out        |     4 +-
 .../clientpositive/tez/vector_bucket.q.out      |     2 +-
 .../tez/vector_cast_constant.q.java1.7.out      |     2 +-
 .../tez/vector_cast_constant.q.java1.8.out      |    21 +-
 .../tez/vector_cast_constant.q.out              |   199 -
 .../clientpositive/tez/vector_char_2.q.out      |     8 +-
 .../clientpositive/tez/vector_char_cast.q.out   |    35 +
 .../tez/vector_char_mapjoin1.q.out              |     6 +-
 .../clientpositive/tez/vector_char_simple.q.out |     6 +-
 .../clientpositive/tez/vector_coalesce.q.out    |   149 +-
 .../clientpositive/tez/vector_coalesce_2.q.out  |     2 +-
 .../tez/vector_count_distinct.q.out             |     4 +-
 .../clientpositive/tez/vector_data_types.q.out  |     2 +-
 .../clientpositive/tez/vector_date_1.q.out      |    12 +-
 .../clientpositive/tez/vector_decimal_1.q.out   |    18 +-
 .../tez/vector_decimal_10_0.q.out               |     2 +-
 .../clientpositive/tez/vector_decimal_2.q.out   |    54 +-
 .../tez/vector_decimal_aggregate.q.out          |     2 +-
 .../tez/vector_decimal_expressions.q.out        |     2 +-
 .../tez/vector_decimal_round.q.out              |    12 +-
 .../tez/vector_decimal_round_2.q.out            |     8 +-
 .../clientpositive/tez/vector_decimal_udf.q.out |     8 +-
 .../clientpositive/tez/vector_distinct_2.q.out  |     2 +-
 .../clientpositive/tez/vector_groupby_3.q.out   |     2 +-
 .../tez/vector_groupby_reduce.q.out             |  1474 +-
 .../tez/vector_grouping_sets.q.out              |     2 +-
 .../clientpositive/tez/vector_if_expr.q.out     |     2 +-
 .../clientpositive/tez/vector_interval_1.q.out  |    16 +-
 .../clientpositive/tez/vector_interval_2.q.out  |    20 +-
 .../clientpositive/tez/vector_join30.q.out      |    50 +-
 .../tez/vector_left_outer_join.q.out            |     2 +-
 .../tez/vector_left_outer_join3.q.out           |   222 -
 .../tez/vector_leftsemi_mapjoin.q.out           |   160 +-
 .../tez/vector_mapjoin_reduce.q.out             |     4 +-
 .../tez/vector_mr_diff_schema_alias.q.out       |     6 +-
 .../tez/vector_multi_insert.q.out               |     8 +
 .../tez/vector_non_string_partition.q.out       |     4 +-
 .../clientpositive/tez/vector_orderby_5.q.out   |     4 +-
 .../clientpositive/tez/vector_outer_join.q.out  |  2204 --
 .../clientpositive/tez/vector_outer_join1.q.out |    50 +-
 .../clientpositive/tez/vector_outer_join2.q.out |     2 +-
 .../clientpositive/tez/vector_outer_join3.q.out |     6 +-
 .../clientpositive/tez/vector_outer_join4.q.out |    50 +-
 .../clientpositive/tez/vector_outer_join5.q.out |    20 +-
 .../tez/vector_partition_diff_num_cols.q.out    |    10 +-
 .../tez/vector_partitioned_date_time.q.out      |    18 +-
 .../tez/vector_reduce_groupby_decimal.q.out     |     4 +-
 .../tez/vector_string_concat.q.out              |     4 +-
 .../tez/vector_varchar_mapjoin1.q.out           |     6 +-
 .../tez/vector_varchar_simple.q.out             |     6 +-
 .../clientpositive/tez/vectorization_0.q.out    |    30 +-
 .../clientpositive/tez/vectorization_13.q.out   |     4 +-
 .../clientpositive/tez/vectorization_14.q.out   |     2 +-
 .../clientpositive/tez/vectorization_15.q.out   |     2 +-
 .../clientpositive/tez/vectorization_17.q.out   |     2 +-
 .../clientpositive/tez/vectorization_7.q.out    |     4 +-
 .../clientpositive/tez/vectorization_8.q.out    |     4 +-
 .../clientpositive/tez/vectorization_div0.q.out |     4 +-
 .../tez/vectorization_limit.q.out               |    16 +-
 .../tez/vectorization_part_project.q.out        |     2 +-
 .../tez/vectorization_short_regress.q.out       |    32 +-
 .../tez/vectorized_date_funcs.q.out             |     4 +-
 .../tez/vectorized_distinct_gby.q.out           |     4 +-
 .../vectorized_dynamic_partition_pruning.q.out  |   231 +-
 .../tez/vectorized_nested_mapjoin.q.out         |     2 +-
 .../clientpositive/tez/vectorized_ptf.q.out     |   182 +-
 .../tez/vectorized_shufflejoin.q.out            |     2 +-
 .../tez/vectorized_timestamp_funcs.q.out        |    12 +-
 ql/src/test/results/clientpositive/topn.q.out   |    42 +
 .../results/clientpositive/transform_ppr1.q.out |     1 +
 .../results/clientpositive/transform_ppr2.q.out |     1 +
 .../results/clientpositive/udf_explode.q.out    |   210 +-
 .../results/clientpositive/udf_greatest.q.out   |    70 +-
 .../results/clientpositive/udf_inline.q.out     |    42 +-
 .../test/results/clientpositive/udf_least.q.out |    70 +-
 .../results/clientpositive/udtf_explode.q.out   |   156 +-
 .../clientpositive/udtf_nofetchtask.q.out       |    30 +
 .../test/results/clientpositive/union22.q.out   |     2 +-
 .../test/results/clientpositive/union24.q.out   |    18 +-
 .../test/results/clientpositive/union36.q.out   |    28 +
 .../clientpositive/unionDistinct_1.q.out        |    49 +-
 .../clientpositive/union_fast_stats.q.out       |   526 +
 .../test/results/clientpositive/union_ppr.q.out |     1 +
 .../clientpositive/update_all_types.q.out       |     4 +-
 .../vector_auto_smb_mapjoin_14.q.out            |  1792 +
 .../clientpositive/vector_char_cast.q.out       |    35 +
 .../clientpositive/vector_char_mapjoin1.q.out   |     6 +-
 .../clientpositive/vector_coalesce.q.out        |   151 +
 .../clientpositive/vector_decimal_mapjoin.q.out |     2 +-
 .../clientpositive/vector_groupby_reduce.q.out  |  1495 +-
 .../clientpositive/vector_inner_join.q.out      |    18 +-
 .../vector_interval_mapjoin.q.out               |     2 +-
 .../clientpositive/vector_left_outer_join.q.out |     2 +-
 .../vector_left_outer_join2.q.out               |     8 +-
 .../vector_leftsemi_mapjoin.q.out               |   152 +-
 .../vector_mr_diff_schema_alias.q.out           |     2 +-
 .../clientpositive/vector_multi_insert.q.out    |     8 +
 .../clientpositive/vector_nullsafe_join.q.out   |    20 +-
 .../results/clientpositive/vector_nvl.q.out     |   233 +
 .../clientpositive/vector_outer_join0.q.out     |     4 +-
 .../clientpositive/vector_outer_join1.q.out     |     6 +-
 .../clientpositive/vector_outer_join2.q.out     |     2 +-
 .../clientpositive/vector_outer_join3.q.out     |     6 +-
 .../clientpositive/vector_outer_join4.q.out     |     6 +-
 .../clientpositive/vector_outer_join5.q.out     |    20 +-
 .../clientpositive/vector_struct_in.q.out       |   825 +
 .../vector_varchar_mapjoin1.q.out               |     6 +-
 .../clientpositive/vectorization_limit.q.out    |     8 +-
 .../clientpositive/vectorized_context.q.out     |     2 +-
 .../clientpositive/vectorized_mapjoin.q.out     |     2 +-
 .../vectorized_nested_mapjoin.q.out             |     2 +-
 .../results/clientpositive/vectorized_ptf.q.out |   236 +-
 .../clientpositive/windowing_windowspec2.q.out  |   198 +-
 ql/src/test/templates/TestCliDriver.vm          |     3 +-
 ql/src/test/templates/TestCompareCliDriver.vm   |     4 +-
 serde/if/serde.thrift                           |     1 +
 serde/pom.xml                                   |    90 +-
 .../gen/thrift/gen-cpp/complex_constants.cpp    |     2 +-
 .../src/gen/thrift/gen-cpp/complex_constants.h  |     2 +-
 serde/src/gen/thrift/gen-cpp/complex_types.cpp  |    94 +-
 serde/src/gen/thrift/gen-cpp/complex_types.h    |    46 +-
 .../gen/thrift/gen-cpp/megastruct_constants.cpp |     2 +-
 .../gen/thrift/gen-cpp/megastruct_constants.h   |     2 +-
 .../src/gen/thrift/gen-cpp/megastruct_types.cpp |    70 +-
 serde/src/gen/thrift/gen-cpp/megastruct_types.h |    24 +-
 .../src/gen/thrift/gen-cpp/serde_constants.cpp  |     4 +-
 serde/src/gen/thrift/gen-cpp/serde_constants.h  |     3 +-
 serde/src/gen/thrift/gen-cpp/serde_types.cpp    |     2 +-
 serde/src/gen/thrift/gen-cpp/serde_types.h      |     2 +-
 .../gen/thrift/gen-cpp/testthrift_constants.cpp |     2 +-
 .../gen/thrift/gen-cpp/testthrift_constants.h   |     2 +-
 .../src/gen/thrift/gen-cpp/testthrift_types.cpp |    34 +-
 serde/src/gen/thrift/gen-cpp/testthrift_types.h |    24 +-
 .../hadoop/hive/serde/serdeConstants.java       |     4 +-
 .../hadoop/hive/serde/test/InnerStruct.java     |     6 +-
 .../hadoop/hive/serde/test/ThriftTestObj.java   |     6 +-
 .../hadoop/hive/serde2/thrift/test/Complex.java |     6 +-
 .../hive/serde2/thrift/test/IntString.java      |     8 +-
 .../hive/serde2/thrift/test/MegaStruct.java     |    16 +-
 .../hive/serde2/thrift/test/MiniStruct.java     |     4 +-
 .../hadoop/hive/serde2/thrift/test/MyEnum.java  |     2 +-
 .../hive/serde2/thrift/test/PropValueUnion.java |     2 +-
 .../hive/serde2/thrift/test/SetIntString.java   |     4 +-
 serde/src/gen/thrift/gen-php/Types.php          |     4 +-
 .../org/apache/hadoop/hive/serde/Types.php      |     7 +-
 .../src/gen/thrift/gen-py/complex/constants.py  |     2 +-
 serde/src/gen/thrift/gen-py/complex/ttypes.py   |    44 +-
 .../gen/thrift/gen-py/megastruct/constants.py   |     2 +-
 .../src/gen/thrift/gen-py/megastruct/ttypes.py  |    50 +-
 .../org_apache_hadoop_hive_serde/constants.py   |     3 +-
 .../org_apache_hadoop_hive_serde/ttypes.py      |     2 +-
 .../gen/thrift/gen-py/testthrift/constants.py   |     2 +-
 .../src/gen/thrift/gen-py/testthrift/ttypes.py  |     8 +-
 .../src/gen/thrift/gen-rb/complex_constants.rb  |     2 +-
 serde/src/gen/thrift/gen-rb/complex_types.rb    |     2 +-
 .../gen/thrift/gen-rb/megastruct_constants.rb   |     2 +-
 serde/src/gen/thrift/gen-rb/megastruct_types.rb |     2 +-
 serde/src/gen/thrift/gen-rb/serde_constants.rb  |     4 +-
 serde/src/gen/thrift/gen-rb/serde_types.rb      |     2 +-
 .../gen/thrift/gen-rb/testthrift_constants.rb   |     2 +-
 serde/src/gen/thrift/gen-rb/testthrift_types.rb |     2 +-
 .../hive/serde2/ColumnProjectionUtils.java      |    20 +-
 .../apache/hadoop/hive/serde2/WriteBuffers.java |     4 +-
 .../hive/serde2/avro/AvroDeserializer.java      |     2 +-
 .../hadoop/hive/serde2/avro/AvroSerDe.java      |    13 +-
 .../hadoop/hive/serde2/avro/AvroSerdeUtils.java |    31 +-
 .../hadoop/hive/serde2/avro/AvroSerializer.java |     2 -
 .../BinarySortableSerDeWithEndPrefix.java       |    41 +
 .../hive/serde2/columnar/ColumnarSerDe.java     |     6 +-
 .../hive/serde2/lazy/LazySerDeParameters.java   |    45 +-
 .../hive/serde2/lazy/LazySimpleSerDe.java       |     7 +-
 .../hadoop/hive/serde2/lazy/LazyUtils.java      |    32 +-
 .../objectinspector/ObjectInspectorUtils.java   |    34 +
 .../hadoop/hive/serde2/avro/TestAvroSerde.java  |    28 +-
 .../hive/serde2/avro/TestAvroSerdeUtils.java    |    18 +-
 .../TestObjectInspectorUtils.java               |    25 +
 service/pom.xml                                 |    45 +-
 service/src/gen/thrift/gen-cpp/TCLIService.cpp  |  1770 +-
 service/src/gen/thrift/gen-cpp/TCLIService.h    |   409 +-
 .../thrift/gen-cpp/TCLIService_constants.cpp    |     2 +-
 .../gen/thrift/gen-cpp/TCLIService_constants.h  |     2 +-
 .../gen/thrift/gen-cpp/TCLIService_types.cpp    |  1226 +-
 .../src/gen/thrift/gen-cpp/TCLIService_types.h  |   816 +-
 service/src/gen/thrift/gen-cpp/ThriftHive.cpp   |   853 +-
 service/src/gen/thrift/gen-cpp/ThriftHive.h     |   199 +-
 .../thrift/gen-cpp/hive_service_constants.cpp   |     2 +-
 .../gen/thrift/gen-cpp/hive_service_constants.h |     2 +-
 .../gen/thrift/gen-cpp/hive_service_types.cpp   |    55 +-
 .../src/gen/thrift/gen-cpp/hive_service_types.h |    26 +-
 .../hadoop/hive/service/HiveClusterStatus.java  |    14 +-
 .../hive/service/HiveServerException.java       |     6 +-
 .../hadoop/hive/service/JobTrackerState.java    |     2 +-
 .../apache/hadoop/hive/service/ThriftHive.java  |     6 +-
 .../service/cli/thrift/TArrayTypeEntry.java     |     6 +-
 .../hive/service/cli/thrift/TBinaryColumn.java  |     6 +-
 .../hive/service/cli/thrift/TBoolColumn.java    |     4 +-
 .../hive/service/cli/thrift/TBoolValue.java     |     6 +-
 .../hive/service/cli/thrift/TByteColumn.java    |     4 +-
 .../hive/service/cli/thrift/TByteValue.java     |     6 +-
 .../hive/service/cli/thrift/TCLIService.java    |     4 +-
 .../cli/thrift/TCLIServiceConstants.java        |     2 +-
 .../cli/thrift/TCancelDelegationTokenReq.java   |     4 +-
 .../cli/thrift/TCancelDelegationTokenResp.java  |     4 +-
 .../service/cli/thrift/TCancelOperationReq.java |     4 +-
 .../cli/thrift/TCancelOperationResp.java        |     4 +-
 .../service/cli/thrift/TCloseOperationReq.java  |     4 +-
 .../service/cli/thrift/TCloseOperationResp.java |     4 +-
 .../service/cli/thrift/TCloseSessionReq.java    |     4 +-
 .../service/cli/thrift/TCloseSessionResp.java   |     4 +-
 .../apache/hive/service/cli/thrift/TColumn.java |     2 +-
 .../hive/service/cli/thrift/TColumnDesc.java    |     6 +-
 .../hive/service/cli/thrift/TColumnValue.java   |     2 +-
 .../hive/service/cli/thrift/TDoubleColumn.java  |     4 +-
 .../hive/service/cli/thrift/TDoubleValue.java   |     6 +-
 .../cli/thrift/TExecuteStatementReq.java        |     6 +-
 .../cli/thrift/TExecuteStatementResp.java       |     4 +-
 .../service/cli/thrift/TFetchOrientation.java   |     2 +-
 .../service/cli/thrift/TFetchResultsReq.java    |     8 +-
 .../service/cli/thrift/TFetchResultsResp.java   |     6 +-
 .../service/cli/thrift/TGetCatalogsReq.java     |     4 +-
 .../service/cli/thrift/TGetCatalogsResp.java    |     4 +-
 .../hive/service/cli/thrift/TGetColumnsReq.java |     4 +-
 .../service/cli/thrift/TGetColumnsResp.java     |     4 +-
 .../cli/thrift/TGetDelegationTokenReq.java      |     4 +-
 .../cli/thrift/TGetDelegationTokenResp.java     |     4 +-
 .../service/cli/thrift/TGetFunctionsReq.java    |     4 +-
 .../service/cli/thrift/TGetFunctionsResp.java   |     4 +-
 .../hive/service/cli/thrift/TGetInfoReq.java    |     4 +-
 .../hive/service/cli/thrift/TGetInfoResp.java   |     4 +-
 .../hive/service/cli/thrift/TGetInfoType.java   |     2 +-
 .../hive/service/cli/thrift/TGetInfoValue.java  |     2 +-
 .../cli/thrift/TGetOperationStatusReq.java      |     4 +-
 .../cli/thrift/TGetOperationStatusResp.java     |     6 +-
 .../cli/thrift/TGetResultSetMetadataReq.java    |     4 +-
 .../cli/thrift/TGetResultSetMetadataResp.java   |     4 +-
 .../hive/service/cli/thrift/TGetSchemasReq.java |     4 +-
 .../service/cli/thrift/TGetSchemasResp.java     |     4 +-
 .../service/cli/thrift/TGetTableTypesReq.java   |     4 +-
 .../service/cli/thrift/TGetTableTypesResp.java  |     4 +-
 .../hive/service/cli/thrift/TGetTablesReq.java  |     4 +-
 .../hive/service/cli/thrift/TGetTablesResp.java |     4 +-
 .../service/cli/thrift/TGetTypeInfoReq.java     |     4 +-
 .../service/cli/thrift/TGetTypeInfoResp.java    |     4 +-
 .../service/cli/thrift/THandleIdentifier.java   |     4 +-
 .../hive/service/cli/thrift/TI16Column.java     |     4 +-
 .../hive/service/cli/thrift/TI16Value.java      |     6 +-
 .../hive/service/cli/thrift/TI32Column.java     |     4 +-
 .../hive/service/cli/thrift/TI32Value.java      |     6 +-
 .../hive/service/cli/thrift/TI64Column.java     |     4 +-
 .../hive/service/cli/thrift/TI64Value.java      |     6 +-
 .../hive/service/cli/thrift/TMapTypeEntry.java  |     8 +-
 .../service/cli/thrift/TOpenSessionReq.java     |     4 +-
 .../service/cli/thrift/TOpenSessionResp.java    |     4 +-
 .../service/cli/thrift/TOperationHandle.java    |     8 +-
 .../service/cli/thrift/TOperationState.java     |     2 +-
 .../hive/service/cli/thrift/TOperationType.java |     2 +-
 .../service/cli/thrift/TPrimitiveTypeEntry.java |     4 +-
 .../service/cli/thrift/TProtocolVersion.java    |     2 +-
 .../cli/thrift/TRenewDelegationTokenReq.java    |     4 +-
 .../cli/thrift/TRenewDelegationTokenResp.java   |     4 +-
 .../apache/hive/service/cli/thrift/TRow.java    |     4 +-
 .../apache/hive/service/cli/thrift/TRowSet.java |     6 +-
 .../hive/service/cli/thrift/TSessionHandle.java |     4 +-
 .../apache/hive/service/cli/thrift/TStatus.java |     6 +-
 .../hive/service/cli/thrift/TStatusCode.java    |     2 +-
 .../hive/service/cli/thrift/TStringColumn.java  |     4 +-
 .../hive/service/cli/thrift/TStringValue.java   |     4 +-
 .../service/cli/thrift/TStructTypeEntry.java    |     4 +-
 .../hive/service/cli/thrift/TTableSchema.java   |     4 +-
 .../hive/service/cli/thrift/TTypeDesc.java      |     4 +-
 .../hive/service/cli/thrift/TTypeEntry.java     |     2 +-
 .../apache/hive/service/cli/thrift/TTypeId.java |     2 +-
 .../service/cli/thrift/TTypeQualifierValue.java |     2 +-
 .../service/cli/thrift/TTypeQualifiers.java     |     4 +-
 .../service/cli/thrift/TUnionTypeEntry.java     |     4 +-
 .../cli/thrift/TUserDefinedTypeEntry.java       |     4 +-
 service/src/gen/thrift/gen-php/TCLIService.php  |     3 +-
 service/src/gen/thrift/gen-php/ThriftHive.php   |     3 +-
 service/src/gen/thrift/gen-php/Types.php        |     4 +-
 .../gen-py/TCLIService/TCLIService-remote       |     2 +-
 .../thrift/gen-py/TCLIService/TCLIService.py    |   269 +-
 .../gen/thrift/gen-py/TCLIService/constants.py  |     2 +-
 .../src/gen/thrift/gen-py/TCLIService/ttypes.py |   190 +-
 .../gen-py/hive_service/ThriftHive-remote       |    51 +-
 .../thrift/gen-py/hive_service/ThriftHive.py    |   135 +-
 .../gen/thrift/gen-py/hive_service/constants.py |     2 +-
 .../gen/thrift/gen-py/hive_service/ttypes.py    |    20 +-
 .../gen/thrift/gen-rb/hive_service_constants.rb |     2 +-
 .../src/gen/thrift/gen-rb/hive_service_types.rb |     2 +-
 .../src/gen/thrift/gen-rb/t_c_l_i_service.rb    |     2 +-
 .../thrift/gen-rb/t_c_l_i_service_constants.rb  |     2 +-
 .../gen/thrift/gen-rb/t_c_l_i_service_types.rb  |     2 +-
 service/src/gen/thrift/gen-rb/thrift_hive.rb    |     2 +-
 .../auth/LdapAuthenticationProviderImpl.java    |    93 +-
 .../cli/operation/HiveCommandOperation.java     |    34 +-
 .../cli/operation/LogDivertAppender.java        |     2 +-
 .../hive/service/cli/operation/Operation.java   |    11 +
 .../service/cli/operation/OperationManager.java |    11 +
 .../service/cli/operation/SQLOperation.java     |    33 +-
 .../service/cli/session/HiveSessionImpl.java    |    12 +
 .../cli/session/HiveSessionImplwithUGI.java     |     3 +-
 .../service/cli/session/HiveSessionProxy.java   |     6 +
 .../service/cli/session/SessionManager.java     |     6 +-
 .../thrift/EmbeddedThriftBinaryCLIService.java  |     2 +-
 .../thrift/ThreadPoolExecutorWithOomHook.java   |    55 +
 .../cli/thrift/ThriftBinaryCLIService.java      |    12 +-
 .../service/cli/thrift/ThriftCLIService.java    |     8 +-
 .../cli/thrift/ThriftHttpCLIService.java        |    17 +-
 .../apache/hive/service/server/HiveServer2.java |    12 +-
 .../hive/service/auth/TestPlainSaslHelper.java  |     2 +-
 .../session/TestPluggableHiveSessionImpl.java   |     2 +-
 .../cli/session/TestSessionGlobalInitFile.java  |     2 +-
 shims/0.20S/pom.xml                             |    63 -
 .../hadoop/hive/shims/Hadoop20SShims.java       |   733 -
 .../apache/hadoop/hive/shims/Jetty20SShims.java |    53 -
 .../apache/hadoop/mapred/WebHCatJTShim20S.java  |   123 -
 shims/0.23/pom.xml                              |    25 +-
 .../apache/hadoop/hive/shims/Hadoop23Shims.java |   106 +-
 shims/aggregator/pom.xml                        |     6 -
 shims/common/pom.xml                            |     4 +-
 .../apache/hadoop/hive/shims/HadoopShims.java   |     2 +-
 .../apache/hadoop/hive/shims/ShimLoader.java    |    17 +-
 .../hive/thrift/HadoopThriftAuthBridge.java     |    14 +-
 shims/pom.xml                                   |     1 -
 shims/scheduler/pom.xml                         |    14 +-
 .../apache/hive/spark/client/SparkClient.java   |     5 +
 .../hive/spark/client/SparkClientImpl.java      |     5 +
 .../org/apache/hive/spark/client/rpc/Rpc.java   |     4 +
 storage-api/pom.xml                             |    31 +-
 .../org/apache/hadoop/hive/common/Pool.java     |    32 +
 .../apache/hadoop/hive/common/io/Allocator.java |    53 +
 .../apache/hadoop/hive/common/io/DataCache.java |   100 +
 .../apache/hadoop/hive/common/io/DiskRange.java |   102 +
 .../hadoop/hive/common/io/DiskRangeList.java    |   210 +
 .../common/io/encoded/EncodedColumnBatch.java   |   142 +
 .../hive/common/io/encoded/MemoryBuffer.java    |    28 +
 .../hadoop/hive/common/type/HiveDecimal.java    |    10 +-
 .../hive/ql/exec/vector/BytesColumnVector.java  |    47 +-
 .../hive/ql/exec/vector/ColumnVector.java       |    49 +-
 .../ql/exec/vector/DecimalColumnVector.java     |    59 +-
 .../hive/ql/exec/vector/DoubleColumnVector.java |    37 +-
 .../hive/ql/exec/vector/ListColumnVector.java   |   119 +
 .../hive/ql/exec/vector/LongColumnVector.java   |    37 +-
 .../hive/ql/exec/vector/MapColumnVector.java    |   131 +
 .../ql/exec/vector/MultiValuedColumnVector.java |   150 +
 .../hive/ql/exec/vector/StructColumnVector.java |   124 +
 .../hive/ql/exec/vector/UnionColumnVector.java  |   134 +
 .../hive/ql/io/sarg/SearchArgumentImpl.java     |     2 +-
 .../ql/exec/vector/TestListColumnVector.java    |   200 +
 .../ql/exec/vector/TestMapColumnVector.java     |   224 +
 .../ql/exec/vector/TestStructColumnVector.java  |    95 +
 .../ql/exec/vector/TestUnionColumnVector.java   |    93 +
 .../hive/ptest/execution/JIRAService.java       |   187 +-
 .../org/apache/hive/ptest/execution/PTest.java  |    11 +-
 .../hive/ptest/execution/TestCheckPhase.java    |    77 +
 .../ptest2/src/main/resources/batch-exec.vm     |     2 +
 .../hive/ptest/execution/TestJIRAService.java   |    89 +-
 ...RAService.testErrorWithMessages.approved.txt |    20 +
 ...ervice.testErrorWithoutMessages.approved.txt |    14 +
 .../TestJIRAService.testFailAdd.approved.txt    |    21 +
 .../TestJIRAService.testFailNoAdd.approved.txt  |    21 +
 .../TestJIRAService.testSuccessAdd.approved.txt |    16 +
 ...estJIRAService.testSuccessNoAdd.approved.txt |    16 +
 .../ptest/execution/TestTestCheckPhase.java     |    91 +
 .../src/test/resources/HIVE-10761.6.patch       |  2539 ++
 .../src/test/resources/HIVE-11271.4.patch       |   606 +
 .../ptest2/src/test/resources/HIVE-9377.1.patch |    25 +
 .../ptest2/src/test/resources/remove-test.patch |    33 +
 .../resources/test-configuration.properties     |     2 +
 2138 files changed, 506231 insertions(+), 42284 deletions(-)
----------------------------------------------------------------------



[12/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)

Posted by xu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py b/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py
index 8354d38..59c7b94 100644
--- a/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py
+++ b/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #
@@ -8,6 +8,7 @@
 
 from thrift.Thrift import TType, TMessageType, TException, TApplicationException
 import fb303.FacebookService
+import logging
 from ttypes import *
 from thrift.Thrift import TProcessor
 from thrift.transport import TTransport
@@ -1091,7 +1092,7 @@ class Client(fb303.FacebookService.Client, Iface):
       return result.success
     if result.o1 is not None:
       raise result.o1
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "getMetaConf failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "getMetaConf failed: unknown result")
 
   def setMetaConf(self, key, value):
     """
@@ -1194,7 +1195,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o1
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_database failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_database failed: unknown result")
 
   def drop_database(self, name, deleteData, cascade):
     """
@@ -1266,7 +1267,7 @@ class Client(fb303.FacebookService.Client, Iface):
       return result.success
     if result.o1 is not None:
       raise result.o1
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_databases failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_databases failed: unknown result")
 
   def get_all_databases(self):
     self.send_get_all_databases()
@@ -1294,7 +1295,7 @@ class Client(fb303.FacebookService.Client, Iface):
       return result.success
     if result.o1 is not None:
       raise result.o1
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_all_databases failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_all_databases failed: unknown result")
 
   def alter_database(self, dbname, db):
     """
@@ -1364,7 +1365,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o1
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_type failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_type failed: unknown result")
 
   def create_type(self, type):
     """
@@ -1401,7 +1402,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o2
     if result.o3 is not None:
       raise result.o3
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "create_type failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "create_type failed: unknown result")
 
   def drop_type(self, type):
     """
@@ -1436,7 +1437,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o1
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_type failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_type failed: unknown result")
 
   def get_type_all(self, name):
     """
@@ -1469,7 +1470,7 @@ class Client(fb303.FacebookService.Client, Iface):
       return result.success
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_type_all failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_type_all failed: unknown result")
 
   def get_fields(self, db_name, table_name):
     """
@@ -1508,7 +1509,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o2
     if result.o3 is not None:
       raise result.o3
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_fields failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_fields failed: unknown result")
 
   def get_fields_with_environment_context(self, db_name, table_name, environment_context):
     """
@@ -1549,7 +1550,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o2
     if result.o3 is not None:
       raise result.o3
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_fields_with_environment_context failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_fields_with_environment_context failed: unknown result")
 
   def get_schema(self, db_name, table_name):
     """
@@ -1588,7 +1589,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o2
     if result.o3 is not None:
       raise result.o3
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_schema failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_schema failed: unknown result")
 
   def get_schema_with_environment_context(self, db_name, table_name, environment_context):
     """
@@ -1629,7 +1630,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o2
     if result.o3 is not None:
       raise result.o3
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_schema_with_environment_context failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_schema_with_environment_context failed: unknown result")
 
   def create_table(self, tbl):
     """
@@ -1816,7 +1817,7 @@ class Client(fb303.FacebookService.Client, Iface):
       return result.success
     if result.o1 is not None:
       raise result.o1
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_tables failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_tables failed: unknown result")
 
   def get_all_tables(self, db_name):
     """
@@ -1849,7 +1850,7 @@ class Client(fb303.FacebookService.Client, Iface):
       return result.success
     if result.o1 is not None:
       raise result.o1
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_all_tables failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_all_tables failed: unknown result")
 
   def get_table(self, dbname, tbl_name):
     """
@@ -1886,7 +1887,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o1
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_table failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_table failed: unknown result")
 
   def get_table_objects_by_name(self, dbname, tbl_names):
     """
@@ -1925,7 +1926,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o2
     if result.o3 is not None:
       raise result.o3
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_table_objects_by_name failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_table_objects_by_name failed: unknown result")
 
   def get_table_names_by_filter(self, dbname, filter, max_tables):
     """
@@ -1966,7 +1967,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o2
     if result.o3 is not None:
       raise result.o3
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_table_names_by_filter failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_table_names_by_filter failed: unknown result")
 
   def alter_table(self, dbname, tbl_name, new_tbl):
     """
@@ -2118,7 +2119,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o2
     if result.o3 is not None:
       raise result.o3
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "add_partition failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "add_partition failed: unknown result")
 
   def add_partition_with_environment_context(self, new_part, environment_context):
     """
@@ -2157,7 +2158,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o2
     if result.o3 is not None:
       raise result.o3
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "add_partition_with_environment_context failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "add_partition_with_environment_context failed: unknown result")
 
   def add_partitions(self, new_parts):
     """
@@ -2194,7 +2195,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o2
     if result.o3 is not None:
       raise result.o3
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "add_partitions failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "add_partitions failed: unknown result")
 
   def add_partitions_pspec(self, new_parts):
     """
@@ -2231,7 +2232,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o2
     if result.o3 is not None:
       raise result.o3
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "add_partitions_pspec failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "add_partitions_pspec failed: unknown result")
 
   def append_partition(self, db_name, tbl_name, part_vals):
     """
@@ -2272,7 +2273,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o2
     if result.o3 is not None:
       raise result.o3
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "append_partition failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "append_partition failed: unknown result")
 
   def add_partitions_req(self, request):
     """
@@ -2309,7 +2310,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o2
     if result.o3 is not None:
       raise result.o3
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "add_partitions_req failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "add_partitions_req failed: unknown result")
 
   def append_partition_with_environment_context(self, db_name, tbl_name, part_vals, environment_context):
     """
@@ -2352,7 +2353,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o2
     if result.o3 is not None:
       raise result.o3
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "append_partition_with_environment_context failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "append_partition_with_environment_context failed: unknown result")
 
   def append_partition_by_name(self, db_name, tbl_name, part_name):
     """
@@ -2393,7 +2394,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o2
     if result.o3 is not None:
       raise result.o3
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "append_partition_by_name failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "append_partition_by_name failed: unknown result")
 
   def append_partition_by_name_with_environment_context(self, db_name, tbl_name, part_name, environment_context):
     """
@@ -2436,7 +2437,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o2
     if result.o3 is not None:
       raise result.o3
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "append_partition_by_name_with_environment_context failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "append_partition_by_name_with_environment_context failed: unknown result")
 
   def drop_partition(self, db_name, tbl_name, part_vals, deleteData):
     """
@@ -2477,7 +2478,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o1
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_partition failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_partition failed: unknown result")
 
   def drop_partition_with_environment_context(self, db_name, tbl_name, part_vals, deleteData, environment_context):
     """
@@ -2520,7 +2521,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o1
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_partition_with_environment_context failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_partition_with_environment_context failed: unknown result")
 
   def drop_partition_by_name(self, db_name, tbl_name, part_name, deleteData):
     """
@@ -2561,7 +2562,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o1
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_partition_by_name failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_partition_by_name failed: unknown result")
 
   def drop_partition_by_name_with_environment_context(self, db_name, tbl_name, part_name, deleteData, environment_context):
     """
@@ -2604,7 +2605,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o1
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_partition_by_name_with_environment_context failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_partition_by_name_with_environment_context failed: unknown result")
 
   def drop_partitions_req(self, req):
     """
@@ -2639,7 +2640,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o1
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_partitions_req failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_partitions_req failed: unknown result")
 
   def get_partition(self, db_name, tbl_name, part_vals):
     """
@@ -2678,7 +2679,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o1
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partition failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partition failed: unknown result")
 
   def exchange_partition(self, partitionSpecs, source_db, source_table_name, dest_db, dest_table_name):
     """
@@ -2725,7 +2726,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o3
     if result.o4 is not None:
       raise result.o4
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "exchange_partition failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "exchange_partition failed: unknown result")
 
   def get_partition_with_auth(self, db_name, tbl_name, part_vals, user_name, group_names):
     """
@@ -2768,7 +2769,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o1
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partition_with_auth failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partition_with_auth failed: unknown result")
 
   def get_partition_by_name(self, db_name, tbl_name, part_name):
     """
@@ -2807,7 +2808,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o1
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partition_by_name failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partition_by_name failed: unknown result")
 
   def get_partitions(self, db_name, tbl_name, max_parts):
     """
@@ -2846,7 +2847,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o1
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partitions failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partitions failed: unknown result")
 
   def get_partitions_with_auth(self, db_name, tbl_name, max_parts, user_name, group_names):
     """
@@ -2889,7 +2890,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o1
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partitions_with_auth failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partitions_with_auth failed: unknown result")
 
   def get_partitions_pspec(self, db_name, tbl_name, max_parts):
     """
@@ -2928,7 +2929,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o1
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partitions_pspec failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partitions_pspec failed: unknown result")
 
   def get_partition_names(self, db_name, tbl_name, max_parts):
     """
@@ -2965,7 +2966,7 @@ class Client(fb303.FacebookService.Client, Iface):
       return result.success
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partition_names failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partition_names failed: unknown result")
 
   def get_partitions_ps(self, db_name, tbl_name, part_vals, max_parts):
     """
@@ -3006,7 +3007,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o1
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partitions_ps failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partitions_ps failed: unknown result")
 
   def get_partitions_ps_with_auth(self, db_name, tbl_name, part_vals, max_parts, user_name, group_names):
     """
@@ -3051,7 +3052,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o1
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partitions_ps_with_auth failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partitions_ps_with_auth failed: unknown result")
 
   def get_partition_names_ps(self, db_name, tbl_name, part_vals, max_parts):
     """
@@ -3092,7 +3093,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o1
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partition_names_ps failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partition_names_ps failed: unknown result")
 
   def get_partitions_by_filter(self, db_name, tbl_name, filter, max_parts):
     """
@@ -3133,7 +3134,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o1
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partitions_by_filter failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partitions_by_filter failed: unknown result")
 
   def get_part_specs_by_filter(self, db_name, tbl_name, filter, max_parts):
     """
@@ -3174,7 +3175,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o1
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_part_specs_by_filter failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_part_specs_by_filter failed: unknown result")
 
   def get_partitions_by_expr(self, req):
     """
@@ -3209,7 +3210,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o1
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partitions_by_expr failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partitions_by_expr failed: unknown result")
 
   def get_partitions_by_names(self, db_name, tbl_name, names):
     """
@@ -3248,7 +3249,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o1
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partitions_by_names failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partitions_by_names failed: unknown result")
 
   def alter_partition(self, db_name, tbl_name, new_part):
     """
@@ -3435,7 +3436,7 @@ class Client(fb303.FacebookService.Client, Iface):
       return result.success
     if result.o1 is not None:
       raise result.o1
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "partition_name_has_valid_characters failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "partition_name_has_valid_characters failed: unknown result")
 
   def get_config_value(self, name, defaultValue):
     """
@@ -3470,7 +3471,7 @@ class Client(fb303.FacebookService.Client, Iface):
       return result.success
     if result.o1 is not None:
       raise result.o1
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_config_value failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_config_value failed: unknown result")
 
   def partition_name_to_vals(self, part_name):
     """
@@ -3503,7 +3504,7 @@ class Client(fb303.FacebookService.Client, Iface):
       return result.success
     if result.o1 is not None:
       raise result.o1
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "partition_name_to_vals failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "partition_name_to_vals failed: unknown result")
 
   def partition_name_to_spec(self, part_name):
     """
@@ -3536,7 +3537,7 @@ class Client(fb303.FacebookService.Client, Iface):
       return result.success
     if result.o1 is not None:
       raise result.o1
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "partition_name_to_spec failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "partition_name_to_spec failed: unknown result")
 
   def markPartitionForEvent(self, db_name, tbl_name, part_vals, eventType):
     """
@@ -3632,7 +3633,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o5
     if result.o6 is not None:
       raise result.o6
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "isPartitionMarkedForEvent failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "isPartitionMarkedForEvent failed: unknown result")
 
   def add_index(self, new_index, index_table):
     """
@@ -3671,7 +3672,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o2
     if result.o3 is not None:
       raise result.o3
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "add_index failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "add_index failed: unknown result")
 
   def alter_index(self, dbname, base_tbl_name, idx_name, new_idx):
     """
@@ -3751,7 +3752,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o1
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_index_by_name failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_index_by_name failed: unknown result")
 
   def get_index_by_name(self, db_name, tbl_name, index_name):
     """
@@ -3790,7 +3791,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o1
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_index_by_name failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_index_by_name failed: unknown result")
 
   def get_indexes(self, db_name, tbl_name, max_indexes):
     """
@@ -3829,7 +3830,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o1
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_indexes failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_indexes failed: unknown result")
 
   def get_index_names(self, db_name, tbl_name, max_indexes):
     """
@@ -3866,7 +3867,7 @@ class Client(fb303.FacebookService.Client, Iface):
       return result.success
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_index_names failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_index_names failed: unknown result")
 
   def update_table_column_statistics(self, stats_obj):
     """
@@ -3905,7 +3906,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o3
     if result.o4 is not None:
       raise result.o4
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "update_table_column_statistics failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "update_table_column_statistics failed: unknown result")
 
   def update_partition_column_statistics(self, stats_obj):
     """
@@ -3944,7 +3945,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o3
     if result.o4 is not None:
       raise result.o4
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "update_partition_column_statistics failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "update_partition_column_statistics failed: unknown result")
 
   def get_table_column_statistics(self, db_name, tbl_name, col_name):
     """
@@ -3987,7 +3988,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o3
     if result.o4 is not None:
       raise result.o4
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_table_column_statistics failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_table_column_statistics failed: unknown result")
 
   def get_partition_column_statistics(self, db_name, tbl_name, part_name, col_name):
     """
@@ -4032,7 +4033,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o3
     if result.o4 is not None:
       raise result.o4
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partition_column_statistics failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partition_column_statistics failed: unknown result")
 
   def get_table_statistics_req(self, request):
     """
@@ -4067,7 +4068,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o1
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_table_statistics_req failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_table_statistics_req failed: unknown result")
 
   def get_partitions_statistics_req(self, request):
     """
@@ -4102,7 +4103,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o1
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partitions_statistics_req failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partitions_statistics_req failed: unknown result")
 
   def get_aggr_stats_for(self, request):
     """
@@ -4137,7 +4138,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o1
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_aggr_stats_for failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_aggr_stats_for failed: unknown result")
 
   def set_aggr_stats_for(self, request):
     """
@@ -4176,7 +4177,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o3
     if result.o4 is not None:
       raise result.o4
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "set_aggr_stats_for failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "set_aggr_stats_for failed: unknown result")
 
   def delete_partition_column_statistics(self, db_name, tbl_name, part_name, col_name):
     """
@@ -4221,7 +4222,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o3
     if result.o4 is not None:
       raise result.o4
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "delete_partition_column_statistics failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "delete_partition_column_statistics failed: unknown result")
 
   def delete_table_column_statistics(self, db_name, tbl_name, col_name):
     """
@@ -4264,7 +4265,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o3
     if result.o4 is not None:
       raise result.o4
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "delete_table_column_statistics failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "delete_table_column_statistics failed: unknown result")
 
   def create_function(self, func):
     """
@@ -4408,7 +4409,7 @@ class Client(fb303.FacebookService.Client, Iface):
       return result.success
     if result.o1 is not None:
       raise result.o1
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_functions failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_functions failed: unknown result")
 
   def get_function(self, dbName, funcName):
     """
@@ -4445,7 +4446,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o1
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_function failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_function failed: unknown result")
 
   def get_all_functions(self):
     self.send_get_all_functions()
@@ -4473,7 +4474,7 @@ class Client(fb303.FacebookService.Client, Iface):
       return result.success
     if result.o1 is not None:
       raise result.o1
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_all_functions failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_all_functions failed: unknown result")
 
   def create_role(self, role):
     """
@@ -4506,7 +4507,7 @@ class Client(fb303.FacebookService.Client, Iface):
       return result.success
     if result.o1 is not None:
       raise result.o1
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "create_role failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "create_role failed: unknown result")
 
   def drop_role(self, role_name):
     """
@@ -4539,7 +4540,7 @@ class Client(fb303.FacebookService.Client, Iface):
       return result.success
     if result.o1 is not None:
       raise result.o1
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_role failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_role failed: unknown result")
 
   def get_role_names(self):
     self.send_get_role_names()
@@ -4567,7 +4568,7 @@ class Client(fb303.FacebookService.Client, Iface):
       return result.success
     if result.o1 is not None:
       raise result.o1
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_role_names failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_role_names failed: unknown result")
 
   def grant_role(self, role_name, principal_name, principal_type, grantor, grantorType, grant_option):
     """
@@ -4610,7 +4611,7 @@ class Client(fb303.FacebookService.Client, Iface):
       return result.success
     if result.o1 is not None:
       raise result.o1
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "grant_role failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "grant_role failed: unknown result")
 
   def revoke_role(self, role_name, principal_name, principal_type):
     """
@@ -4647,7 +4648,7 @@ class Client(fb303.FacebookService.Client, Iface):
       return result.success
     if result.o1 is not None:
       raise result.o1
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "revoke_role failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "revoke_role failed: unknown result")
 
   def list_roles(self, principal_name, principal_type):
     """
@@ -4682,7 +4683,7 @@ class Client(fb303.FacebookService.Client, Iface):
       return result.success
     if result.o1 is not None:
       raise result.o1
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "list_roles failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "list_roles failed: unknown result")
 
   def grant_revoke_role(self, request):
     """
@@ -4715,7 +4716,7 @@ class Client(fb303.FacebookService.Client, Iface):
       return result.success
     if result.o1 is not None:
       raise result.o1
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "grant_revoke_role failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "grant_revoke_role failed: unknown result")
 
   def get_principals_in_role(self, request):
     """
@@ -4748,7 +4749,7 @@ class Client(fb303.FacebookService.Client, Iface):
       return result.success
     if result.o1 is not None:
       raise result.o1
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_principals_in_role failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_principals_in_role failed: unknown result")
 
   def get_role_grants_for_principal(self, request):
     """
@@ -4781,7 +4782,7 @@ class Client(fb303.FacebookService.Client, Iface):
       return result.success
     if result.o1 is not None:
       raise result.o1
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_role_grants_for_principal failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_role_grants_for_principal failed: unknown result")
 
   def get_privilege_set(self, hiveObject, user_name, group_names):
     """
@@ -4818,7 +4819,7 @@ class Client(fb303.FacebookService.Client, Iface):
       return result.success
     if result.o1 is not None:
       raise result.o1
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_privilege_set failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_privilege_set failed: unknown result")
 
   def list_privileges(self, principal_name, principal_type, hiveObject):
     """
@@ -4855,7 +4856,7 @@ class Client(fb303.FacebookService.Client, Iface):
       return result.success
     if result.o1 is not None:
       raise result.o1
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "list_privileges failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "list_privileges failed: unknown result")
 
   def grant_privileges(self, privileges):
     """
@@ -4888,7 +4889,7 @@ class Client(fb303.FacebookService.Client, Iface):
       return result.success
     if result.o1 is not None:
       raise result.o1
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "grant_privileges failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "grant_privileges failed: unknown result")
 
   def revoke_privileges(self, privileges):
     """
@@ -4921,7 +4922,7 @@ class Client(fb303.FacebookService.Client, Iface):
       return result.success
     if result.o1 is not None:
       raise result.o1
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "revoke_privileges failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "revoke_privileges failed: unknown result")
 
   def grant_revoke_privileges(self, request):
     """
@@ -4954,7 +4955,7 @@ class Client(fb303.FacebookService.Client, Iface):
       return result.success
     if result.o1 is not None:
       raise result.o1
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "grant_revoke_privileges failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "grant_revoke_privileges failed: unknown result")
 
   def set_ugi(self, user_name, group_names):
     """
@@ -4989,7 +4990,7 @@ class Client(fb303.FacebookService.Client, Iface):
       return result.success
     if result.o1 is not None:
       raise result.o1
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "set_ugi failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "set_ugi failed: unknown result")
 
   def get_delegation_token(self, token_owner, renewer_kerberos_principal_name):
     """
@@ -5024,7 +5025,7 @@ class Client(fb303.FacebookService.Client, Iface):
       return result.success
     if result.o1 is not None:
       raise result.o1
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_delegation_token failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_delegation_token failed: unknown result")
 
   def renew_delegation_token(self, token_str_form):
     """
@@ -5057,7 +5058,7 @@ class Client(fb303.FacebookService.Client, Iface):
       return result.success
     if result.o1 is not None:
       raise result.o1
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "renew_delegation_token failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "renew_delegation_token failed: unknown result")
 
   def cancel_delegation_token(self, token_str_form):
     """
@@ -5114,7 +5115,7 @@ class Client(fb303.FacebookService.Client, Iface):
     iprot.readMessageEnd()
     if result.success is not None:
       return result.success
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_open_txns failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_open_txns failed: unknown result")
 
   def get_open_txns_info(self):
     self.send_get_open_txns_info()
@@ -5140,7 +5141,7 @@ class Client(fb303.FacebookService.Client, Iface):
     iprot.readMessageEnd()
     if result.success is not None:
       return result.success
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_open_txns_info failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_open_txns_info failed: unknown result")
 
   def open_txns(self, rqst):
     """
@@ -5171,7 +5172,7 @@ class Client(fb303.FacebookService.Client, Iface):
     iprot.readMessageEnd()
     if result.success is not None:
       return result.success
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "open_txns failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "open_txns failed: unknown result")
 
   def abort_txn(self, rqst):
     """
@@ -5270,7 +5271,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o1
     if result.o2 is not None:
       raise result.o2
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "lock failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "lock failed: unknown result")
 
   def check_lock(self, rqst):
     """
@@ -5307,7 +5308,7 @@ class Client(fb303.FacebookService.Client, Iface):
       raise result.o2
     if result.o3 is not None:
       raise result.o3
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "check_lock failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "check_lock failed: unknown result")
 
   def unlock(self, rqst):
     """
@@ -5371,7 +5372,7 @@ class Client(fb303.FacebookService.Client, Iface):
     iprot.readMessageEnd()
     if result.success is not None:
       return result.success
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "show_locks failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "show_locks failed: unknown result")
 
   def heartbeat(self, ids):
     """
@@ -5437,7 +5438,7 @@ class Client(fb303.FacebookService.Client, Iface):
     iprot.readMessageEnd()
     if result.success is not None:
       return result.success
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "heartbeat_txn_range failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "heartbeat_txn_range failed: unknown result")
 
   def compact(self, rqst):
     """
@@ -5497,7 +5498,7 @@ class Client(fb303.FacebookService.Client, Iface):
     iprot.readMessageEnd()
     if result.success is not None:
       return result.success
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "show_compact failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "show_compact failed: unknown result")
 
   def add_dynamic_partitions(self, rqst):
     """
@@ -5561,7 +5562,7 @@ class Client(fb303.FacebookService.Client, Iface):
     iprot.readMessageEnd()
     if result.success is not None:
       return result.success
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_next_notification failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_next_notification failed: unknown result")
 
   def get_current_notificationEventId(self):
     self.send_get_current_notificationEventId()
@@ -5587,7 +5588,7 @@ class Client(fb303.FacebookService.Client, Iface):
     iprot.readMessageEnd()
     if result.success is not None:
       return result.success
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_current_notificationEventId failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_current_notificationEventId failed: unknown result")
 
   def fire_listener_event(self, rqst):
     """
@@ -5618,7 +5619,7 @@ class Client(fb303.FacebookService.Client, Iface):
     iprot.readMessageEnd()
     if result.success is not None:
       return result.success
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "fire_listener_event failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "fire_listener_event failed: unknown result")
 
   def flushCache(self):
     self.send_flushCache()
@@ -5673,7 +5674,7 @@ class Client(fb303.FacebookService.Client, Iface):
     iprot.readMessageEnd()
     if result.success is not None:
       return result.success
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_file_metadata_by_expr failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_file_metadata_by_expr failed: unknown result")
 
   def get_file_metadata(self, req):
     """
@@ -5704,7 +5705,7 @@ class Client(fb303.FacebookService.Client, Iface):
     iprot.readMessageEnd()
     if result.success is not None:
       return result.success
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_file_metadata failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "get_file_metadata failed: unknown result")
 
   def put_file_metadata(self, req):
     """
@@ -5735,7 +5736,7 @@ class Client(fb303.FacebookService.Client, Iface):
     iprot.readMessageEnd()
     if result.success is not None:
       return result.success
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "put_file_metadata failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "put_file_metadata failed: unknown result")
 
   def clear_file_metadata(self, req):
     """
@@ -5766,7 +5767,7 @@ class Client(fb303.FacebookService.Client, Iface):
     iprot.readMessageEnd()
     if result.success is not None:
       return result.success
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "clear_file_metadata failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "clear_file_metadata failed: unknown result")
 
 
 class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
@@ -5924,9 +5925,17 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = getMetaConf_result()
     try:
       result.success = self._handler.getMetaConf(args.key)
-    except MetaException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except MetaException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    oprot.writeMessageBegin("getMetaConf", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("getMetaConf", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -5938,9 +5947,17 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = setMetaConf_result()
     try:
       self._handler.setMetaConf(args.key, args.value)
-    except MetaException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except MetaException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    oprot.writeMessageBegin("setMetaConf", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("setMetaConf", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -5952,13 +5969,23 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = create_database_result()
     try:
       self._handler.create_database(args.database)
-    except AlreadyExistsException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except AlreadyExistsException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    except InvalidObjectException, o2:
+    except InvalidObjectException as o2:
+      msg_type = TMessageType.REPLY
       result.o2 = o2
-    except MetaException, o3:
+    except MetaException as o3:
+      msg_type = TMessageType.REPLY
       result.o3 = o3
-    oprot.writeMessageBegin("create_database", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("create_database", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -5970,11 +5997,20 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = get_database_result()
     try:
       result.success = self._handler.get_database(args.name)
-    except NoSuchObjectException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except NoSuchObjectException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    except MetaException, o2:
+    except MetaException as o2:
+      msg_type = TMessageType.REPLY
       result.o2 = o2
-    oprot.writeMessageBegin("get_database", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("get_database", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -5986,13 +6022,23 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = drop_database_result()
     try:
       self._handler.drop_database(args.name, args.deleteData, args.cascade)
-    except NoSuchObjectException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except NoSuchObjectException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    except InvalidOperationException, o2:
+    except InvalidOperationException as o2:
+      msg_type = TMessageType.REPLY
       result.o2 = o2
-    except MetaException, o3:
+    except MetaException as o3:
+      msg_type = TMessageType.REPLY
       result.o3 = o3
-    oprot.writeMessageBegin("drop_database", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("drop_database", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6004,9 +6050,17 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = get_databases_result()
     try:
       result.success = self._handler.get_databases(args.pattern)
-    except MetaException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except MetaException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    oprot.writeMessageBegin("get_databases", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("get_databases", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6018,9 +6072,17 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = get_all_databases_result()
     try:
       result.success = self._handler.get_all_databases()
-    except MetaException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except MetaException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    oprot.writeMessageBegin("get_all_databases", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("get_all_databases", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6032,11 +6094,20 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = alter_database_result()
     try:
       self._handler.alter_database(args.dbname, args.db)
-    except MetaException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except MetaException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    except NoSuchObjectException, o2:
+    except NoSuchObjectException as o2:
+      msg_type = TMessageType.REPLY
       result.o2 = o2
-    oprot.writeMessageBegin("alter_database", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("alter_database", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6048,11 +6119,20 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = get_type_result()
     try:
       result.success = self._handler.get_type(args.name)
-    except MetaException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except MetaException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    except NoSuchObjectException, o2:
+    except NoSuchObjectException as o2:
+      msg_type = TMessageType.REPLY
       result.o2 = o2
-    oprot.writeMessageBegin("get_type", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("get_type", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6064,13 +6144,23 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = create_type_result()
     try:
       result.success = self._handler.create_type(args.type)
-    except AlreadyExistsException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except AlreadyExistsException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    except InvalidObjectException, o2:
+    except InvalidObjectException as o2:
+      msg_type = TMessageType.REPLY
       result.o2 = o2
-    except MetaException, o3:
+    except MetaException as o3:
+      msg_type = TMessageType.REPLY
       result.o3 = o3
-    oprot.writeMessageBegin("create_type", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("create_type", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6082,11 +6172,20 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = drop_type_result()
     try:
       result.success = self._handler.drop_type(args.type)
-    except MetaException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except MetaException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    except NoSuchObjectException, o2:
+    except NoSuchObjectException as o2:
+      msg_type = TMessageType.REPLY
       result.o2 = o2
-    oprot.writeMessageBegin("drop_type", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("drop_type", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6098,9 +6197,17 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = get_type_all_result()
     try:
       result.success = self._handler.get_type_all(args.name)
-    except MetaException, o2:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except MetaException as o2:
+      msg_type = TMessageType.REPLY
       result.o2 = o2
-    oprot.writeMessageBegin("get_type_all", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("get_type_all", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6112,13 +6219,23 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = get_fields_result()
     try:
       result.success = self._handler.get_fields(args.db_name, args.table_name)
-    except MetaException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except MetaException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    except UnknownTableException, o2:
+    except UnknownTableException as o2:
+      msg_type = TMessageType.REPLY
       result.o2 = o2
-    except UnknownDBException, o3:
+    except UnknownDBException as o3:
+      msg_type = TMessageType.REPLY
       result.o3 = o3
-    oprot.writeMessageBegin("get_fields", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("get_fields", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6130,13 +6247,23 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = get_fields_with_environment_context_result()
     try:
       result.success = self._handler.get_fields_with_environment_context(args.db_name, args.table_name, args.environment_context)
-    except MetaException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except MetaException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    except UnknownTableException, o2:
+    except UnknownTableException as o2:
+      msg_type = TMessageType.REPLY
       result.o2 = o2
-    except UnknownDBException, o3:
+    except UnknownDBException as o3:
+      msg_type = TMessageType.REPLY
       result.o3 = o3
-    oprot.writeMessageBegin("get_fields_with_environment_context", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("get_fields_with_environment_context", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6148,13 +6275,23 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = get_schema_result()
     try:
       result.success = self._handler.get_schema(args.db_name, args.table_name)
-    except MetaException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except MetaException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    except UnknownTableException, o2:
+    except UnknownTableException as o2:
+      msg_type = TMessageType.REPLY
       result.o2 = o2
-    except UnknownDBException, o3:
+    except UnknownDBException as o3:
+      msg_type = TMessageType.REPLY
       result.o3 = o3
-    oprot.writeMessageBegin("get_schema", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("get_schema", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6166,13 +6303,23 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = get_schema_with_environment_context_result()
     try:
       result.success = self._handler.get_schema_with_environment_context(args.db_name, args.table_name, args.environment_context)
-    except MetaException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except MetaException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    except UnknownTableException, o2:
+    except UnknownTableException as o2:
+      msg_type = TMessageType.REPLY
       result.o2 = o2
-    except UnknownDBException, o3:
+    except UnknownDBException as o3:
+      msg_type = TMessageType.REPLY
       result.o3 = o3
-    oprot.writeMessageBegin("get_schema_with_environment_context", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("get_schema_with_environment_context", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6184,15 +6331,26 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = create_table_result()
     try:
       self._handler.create_table(args.tbl)
-    except AlreadyExistsException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except AlreadyExistsException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    except InvalidObjectException, o2:
+    except InvalidObjectException as o2:
+      msg_type = TMessageType.REPLY
       result.o2 = o2
-    except MetaException, o3:
+    except MetaException as o3:
+      msg_type = TMessageType.REPLY
       result.o3 = o3
-    except NoSuchObjectException, o4:
+    except NoSuchObjectException as o4:
+      msg_type = TMessageType.REPLY
       result.o4 = o4
-    oprot.writeMessageBegin("create_table", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("create_table", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6204,15 +6362,26 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = create_table_with_environment_context_result()
     try:
       self._handler.create_table_with_environment_context(args.tbl, args.environment_context)
-    except AlreadyExistsException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except AlreadyExistsException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    except InvalidObjectException, o2:
+    except InvalidObjectException as o2:
+      msg_type = TMessageType.REPLY
       result.o2 = o2
-    except MetaException, o3:
+    except MetaException as o3:
+      msg_type = TMessageType.REPLY
       result.o3 = o3
-    except NoSuchObjectException, o4:
+    except NoSuchObjectException as o4:
+      msg_type = TMessageType.REPLY
       result.o4 = o4
-    oprot.writeMessageBegin("create_table_with_environment_context", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("create_table_with_environment_context", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6224,11 +6393,20 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = drop_table_result()
     try:
       self._handler.drop_table(args.dbname, args.name, args.deleteData)
-    except NoSuchObjectException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except NoSuchObjectException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    except MetaException, o3:
+    except MetaException as o3:
+      msg_type = TMessageType.REPLY
       result.o3 = o3
-    oprot.writeMessageBegin("drop_table", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("drop_table", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6240,11 +6418,20 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = drop_table_with_environment_context_result()
     try:
       self._handler.drop_table_with_environment_context(args.dbname, args.name, args.deleteData, args.environment_context)
-    except NoSuchObjectException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except NoSuchObjectException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    except MetaException, o3:
+    except MetaException as o3:
+      msg_type = TMessageType.REPLY
       result.o3 = o3
-    oprot.writeMessageBegin("drop_table_with_environment_context", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("drop_table_with_environment_context", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6256,9 +6443,17 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = get_tables_result()
     try:
       result.success = self._handler.get_tables(args.db_name, args.pattern)
-    except MetaException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except MetaException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    oprot.writeMessageBegin("get_tables", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("get_tables", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6270,9 +6465,17 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = get_all_tables_result()
     try:
       result.success = self._handler.get_all_tables(args.db_name)
-    except MetaException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except MetaException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    oprot.writeMessageBegin("get_all_tables", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("get_all_tables", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6284,11 +6487,20 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = get_table_result()
     try:
       result.success = self._handler.get_table(args.dbname, args.tbl_name)
-    except MetaException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except MetaException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    except NoSuchObjectException, o2:
+    except NoSuchObjectException as o2:
+      msg_type = TMessageType.REPLY
       result.o2 = o2
-    oprot.writeMessageBegin("get_table", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("get_table", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6300,13 +6512,23 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = get_table_objects_by_name_result()
     try:
       result.success = self._handler.get_table_objects_by_name(args.dbname, args.tbl_names)
-    except MetaException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except MetaException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    except InvalidOperationException, o2:
+    except InvalidOperationException as o2:
+      msg_type = TMessageType.REPLY
       result.o2 = o2
-    except UnknownDBException, o3:
+    except UnknownDBException as o3:
+      msg_type = TMessageType.REPLY
       result.o3 = o3
-    oprot.writeMessageBegin("get_table_objects_by_name", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("get_table_objects_by_name", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6318,13 +6540,23 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = get_table_names_by_filter_result()
     try:
       result.success = self._handler.get_table_names_by_filter(args.dbname, args.filter, args.max_tables)
-    except MetaException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except MetaException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    except InvalidOperationException, o2:
+    except InvalidOperationException as o2:
+      msg_type = TMessageType.REPLY
       result.o2 = o2
-    except UnknownDBException, o3:
+    except UnknownDBException as o3:
+      msg_type = TMessageType.REPLY
       result.o3 = o3
-    oprot.writeMessageBegin("get_table_names_by_filter", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("get_table_names_by_filter", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6336,11 +6568,20 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = alter_table_result()
     try:
       self._handler.alter_table(args.dbname, args.tbl_name, args.new_tbl)
-    except InvalidOperationException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except InvalidOperationException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    except MetaException, o2:
+    except MetaException as o2:
+      msg_type = TMessageType.REPLY
       result.o2 = o2
-    oprot.writeMessageBegin("alter_table", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("alter_table", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6352,11 +6593,20 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = alter_table_with_environment_context_result()
     try:
       self._handler.alter_table_with_environment_context(args.dbname, args.tbl_name, args.new_tbl, args.environment_context)
-    except InvalidOperationException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except InvalidOperationException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    except MetaException, o2:
+    except MetaException as o2:
+      msg_type = TMessageType.REPLY
       result.o2 = o2
-    oprot.writeMessageBegin("alter_table_with_environment_context", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("alter_table_with_environment_context", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6368,11 +6618,20 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = alter_table_with_cascade_result()
     try:
       self._handler.alter_table_with_cascade(args.dbname, args.tbl_name, args.new_tbl, args.cascade)
-    except InvalidOperationException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except InvalidOperationException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    except MetaException, o2:
+    except MetaException as o2:
+      msg_type = TMessageType.REPLY
       result.o2 = o2
-    oprot.writeMessageBegin("alter_table_with_cascade", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("alter_table_with_cascade", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6384,13 +6643,23 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = add_partition_result()
     try:
       result.success = self._handler.add_partition(args.new_part)
-    except InvalidObjectException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except InvalidObjectException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    except AlreadyExistsException, o2:
+    except AlreadyExistsException as o2:
+      msg_type = TMessageType.REPLY
       result.o2 = o2
-    except MetaException, o3:
+    except MetaException as o3:
+      msg_type = TMessageType.REPLY
       result.o3 = o3
-    oprot.writeMessageBegin("add_partition", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("add_partition", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6402,13 +6671,23 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = add_partition_with_environment_context_result()
     try:
       result.success = self._handler.add_partition_with_environment_context(args.new_part, args.environment_context)
-    except InvalidObjectException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except InvalidObjectException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    except AlreadyExistsException, o2:
+    except AlreadyExistsException as o2:
+      msg_type = TMessageType.REPLY
       result.o2 = o2
-    except MetaException, o3:
+    except MetaException as o3:
+      msg_type = TMessageType.REPLY
       result.o3 = o3
-    oprot.writeMessageBegin("add_partition_with_environment_context", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("add_partition_with_environment_context", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6420,13 +6699,23 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = add_partitions_result()
     try:
       result.success = self._handler.add_partitions(args.new_parts)
-    except InvalidObjectException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except InvalidObjectException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    except AlreadyExistsException, o2:
+    except AlreadyExistsException as o2:
+      msg_type = TMessageType.REPLY
       result.o2 = o2
-    except MetaException, o3:
+    except MetaException as o3:
+      msg_type = TMessageType.REPLY
       result.o3 = o3
-    oprot.writeMessageBegin("add_partitions", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("add_partitions", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6438,13 +6727,23 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = add_partitions_pspec_result()
     try:
       result.success = self._handler.add_partitions_pspec(args.new_parts)
-    except InvalidObjectException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except InvalidObjectException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    except AlreadyExistsException, o2:
+    except AlreadyExistsException as o2:
+      msg_type = TMessageType.REPLY
       result.o2 = o2
-    except MetaException, o3:
+    except MetaException as o3:
+      msg_type = TMessageType.REPLY
       result.o3 = o3
-    oprot.writeMessageBegin("add_partitions_pspec", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("add_partitions_pspec", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6456,13 +6755,23 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = append_partition_result()
     try:
       result.success = self._handler.append_partition(args.db_name, args.tbl_name, args.part_vals)
-    except InvalidObjectException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except InvalidObjectException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    except AlreadyExistsException, o2:
+    except AlreadyExistsException as o2:
+      msg_type = TMessageType.REPLY
       result.o2 = o2
-    except MetaException, o3:
+    except MetaException as o3:
+      msg_type = TMessageType.REPLY
       result.o3 = o3
-    oprot.writeMessageBegin("append_partition", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("append_partition", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6474,13 +6783,23 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = add_partitions_req_result()
     try:
       result.success = self._handler.add_partitions_req(args.request)
-    except InvalidObjectException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except InvalidObjectException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    except AlreadyExistsException, o2:
+    except AlreadyExistsException as o2:
+      msg_type = TMessageType.REPLY
       result.o2 = o2
-    except MetaException, o3:
+    except MetaException as o3:
+      msg_type = TMessageType.REPLY
       result.o3 = o3
-    oprot.writeMessageBegin("add_partitions_req", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("add_partitions_req", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6492,13 +6811,23 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = append_partition_with_environment_context_result()
     try:
       result.success = self._handler.append_partition_with_environment_context(args.db_name, args.tbl_name, args.part_vals, args.environment_context)
-    except InvalidObjectException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except InvalidObjectException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    except AlreadyExistsException, o2:
+    except AlreadyExistsException as o2:
+      msg_type = TMessageType.REPLY
       result.o2 = o2
-    except MetaException, o3:
+    except MetaException as o3:
+      msg_type = TMessageType.REPLY
       result.o3 = o3
-    oprot.writeMessageBegin("append_partition_with_environment_context", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("append_partition_with_environment_context", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6510,13 +6839,23 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = append_partition_by_name_result()
     try:
       result.success = self._handler.append_partition_by_name(args.db_name, args.tbl_name, args.part_name)
-    except InvalidObjectException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except InvalidObjectException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    except AlreadyExistsException, o2:
+    except AlreadyExistsException as o2:
+      msg_type = TMessageType.REPLY
       result.o2 = o2
-    except MetaException, o3:
+    except MetaException as o3:
+      msg_type = TMessageType.REPLY
       result.o3 = o3
-    oprot.writeMessageBegin("append_partition_by_name", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("append_partition_by_name", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6528,13 +6867,23 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = append_partition_by_name_with_environment_context_result()
     try:
       result.success = self._handler.append_partition_by_name_with_environment_context(args.db_name, args.tbl_name, args.part_name, args.environment_context)
-    except InvalidObjectException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except InvalidObjectException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    except AlreadyExistsException, o2:
+    except AlreadyExistsException as o2:
+      msg_type = TMessageType.REPLY
       result.o2 = o2
-    except MetaException, o3:
+    except MetaException as o3:
+      msg_type = TMessageType.REPLY
       result.o3 = o3
-    oprot.writeMessageBegin("append_partition_by_name_with_environment_context", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("append_partition_by_name_with_environment_context", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6546,11 +6895,20 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = drop_partition_result()
     try:
       result.success = self._handler.drop_partition(args.db_name, args.tbl_name, args.part_vals, args.deleteData)
-    except NoSuchObjectException, o1:
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except NoSuchObjectException as o1:
+      msg_type = TMessageType.REPLY
       result.o1 = o1
-    except MetaException, o2:
+    except MetaException as o2:
+      msg_type = TMessageType.REPLY
       result.o2 = o2
-    oprot.writeMessageBegin("drop_partition", TMessageType.REPLY, seqid)
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("drop_partition", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -6562,11 +6920,20 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
     result = drop_partition_with_environment_context_result()
     try:
       result.success = self._handler.drop_partition_with_environment_context(args.db_name, args.tbl_name, args.part_vals, args.deleteData, args.environment_context)
-    except NoSuchObjectException, o1:
+      msg_type = TMe

<TRUNCATED>

[22/55] [abbrv] hive git commit: HIVE-12253 : revert HIVE-12061 (Sergey Shelukhin, reviewed by Prasanth Jayachandran)

Posted by xu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/3e0d87f8/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp b/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
index 5fd4a90..cb0ee7a 100644
--- a/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
+++ b/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
@@ -151,14 +151,6 @@ const char* _kResourceTypeNames[] = {
 };
 const std::map<int, const char*> _ResourceType_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(3, _kResourceTypeValues, _kResourceTypeNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL));
 
-int _kFileMetadataExprTypeValues[] = {
-  FileMetadataExprType::ORC_SARG
-};
-const char* _kFileMetadataExprTypeNames[] = {
-  "ORC_SARG"
-};
-const std::map<int, const char*> _FileMetadataExprType_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(1, _kFileMetadataExprTypeValues, _kFileMetadataExprTypeNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL));
-
 
 Version::~Version() throw() {
 }
@@ -14270,11 +14262,6 @@ void GetFileMetadataByExprRequest::__set_doGetFooters(const bool val) {
 __isset.doGetFooters = true;
 }
 
-void GetFileMetadataByExprRequest::__set_type(const FileMetadataExprType::type val) {
-  this->type = val;
-__isset.type = true;
-}
-
 uint32_t GetFileMetadataByExprRequest::read(::apache::thrift::protocol::TProtocol* iprot) {
 
   apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
@@ -14334,16 +14321,6 @@ uint32_t GetFileMetadataByExprRequest::read(::apache::thrift::protocol::TProtoco
           xfer += iprot->skip(ftype);
         }
         break;
-      case 4:
-        if (ftype == ::apache::thrift::protocol::T_I32) {
-          int32_t ecast626;
-          xfer += iprot->readI32(ecast626);
-          this->type = (FileMetadataExprType::type)ecast626;
-          this->__isset.type = true;
-        } else {
-          xfer += iprot->skip(ftype);
-        }
-        break;
       default:
         xfer += iprot->skip(ftype);
         break;
@@ -14368,10 +14345,10 @@ uint32_t GetFileMetadataByExprRequest::write(::apache::thrift::protocol::TProtoc
   xfer += oprot->writeFieldBegin("fileIds", ::apache::thrift::protocol::T_LIST, 1);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_I64, static_cast<uint32_t>(this->fileIds.size()));
-    std::vector<int64_t> ::const_iterator _iter627;
-    for (_iter627 = this->fileIds.begin(); _iter627 != this->fileIds.end(); ++_iter627)
+    std::vector<int64_t> ::const_iterator _iter626;
+    for (_iter626 = this->fileIds.begin(); _iter626 != this->fileIds.end(); ++_iter626)
     {
-      xfer += oprot->writeI64((*_iter627));
+      xfer += oprot->writeI64((*_iter626));
     }
     xfer += oprot->writeListEnd();
   }
@@ -14386,11 +14363,6 @@ uint32_t GetFileMetadataByExprRequest::write(::apache::thrift::protocol::TProtoc
     xfer += oprot->writeBool(this->doGetFooters);
     xfer += oprot->writeFieldEnd();
   }
-  if (this->__isset.type) {
-    xfer += oprot->writeFieldBegin("type", ::apache::thrift::protocol::T_I32, 4);
-    xfer += oprot->writeI32((int32_t)this->type);
-    xfer += oprot->writeFieldEnd();
-  }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
   return xfer;
@@ -14401,23 +14373,20 @@ void swap(GetFileMetadataByExprRequest &a, GetFileMetadataByExprRequest &b) {
   swap(a.fileIds, b.fileIds);
   swap(a.expr, b.expr);
   swap(a.doGetFooters, b.doGetFooters);
-  swap(a.type, b.type);
   swap(a.__isset, b.__isset);
 }
 
-GetFileMetadataByExprRequest::GetFileMetadataByExprRequest(const GetFileMetadataByExprRequest& other628) {
+GetFileMetadataByExprRequest::GetFileMetadataByExprRequest(const GetFileMetadataByExprRequest& other627) {
+  fileIds = other627.fileIds;
+  expr = other627.expr;
+  doGetFooters = other627.doGetFooters;
+  __isset = other627.__isset;
+}
+GetFileMetadataByExprRequest& GetFileMetadataByExprRequest::operator=(const GetFileMetadataByExprRequest& other628) {
   fileIds = other628.fileIds;
   expr = other628.expr;
   doGetFooters = other628.doGetFooters;
-  type = other628.type;
   __isset = other628.__isset;
-}
-GetFileMetadataByExprRequest& GetFileMetadataByExprRequest::operator=(const GetFileMetadataByExprRequest& other629) {
-  fileIds = other629.fileIds;
-  expr = other629.expr;
-  doGetFooters = other629.doGetFooters;
-  type = other629.type;
-  __isset = other629.__isset;
   return *this;
 }
 void GetFileMetadataByExprRequest::printTo(std::ostream& out) const {
@@ -14426,7 +14395,6 @@ void GetFileMetadataByExprRequest::printTo(std::ostream& out) const {
   out << "fileIds=" << to_string(fileIds);
   out << ", " << "expr=" << to_string(expr);
   out << ", " << "doGetFooters="; (__isset.doGetFooters ? (out << to_string(doGetFooters)) : (out << "<null>"));
-  out << ", " << "type="; (__isset.type ? (out << to_string(type)) : (out << "<null>"));
   out << ")";
 }
 
@@ -14470,17 +14438,17 @@ uint32_t GetFileMetadataResult::read(::apache::thrift::protocol::TProtocol* ipro
         if (ftype == ::apache::thrift::protocol::T_MAP) {
           {
             this->metadata.clear();
-            uint32_t _size630;
-            ::apache::thrift::protocol::TType _ktype631;
-            ::apache::thrift::protocol::TType _vtype632;
-            xfer += iprot->readMapBegin(_ktype631, _vtype632, _size630);
-            uint32_t _i634;
-            for (_i634 = 0; _i634 < _size630; ++_i634)
+            uint32_t _size629;
+            ::apache::thrift::protocol::TType _ktype630;
+            ::apache::thrift::protocol::TType _vtype631;
+            xfer += iprot->readMapBegin(_ktype630, _vtype631, _size629);
+            uint32_t _i633;
+            for (_i633 = 0; _i633 < _size629; ++_i633)
             {
-              int64_t _key635;
-              xfer += iprot->readI64(_key635);
-              std::string& _val636 = this->metadata[_key635];
-              xfer += iprot->readBinary(_val636);
+              int64_t _key634;
+              xfer += iprot->readI64(_key634);
+              std::string& _val635 = this->metadata[_key634];
+              xfer += iprot->readBinary(_val635);
             }
             xfer += iprot->readMapEnd();
           }
@@ -14521,11 +14489,11 @@ uint32_t GetFileMetadataResult::write(::apache::thrift::protocol::TProtocol* opr
   xfer += oprot->writeFieldBegin("metadata", ::apache::thrift::protocol::T_MAP, 1);
   {
     xfer += oprot->writeMapBegin(::apache::thrift::protocol::T_I64, ::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->metadata.size()));
-    std::map<int64_t, std::string> ::const_iterator _iter637;
-    for (_iter637 = this->metadata.begin(); _iter637 != this->metadata.end(); ++_iter637)
+    std::map<int64_t, std::string> ::const_iterator _iter636;
+    for (_iter636 = this->metadata.begin(); _iter636 != this->metadata.end(); ++_iter636)
     {
-      xfer += oprot->writeI64(_iter637->first);
-      xfer += oprot->writeBinary(_iter637->second);
+      xfer += oprot->writeI64(_iter636->first);
+      xfer += oprot->writeBinary(_iter636->second);
     }
     xfer += oprot->writeMapEnd();
   }
@@ -14546,13 +14514,13 @@ void swap(GetFileMetadataResult &a, GetFileMetadataResult &b) {
   swap(a.isSupported, b.isSupported);
 }
 
-GetFileMetadataResult::GetFileMetadataResult(const GetFileMetadataResult& other638) {
+GetFileMetadataResult::GetFileMetadataResult(const GetFileMetadataResult& other637) {
+  metadata = other637.metadata;
+  isSupported = other637.isSupported;
+}
+GetFileMetadataResult& GetFileMetadataResult::operator=(const GetFileMetadataResult& other638) {
   metadata = other638.metadata;
   isSupported = other638.isSupported;
-}
-GetFileMetadataResult& GetFileMetadataResult::operator=(const GetFileMetadataResult& other639) {
-  metadata = other639.metadata;
-  isSupported = other639.isSupported;
   return *this;
 }
 void GetFileMetadataResult::printTo(std::ostream& out) const {
@@ -14598,14 +14566,14 @@ uint32_t GetFileMetadataRequest::read(::apache::thrift::protocol::TProtocol* ipr
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->fileIds.clear();
-            uint32_t _size640;
-            ::apache::thrift::protocol::TType _etype643;
-            xfer += iprot->readListBegin(_etype643, _size640);
-            this->fileIds.resize(_size640);
-            uint32_t _i644;
-            for (_i644 = 0; _i644 < _size640; ++_i644)
+            uint32_t _size639;
+            ::apache::thrift::protocol::TType _etype642;
+            xfer += iprot->readListBegin(_etype642, _size639);
+            this->fileIds.resize(_size639);
+            uint32_t _i643;
+            for (_i643 = 0; _i643 < _size639; ++_i643)
             {
-              xfer += iprot->readI64(this->fileIds[_i644]);
+              xfer += iprot->readI64(this->fileIds[_i643]);
             }
             xfer += iprot->readListEnd();
           }
@@ -14636,10 +14604,10 @@ uint32_t GetFileMetadataRequest::write(::apache::thrift::protocol::TProtocol* op
   xfer += oprot->writeFieldBegin("fileIds", ::apache::thrift::protocol::T_LIST, 1);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_I64, static_cast<uint32_t>(this->fileIds.size()));
-    std::vector<int64_t> ::const_iterator _iter645;
-    for (_iter645 = this->fileIds.begin(); _iter645 != this->fileIds.end(); ++_iter645)
+    std::vector<int64_t> ::const_iterator _iter644;
+    for (_iter644 = this->fileIds.begin(); _iter644 != this->fileIds.end(); ++_iter644)
     {
-      xfer += oprot->writeI64((*_iter645));
+      xfer += oprot->writeI64((*_iter644));
     }
     xfer += oprot->writeListEnd();
   }
@@ -14655,11 +14623,11 @@ void swap(GetFileMetadataRequest &a, GetFileMetadataRequest &b) {
   swap(a.fileIds, b.fileIds);
 }
 
-GetFileMetadataRequest::GetFileMetadataRequest(const GetFileMetadataRequest& other646) {
-  fileIds = other646.fileIds;
+GetFileMetadataRequest::GetFileMetadataRequest(const GetFileMetadataRequest& other645) {
+  fileIds = other645.fileIds;
 }
-GetFileMetadataRequest& GetFileMetadataRequest::operator=(const GetFileMetadataRequest& other647) {
-  fileIds = other647.fileIds;
+GetFileMetadataRequest& GetFileMetadataRequest::operator=(const GetFileMetadataRequest& other646) {
+  fileIds = other646.fileIds;
   return *this;
 }
 void GetFileMetadataRequest::printTo(std::ostream& out) const {
@@ -14718,11 +14686,11 @@ void swap(PutFileMetadataResult &a, PutFileMetadataResult &b) {
   (void) b;
 }
 
-PutFileMetadataResult::PutFileMetadataResult(const PutFileMetadataResult& other648) {
-  (void) other648;
+PutFileMetadataResult::PutFileMetadataResult(const PutFileMetadataResult& other647) {
+  (void) other647;
 }
-PutFileMetadataResult& PutFileMetadataResult::operator=(const PutFileMetadataResult& other649) {
-  (void) other649;
+PutFileMetadataResult& PutFileMetadataResult::operator=(const PutFileMetadataResult& other648) {
+  (void) other648;
   return *this;
 }
 void PutFileMetadataResult::printTo(std::ostream& out) const {
@@ -14771,14 +14739,14 @@ uint32_t PutFileMetadataRequest::read(::apache::thrift::protocol::TProtocol* ipr
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->fileIds.clear();
-            uint32_t _size650;
-            ::apache::thrift::protocol::TType _etype653;
-            xfer += iprot->readListBegin(_etype653, _size650);
-            this->fileIds.resize(_size650);
-            uint32_t _i654;
-            for (_i654 = 0; _i654 < _size650; ++_i654)
+            uint32_t _size649;
+            ::apache::thrift::protocol::TType _etype652;
+            xfer += iprot->readListBegin(_etype652, _size649);
+            this->fileIds.resize(_size649);
+            uint32_t _i653;
+            for (_i653 = 0; _i653 < _size649; ++_i653)
             {
-              xfer += iprot->readI64(this->fileIds[_i654]);
+              xfer += iprot->readI64(this->fileIds[_i653]);
             }
             xfer += iprot->readListEnd();
           }
@@ -14791,14 +14759,14 @@ uint32_t PutFileMetadataRequest::read(::apache::thrift::protocol::TProtocol* ipr
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->metadata.clear();
-            uint32_t _size655;
-            ::apache::thrift::protocol::TType _etype658;
-            xfer += iprot->readListBegin(_etype658, _size655);
-            this->metadata.resize(_size655);
-            uint32_t _i659;
-            for (_i659 = 0; _i659 < _size655; ++_i659)
+            uint32_t _size654;
+            ::apache::thrift::protocol::TType _etype657;
+            xfer += iprot->readListBegin(_etype657, _size654);
+            this->metadata.resize(_size654);
+            uint32_t _i658;
+            for (_i658 = 0; _i658 < _size654; ++_i658)
             {
-              xfer += iprot->readBinary(this->metadata[_i659]);
+              xfer += iprot->readBinary(this->metadata[_i658]);
             }
             xfer += iprot->readListEnd();
           }
@@ -14831,10 +14799,10 @@ uint32_t PutFileMetadataRequest::write(::apache::thrift::protocol::TProtocol* op
   xfer += oprot->writeFieldBegin("fileIds", ::apache::thrift::protocol::T_LIST, 1);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_I64, static_cast<uint32_t>(this->fileIds.size()));
-    std::vector<int64_t> ::const_iterator _iter660;
-    for (_iter660 = this->fileIds.begin(); _iter660 != this->fileIds.end(); ++_iter660)
+    std::vector<int64_t> ::const_iterator _iter659;
+    for (_iter659 = this->fileIds.begin(); _iter659 != this->fileIds.end(); ++_iter659)
     {
-      xfer += oprot->writeI64((*_iter660));
+      xfer += oprot->writeI64((*_iter659));
     }
     xfer += oprot->writeListEnd();
   }
@@ -14843,10 +14811,10 @@ uint32_t PutFileMetadataRequest::write(::apache::thrift::protocol::TProtocol* op
   xfer += oprot->writeFieldBegin("metadata", ::apache::thrift::protocol::T_LIST, 2);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->metadata.size()));
-    std::vector<std::string> ::const_iterator _iter661;
-    for (_iter661 = this->metadata.begin(); _iter661 != this->metadata.end(); ++_iter661)
+    std::vector<std::string> ::const_iterator _iter660;
+    for (_iter660 = this->metadata.begin(); _iter660 != this->metadata.end(); ++_iter660)
     {
-      xfer += oprot->writeBinary((*_iter661));
+      xfer += oprot->writeBinary((*_iter660));
     }
     xfer += oprot->writeListEnd();
   }
@@ -14863,13 +14831,13 @@ void swap(PutFileMetadataRequest &a, PutFileMetadataRequest &b) {
   swap(a.metadata, b.metadata);
 }
 
-PutFileMetadataRequest::PutFileMetadataRequest(const PutFileMetadataRequest& other662) {
+PutFileMetadataRequest::PutFileMetadataRequest(const PutFileMetadataRequest& other661) {
+  fileIds = other661.fileIds;
+  metadata = other661.metadata;
+}
+PutFileMetadataRequest& PutFileMetadataRequest::operator=(const PutFileMetadataRequest& other662) {
   fileIds = other662.fileIds;
   metadata = other662.metadata;
-}
-PutFileMetadataRequest& PutFileMetadataRequest::operator=(const PutFileMetadataRequest& other663) {
-  fileIds = other663.fileIds;
-  metadata = other663.metadata;
   return *this;
 }
 void PutFileMetadataRequest::printTo(std::ostream& out) const {
@@ -14929,11 +14897,11 @@ void swap(ClearFileMetadataResult &a, ClearFileMetadataResult &b) {
   (void) b;
 }
 
-ClearFileMetadataResult::ClearFileMetadataResult(const ClearFileMetadataResult& other664) {
-  (void) other664;
+ClearFileMetadataResult::ClearFileMetadataResult(const ClearFileMetadataResult& other663) {
+  (void) other663;
 }
-ClearFileMetadataResult& ClearFileMetadataResult::operator=(const ClearFileMetadataResult& other665) {
-  (void) other665;
+ClearFileMetadataResult& ClearFileMetadataResult::operator=(const ClearFileMetadataResult& other664) {
+  (void) other664;
   return *this;
 }
 void ClearFileMetadataResult::printTo(std::ostream& out) const {
@@ -14977,14 +14945,14 @@ uint32_t ClearFileMetadataRequest::read(::apache::thrift::protocol::TProtocol* i
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->fileIds.clear();
-            uint32_t _size666;
-            ::apache::thrift::protocol::TType _etype669;
-            xfer += iprot->readListBegin(_etype669, _size666);
-            this->fileIds.resize(_size666);
-            uint32_t _i670;
-            for (_i670 = 0; _i670 < _size666; ++_i670)
+            uint32_t _size665;
+            ::apache::thrift::protocol::TType _etype668;
+            xfer += iprot->readListBegin(_etype668, _size665);
+            this->fileIds.resize(_size665);
+            uint32_t _i669;
+            for (_i669 = 0; _i669 < _size665; ++_i669)
             {
-              xfer += iprot->readI64(this->fileIds[_i670]);
+              xfer += iprot->readI64(this->fileIds[_i669]);
             }
             xfer += iprot->readListEnd();
           }
@@ -15015,10 +14983,10 @@ uint32_t ClearFileMetadataRequest::write(::apache::thrift::protocol::TProtocol*
   xfer += oprot->writeFieldBegin("fileIds", ::apache::thrift::protocol::T_LIST, 1);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_I64, static_cast<uint32_t>(this->fileIds.size()));
-    std::vector<int64_t> ::const_iterator _iter671;
-    for (_iter671 = this->fileIds.begin(); _iter671 != this->fileIds.end(); ++_iter671)
+    std::vector<int64_t> ::const_iterator _iter670;
+    for (_iter670 = this->fileIds.begin(); _iter670 != this->fileIds.end(); ++_iter670)
     {
-      xfer += oprot->writeI64((*_iter671));
+      xfer += oprot->writeI64((*_iter670));
     }
     xfer += oprot->writeListEnd();
   }
@@ -15034,11 +15002,11 @@ void swap(ClearFileMetadataRequest &a, ClearFileMetadataRequest &b) {
   swap(a.fileIds, b.fileIds);
 }
 
-ClearFileMetadataRequest::ClearFileMetadataRequest(const ClearFileMetadataRequest& other672) {
-  fileIds = other672.fileIds;
+ClearFileMetadataRequest::ClearFileMetadataRequest(const ClearFileMetadataRequest& other671) {
+  fileIds = other671.fileIds;
 }
-ClearFileMetadataRequest& ClearFileMetadataRequest::operator=(const ClearFileMetadataRequest& other673) {
-  fileIds = other673.fileIds;
+ClearFileMetadataRequest& ClearFileMetadataRequest::operator=(const ClearFileMetadataRequest& other672) {
+  fileIds = other672.fileIds;
   return *this;
 }
 void ClearFileMetadataRequest::printTo(std::ostream& out) const {
@@ -15083,14 +15051,14 @@ uint32_t GetAllFunctionsResponse::read(::apache::thrift::protocol::TProtocol* ip
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->functions.clear();
-            uint32_t _size674;
-            ::apache::thrift::protocol::TType _etype677;
-            xfer += iprot->readListBegin(_etype677, _size674);
-            this->functions.resize(_size674);
-            uint32_t _i678;
-            for (_i678 = 0; _i678 < _size674; ++_i678)
+            uint32_t _size673;
+            ::apache::thrift::protocol::TType _etype676;
+            xfer += iprot->readListBegin(_etype676, _size673);
+            this->functions.resize(_size673);
+            uint32_t _i677;
+            for (_i677 = 0; _i677 < _size673; ++_i677)
             {
-              xfer += this->functions[_i678].read(iprot);
+              xfer += this->functions[_i677].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -15120,10 +15088,10 @@ uint32_t GetAllFunctionsResponse::write(::apache::thrift::protocol::TProtocol* o
     xfer += oprot->writeFieldBegin("functions", ::apache::thrift::protocol::T_LIST, 1);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->functions.size()));
-      std::vector<Function> ::const_iterator _iter679;
-      for (_iter679 = this->functions.begin(); _iter679 != this->functions.end(); ++_iter679)
+      std::vector<Function> ::const_iterator _iter678;
+      for (_iter678 = this->functions.begin(); _iter678 != this->functions.end(); ++_iter678)
       {
-        xfer += (*_iter679).write(oprot);
+        xfer += (*_iter678).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -15140,13 +15108,13 @@ void swap(GetAllFunctionsResponse &a, GetAllFunctionsResponse &b) {
   swap(a.__isset, b.__isset);
 }
 
-GetAllFunctionsResponse::GetAllFunctionsResponse(const GetAllFunctionsResponse& other680) {
+GetAllFunctionsResponse::GetAllFunctionsResponse(const GetAllFunctionsResponse& other679) {
+  functions = other679.functions;
+  __isset = other679.__isset;
+}
+GetAllFunctionsResponse& GetAllFunctionsResponse::operator=(const GetAllFunctionsResponse& other680) {
   functions = other680.functions;
   __isset = other680.__isset;
-}
-GetAllFunctionsResponse& GetAllFunctionsResponse::operator=(const GetAllFunctionsResponse& other681) {
-  functions = other681.functions;
-  __isset = other681.__isset;
   return *this;
 }
 void GetAllFunctionsResponse::printTo(std::ostream& out) const {
@@ -15226,13 +15194,13 @@ void swap(MetaException &a, MetaException &b) {
   swap(a.__isset, b.__isset);
 }
 
-MetaException::MetaException(const MetaException& other682) : TException() {
+MetaException::MetaException(const MetaException& other681) : TException() {
+  message = other681.message;
+  __isset = other681.__isset;
+}
+MetaException& MetaException::operator=(const MetaException& other682) {
   message = other682.message;
   __isset = other682.__isset;
-}
-MetaException& MetaException::operator=(const MetaException& other683) {
-  message = other683.message;
-  __isset = other683.__isset;
   return *this;
 }
 void MetaException::printTo(std::ostream& out) const {
@@ -15323,13 +15291,13 @@ void swap(UnknownTableException &a, UnknownTableException &b) {
   swap(a.__isset, b.__isset);
 }
 
-UnknownTableException::UnknownTableException(const UnknownTableException& other684) : TException() {
+UnknownTableException::UnknownTableException(const UnknownTableException& other683) : TException() {
+  message = other683.message;
+  __isset = other683.__isset;
+}
+UnknownTableException& UnknownTableException::operator=(const UnknownTableException& other684) {
   message = other684.message;
   __isset = other684.__isset;
-}
-UnknownTableException& UnknownTableException::operator=(const UnknownTableException& other685) {
-  message = other685.message;
-  __isset = other685.__isset;
   return *this;
 }
 void UnknownTableException::printTo(std::ostream& out) const {
@@ -15420,13 +15388,13 @@ void swap(UnknownDBException &a, UnknownDBException &b) {
   swap(a.__isset, b.__isset);
 }
 
-UnknownDBException::UnknownDBException(const UnknownDBException& other686) : TException() {
+UnknownDBException::UnknownDBException(const UnknownDBException& other685) : TException() {
+  message = other685.message;
+  __isset = other685.__isset;
+}
+UnknownDBException& UnknownDBException::operator=(const UnknownDBException& other686) {
   message = other686.message;
   __isset = other686.__isset;
-}
-UnknownDBException& UnknownDBException::operator=(const UnknownDBException& other687) {
-  message = other687.message;
-  __isset = other687.__isset;
   return *this;
 }
 void UnknownDBException::printTo(std::ostream& out) const {
@@ -15517,13 +15485,13 @@ void swap(AlreadyExistsException &a, AlreadyExistsException &b) {
   swap(a.__isset, b.__isset);
 }
 
-AlreadyExistsException::AlreadyExistsException(const AlreadyExistsException& other688) : TException() {
+AlreadyExistsException::AlreadyExistsException(const AlreadyExistsException& other687) : TException() {
+  message = other687.message;
+  __isset = other687.__isset;
+}
+AlreadyExistsException& AlreadyExistsException::operator=(const AlreadyExistsException& other688) {
   message = other688.message;
   __isset = other688.__isset;
-}
-AlreadyExistsException& AlreadyExistsException::operator=(const AlreadyExistsException& other689) {
-  message = other689.message;
-  __isset = other689.__isset;
   return *this;
 }
 void AlreadyExistsException::printTo(std::ostream& out) const {
@@ -15614,13 +15582,13 @@ void swap(InvalidPartitionException &a, InvalidPartitionException &b) {
   swap(a.__isset, b.__isset);
 }
 
-InvalidPartitionException::InvalidPartitionException(const InvalidPartitionException& other690) : TException() {
+InvalidPartitionException::InvalidPartitionException(const InvalidPartitionException& other689) : TException() {
+  message = other689.message;
+  __isset = other689.__isset;
+}
+InvalidPartitionException& InvalidPartitionException::operator=(const InvalidPartitionException& other690) {
   message = other690.message;
   __isset = other690.__isset;
-}
-InvalidPartitionException& InvalidPartitionException::operator=(const InvalidPartitionException& other691) {
-  message = other691.message;
-  __isset = other691.__isset;
   return *this;
 }
 void InvalidPartitionException::printTo(std::ostream& out) const {
@@ -15711,13 +15679,13 @@ void swap(UnknownPartitionException &a, UnknownPartitionException &b) {
   swap(a.__isset, b.__isset);
 }
 
-UnknownPartitionException::UnknownPartitionException(const UnknownPartitionException& other692) : TException() {
+UnknownPartitionException::UnknownPartitionException(const UnknownPartitionException& other691) : TException() {
+  message = other691.message;
+  __isset = other691.__isset;
+}
+UnknownPartitionException& UnknownPartitionException::operator=(const UnknownPartitionException& other692) {
   message = other692.message;
   __isset = other692.__isset;
-}
-UnknownPartitionException& UnknownPartitionException::operator=(const UnknownPartitionException& other693) {
-  message = other693.message;
-  __isset = other693.__isset;
   return *this;
 }
 void UnknownPartitionException::printTo(std::ostream& out) const {
@@ -15808,13 +15776,13 @@ void swap(InvalidObjectException &a, InvalidObjectException &b) {
   swap(a.__isset, b.__isset);
 }
 
-InvalidObjectException::InvalidObjectException(const InvalidObjectException& other694) : TException() {
+InvalidObjectException::InvalidObjectException(const InvalidObjectException& other693) : TException() {
+  message = other693.message;
+  __isset = other693.__isset;
+}
+InvalidObjectException& InvalidObjectException::operator=(const InvalidObjectException& other694) {
   message = other694.message;
   __isset = other694.__isset;
-}
-InvalidObjectException& InvalidObjectException::operator=(const InvalidObjectException& other695) {
-  message = other695.message;
-  __isset = other695.__isset;
   return *this;
 }
 void InvalidObjectException::printTo(std::ostream& out) const {
@@ -15905,13 +15873,13 @@ void swap(NoSuchObjectException &a, NoSuchObjectException &b) {
   swap(a.__isset, b.__isset);
 }
 
-NoSuchObjectException::NoSuchObjectException(const NoSuchObjectException& other696) : TException() {
+NoSuchObjectException::NoSuchObjectException(const NoSuchObjectException& other695) : TException() {
+  message = other695.message;
+  __isset = other695.__isset;
+}
+NoSuchObjectException& NoSuchObjectException::operator=(const NoSuchObjectException& other696) {
   message = other696.message;
   __isset = other696.__isset;
-}
-NoSuchObjectException& NoSuchObjectException::operator=(const NoSuchObjectException& other697) {
-  message = other697.message;
-  __isset = other697.__isset;
   return *this;
 }
 void NoSuchObjectException::printTo(std::ostream& out) const {
@@ -16002,13 +15970,13 @@ void swap(IndexAlreadyExistsException &a, IndexAlreadyExistsException &b) {
   swap(a.__isset, b.__isset);
 }
 
-IndexAlreadyExistsException::IndexAlreadyExistsException(const IndexAlreadyExistsException& other698) : TException() {
+IndexAlreadyExistsException::IndexAlreadyExistsException(const IndexAlreadyExistsException& other697) : TException() {
+  message = other697.message;
+  __isset = other697.__isset;
+}
+IndexAlreadyExistsException& IndexAlreadyExistsException::operator=(const IndexAlreadyExistsException& other698) {
   message = other698.message;
   __isset = other698.__isset;
-}
-IndexAlreadyExistsException& IndexAlreadyExistsException::operator=(const IndexAlreadyExistsException& other699) {
-  message = other699.message;
-  __isset = other699.__isset;
   return *this;
 }
 void IndexAlreadyExistsException::printTo(std::ostream& out) const {
@@ -16099,13 +16067,13 @@ void swap(InvalidOperationException &a, InvalidOperationException &b) {
   swap(a.__isset, b.__isset);
 }
 
-InvalidOperationException::InvalidOperationException(const InvalidOperationException& other700) : TException() {
+InvalidOperationException::InvalidOperationException(const InvalidOperationException& other699) : TException() {
+  message = other699.message;
+  __isset = other699.__isset;
+}
+InvalidOperationException& InvalidOperationException::operator=(const InvalidOperationException& other700) {
   message = other700.message;
   __isset = other700.__isset;
-}
-InvalidOperationException& InvalidOperationException::operator=(const InvalidOperationException& other701) {
-  message = other701.message;
-  __isset = other701.__isset;
   return *this;
 }
 void InvalidOperationException::printTo(std::ostream& out) const {
@@ -16196,13 +16164,13 @@ void swap(ConfigValSecurityException &a, ConfigValSecurityException &b) {
   swap(a.__isset, b.__isset);
 }
 
-ConfigValSecurityException::ConfigValSecurityException(const ConfigValSecurityException& other702) : TException() {
+ConfigValSecurityException::ConfigValSecurityException(const ConfigValSecurityException& other701) : TException() {
+  message = other701.message;
+  __isset = other701.__isset;
+}
+ConfigValSecurityException& ConfigValSecurityException::operator=(const ConfigValSecurityException& other702) {
   message = other702.message;
   __isset = other702.__isset;
-}
-ConfigValSecurityException& ConfigValSecurityException::operator=(const ConfigValSecurityException& other703) {
-  message = other703.message;
-  __isset = other703.__isset;
   return *this;
 }
 void ConfigValSecurityException::printTo(std::ostream& out) const {
@@ -16293,13 +16261,13 @@ void swap(InvalidInputException &a, InvalidInputException &b) {
   swap(a.__isset, b.__isset);
 }
 
-InvalidInputException::InvalidInputException(const InvalidInputException& other704) : TException() {
+InvalidInputException::InvalidInputException(const InvalidInputException& other703) : TException() {
+  message = other703.message;
+  __isset = other703.__isset;
+}
+InvalidInputException& InvalidInputException::operator=(const InvalidInputException& other704) {
   message = other704.message;
   __isset = other704.__isset;
-}
-InvalidInputException& InvalidInputException::operator=(const InvalidInputException& other705) {
-  message = other705.message;
-  __isset = other705.__isset;
   return *this;
 }
 void InvalidInputException::printTo(std::ostream& out) const {
@@ -16390,13 +16358,13 @@ void swap(NoSuchTxnException &a, NoSuchTxnException &b) {
   swap(a.__isset, b.__isset);
 }
 
-NoSuchTxnException::NoSuchTxnException(const NoSuchTxnException& other706) : TException() {
+NoSuchTxnException::NoSuchTxnException(const NoSuchTxnException& other705) : TException() {
+  message = other705.message;
+  __isset = other705.__isset;
+}
+NoSuchTxnException& NoSuchTxnException::operator=(const NoSuchTxnException& other706) {
   message = other706.message;
   __isset = other706.__isset;
-}
-NoSuchTxnException& NoSuchTxnException::operator=(const NoSuchTxnException& other707) {
-  message = other707.message;
-  __isset = other707.__isset;
   return *this;
 }
 void NoSuchTxnException::printTo(std::ostream& out) const {
@@ -16487,13 +16455,13 @@ void swap(TxnAbortedException &a, TxnAbortedException &b) {
   swap(a.__isset, b.__isset);
 }
 
-TxnAbortedException::TxnAbortedException(const TxnAbortedException& other708) : TException() {
+TxnAbortedException::TxnAbortedException(const TxnAbortedException& other707) : TException() {
+  message = other707.message;
+  __isset = other707.__isset;
+}
+TxnAbortedException& TxnAbortedException::operator=(const TxnAbortedException& other708) {
   message = other708.message;
   __isset = other708.__isset;
-}
-TxnAbortedException& TxnAbortedException::operator=(const TxnAbortedException& other709) {
-  message = other709.message;
-  __isset = other709.__isset;
   return *this;
 }
 void TxnAbortedException::printTo(std::ostream& out) const {
@@ -16584,13 +16552,13 @@ void swap(TxnOpenException &a, TxnOpenException &b) {
   swap(a.__isset, b.__isset);
 }
 
-TxnOpenException::TxnOpenException(const TxnOpenException& other710) : TException() {
+TxnOpenException::TxnOpenException(const TxnOpenException& other709) : TException() {
+  message = other709.message;
+  __isset = other709.__isset;
+}
+TxnOpenException& TxnOpenException::operator=(const TxnOpenException& other710) {
   message = other710.message;
   __isset = other710.__isset;
-}
-TxnOpenException& TxnOpenException::operator=(const TxnOpenException& other711) {
-  message = other711.message;
-  __isset = other711.__isset;
   return *this;
 }
 void TxnOpenException::printTo(std::ostream& out) const {
@@ -16681,13 +16649,13 @@ void swap(NoSuchLockException &a, NoSuchLockException &b) {
   swap(a.__isset, b.__isset);
 }
 
-NoSuchLockException::NoSuchLockException(const NoSuchLockException& other712) : TException() {
+NoSuchLockException::NoSuchLockException(const NoSuchLockException& other711) : TException() {
+  message = other711.message;
+  __isset = other711.__isset;
+}
+NoSuchLockException& NoSuchLockException::operator=(const NoSuchLockException& other712) {
   message = other712.message;
   __isset = other712.__isset;
-}
-NoSuchLockException& NoSuchLockException::operator=(const NoSuchLockException& other713) {
-  message = other713.message;
-  __isset = other713.__isset;
   return *this;
 }
 void NoSuchLockException::printTo(std::ostream& out) const {

http://git-wip-us.apache.org/repos/asf/hive/blob/3e0d87f8/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h b/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
index 53ab272..c20badd 100644
--- a/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
+++ b/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
@@ -137,14 +137,6 @@ struct ResourceType {
 
 extern const std::map<int, const char*> _ResourceType_VALUES_TO_NAMES;
 
-struct FileMetadataExprType {
-  enum type {
-    ORC_SARG = 1
-  };
-};
-
-extern const std::map<int, const char*> _FileMetadataExprType_VALUES_TO_NAMES;
-
 class Version;
 
 class FieldSchema;
@@ -5805,9 +5797,8 @@ inline std::ostream& operator<<(std::ostream& out, const GetFileMetadataByExprRe
 }
 
 typedef struct _GetFileMetadataByExprRequest__isset {
-  _GetFileMetadataByExprRequest__isset() : doGetFooters(false), type(false) {}
+  _GetFileMetadataByExprRequest__isset() : doGetFooters(false) {}
   bool doGetFooters :1;
-  bool type :1;
 } _GetFileMetadataByExprRequest__isset;
 
 class GetFileMetadataByExprRequest {
@@ -5815,14 +5806,13 @@ class GetFileMetadataByExprRequest {
 
   GetFileMetadataByExprRequest(const GetFileMetadataByExprRequest&);
   GetFileMetadataByExprRequest& operator=(const GetFileMetadataByExprRequest&);
-  GetFileMetadataByExprRequest() : expr(), doGetFooters(0), type((FileMetadataExprType::type)0) {
+  GetFileMetadataByExprRequest() : expr(), doGetFooters(0) {
   }
 
   virtual ~GetFileMetadataByExprRequest() throw();
   std::vector<int64_t>  fileIds;
   std::string expr;
   bool doGetFooters;
-  FileMetadataExprType::type type;
 
   _GetFileMetadataByExprRequest__isset __isset;
 
@@ -5832,8 +5822,6 @@ class GetFileMetadataByExprRequest {
 
   void __set_doGetFooters(const bool val);
 
-  void __set_type(const FileMetadataExprType::type val);
-
   bool operator == (const GetFileMetadataByExprRequest & rhs) const
   {
     if (!(fileIds == rhs.fileIds))
@@ -5844,10 +5832,6 @@ class GetFileMetadataByExprRequest {
       return false;
     else if (__isset.doGetFooters && !(doGetFooters == rhs.doGetFooters))
       return false;
-    if (__isset.type != rhs.__isset.type)
-      return false;
-    else if (__isset.type && !(type == rhs.type))
-      return false;
     return true;
   }
   bool operator != (const GetFileMetadataByExprRequest &rhs) const {

http://git-wip-us.apache.org/repos/asf/hive/blob/3e0d87f8/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FileMetadataExprType.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FileMetadataExprType.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FileMetadataExprType.java
deleted file mode 100644
index 4e393e2..0000000
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FileMetadataExprType.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.9.3)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- *  @generated
- */
-package org.apache.hadoop.hive.metastore.api;
-
-
-import java.util.Map;
-import java.util.HashMap;
-import org.apache.thrift.TEnum;
-
-public enum FileMetadataExprType implements org.apache.thrift.TEnum {
-  ORC_SARG(1);
-
-  private final int value;
-
-  private FileMetadataExprType(int value) {
-    this.value = value;
-  }
-
-  /**
-   * Get the integer value of this enum value, as defined in the Thrift IDL.
-   */
-  public int getValue() {
-    return value;
-  }
-
-  /**
-   * Find a the enum type by its integer value, as defined in the Thrift IDL.
-   * @return null if the value is not found.
-   */
-  public static FileMetadataExprType findByValue(int value) { 
-    switch (value) {
-      case 1:
-        return ORC_SARG;
-      default:
-        return null;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/3e0d87f8/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataByExprRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataByExprRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataByExprRequest.java
index 0236b4a..b880093 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataByExprRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataByExprRequest.java
@@ -41,7 +41,6 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
   private static final org.apache.thrift.protocol.TField FILE_IDS_FIELD_DESC = new org.apache.thrift.protocol.TField("fileIds", org.apache.thrift.protocol.TType.LIST, (short)1);
   private static final org.apache.thrift.protocol.TField EXPR_FIELD_DESC = new org.apache.thrift.protocol.TField("expr", org.apache.thrift.protocol.TType.STRING, (short)2);
   private static final org.apache.thrift.protocol.TField DO_GET_FOOTERS_FIELD_DESC = new org.apache.thrift.protocol.TField("doGetFooters", org.apache.thrift.protocol.TType.BOOL, (short)3);
-  private static final org.apache.thrift.protocol.TField TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField("type", org.apache.thrift.protocol.TType.I32, (short)4);
 
   private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
   static {
@@ -52,18 +51,12 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
   private List<Long> fileIds; // required
   private ByteBuffer expr; // required
   private boolean doGetFooters; // optional
-  private FileMetadataExprType type; // optional
 
   /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
   public enum _Fields implements org.apache.thrift.TFieldIdEnum {
     FILE_IDS((short)1, "fileIds"),
     EXPR((short)2, "expr"),
-    DO_GET_FOOTERS((short)3, "doGetFooters"),
-    /**
-     * 
-     * @see FileMetadataExprType
-     */
-    TYPE((short)4, "type");
+    DO_GET_FOOTERS((short)3, "doGetFooters");
 
     private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
 
@@ -84,8 +77,6 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
           return EXPR;
         case 3: // DO_GET_FOOTERS
           return DO_GET_FOOTERS;
-        case 4: // TYPE
-          return TYPE;
         default:
           return null;
       }
@@ -128,7 +119,7 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
   // isset id assignments
   private static final int __DOGETFOOTERS_ISSET_ID = 0;
   private byte __isset_bitfield = 0;
-  private static final _Fields optionals[] = {_Fields.DO_GET_FOOTERS,_Fields.TYPE};
+  private static final _Fields optionals[] = {_Fields.DO_GET_FOOTERS};
   public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
   static {
     Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
@@ -139,8 +130,6 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING        , true)));
     tmpMap.put(_Fields.DO_GET_FOOTERS, new org.apache.thrift.meta_data.FieldMetaData("doGetFooters", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL)));
-    tmpMap.put(_Fields.TYPE, new org.apache.thrift.meta_data.FieldMetaData("type", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
-        new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, FileMetadataExprType.class)));
     metaDataMap = Collections.unmodifiableMap(tmpMap);
     org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(GetFileMetadataByExprRequest.class, metaDataMap);
   }
@@ -170,9 +159,6 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
       this.expr = org.apache.thrift.TBaseHelper.copyBinary(other.expr);
     }
     this.doGetFooters = other.doGetFooters;
-    if (other.isSetType()) {
-      this.type = other.type;
-    }
   }
 
   public GetFileMetadataByExprRequest deepCopy() {
@@ -185,7 +171,6 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
     this.expr = null;
     setDoGetFootersIsSet(false);
     this.doGetFooters = false;
-    this.type = null;
   }
 
   public int getFileIdsSize() {
@@ -280,37 +265,6 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
     __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __DOGETFOOTERS_ISSET_ID, value);
   }
 
-  /**
-   * 
-   * @see FileMetadataExprType
-   */
-  public FileMetadataExprType getType() {
-    return this.type;
-  }
-
-  /**
-   * 
-   * @see FileMetadataExprType
-   */
-  public void setType(FileMetadataExprType type) {
-    this.type = type;
-  }
-
-  public void unsetType() {
-    this.type = null;
-  }
-
-  /** Returns true if field type is set (has been assigned a value) and false otherwise */
-  public boolean isSetType() {
-    return this.type != null;
-  }
-
-  public void setTypeIsSet(boolean value) {
-    if (!value) {
-      this.type = null;
-    }
-  }
-
   public void setFieldValue(_Fields field, Object value) {
     switch (field) {
     case FILE_IDS:
@@ -337,14 +291,6 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
       }
       break;
 
-    case TYPE:
-      if (value == null) {
-        unsetType();
-      } else {
-        setType((FileMetadataExprType)value);
-      }
-      break;
-
     }
   }
 
@@ -359,9 +305,6 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
     case DO_GET_FOOTERS:
       return isDoGetFooters();
 
-    case TYPE:
-      return getType();
-
     }
     throw new IllegalStateException();
   }
@@ -379,8 +322,6 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
       return isSetExpr();
     case DO_GET_FOOTERS:
       return isSetDoGetFooters();
-    case TYPE:
-      return isSetType();
     }
     throw new IllegalStateException();
   }
@@ -425,15 +366,6 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
         return false;
     }
 
-    boolean this_present_type = true && this.isSetType();
-    boolean that_present_type = true && that.isSetType();
-    if (this_present_type || that_present_type) {
-      if (!(this_present_type && that_present_type))
-        return false;
-      if (!this.type.equals(that.type))
-        return false;
-    }
-
     return true;
   }
 
@@ -456,11 +388,6 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
     if (present_doGetFooters)
       list.add(doGetFooters);
 
-    boolean present_type = true && (isSetType());
-    list.add(present_type);
-    if (present_type)
-      list.add(type.getValue());
-
     return list.hashCode();
   }
 
@@ -502,16 +429,6 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
         return lastComparison;
       }
     }
-    lastComparison = Boolean.valueOf(isSetType()).compareTo(other.isSetType());
-    if (lastComparison != 0) {
-      return lastComparison;
-    }
-    if (isSetType()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.type, other.type);
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-    }
     return 0;
   }
 
@@ -553,16 +470,6 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
       sb.append(this.doGetFooters);
       first = false;
     }
-    if (isSetType()) {
-      if (!first) sb.append(", ");
-      sb.append("type:");
-      if (this.type == null) {
-        sb.append("null");
-      } else {
-        sb.append(this.type);
-      }
-      first = false;
-    }
     sb.append(")");
     return sb.toString();
   }
@@ -650,14 +557,6 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
             break;
-          case 4: // TYPE
-            if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
-              struct.type = org.apache.hadoop.hive.metastore.api.FileMetadataExprType.findByValue(iprot.readI32());
-              struct.setTypeIsSet(true);
-            } else { 
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-            }
-            break;
           default:
             org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
         }
@@ -693,13 +592,6 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
         oprot.writeBool(struct.doGetFooters);
         oprot.writeFieldEnd();
       }
-      if (struct.type != null) {
-        if (struct.isSetType()) {
-          oprot.writeFieldBegin(TYPE_FIELD_DESC);
-          oprot.writeI32(struct.type.getValue());
-          oprot.writeFieldEnd();
-        }
-      }
       oprot.writeFieldStop();
       oprot.writeStructEnd();
     }
@@ -729,16 +621,10 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
       if (struct.isSetDoGetFooters()) {
         optionals.set(0);
       }
-      if (struct.isSetType()) {
-        optionals.set(1);
-      }
-      oprot.writeBitSet(optionals, 2);
+      oprot.writeBitSet(optionals, 1);
       if (struct.isSetDoGetFooters()) {
         oprot.writeBool(struct.doGetFooters);
       }
-      if (struct.isSetType()) {
-        oprot.writeI32(struct.type.getValue());
-      }
     }
 
     @Override
@@ -757,15 +643,11 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
       struct.setFileIdsIsSet(true);
       struct.expr = iprot.readBinary();
       struct.setExprIsSet(true);
-      BitSet incoming = iprot.readBitSet(2);
+      BitSet incoming = iprot.readBitSet(1);
       if (incoming.get(0)) {
         struct.doGetFooters = iprot.readBool();
         struct.setDoGetFootersIsSet(true);
       }
-      if (incoming.get(1)) {
-        struct.type = org.apache.hadoop.hive.metastore.api.FileMetadataExprType.findByValue(iprot.readI32());
-        struct.setTypeIsSet(true);
-      }
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/3e0d87f8/metastore/src/gen/thrift/gen-php/metastore/Types.php
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-php/metastore/Types.php b/metastore/src/gen/thrift/gen-php/metastore/Types.php
index e63213d..3ec2b1c 100644
--- a/metastore/src/gen/thrift/gen-php/metastore/Types.php
+++ b/metastore/src/gen/thrift/gen-php/metastore/Types.php
@@ -143,13 +143,6 @@ final class ResourceType {
   );
 }
 
-final class FileMetadataExprType {
-  const ORC_SARG = 1;
-  static public $__names = array(
-    1 => 'ORC_SARG',
-  );
-}
-
 class Version {
   static $_TSPEC;
 
@@ -14115,10 +14108,6 @@ class GetFileMetadataByExprRequest {
    * @var bool
    */
   public $doGetFooters = null;
-  /**
-   * @var int
-   */
-  public $type = null;
 
   public function __construct($vals=null) {
     if (!isset(self::$_TSPEC)) {
@@ -14139,10 +14128,6 @@ class GetFileMetadataByExprRequest {
           'var' => 'doGetFooters',
           'type' => TType::BOOL,
           ),
-        4 => array(
-          'var' => 'type',
-          'type' => TType::I32,
-          ),
         );
     }
     if (is_array($vals)) {
@@ -14155,9 +14140,6 @@ class GetFileMetadataByExprRequest {
       if (isset($vals['doGetFooters'])) {
         $this->doGetFooters = $vals['doGetFooters'];
       }
-      if (isset($vals['type'])) {
-        $this->type = $vals['type'];
-      }
     }
   }
 
@@ -14211,13 +14193,6 @@ class GetFileMetadataByExprRequest {
             $xfer += $input->skip($ftype);
           }
           break;
-        case 4:
-          if ($ftype == TType::I32) {
-            $xfer += $input->readI32($this->type);
-          } else {
-            $xfer += $input->skip($ftype);
-          }
-          break;
         default:
           $xfer += $input->skip($ftype);
           break;
@@ -14258,11 +14233,6 @@ class GetFileMetadataByExprRequest {
       $xfer += $output->writeBool($this->doGetFooters);
       $xfer += $output->writeFieldEnd();
     }
-    if ($this->type !== null) {
-      $xfer += $output->writeFieldBegin('type', TType::I32, 4);
-      $xfer += $output->writeI32($this->type);
-      $xfer += $output->writeFieldEnd();
-    }
     $xfer += $output->writeFieldStop();
     $xfer += $output->writeStructEnd();
     return $xfer;

http://git-wip-us.apache.org/repos/asf/hive/blob/3e0d87f8/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py b/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
index 8940dff..221d602 100644
--- a/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
+++ b/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
@@ -213,17 +213,6 @@ class ResourceType:
     "ARCHIVE": 3,
   }
 
-class FileMetadataExprType:
-  ORC_SARG = 1
-
-  _VALUES_TO_NAMES = {
-    1: "ORC_SARG",
-  }
-
-  _NAMES_TO_VALUES = {
-    "ORC_SARG": 1,
-  }
-
 
 class Version:
   """
@@ -9938,7 +9927,6 @@ class GetFileMetadataByExprRequest:
    - fileIds
    - expr
    - doGetFooters
-   - type
   """
 
   thrift_spec = (
@@ -9946,14 +9934,12 @@ class GetFileMetadataByExprRequest:
     (1, TType.LIST, 'fileIds', (TType.I64,None), None, ), # 1
     (2, TType.STRING, 'expr', None, None, ), # 2
     (3, TType.BOOL, 'doGetFooters', None, None, ), # 3
-    (4, TType.I32, 'type', None, None, ), # 4
   )
 
-  def __init__(self, fileIds=None, expr=None, doGetFooters=None, type=None,):
+  def __init__(self, fileIds=None, expr=None, doGetFooters=None,):
     self.fileIds = fileIds
     self.expr = expr
     self.doGetFooters = doGetFooters
-    self.type = type
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -9984,11 +9970,6 @@ class GetFileMetadataByExprRequest:
           self.doGetFooters = iprot.readBool()
         else:
           iprot.skip(ftype)
-      elif fid == 4:
-        if ftype == TType.I32:
-          self.type = iprot.readI32()
-        else:
-          iprot.skip(ftype)
       else:
         iprot.skip(ftype)
       iprot.readFieldEnd()
@@ -10014,10 +9995,6 @@ class GetFileMetadataByExprRequest:
       oprot.writeFieldBegin('doGetFooters', TType.BOOL, 3)
       oprot.writeBool(self.doGetFooters)
       oprot.writeFieldEnd()
-    if self.type is not None:
-      oprot.writeFieldBegin('type', TType.I32, 4)
-      oprot.writeI32(self.type)
-      oprot.writeFieldEnd()
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 
@@ -10034,7 +10011,6 @@ class GetFileMetadataByExprRequest:
     value = (value * 31) ^ hash(self.fileIds)
     value = (value * 31) ^ hash(self.expr)
     value = (value * 31) ^ hash(self.doGetFooters)
-    value = (value * 31) ^ hash(self.type)
     return value
 
   def __repr__(self):

http://git-wip-us.apache.org/repos/asf/hive/blob/3e0d87f8/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb b/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
index 08b9b06..cfabbb8 100644
--- a/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
+++ b/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
@@ -101,12 +101,6 @@ module ResourceType
   VALID_VALUES = Set.new([JAR, FILE, ARCHIVE]).freeze
 end
 
-module FileMetadataExprType
-  ORC_SARG = 1
-  VALUE_MAP = {1 => "ORC_SARG"}
-  VALID_VALUES = Set.new([ORC_SARG]).freeze
-end
-
 class Version
   include ::Thrift::Struct, ::Thrift::Struct_Union
   VERSION = 1
@@ -2280,13 +2274,11 @@ class GetFileMetadataByExprRequest
   FILEIDS = 1
   EXPR = 2
   DOGETFOOTERS = 3
-  TYPE = 4
 
   FIELDS = {
     FILEIDS => {:type => ::Thrift::Types::LIST, :name => 'fileIds', :element => {:type => ::Thrift::Types::I64}},
     EXPR => {:type => ::Thrift::Types::STRING, :name => 'expr', :binary => true},
-    DOGETFOOTERS => {:type => ::Thrift::Types::BOOL, :name => 'doGetFooters', :optional => true},
-    TYPE => {:type => ::Thrift::Types::I32, :name => 'type', :optional => true, :enum_class => ::FileMetadataExprType}
+    DOGETFOOTERS => {:type => ::Thrift::Types::BOOL, :name => 'doGetFooters', :optional => true}
   }
 
   def struct_fields; FIELDS; end
@@ -2294,9 +2286,6 @@ class GetFileMetadataByExprRequest
   def validate
     raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field fileIds is unset!') unless @fileIds
     raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field expr is unset!') unless @expr
-    unless @type.nil? || ::FileMetadataExprType::VALID_VALUES.include?(@type)
-      raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Invalid value of field type!')
-    end
   end
 
   ::Thrift::Struct.generate_accessors self

http://git-wip-us.apache.org/repos/asf/hive/blob/3e0d87f8/metastore/src/java/org/apache/hadoop/hive/metastore/FileMetadataHandler.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/FileMetadataHandler.java b/metastore/src/java/org/apache/hadoop/hive/metastore/FileMetadataHandler.java
deleted file mode 100644
index 7c3525a..0000000
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/FileMetadataHandler.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.metastore;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.List;
-
-public interface FileMetadataHandler {
-
-  void getFileMetadataByExpr(List<Long> fileIds, byte[] expr,
-      ByteBuffer[] metadatas, ByteBuffer[] results, boolean[] eliminated) throws IOException;
-
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/3e0d87f8/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index 8ed4310..40e6e62 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -5617,16 +5617,13 @@ public class HiveMetaStore extends ThriftHiveMetastore {
         return result;
       }
       result.setIsSupported(true);
-
       List<Long> fileIds = req.getFileIds();
-      boolean needMetadata = !req.isSetDoGetFooters() || req.isDoGetFooters();
-      FileMetadataExprType type = req.isSetType() ? req.getType() : FileMetadataExprType.ORC_SARG;
-
-      ByteBuffer[] metadatas = needMetadata ? new ByteBuffer[fileIds.size()] : null;
-      ByteBuffer[] ppdResults = new ByteBuffer[fileIds.size()];
+      byte[] expr = req.getExpr();
+      boolean needMetadata = req.isDoGetFooters();
+      ByteBuffer[] metadatas = new ByteBuffer[fileIds.size()];
+      ByteBuffer[] stripeBitsets = new ByteBuffer[fileIds.size()];
       boolean[] eliminated = new boolean[fileIds.size()];
-
-      getMS().getFileMetadataByExpr(fileIds, type, req.getExpr(), metadatas, ppdResults, eliminated);
+      getMS().getFileMetadataByExpr(fileIds, expr, metadatas, stripeBitsets, eliminated);
       for (int i = 0; i < metadatas.length; ++i) {
         long fileId = fileIds.get(i);
         ByteBuffer metadata = metadatas[i];
@@ -5634,7 +5631,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
         metadata = (eliminated[i] || !needMetadata) ? null
             : handleReadOnlyBufferForThrift(metadata);
         MetadataPpdResult mpr = new MetadataPpdResult();
-        ByteBuffer bitset = eliminated[i] ? null : handleReadOnlyBufferForThrift(ppdResults[i]);
+        ByteBuffer bitset = eliminated[i] ? null : handleReadOnlyBufferForThrift(stripeBitsets[i]);
         mpr.setMetadata(metadata);
         mpr.setIncludeBitset(bitset);
         result.putToMetadata(fileId, mpr);

http://git-wip-us.apache.org/repos/asf/hive/blob/3e0d87f8/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index 0f98963..f0c1893 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@ -79,7 +79,6 @@ import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
 import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.FileMetadataExprType;
 import org.apache.hadoop.hive.metastore.api.Function;
 import org.apache.hadoop.hive.metastore.api.FunctionType;
 import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
@@ -7667,7 +7666,7 @@ public class ObjectStore implements RawStore, Configurable {
   }
 
   @Override
-  public void getFileMetadataByExpr(List<Long> fileIds, FileMetadataExprType type, byte[] expr,
+  public void getFileMetadataByExpr(List<Long> fileIds, byte[] expr,
       ByteBuffer[] metadatas, ByteBuffer[] stripeBitsets, boolean[] eliminated) {
     throw new UnsupportedOperationException();
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/3e0d87f8/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
index 4aa17a5..45428ed 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
@@ -31,7 +31,6 @@ import org.apache.hadoop.hive.metastore.api.AggrStats;
 import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
 import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId;
 import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.FileMetadataExprType;
 import org.apache.hadoop.hive.metastore.api.Function;
 import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
 import org.apache.hadoop.hive.metastore.api.Index;
@@ -618,7 +617,6 @@ public interface RawStore extends Configurable {
    * produce additional information based on file metadata and also filter the file list.
    * @param fileIds List of file IDs from the filesystem.
    * @param expr Format-specific serialized expression applicable to the files' metadatas.
-   * @param type Expression type; used to determine the class that handles the metadata.
    * @param metadatas Output parameter; fileIds-sized array to receive the metadatas
    *                  for corresponding files, if any.
    * @param exprResults Output parameter; fileIds-sized array to receive the format-specific
@@ -626,7 +624,7 @@ public interface RawStore extends Configurable {
    * @param eliminated Output parameter; fileIds-sized array to receive the indication of whether
    *                   the corresponding files are entirely eliminated by the expression.
    */
-  void getFileMetadataByExpr(List<Long> fileIds, FileMetadataExprType type, byte[] expr,
-      ByteBuffer[] metadatas, ByteBuffer[] exprResults, boolean[] eliminated)
-          throws MetaException;
+  void getFileMetadataByExpr(
+      List<Long> fileIds, byte[] expr, ByteBuffer[] metadatas,
+      ByteBuffer[] exprResults, boolean[] eliminated) throws MetaException;
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/3e0d87f8/metastore/src/java/org/apache/hadoop/hive/metastore/filemeta/OrcFileMetadataHandler.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/filemeta/OrcFileMetadataHandler.java b/metastore/src/java/org/apache/hadoop/hive/metastore/filemeta/OrcFileMetadataHandler.java
deleted file mode 100644
index 14189da..0000000
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/filemeta/OrcFileMetadataHandler.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.metastore.filemeta;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.List;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.metastore.FileMetadataHandler;
-import org.apache.hadoop.hive.metastore.PartitionExpressionProxy;
-import org.apache.hadoop.hive.metastore.hbase.HBaseReadWrite;
-import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
-
-public class OrcFileMetadataHandler implements FileMetadataHandler {
-  private final Configuration conf;
-  private final PartitionExpressionProxy expressionProxy;
-  private final HBaseReadWrite hbase;
-
-  public OrcFileMetadataHandler(Configuration conf,
-      PartitionExpressionProxy expressionProxy, HBaseReadWrite hbase) {
-    this.conf = conf;
-    this.expressionProxy = expressionProxy;
-    this.hbase = hbase;
-  }
-
-  @Override
-  public void getFileMetadataByExpr(List<Long> fileIds, byte[] expr,
-      ByteBuffer[] metadatas, ByteBuffer[] results, boolean[] eliminated) throws IOException {
-    SearchArgument sarg = expressionProxy.createSarg(expr);
-    // For now, don't push anything into HBase, nor store anything special in HBase
-    if (metadatas == null) {
-      // null means don't return metadata; we'd need the array anyway for now.
-      metadatas = new ByteBuffer[results.length];
-    }
-    hbase.getFileMetadata(fileIds, metadatas);
-    for (int i = 0; i < metadatas.length;  ++i) {
-      if (metadatas[i] == null) continue;
-      ByteBuffer result = expressionProxy.applySargToFileMetadata(sarg, metadatas[i]);
-      eliminated[i] = (result == null);
-      if (!eliminated[i]) {
-        results[i] = result;
-      }
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/3e0d87f8/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
index ffd3ee5..781f562 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
@@ -1747,7 +1747,7 @@ public class HBaseReadWrite {
    * @param fileIds file ID list.
    * @return Serialized file metadata.
    */
-  public void getFileMetadata(List<Long> fileIds, ByteBuffer[] result) throws IOException {
+  void getFileMetadata(List<Long> fileIds, ByteBuffer[] result) throws IOException {
     byte[][] keys = new byte[fileIds.size()][];
     for (int i = 0; i < fileIds.size(); ++i) {
       keys[i] = HBaseUtils.makeLongKey(fileIds.get(i));

http://git-wip-us.apache.org/repos/asf/hive/blob/3e0d87f8/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
index 67a02d9..09e57e5 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
@@ -27,7 +27,6 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.common.FileUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.FileMetadataHandler;
 import org.apache.hadoop.hive.metastore.HiveMetaStore;
 import org.apache.hadoop.hive.metastore.PartFilterExprUtil;
 import org.apache.hadoop.hive.metastore.PartitionExpressionProxy;
@@ -39,7 +38,6 @@ import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
 import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.FileMetadataExprType;
 import org.apache.hadoop.hive.metastore.api.Function;
 import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
 import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
@@ -66,7 +64,6 @@ import org.apache.hadoop.hive.metastore.api.Type;
 import org.apache.hadoop.hive.metastore.api.UnknownDBException;
 import org.apache.hadoop.hive.metastore.api.UnknownPartitionException;
 import org.apache.hadoop.hive.metastore.api.UnknownTableException;
-import org.apache.hadoop.hive.metastore.filemeta.OrcFileMetadataHandler;
 import org.apache.hadoop.hive.metastore.hbase.HBaseFilterPlanUtil.PlanResult;
 import org.apache.hadoop.hive.metastore.hbase.HBaseFilterPlanUtil.ScanPlan;
 import org.apache.hadoop.hive.metastore.parser.ExpressionTree;
@@ -99,7 +96,6 @@ public class HBaseStore implements RawStore {
   private Configuration conf;
   private int txnNestLevel = 0;
   private PartitionExpressionProxy expressionProxy = null;
-  private Map<FileMetadataExprType, FileMetadataHandler> fmHandlers = new HashMap<>();
 
   public HBaseStore() {
   }
@@ -2245,24 +2241,10 @@ public class HBaseStore implements RawStore {
     // initialize expressionProxy. Also re-initialize it if
     // setConf is being called with new configuration object (though that
     // is not expected to happen, doing it just for safety)
-    // TODO: why not re-intialize HBaseReadWrite?
-    if (expressionProxy == null || conf != configuration) {
+    if(expressionProxy == null || conf != configuration) {
       expressionProxy = PartFilterExprUtil.createExpressionProxy(configuration);
     }
     conf = configuration;
-    createFileMetadataHandlers();
-  }
-
-  private void createFileMetadataHandlers() {
-    for (FileMetadataExprType v : FileMetadataExprType.values()) {
-      switch (v) {
-      case ORC_SARG:
-        fmHandlers.put(v, new OrcFileMetadataHandler(conf, expressionProxy, getHBase()));
-        break;
-      default:
-        throw new AssertionError("Unsupported type " + v);
-      }
-    }
   }
 
   @Override
@@ -2398,16 +2380,25 @@ public class HBaseStore implements RawStore {
   }
 
   @Override
-  public void getFileMetadataByExpr(List<Long> fileIds, FileMetadataExprType type, byte[] expr,
-      ByteBuffer[] metadatas, ByteBuffer[] results, boolean[] eliminated) throws MetaException {
-    FileMetadataHandler fmh = fmHandlers.get(type);
+  public void getFileMetadataByExpr(List<Long> fileIds, byte[] expr, ByteBuffer[] metadatas,
+      ByteBuffer[] results, boolean[] eliminated) throws MetaException {
+    SearchArgument sarg = expressionProxy.createSarg(expr);
     boolean commit = true;
     try {
-      fmh.getFileMetadataByExpr(fileIds, expr, metadatas, results, eliminated);
+      // For now, don't push anything into HBase, nor store anything special in HBase
+      getHBase().getFileMetadata(fileIds, metadatas);
+      for (int i = 0; i < metadatas.length;  ++i) {
+        if (metadatas[i] == null) continue;
+        ByteBuffer result = expressionProxy.applySargToFileMetadata(sarg, metadatas[i]);
+        eliminated[i] = (result == null);
+        if (!eliminated[i]) {
+          results[i] = result;
+        }
+      }
     } catch (IOException e) {
-      LOG.error("Unable to get file metadata by expr", e);
       commit = false;
-      throw new MetaException("Error reading file metadata by expr" + e.getMessage());
+      LOG.error("Unable to get file metadata", e);
+      throw new MetaException("Error reading file metadata " + e.getMessage());
     } finally {
       commitOrRoleBack(commit);
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/3e0d87f8/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
----------------------------------------------------------------------
diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
index a100e9f..d11c0d5 100644
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
+++ b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
@@ -30,7 +30,6 @@ import org.apache.hadoop.hive.metastore.api.AggrStats;
 import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
 import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId;
 import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.FileMetadataExprType;
 import org.apache.hadoop.hive.metastore.api.Function;
 import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
 import org.apache.hadoop.hive.metastore.api.Index;
@@ -778,7 +777,7 @@ public class DummyRawStoreControlledCommit implements RawStore, Configurable {
 
 
   @Override
-  public void getFileMetadataByExpr(List<Long> fileIds, FileMetadataExprType type, byte[] expr,
+  public void getFileMetadataByExpr(List<Long> fileIds, byte[] expr,
       ByteBuffer[] metadatas, ByteBuffer[] stripeBitsets, boolean[] eliminated) {
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/3e0d87f8/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
----------------------------------------------------------------------
diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
index f6100e6..2de049a 100644
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
+++ b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
@@ -31,7 +31,6 @@ import org.apache.hadoop.hive.metastore.api.AggrStats;
 import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
 import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId;
 import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.FileMetadataExprType;
 import org.apache.hadoop.hive.metastore.api.Function;
 import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
 import org.apache.hadoop.hive.metastore.api.Index;
@@ -794,7 +793,7 @@ public class DummyRawStoreForJdoConnection implements RawStore {
   }
 
   @Override
-  public void getFileMetadataByExpr(List<Long> fileIds, FileMetadataExprType type, byte[] expr,
+  public void getFileMetadataByExpr(List<Long> fileIds, byte[] expr,
       ByteBuffer[] metadatas, ByteBuffer[] stripeBitsets, boolean[] eliminated) {
   }
 }


[03/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)

Posted by xu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/JobTrackerState.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/JobTrackerState.java b/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/JobTrackerState.java
index ab48bf7..46c71ee 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/JobTrackerState.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/JobTrackerState.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/ThriftHive.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/ThriftHive.java b/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/ThriftHive.java
index 6235efd..934a8a5 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/ThriftHive.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/ThriftHive.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class ThriftHive {
 
   public interface Iface extends org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface {
@@ -2951,7 +2951,7 @@ public class ThriftHive {
     public Object getFieldValue(_Fields field) {
       switch (field) {
       case NUM_ROWS:
-        return Integer.valueOf(getNumRows());
+        return getNumRows();
 
       }
       throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TArrayTypeEntry.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TArrayTypeEntry.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TArrayTypeEntry.java
index c6ee2c5..0fb33c7 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TArrayTypeEntry.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TArrayTypeEntry.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TArrayTypeEntry implements org.apache.thrift.TBase<TArrayTypeEntry, TArrayTypeEntry._Fields>, java.io.Serializable, Cloneable, Comparable<TArrayTypeEntry> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TArrayTypeEntry");
 
@@ -185,7 +185,7 @@ public class TArrayTypeEntry implements org.apache.thrift.TBase<TArrayTypeEntry,
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case OBJECT_TYPE_PTR:
-      return Integer.valueOf(getObjectTypePtr());
+      return getObjectTypePtr();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBinaryColumn.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBinaryColumn.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBinaryColumn.java
index c58a260..788bc89 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBinaryColumn.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBinaryColumn.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TBinaryColumn implements org.apache.thrift.TBase<TBinaryColumn, TBinaryColumn._Fields>, java.io.Serializable, Cloneable, Comparable<TBinaryColumn> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TBinaryColumn");
 
@@ -380,7 +380,7 @@ public class TBinaryColumn implements org.apache.thrift.TBase<TBinaryColumn, TBi
     if (this.values == null) {
       sb.append("null");
     } else {
-      sb.append(this.values);
+      org.apache.thrift.TBaseHelper.toString(this.values, sb);
     }
     first = false;
     if (!first) sb.append(", ");

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBoolColumn.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBoolColumn.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBoolColumn.java
index 1d2d345..c0c5cbe 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBoolColumn.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBoolColumn.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TBoolColumn implements org.apache.thrift.TBase<TBoolColumn, TBoolColumn._Fields>, java.io.Serializable, Cloneable, Comparable<TBoolColumn> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TBoolColumn");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBoolValue.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBoolValue.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBoolValue.java
index c4b3087..3670a1e 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBoolValue.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBoolValue.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TBoolValue implements org.apache.thrift.TBase<TBoolValue, TBoolValue._Fields>, java.io.Serializable, Cloneable, Comparable<TBoolValue> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TBoolValue");
 
@@ -178,7 +178,7 @@ public class TBoolValue implements org.apache.thrift.TBase<TBoolValue, TBoolValu
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case VALUE:
-      return Boolean.valueOf(isValue());
+      return isValue();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TByteColumn.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TByteColumn.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TByteColumn.java
index 7f03951..85c881e 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TByteColumn.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TByteColumn.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TByteColumn implements org.apache.thrift.TBase<TByteColumn, TByteColumn._Fields>, java.io.Serializable, Cloneable, Comparable<TByteColumn> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TByteColumn");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TByteValue.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TByteValue.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TByteValue.java
index abb0381..82209f1 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TByteValue.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TByteValue.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TByteValue implements org.apache.thrift.TBase<TByteValue, TByteValue._Fields>, java.io.Serializable, Cloneable, Comparable<TByteValue> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TByteValue");
 
@@ -178,7 +178,7 @@ public class TByteValue implements org.apache.thrift.TBase<TByteValue, TByteValu
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case VALUE:
-      return Byte.valueOf(getValue());
+      return getValue();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCLIService.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCLIService.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCLIService.java
index 91a5be7..ded848f 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCLIService.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCLIService.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TCLIService {
 
   public interface Iface {

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCLIServiceConstants.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCLIServiceConstants.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCLIServiceConstants.java
index 046eb15..3f46013 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCLIServiceConstants.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCLIServiceConstants.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelDelegationTokenReq.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelDelegationTokenReq.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelDelegationTokenReq.java
index 94a4b08..fde0c47 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelDelegationTokenReq.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelDelegationTokenReq.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TCancelDelegationTokenReq implements org.apache.thrift.TBase<TCancelDelegationTokenReq, TCancelDelegationTokenReq._Fields>, java.io.Serializable, Cloneable, Comparable<TCancelDelegationTokenReq> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TCancelDelegationTokenReq");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelDelegationTokenResp.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelDelegationTokenResp.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelDelegationTokenResp.java
index 10a52f5..65d0551 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelDelegationTokenResp.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelDelegationTokenResp.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TCancelDelegationTokenResp implements org.apache.thrift.TBase<TCancelDelegationTokenResp, TCancelDelegationTokenResp._Fields>, java.io.Serializable, Cloneable, Comparable<TCancelDelegationTokenResp> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TCancelDelegationTokenResp");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelOperationReq.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelOperationReq.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelOperationReq.java
index 1a0738e..f532d09 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelOperationReq.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelOperationReq.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TCancelOperationReq implements org.apache.thrift.TBase<TCancelOperationReq, TCancelOperationReq._Fields>, java.io.Serializable, Cloneable, Comparable<TCancelOperationReq> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TCancelOperationReq");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelOperationResp.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelOperationResp.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelOperationResp.java
index 76d1c0b..f7f8325 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelOperationResp.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelOperationResp.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TCancelOperationResp implements org.apache.thrift.TBase<TCancelOperationResp, TCancelOperationResp._Fields>, java.io.Serializable, Cloneable, Comparable<TCancelOperationResp> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TCancelOperationResp");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCloseOperationReq.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCloseOperationReq.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCloseOperationReq.java
index cb4bc66..c0c2c2c 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCloseOperationReq.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCloseOperationReq.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TCloseOperationReq implements org.apache.thrift.TBase<TCloseOperationReq, TCloseOperationReq._Fields>, java.io.Serializable, Cloneable, Comparable<TCloseOperationReq> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TCloseOperationReq");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCloseOperationResp.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCloseOperationResp.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCloseOperationResp.java
index 81334b4..83bcd03 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCloseOperationResp.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCloseOperationResp.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TCloseOperationResp implements org.apache.thrift.TBase<TCloseOperationResp, TCloseOperationResp._Fields>, java.io.Serializable, Cloneable, Comparable<TCloseOperationResp> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TCloseOperationResp");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCloseSessionReq.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCloseSessionReq.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCloseSessionReq.java
index 208dda9..d9c989a 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCloseSessionReq.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCloseSessionReq.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TCloseSessionReq implements org.apache.thrift.TBase<TCloseSessionReq, TCloseSessionReq._Fields>, java.io.Serializable, Cloneable, Comparable<TCloseSessionReq> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TCloseSessionReq");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCloseSessionResp.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCloseSessionResp.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCloseSessionResp.java
index 5e9f479..7c64d4d 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCloseSessionResp.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCloseSessionResp.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TCloseSessionResp implements org.apache.thrift.TBase<TCloseSessionResp, TCloseSessionResp._Fields>, java.io.Serializable, Cloneable, Comparable<TCloseSessionResp> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TCloseSessionResp");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TColumn.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TColumn.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TColumn.java
index 1bda69d..082b9b4 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TColumn.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TColumn.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TColumnDesc.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TColumnDesc.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TColumnDesc.java
index cc792f5..b01fadb 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TColumnDesc.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TColumnDesc.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TColumnDesc implements org.apache.thrift.TBase<TColumnDesc, TColumnDesc._Fields>, java.io.Serializable, Cloneable, Comparable<TColumnDesc> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TColumnDesc");
 
@@ -322,7 +322,7 @@ public class TColumnDesc implements org.apache.thrift.TBase<TColumnDesc, TColumn
       return getTypeDesc();
 
     case POSITION:
-      return Integer.valueOf(getPosition());
+      return getPosition();
 
     case COMMENT:
       return getComment();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TColumnValue.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TColumnValue.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TColumnValue.java
index 61aee4d..2c2c6bb 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TColumnValue.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TColumnValue.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TDoubleColumn.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TDoubleColumn.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TDoubleColumn.java
index 3306175..dffad80 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TDoubleColumn.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TDoubleColumn.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TDoubleColumn implements org.apache.thrift.TBase<TDoubleColumn, TDoubleColumn._Fields>, java.io.Serializable, Cloneable, Comparable<TDoubleColumn> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TDoubleColumn");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TDoubleValue.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TDoubleValue.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TDoubleValue.java
index b043717..6524f93 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TDoubleValue.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TDoubleValue.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TDoubleValue implements org.apache.thrift.TBase<TDoubleValue, TDoubleValue._Fields>, java.io.Serializable, Cloneable, Comparable<TDoubleValue> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TDoubleValue");
 
@@ -178,7 +178,7 @@ public class TDoubleValue implements org.apache.thrift.TBase<TDoubleValue, TDoub
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case VALUE:
-      return Double.valueOf(getValue());
+      return getValue();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TExecuteStatementReq.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TExecuteStatementReq.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TExecuteStatementReq.java
index 14847a7..30cf243 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TExecuteStatementReq.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TExecuteStatementReq.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TExecuteStatementReq implements org.apache.thrift.TBase<TExecuteStatementReq, TExecuteStatementReq._Fields>, java.io.Serializable, Cloneable, Comparable<TExecuteStatementReq> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TExecuteStatementReq");
 
@@ -338,7 +338,7 @@ public class TExecuteStatementReq implements org.apache.thrift.TBase<TExecuteSta
       return getConfOverlay();
 
     case RUN_ASYNC:
-      return Boolean.valueOf(isRunAsync());
+      return isRunAsync();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TExecuteStatementResp.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TExecuteStatementResp.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TExecuteStatementResp.java
index e764838..0b9aa0f 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TExecuteStatementResp.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TExecuteStatementResp.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TExecuteStatementResp implements org.apache.thrift.TBase<TExecuteStatementResp, TExecuteStatementResp._Fields>, java.io.Serializable, Cloneable, Comparable<TExecuteStatementResp> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TExecuteStatementResp");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TFetchOrientation.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TFetchOrientation.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TFetchOrientation.java
index 64713e8..d3fd3f0 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TFetchOrientation.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TFetchOrientation.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TFetchResultsReq.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TFetchResultsReq.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TFetchResultsReq.java
index 2be02ce..23cfd54 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TFetchResultsReq.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TFetchResultsReq.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TFetchResultsReq implements org.apache.thrift.TBase<TFetchResultsReq, TFetchResultsReq._Fields>, java.io.Serializable, Cloneable, Comparable<TFetchResultsReq> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TFetchResultsReq");
 
@@ -338,10 +338,10 @@ public class TFetchResultsReq implements org.apache.thrift.TBase<TFetchResultsRe
       return getOrientation();
 
     case MAX_ROWS:
-      return Long.valueOf(getMaxRows());
+      return getMaxRows();
 
     case FETCH_TYPE:
-      return Short.valueOf(getFetchType());
+      return getFetchType();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TFetchResultsResp.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TFetchResultsResp.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TFetchResultsResp.java
index b177fbf..2d325fc 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TFetchResultsResp.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TFetchResultsResp.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TFetchResultsResp implements org.apache.thrift.TBase<TFetchResultsResp, TFetchResultsResp._Fields>, java.io.Serializable, Cloneable, Comparable<TFetchResultsResp> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TFetchResultsResp");
 
@@ -272,7 +272,7 @@ public class TFetchResultsResp implements org.apache.thrift.TBase<TFetchResultsR
       return getStatus();
 
     case HAS_MORE_ROWS:
-      return Boolean.valueOf(isHasMoreRows());
+      return isHasMoreRows();
 
     case RESULTS:
       return getResults();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetCatalogsReq.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetCatalogsReq.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetCatalogsReq.java
index 18d6ece..02e5cbc 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetCatalogsReq.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetCatalogsReq.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TGetCatalogsReq implements org.apache.thrift.TBase<TGetCatalogsReq, TGetCatalogsReq._Fields>, java.io.Serializable, Cloneable, Comparable<TGetCatalogsReq> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TGetCatalogsReq");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetCatalogsResp.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetCatalogsResp.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetCatalogsResp.java
index 85437d8..2372d3a 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetCatalogsResp.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetCatalogsResp.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TGetCatalogsResp implements org.apache.thrift.TBase<TGetCatalogsResp, TGetCatalogsResp._Fields>, java.io.Serializable, Cloneable, Comparable<TGetCatalogsResp> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TGetCatalogsResp");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetColumnsReq.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetColumnsReq.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetColumnsReq.java
index 60e6547..60872f4 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetColumnsReq.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetColumnsReq.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TGetColumnsReq implements org.apache.thrift.TBase<TGetColumnsReq, TGetColumnsReq._Fields>, java.io.Serializable, Cloneable, Comparable<TGetColumnsReq> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TGetColumnsReq");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetColumnsResp.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetColumnsResp.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetColumnsResp.java
index 1177458..7992a24 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetColumnsResp.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetColumnsResp.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TGetColumnsResp implements org.apache.thrift.TBase<TGetColumnsResp, TGetColumnsResp._Fields>, java.io.Serializable, Cloneable, Comparable<TGetColumnsResp> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TGetColumnsResp");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetDelegationTokenReq.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetDelegationTokenReq.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetDelegationTokenReq.java
index 0a8ba93..363bf4b 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetDelegationTokenReq.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetDelegationTokenReq.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TGetDelegationTokenReq implements org.apache.thrift.TBase<TGetDelegationTokenReq, TGetDelegationTokenReq._Fields>, java.io.Serializable, Cloneable, Comparable<TGetDelegationTokenReq> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TGetDelegationTokenReq");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetDelegationTokenResp.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetDelegationTokenResp.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetDelegationTokenResp.java
index 0332a35..5234dcd 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetDelegationTokenResp.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetDelegationTokenResp.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TGetDelegationTokenResp implements org.apache.thrift.TBase<TGetDelegationTokenResp, TGetDelegationTokenResp._Fields>, java.io.Serializable, Cloneable, Comparable<TGetDelegationTokenResp> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TGetDelegationTokenResp");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetFunctionsReq.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetFunctionsReq.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetFunctionsReq.java
index 3362d87..d366529 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetFunctionsReq.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetFunctionsReq.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TGetFunctionsReq implements org.apache.thrift.TBase<TGetFunctionsReq, TGetFunctionsReq._Fields>, java.io.Serializable, Cloneable, Comparable<TGetFunctionsReq> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TGetFunctionsReq");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetFunctionsResp.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetFunctionsResp.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetFunctionsResp.java
index cbe42cb..6ef2960 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetFunctionsResp.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetFunctionsResp.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TGetFunctionsResp implements org.apache.thrift.TBase<TGetFunctionsResp, TGetFunctionsResp._Fields>, java.io.Serializable, Cloneable, Comparable<TGetFunctionsResp> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TGetFunctionsResp");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetInfoReq.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetInfoReq.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetInfoReq.java
index 296c74a..80b80c7 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetInfoReq.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetInfoReq.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TGetInfoReq implements org.apache.thrift.TBase<TGetInfoReq, TGetInfoReq._Fields>, java.io.Serializable, Cloneable, Comparable<TGetInfoReq> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TGetInfoReq");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetInfoResp.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetInfoResp.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetInfoResp.java
index b40af4c..f6a07fc 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetInfoResp.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetInfoResp.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TGetInfoResp implements org.apache.thrift.TBase<TGetInfoResp, TGetInfoResp._Fields>, java.io.Serializable, Cloneable, Comparable<TGetInfoResp> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TGetInfoResp");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetInfoType.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetInfoType.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetInfoType.java
index f4dc73d..72385a7 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetInfoType.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetInfoType.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetInfoValue.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetInfoValue.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetInfoValue.java
index af1224d..361e39f 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetInfoValue.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetInfoValue.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetOperationStatusReq.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetOperationStatusReq.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetOperationStatusReq.java
index 60a23d1..183ff6d 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetOperationStatusReq.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetOperationStatusReq.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TGetOperationStatusReq implements org.apache.thrift.TBase<TGetOperationStatusReq, TGetOperationStatusReq._Fields>, java.io.Serializable, Cloneable, Comparable<TGetOperationStatusReq> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TGetOperationStatusReq");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetOperationStatusResp.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetOperationStatusResp.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetOperationStatusResp.java
index 574e903..99c2409 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetOperationStatusResp.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetOperationStatusResp.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TGetOperationStatusResp implements org.apache.thrift.TBase<TGetOperationStatusResp, TGetOperationStatusResp._Fields>, java.io.Serializable, Cloneable, Comparable<TGetOperationStatusResp> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TGetOperationStatusResp");
 
@@ -374,7 +374,7 @@ public class TGetOperationStatusResp implements org.apache.thrift.TBase<TGetOper
       return getSqlState();
 
     case ERROR_CODE:
-      return Integer.valueOf(getErrorCode());
+      return getErrorCode();
 
     case ERROR_MESSAGE:
       return getErrorMessage();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetResultSetMetadataReq.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetResultSetMetadataReq.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetResultSetMetadataReq.java
index 2ce442e..626b59f 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetResultSetMetadataReq.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetResultSetMetadataReq.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TGetResultSetMetadataReq implements org.apache.thrift.TBase<TGetResultSetMetadataReq, TGetResultSetMetadataReq._Fields>, java.io.Serializable, Cloneable, Comparable<TGetResultSetMetadataReq> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TGetResultSetMetadataReq");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetResultSetMetadataResp.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetResultSetMetadataResp.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetResultSetMetadataResp.java
index 0f32b30..8e2d462 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetResultSetMetadataResp.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetResultSetMetadataResp.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TGetResultSetMetadataResp implements org.apache.thrift.TBase<TGetResultSetMetadataResp, TGetResultSetMetadataResp._Fields>, java.io.Serializable, Cloneable, Comparable<TGetResultSetMetadataResp> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TGetResultSetMetadataResp");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetSchemasReq.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetSchemasReq.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetSchemasReq.java
index 2c272c0..11988e2 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetSchemasReq.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetSchemasReq.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TGetSchemasReq implements org.apache.thrift.TBase<TGetSchemasReq, TGetSchemasReq._Fields>, java.io.Serializable, Cloneable, Comparable<TGetSchemasReq> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TGetSchemasReq");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetSchemasResp.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetSchemasResp.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetSchemasResp.java
index 7d177ac..27060b7 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetSchemasResp.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetSchemasResp.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TGetSchemasResp implements org.apache.thrift.TBase<TGetSchemasResp, TGetSchemasResp._Fields>, java.io.Serializable, Cloneable, Comparable<TGetSchemasResp> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TGetSchemasResp");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetTableTypesReq.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetTableTypesReq.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetTableTypesReq.java
index 3498fa1..8c9106e 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetTableTypesReq.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetTableTypesReq.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TGetTableTypesReq implements org.apache.thrift.TBase<TGetTableTypesReq, TGetTableTypesReq._Fields>, java.io.Serializable, Cloneable, Comparable<TGetTableTypesReq> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TGetTableTypesReq");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetTableTypesResp.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetTableTypesResp.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetTableTypesResp.java
index 6c7ad75..440e080 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetTableTypesResp.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetTableTypesResp.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TGetTableTypesResp implements org.apache.thrift.TBase<TGetTableTypesResp, TGetTableTypesResp._Fields>, java.io.Serializable, Cloneable, Comparable<TGetTableTypesResp> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TGetTableTypesResp");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetTablesReq.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetTablesReq.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetTablesReq.java
index 0a2b268..4f22732 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetTablesReq.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetTablesReq.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TGetTablesReq implements org.apache.thrift.TBase<TGetTablesReq, TGetTablesReq._Fields>, java.io.Serializable, Cloneable, Comparable<TGetTablesReq> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TGetTablesReq");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetTablesResp.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetTablesResp.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetTablesResp.java
index b4f6b15..4878228 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetTablesResp.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetTablesResp.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TGetTablesResp implements org.apache.thrift.TBase<TGetTablesResp, TGetTablesResp._Fields>, java.io.Serializable, Cloneable, Comparable<TGetTablesResp> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TGetTablesResp");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetTypeInfoReq.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetTypeInfoReq.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetTypeInfoReq.java
index bddf074..3fa8edc 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetTypeInfoReq.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetTypeInfoReq.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TGetTypeInfoReq implements org.apache.thrift.TBase<TGetTypeInfoReq, TGetTypeInfoReq._Fields>, java.io.Serializable, Cloneable, Comparable<TGetTypeInfoReq> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TGetTypeInfoReq");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetTypeInfoResp.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetTypeInfoResp.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetTypeInfoResp.java
index 7390669..c0dd1f8 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetTypeInfoResp.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetTypeInfoResp.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TGetTypeInfoResp implements org.apache.thrift.TBase<TGetTypeInfoResp, TGetTypeInfoResp._Fields>, java.io.Serializable, Cloneable, Comparable<TGetTypeInfoResp> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TGetTypeInfoResp");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/THandleIdentifier.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/THandleIdentifier.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/THandleIdentifier.java
index dd1dba0..d959456 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/THandleIdentifier.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/THandleIdentifier.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class THandleIdentifier implements org.apache.thrift.TBase<THandleIdentifier, THandleIdentifier._Fields>, java.io.Serializable, Cloneable, Comparable<THandleIdentifier> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("THandleIdentifier");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI16Column.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI16Column.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI16Column.java
index 48e0261..1a7f13a 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI16Column.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI16Column.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TI16Column implements org.apache.thrift.TBase<TI16Column, TI16Column._Fields>, java.io.Serializable, Cloneable, Comparable<TI16Column> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TI16Column");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI16Value.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI16Value.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI16Value.java
index ccf1b56..ece0199 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI16Value.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI16Value.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TI16Value implements org.apache.thrift.TBase<TI16Value, TI16Value._Fields>, java.io.Serializable, Cloneable, Comparable<TI16Value> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TI16Value");
 
@@ -178,7 +178,7 @@ public class TI16Value implements org.apache.thrift.TBase<TI16Value, TI16Value._
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case VALUE:
-      return Short.valueOf(getValue());
+      return getValue();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI32Column.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI32Column.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI32Column.java
index 4ac7977..78fb651 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI32Column.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI32Column.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TI32Column implements org.apache.thrift.TBase<TI32Column, TI32Column._Fields>, java.io.Serializable, Cloneable, Comparable<TI32Column> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TI32Column");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI32Value.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI32Value.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI32Value.java
index 69c2d5b..cf63261 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI32Value.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI32Value.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TI32Value implements org.apache.thrift.TBase<TI32Value, TI32Value._Fields>, java.io.Serializable, Cloneable, Comparable<TI32Value> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TI32Value");
 
@@ -178,7 +178,7 @@ public class TI32Value implements org.apache.thrift.TBase<TI32Value, TI32Value._
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case VALUE:
-      return Integer.valueOf(getValue());
+      return getValue();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI64Column.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI64Column.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI64Column.java
index 0bb14d1..0f649fb 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI64Column.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI64Column.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TI64Column implements org.apache.thrift.TBase<TI64Column, TI64Column._Fields>, java.io.Serializable, Cloneable, Comparable<TI64Column> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TI64Column");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI64Value.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI64Value.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI64Value.java
index 30436d6..75ada87 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI64Value.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI64Value.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TI64Value implements org.apache.thrift.TBase<TI64Value, TI64Value._Fields>, java.io.Serializable, Cloneable, Comparable<TI64Value> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TI64Value");
 
@@ -178,7 +178,7 @@ public class TI64Value implements org.apache.thrift.TBase<TI64Value, TI64Value._
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case VALUE:
-      return Long.valueOf(getValue());
+      return getValue();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TMapTypeEntry.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TMapTypeEntry.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TMapTypeEntry.java
index c764a41..9777683 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TMapTypeEntry.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TMapTypeEntry.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TMapTypeEntry implements org.apache.thrift.TBase<TMapTypeEntry, TMapTypeEntry._Fields>, java.io.Serializable, Cloneable, Comparable<TMapTypeEntry> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TMapTypeEntry");
 
@@ -229,10 +229,10 @@ public class TMapTypeEntry implements org.apache.thrift.TBase<TMapTypeEntry, TMa
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case KEY_TYPE_PTR:
-      return Integer.valueOf(getKeyTypePtr());
+      return getKeyTypePtr();
 
     case VALUE_TYPE_PTR:
-      return Integer.valueOf(getValueTypePtr());
+      return getValueTypePtr();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOpenSessionReq.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOpenSessionReq.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOpenSessionReq.java
index b2a9d74..a43c1d0 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOpenSessionReq.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOpenSessionReq.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TOpenSessionReq implements org.apache.thrift.TBase<TOpenSessionReq, TOpenSessionReq._Fields>, java.io.Serializable, Cloneable, Comparable<TOpenSessionReq> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TOpenSessionReq");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOpenSessionResp.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOpenSessionResp.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOpenSessionResp.java
index f3e3ed1..3199ec3 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOpenSessionResp.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOpenSessionResp.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TOpenSessionResp implements org.apache.thrift.TBase<TOpenSessionResp, TOpenSessionResp._Fields>, java.io.Serializable, Cloneable, Comparable<TOpenSessionResp> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TOpenSessionResp");
 


[48/55] [abbrv] hive git commit: HIVE-11489 : Jenkins PreCommit-HIVE-SPARK-Build fails with TestCliDriver.initializationError (Szehon, reviewed by Sergio Pena)

Posted by xu...@apache.org.
HIVE-11489 : Jenkins PreCommit-HIVE-SPARK-Build fails with TestCliDriver.initializationError (Szehon, reviewed by Sergio Pena)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/6df90903
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/6df90903
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/6df90903

Branch: refs/heads/spark
Commit: 6df909035c474bd2634481f8f2f9ab626ec79b8a
Parents: d5e8544
Author: Szehon Ho <sz...@cloudera.com>
Authored: Tue Oct 27 14:03:24 2015 -0700
Committer: Szehon Ho <sz...@cloudera.com>
Committed: Tue Oct 27 14:03:24 2015 -0700

----------------------------------------------------------------------
 testutils/ptest2/src/main/resources/batch-exec.vm | 2 ++
 1 file changed, 2 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/6df90903/testutils/ptest2/src/main/resources/batch-exec.vm
----------------------------------------------------------------------
diff --git a/testutils/ptest2/src/main/resources/batch-exec.vm b/testutils/ptest2/src/main/resources/batch-exec.vm
index da3e0ac..c155851 100644
--- a/testutils/ptest2/src/main/resources/batch-exec.vm
+++ b/testutils/ptest2/src/main/resources/batch-exec.vm
@@ -62,6 +62,8 @@ then
     testModule=./
   fi
   pushd $testModule
+  #clean to force regeneration of class files (maven sometimes skips generation)
+  mvn clean -Dmaven.repo.local=$localDir/$instanceName/maven $mavenArgs
   timeout 2h mvn -B test -Dmaven.repo.local=$localDir/$instanceName/maven \
     $mavenArgs $mavenTestArgs $testArguments 1>$logDir/maven-test.txt 2>&1 </dev/null &
 #[[


[11/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)

Posted by xu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-py/hive_metastore/constants.py
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-py/hive_metastore/constants.py b/metastore/src/gen/thrift/gen-py/hive_metastore/constants.py
index f86fda9..d1c07a5 100644
--- a/metastore/src/gen/thrift/gen-py/hive_metastore/constants.py
+++ b/metastore/src/gen/thrift/gen-py/hive_metastore/constants.py
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py b/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
index 56ce527..8940dff 100644
--- a/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
+++ b/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #
@@ -253,12 +253,12 @@ class Version:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.version = iprot.readString();
+          self.version = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.comments = iprot.readString();
+          self.comments = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -334,17 +334,17 @@ class FieldSchema:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.name = iprot.readString();
+          self.name = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.type = iprot.readString();
+          self.type = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.comment = iprot.readString();
+          self.comment = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -428,17 +428,17 @@ class Type:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.name = iprot.readString();
+          self.name = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.type1 = iprot.readString();
+          self.type1 = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.type2 = iprot.readString();
+          self.type2 = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 4:
@@ -544,17 +544,17 @@ class HiveObjectRef:
         break
       if fid == 1:
         if ftype == TType.I32:
-          self.objectType = iprot.readI32();
+          self.objectType = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.dbName = iprot.readString();
+          self.dbName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.objectName = iprot.readString();
+          self.objectName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 4:
@@ -562,14 +562,14 @@ class HiveObjectRef:
           self.partValues = []
           (_etype10, _size7) = iprot.readListBegin()
           for _i11 in xrange(_size7):
-            _elem12 = iprot.readString();
+            _elem12 = iprot.readString()
             self.partValues.append(_elem12)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
       elif fid == 5:
         if ftype == TType.STRING:
-          self.columnName = iprot.readString();
+          self.columnName = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -669,27 +669,27 @@ class PrivilegeGrantInfo:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.privilege = iprot.readString();
+          self.privilege = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.I32:
-          self.createTime = iprot.readI32();
+          self.createTime = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.grantor = iprot.readString();
+          self.grantor = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.I32:
-          self.grantorType = iprot.readI32();
+          self.grantorType = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 5:
         if ftype == TType.BOOL:
-          self.grantOption = iprot.readBool();
+          self.grantOption = iprot.readBool()
         else:
           iprot.skip(ftype)
       else:
@@ -789,12 +789,12 @@ class HiveObjectPrivilege:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.principalName = iprot.readString();
+          self.principalName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.I32:
-          self.principalType = iprot.readI32();
+          self.principalType = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 4:
@@ -963,7 +963,7 @@ class PrincipalPrivilegeSet:
           self.userPrivileges = {}
           (_ktype22, _vtype23, _size21 ) = iprot.readMapBegin()
           for _i25 in xrange(_size21):
-            _key26 = iprot.readString();
+            _key26 = iprot.readString()
             _val27 = []
             (_etype31, _size28) = iprot.readListBegin()
             for _i32 in xrange(_size28):
@@ -980,7 +980,7 @@ class PrincipalPrivilegeSet:
           self.groupPrivileges = {}
           (_ktype35, _vtype36, _size34 ) = iprot.readMapBegin()
           for _i38 in xrange(_size34):
-            _key39 = iprot.readString();
+            _key39 = iprot.readString()
             _val40 = []
             (_etype44, _size41) = iprot.readListBegin()
             for _i45 in xrange(_size41):
@@ -997,7 +997,7 @@ class PrincipalPrivilegeSet:
           self.rolePrivileges = {}
           (_ktype48, _vtype49, _size47 ) = iprot.readMapBegin()
           for _i51 in xrange(_size47):
-            _key52 = iprot.readString();
+            _key52 = iprot.readString()
             _val53 = []
             (_etype57, _size54) = iprot.readListBegin()
             for _i58 in xrange(_size54):
@@ -1108,7 +1108,7 @@ class GrantRevokePrivilegeRequest:
         break
       if fid == 1:
         if ftype == TType.I32:
-          self.requestType = iprot.readI32();
+          self.requestType = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 2:
@@ -1119,7 +1119,7 @@ class GrantRevokePrivilegeRequest:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.BOOL:
-          self.revokeGrantOption = iprot.readBool();
+          self.revokeGrantOption = iprot.readBool()
         else:
           iprot.skip(ftype)
       else:
@@ -1194,7 +1194,7 @@ class GrantRevokePrivilegeResponse:
         break
       if fid == 1:
         if ftype == TType.BOOL:
-          self.success = iprot.readBool();
+          self.success = iprot.readBool()
         else:
           iprot.skip(ftype)
       else:
@@ -1265,17 +1265,17 @@ class Role:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.roleName = iprot.readString();
+          self.roleName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.I32:
-          self.createTime = iprot.readI32();
+          self.createTime = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.ownerName = iprot.readString();
+          self.ownerName = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -1368,37 +1368,37 @@ class RolePrincipalGrant:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.roleName = iprot.readString();
+          self.roleName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.principalName = iprot.readString();
+          self.principalName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.I32:
-          self.principalType = iprot.readI32();
+          self.principalType = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.BOOL:
-          self.grantOption = iprot.readBool();
+          self.grantOption = iprot.readBool()
         else:
           iprot.skip(ftype)
       elif fid == 5:
         if ftype == TType.I32:
-          self.grantTime = iprot.readI32();
+          self.grantTime = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 6:
         if ftype == TType.STRING:
-          self.grantorName = iprot.readString();
+          self.grantorName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 7:
         if ftype == TType.I32:
-          self.grantorPrincipalType = iprot.readI32();
+          self.grantorPrincipalType = iprot.readI32()
         else:
           iprot.skip(ftype)
       else:
@@ -1496,12 +1496,12 @@ class GetRoleGrantsForPrincipalRequest:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.principal_name = iprot.readString();
+          self.principal_name = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.I32:
-          self.principal_type = iprot.readI32();
+          self.principal_type = iprot.readI32()
         else:
           iprot.skip(ftype)
       else:
@@ -1651,7 +1651,7 @@ class GetPrincipalsInRoleRequest:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.roleName = iprot.readString();
+          self.roleName = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -1812,37 +1812,37 @@ class GrantRevokeRoleRequest:
         break
       if fid == 1:
         if ftype == TType.I32:
-          self.requestType = iprot.readI32();
+          self.requestType = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.roleName = iprot.readString();
+          self.roleName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.principalName = iprot.readString();
+          self.principalName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.I32:
-          self.principalType = iprot.readI32();
+          self.principalType = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 5:
         if ftype == TType.STRING:
-          self.grantor = iprot.readString();
+          self.grantor = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 6:
         if ftype == TType.I32:
-          self.grantorType = iprot.readI32();
+          self.grantorType = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 7:
         if ftype == TType.BOOL:
-          self.grantOption = iprot.readBool();
+          self.grantOption = iprot.readBool()
         else:
           iprot.skip(ftype)
       else:
@@ -1937,7 +1937,7 @@ class GrantRevokeRoleResponse:
         break
       if fid == 1:
         if ftype == TType.BOOL:
-          self.success = iprot.readBool();
+          self.success = iprot.readBool()
         else:
           iprot.skip(ftype)
       else:
@@ -2020,17 +2020,17 @@ class Database:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.name = iprot.readString();
+          self.name = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.description = iprot.readString();
+          self.description = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.locationUri = iprot.readString();
+          self.locationUri = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 4:
@@ -2038,8 +2038,8 @@ class Database:
           self.parameters = {}
           (_ktype84, _vtype85, _size83 ) = iprot.readMapBegin()
           for _i87 in xrange(_size83):
-            _key88 = iprot.readString();
-            _val89 = iprot.readString();
+            _key88 = iprot.readString()
+            _val89 = iprot.readString()
             self.parameters[_key88] = _val89
           iprot.readMapEnd()
         else:
@@ -2052,12 +2052,12 @@ class Database:
           iprot.skip(ftype)
       elif fid == 6:
         if ftype == TType.STRING:
-          self.ownerName = iprot.readString();
+          self.ownerName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 7:
         if ftype == TType.I32:
-          self.ownerType = iprot.readI32();
+          self.ownerType = iprot.readI32()
         else:
           iprot.skip(ftype)
       else:
@@ -2162,12 +2162,12 @@ class SerDeInfo:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.name = iprot.readString();
+          self.name = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.serializationLib = iprot.readString();
+          self.serializationLib = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
@@ -2175,8 +2175,8 @@ class SerDeInfo:
           self.parameters = {}
           (_ktype93, _vtype94, _size92 ) = iprot.readMapBegin()
           for _i96 in xrange(_size92):
-            _key97 = iprot.readString();
-            _val98 = iprot.readString();
+            _key97 = iprot.readString()
+            _val98 = iprot.readString()
             self.parameters[_key97] = _val98
           iprot.readMapEnd()
         else:
@@ -2260,12 +2260,12 @@ class Order:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.col = iprot.readString();
+          self.col = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.I32:
-          self.order = iprot.readI32();
+          self.order = iprot.readI32()
         else:
           iprot.skip(ftype)
       else:
@@ -2344,7 +2344,7 @@ class SkewedInfo:
           self.skewedColNames = []
           (_etype104, _size101) = iprot.readListBegin()
           for _i105 in xrange(_size101):
-            _elem106 = iprot.readString();
+            _elem106 = iprot.readString()
             self.skewedColNames.append(_elem106)
           iprot.readListEnd()
         else:
@@ -2357,7 +2357,7 @@ class SkewedInfo:
             _elem112 = []
             (_etype116, _size113) = iprot.readListBegin()
             for _i117 in xrange(_size113):
-              _elem118 = iprot.readString();
+              _elem118 = iprot.readString()
               _elem112.append(_elem118)
             iprot.readListEnd()
             self.skewedColValues.append(_elem112)
@@ -2372,10 +2372,10 @@ class SkewedInfo:
             _key124 = []
             (_etype129, _size126) = iprot.readListBegin()
             for _i130 in xrange(_size126):
-              _elem131 = iprot.readString();
+              _elem131 = iprot.readString()
               _key124.append(_elem131)
             iprot.readListEnd()
-            _val125 = iprot.readString();
+            _val125 = iprot.readString()
             self.skewedColValueLocationMaps[_key124] = _val125
           iprot.readMapEnd()
         else:
@@ -2512,27 +2512,27 @@ class StorageDescriptor:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.location = iprot.readString();
+          self.location = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.inputFormat = iprot.readString();
+          self.inputFormat = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.STRING:
-          self.outputFormat = iprot.readString();
+          self.outputFormat = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 5:
         if ftype == TType.BOOL:
-          self.compressed = iprot.readBool();
+          self.compressed = iprot.readBool()
         else:
           iprot.skip(ftype)
       elif fid == 6:
         if ftype == TType.I32:
-          self.numBuckets = iprot.readI32();
+          self.numBuckets = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 7:
@@ -2546,7 +2546,7 @@ class StorageDescriptor:
           self.bucketCols = []
           (_etype147, _size144) = iprot.readListBegin()
           for _i148 in xrange(_size144):
-            _elem149 = iprot.readString();
+            _elem149 = iprot.readString()
             self.bucketCols.append(_elem149)
           iprot.readListEnd()
         else:
@@ -2567,8 +2567,8 @@ class StorageDescriptor:
           self.parameters = {}
           (_ktype157, _vtype158, _size156 ) = iprot.readMapBegin()
           for _i160 in xrange(_size156):
-            _key161 = iprot.readString();
-            _val162 = iprot.readString();
+            _key161 = iprot.readString()
+            _val162 = iprot.readString()
             self.parameters[_key161] = _val162
           iprot.readMapEnd()
         else:
@@ -2581,7 +2581,7 @@ class StorageDescriptor:
           iprot.skip(ftype)
       elif fid == 12:
         if ftype == TType.BOOL:
-          self.storedAsSubDirectories = iprot.readBool();
+          self.storedAsSubDirectories = iprot.readBool()
         else:
           iprot.skip(ftype)
       else:
@@ -2753,32 +2753,32 @@ class Table:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.tableName = iprot.readString();
+          self.tableName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.dbName = iprot.readString();
+          self.dbName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.owner = iprot.readString();
+          self.owner = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.I32:
-          self.createTime = iprot.readI32();
+          self.createTime = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 5:
         if ftype == TType.I32:
-          self.lastAccessTime = iprot.readI32();
+          self.lastAccessTime = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 6:
         if ftype == TType.I32:
-          self.retention = iprot.readI32();
+          self.retention = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 7:
@@ -2803,25 +2803,25 @@ class Table:
           self.parameters = {}
           (_ktype175, _vtype176, _size174 ) = iprot.readMapBegin()
           for _i178 in xrange(_size174):
-            _key179 = iprot.readString();
-            _val180 = iprot.readString();
+            _key179 = iprot.readString()
+            _val180 = iprot.readString()
             self.parameters[_key179] = _val180
           iprot.readMapEnd()
         else:
           iprot.skip(ftype)
       elif fid == 10:
         if ftype == TType.STRING:
-          self.viewOriginalText = iprot.readString();
+          self.viewOriginalText = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 11:
         if ftype == TType.STRING:
-          self.viewExpandedText = iprot.readString();
+          self.viewExpandedText = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 12:
         if ftype == TType.STRING:
-          self.tableType = iprot.readString();
+          self.tableType = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 13:
@@ -2832,7 +2832,7 @@ class Table:
           iprot.skip(ftype)
       elif fid == 14:
         if ftype == TType.BOOL:
-          self.temporary = iprot.readBool();
+          self.temporary = iprot.readBool()
         else:
           iprot.skip(ftype)
       else:
@@ -2993,29 +2993,29 @@ class Partition:
           self.values = []
           (_etype187, _size184) = iprot.readListBegin()
           for _i188 in xrange(_size184):
-            _elem189 = iprot.readString();
+            _elem189 = iprot.readString()
             self.values.append(_elem189)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.dbName = iprot.readString();
+          self.dbName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.tableName = iprot.readString();
+          self.tableName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.I32:
-          self.createTime = iprot.readI32();
+          self.createTime = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 5:
         if ftype == TType.I32:
-          self.lastAccessTime = iprot.readI32();
+          self.lastAccessTime = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 6:
@@ -3029,8 +3029,8 @@ class Partition:
           self.parameters = {}
           (_ktype191, _vtype192, _size190 ) = iprot.readMapBegin()
           for _i194 in xrange(_size190):
-            _key195 = iprot.readString();
-            _val196 = iprot.readString();
+            _key195 = iprot.readString()
+            _val196 = iprot.readString()
             self.parameters[_key195] = _val196
           iprot.readMapEnd()
         else:
@@ -3163,24 +3163,24 @@ class PartitionWithoutSD:
           self.values = []
           (_etype203, _size200) = iprot.readListBegin()
           for _i204 in xrange(_size200):
-            _elem205 = iprot.readString();
+            _elem205 = iprot.readString()
             self.values.append(_elem205)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.I32:
-          self.createTime = iprot.readI32();
+          self.createTime = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.I32:
-          self.lastAccessTime = iprot.readI32();
+          self.lastAccessTime = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.STRING:
-          self.relativePath = iprot.readString();
+          self.relativePath = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 5:
@@ -3188,8 +3188,8 @@ class PartitionWithoutSD:
           self.parameters = {}
           (_ktype207, _vtype208, _size206 ) = iprot.readMapBegin()
           for _i210 in xrange(_size206):
-            _key211 = iprot.readString();
-            _val212 = iprot.readString();
+            _key211 = iprot.readString()
+            _val212 = iprot.readString()
             self.parameters[_key211] = _val212
           iprot.readMapEnd()
         else:
@@ -3468,17 +3468,17 @@ class PartitionSpec:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.dbName = iprot.readString();
+          self.dbName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.tableName = iprot.readString();
+          self.tableName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.rootPath = iprot.readString();
+          self.rootPath = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 4:
@@ -3602,37 +3602,37 @@ class Index:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.indexName = iprot.readString();
+          self.indexName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.indexHandlerClass = iprot.readString();
+          self.indexHandlerClass = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.dbName = iprot.readString();
+          self.dbName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.STRING:
-          self.origTableName = iprot.readString();
+          self.origTableName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 5:
         if ftype == TType.I32:
-          self.createTime = iprot.readI32();
+          self.createTime = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 6:
         if ftype == TType.I32:
-          self.lastAccessTime = iprot.readI32();
+          self.lastAccessTime = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 7:
         if ftype == TType.STRING:
-          self.indexTableName = iprot.readString();
+          self.indexTableName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 8:
@@ -3646,15 +3646,15 @@ class Index:
           self.parameters = {}
           (_ktype231, _vtype232, _size230 ) = iprot.readMapBegin()
           for _i234 in xrange(_size230):
-            _key235 = iprot.readString();
-            _val236 = iprot.readString();
+            _key235 = iprot.readString()
+            _val236 = iprot.readString()
             self.parameters[_key235] = _val236
           iprot.readMapEnd()
         else:
           iprot.skip(ftype)
       elif fid == 10:
         if ftype == TType.BOOL:
-          self.deferredRebuild = iprot.readBool();
+          self.deferredRebuild = iprot.readBool()
         else:
           iprot.skip(ftype)
       else:
@@ -3774,17 +3774,17 @@ class BooleanColumnStatsData:
         break
       if fid == 1:
         if ftype == TType.I64:
-          self.numTrues = iprot.readI64();
+          self.numTrues = iprot.readI64()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.I64:
-          self.numFalses = iprot.readI64();
+          self.numFalses = iprot.readI64()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.I64:
-          self.numNulls = iprot.readI64();
+          self.numNulls = iprot.readI64()
         else:
           iprot.skip(ftype)
       else:
@@ -3874,22 +3874,22 @@ class DoubleColumnStatsData:
         break
       if fid == 1:
         if ftype == TType.DOUBLE:
-          self.lowValue = iprot.readDouble();
+          self.lowValue = iprot.readDouble()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.DOUBLE:
-          self.highValue = iprot.readDouble();
+          self.highValue = iprot.readDouble()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.I64:
-          self.numNulls = iprot.readI64();
+          self.numNulls = iprot.readI64()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.I64:
-          self.numDVs = iprot.readI64();
+          self.numDVs = iprot.readI64()
         else:
           iprot.skip(ftype)
       else:
@@ -3982,22 +3982,22 @@ class LongColumnStatsData:
         break
       if fid == 1:
         if ftype == TType.I64:
-          self.lowValue = iprot.readI64();
+          self.lowValue = iprot.readI64()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.I64:
-          self.highValue = iprot.readI64();
+          self.highValue = iprot.readI64()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.I64:
-          self.numNulls = iprot.readI64();
+          self.numNulls = iprot.readI64()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.I64:
-          self.numDVs = iprot.readI64();
+          self.numDVs = iprot.readI64()
         else:
           iprot.skip(ftype)
       else:
@@ -4090,22 +4090,22 @@ class StringColumnStatsData:
         break
       if fid == 1:
         if ftype == TType.I64:
-          self.maxColLen = iprot.readI64();
+          self.maxColLen = iprot.readI64()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.DOUBLE:
-          self.avgColLen = iprot.readDouble();
+          self.avgColLen = iprot.readDouble()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.I64:
-          self.numNulls = iprot.readI64();
+          self.numNulls = iprot.readI64()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.I64:
-          self.numDVs = iprot.readI64();
+          self.numDVs = iprot.readI64()
         else:
           iprot.skip(ftype)
       else:
@@ -4199,17 +4199,17 @@ class BinaryColumnStatsData:
         break
       if fid == 1:
         if ftype == TType.I64:
-          self.maxColLen = iprot.readI64();
+          self.maxColLen = iprot.readI64()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.DOUBLE:
-          self.avgColLen = iprot.readDouble();
+          self.avgColLen = iprot.readDouble()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.I64:
-          self.numNulls = iprot.readI64();
+          self.numNulls = iprot.readI64()
         else:
           iprot.skip(ftype)
       else:
@@ -4294,12 +4294,12 @@ class Decimal:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.unscaled = iprot.readString();
+          self.unscaled = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.I16:
-          self.scale = iprot.readI16();
+          self.scale = iprot.readI16()
         else:
           iprot.skip(ftype)
       else:
@@ -4394,12 +4394,12 @@ class DecimalColumnStatsData:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.I64:
-          self.numNulls = iprot.readI64();
+          self.numNulls = iprot.readI64()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.I64:
-          self.numDVs = iprot.readI64();
+          self.numDVs = iprot.readI64()
         else:
           iprot.skip(ftype)
       else:
@@ -4483,7 +4483,7 @@ class Date:
         break
       if fid == 1:
         if ftype == TType.I64:
-          self.daysSinceEpoch = iprot.readI64();
+          self.daysSinceEpoch = iprot.readI64()
         else:
           iprot.skip(ftype)
       else:
@@ -4571,12 +4571,12 @@ class DateColumnStatsData:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.I64:
-          self.numNulls = iprot.readI64();
+          self.numNulls = iprot.readI64()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.I64:
-          self.numDVs = iprot.readI64();
+          self.numDVs = iprot.readI64()
         else:
           iprot.skip(ftype)
       else:
@@ -4816,12 +4816,12 @@ class ColumnStatisticsObj:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.colName = iprot.readString();
+          self.colName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.colType = iprot.readString();
+          self.colType = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
@@ -4920,27 +4920,27 @@ class ColumnStatisticsDesc:
         break
       if fid == 1:
         if ftype == TType.BOOL:
-          self.isTblLevel = iprot.readBool();
+          self.isTblLevel = iprot.readBool()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.dbName = iprot.readString();
+          self.dbName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.tableName = iprot.readString();
+          self.tableName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.STRING:
-          self.partName = iprot.readString();
+          self.partName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 5:
         if ftype == TType.I64:
-          self.lastAnalyzed = iprot.readI64();
+          self.lastAnalyzed = iprot.readI64()
         else:
           iprot.skip(ftype)
       else:
@@ -5137,7 +5137,7 @@ class AggrStats:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.I64:
-          self.partsFound = iprot.readI64();
+          self.partsFound = iprot.readI64()
         else:
           iprot.skip(ftype)
       else:
@@ -5307,8 +5307,8 @@ class Schema:
           self.properties = {}
           (_ktype267, _vtype268, _size266 ) = iprot.readMapBegin()
           for _i270 in xrange(_size266):
-            _key271 = iprot.readString();
-            _val272 = iprot.readString();
+            _key271 = iprot.readString()
+            _val272 = iprot.readString()
             self.properties[_key271] = _val272
           iprot.readMapEnd()
         else:
@@ -5390,8 +5390,8 @@ class EnvironmentContext:
           self.properties = {}
           (_ktype277, _vtype278, _size276 ) = iprot.readMapBegin()
           for _i280 in xrange(_size276):
-            _key281 = iprot.readString();
-            _val282 = iprot.readString();
+            _key281 = iprot.readString()
+            _val282 = iprot.readString()
             self.properties[_key281] = _val282
           iprot.readMapEnd()
         else:
@@ -5476,7 +5476,7 @@ class PartitionsByExprResult:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.BOOL:
-          self.hasUnknownPartitions = iprot.readBool();
+          self.hasUnknownPartitions = iprot.readBool()
         else:
           iprot.skip(ftype)
       else:
@@ -5565,27 +5565,27 @@ class PartitionsByExprRequest:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.dbName = iprot.readString();
+          self.dbName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.tblName = iprot.readString();
+          self.tblName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.expr = iprot.readString();
+          self.expr = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.STRING:
-          self.defaultPartitionName = iprot.readString();
+          self.defaultPartitionName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 5:
         if ftype == TType.I16:
-          self.maxParts = iprot.readI16();
+          self.maxParts = iprot.readI16()
         else:
           iprot.skip(ftype)
       else:
@@ -5755,7 +5755,7 @@ class PartitionsStatsResult:
           self.partStats = {}
           (_ktype300, _vtype301, _size299 ) = iprot.readMapBegin()
           for _i303 in xrange(_size299):
-            _key304 = iprot.readString();
+            _key304 = iprot.readString()
             _val305 = []
             (_etype309, _size306) = iprot.readListBegin()
             for _i310 in xrange(_size306):
@@ -5844,12 +5844,12 @@ class TableStatsRequest:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.dbName = iprot.readString();
+          self.dbName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.tblName = iprot.readString();
+          self.tblName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
@@ -5857,7 +5857,7 @@ class TableStatsRequest:
           self.colNames = []
           (_etype318, _size315) = iprot.readListBegin()
           for _i319 in xrange(_size315):
-            _elem320 = iprot.readString();
+            _elem320 = iprot.readString()
             self.colNames.append(_elem320)
           iprot.readListEnd()
         else:
@@ -5952,12 +5952,12 @@ class PartitionsStatsRequest:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.dbName = iprot.readString();
+          self.dbName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.tblName = iprot.readString();
+          self.tblName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
@@ -5965,7 +5965,7 @@ class PartitionsStatsRequest:
           self.colNames = []
           (_etype325, _size322) = iprot.readListBegin()
           for _i326 in xrange(_size322):
-            _elem327 = iprot.readString();
+            _elem327 = iprot.readString()
             self.colNames.append(_elem327)
           iprot.readListEnd()
         else:
@@ -5975,7 +5975,7 @@ class PartitionsStatsRequest:
           self.partNames = []
           (_etype331, _size328) = iprot.readListBegin()
           for _i332 in xrange(_size328):
-            _elem333 = iprot.readString();
+            _elem333 = iprot.readString()
             self.partNames.append(_elem333)
           iprot.readListEnd()
         else:
@@ -6157,12 +6157,12 @@ class AddPartitionsRequest:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.dbName = iprot.readString();
+          self.dbName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.tblName = iprot.readString();
+          self.tblName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
@@ -6178,12 +6178,12 @@ class AddPartitionsRequest:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.BOOL:
-          self.ifNotExists = iprot.readBool();
+          self.ifNotExists = iprot.readBool()
         else:
           iprot.skip(ftype)
       elif fid == 5:
         if ftype == TType.BOOL:
-          self.needResult = iprot.readBool();
+          self.needResult = iprot.readBool()
         else:
           iprot.skip(ftype)
       else:
@@ -6356,12 +6356,12 @@ class DropPartitionsExpr:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.expr = iprot.readString();
+          self.expr = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.I32:
-          self.partArchiveLevel = iprot.readI32();
+          self.partArchiveLevel = iprot.readI32()
         else:
           iprot.skip(ftype)
       else:
@@ -6439,7 +6439,7 @@ class RequestPartsSpec:
           self.names = []
           (_etype360, _size357) = iprot.readListBegin()
           for _i361 in xrange(_size357):
-            _elem362 = iprot.readString();
+            _elem362 = iprot.readString()
             self.names.append(_elem362)
           iprot.readListEnd()
         else:
@@ -6549,12 +6549,12 @@ class DropPartitionsRequest:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.dbName = iprot.readString();
+          self.dbName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.tblName = iprot.readString();
+          self.tblName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
@@ -6565,17 +6565,17 @@ class DropPartitionsRequest:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.BOOL:
-          self.deleteData = iprot.readBool();
+          self.deleteData = iprot.readBool()
         else:
           iprot.skip(ftype)
       elif fid == 5:
         if ftype == TType.BOOL:
-          self.ifExists = iprot.readBool();
+          self.ifExists = iprot.readBool()
         else:
           iprot.skip(ftype)
       elif fid == 6:
         if ftype == TType.BOOL:
-          self.ignoreProtection = iprot.readBool();
+          self.ignoreProtection = iprot.readBool()
         else:
           iprot.skip(ftype)
       elif fid == 7:
@@ -6586,7 +6586,7 @@ class DropPartitionsRequest:
           iprot.skip(ftype)
       elif fid == 8:
         if ftype == TType.BOOL:
-          self.needResult = iprot.readBool();
+          self.needResult = iprot.readBool()
         else:
           iprot.skip(ftype)
       else:
@@ -6695,12 +6695,12 @@ class ResourceUri:
         break
       if fid == 1:
         if ftype == TType.I32:
-          self.resourceType = iprot.readI32();
+          self.resourceType = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.uri = iprot.readString();
+          self.uri = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -6791,37 +6791,37 @@ class Function:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.functionName = iprot.readString();
+          self.functionName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.dbName = iprot.readString();
+          self.dbName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.className = iprot.readString();
+          self.className = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.STRING:
-          self.ownerName = iprot.readString();
+          self.ownerName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 5:
         if ftype == TType.I32:
-          self.ownerType = iprot.readI32();
+          self.ownerType = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 6:
         if ftype == TType.I32:
-          self.createTime = iprot.readI32();
+          self.createTime = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 7:
         if ftype == TType.I32:
-          self.functionType = iprot.readI32();
+          self.functionType = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 8:
@@ -6944,22 +6944,22 @@ class TxnInfo:
         break
       if fid == 1:
         if ftype == TType.I64:
-          self.id = iprot.readI64();
+          self.id = iprot.readI64()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.I32:
-          self.state = iprot.readI32();
+          self.state = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.user = iprot.readString();
+          self.user = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.STRING:
-          self.hostname = iprot.readString();
+          self.hostname = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -7050,7 +7050,7 @@ class GetOpenTxnsInfoResponse:
         break
       if fid == 1:
         if ftype == TType.I64:
-          self.txn_high_water_mark = iprot.readI64();
+          self.txn_high_water_mark = iprot.readI64()
         else:
           iprot.skip(ftype)
       elif fid == 2:
@@ -7141,7 +7141,7 @@ class GetOpenTxnsResponse:
         break
       if fid == 1:
         if ftype == TType.I64:
-          self.txn_high_water_mark = iprot.readI64();
+          self.txn_high_water_mark = iprot.readI64()
         else:
           iprot.skip(ftype)
       elif fid == 2:
@@ -7149,7 +7149,7 @@ class GetOpenTxnsResponse:
           self.open_txns = set()
           (_etype388, _size385) = iprot.readSetBegin()
           for _i389 in xrange(_size385):
-            _elem390 = iprot.readI64();
+            _elem390 = iprot.readI64()
             self.open_txns.add(_elem390)
           iprot.readSetEnd()
         else:
@@ -7234,17 +7234,17 @@ class OpenTxnRequest:
         break
       if fid == 1:
         if ftype == TType.I32:
-          self.num_txns = iprot.readI32();
+          self.num_txns = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.user = iprot.readString();
+          self.user = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.hostname = iprot.readString();
+          self.hostname = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -7328,7 +7328,7 @@ class OpenTxnsResponse:
           self.txn_ids = []
           (_etype395, _size392) = iprot.readListBegin()
           for _i396 in xrange(_size392):
-            _elem397 = iprot.readI64();
+            _elem397 = iprot.readI64()
             self.txn_ids.append(_elem397)
           iprot.readListEnd()
         else:
@@ -7400,7 +7400,7 @@ class AbortTxnRequest:
         break
       if fid == 1:
         if ftype == TType.I64:
-          self.txnid = iprot.readI64();
+          self.txnid = iprot.readI64()
         else:
           iprot.skip(ftype)
       else:
@@ -7467,7 +7467,7 @@ class CommitTxnRequest:
         break
       if fid == 1:
         if ftype == TType.I64:
-          self.txnid = iprot.readI64();
+          self.txnid = iprot.readI64()
         else:
           iprot.skip(ftype)
       else:
@@ -7546,27 +7546,27 @@ class LockComponent:
         break
       if fid == 1:
         if ftype == TType.I32:
-          self.type = iprot.readI32();
+          self.type = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.I32:
-          self.level = iprot.readI32();
+          self.level = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.dbname = iprot.readString();
+          self.dbname = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.STRING:
-          self.tablename = iprot.readString();
+          self.tablename = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 5:
         if ftype == TType.STRING:
-          self.partitionname = iprot.readString();
+          self.partitionname = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -7677,17 +7677,17 @@ class LockRequest:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.I64:
-          self.txnid = iprot.readI64();
+          self.txnid = iprot.readI64()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.user = iprot.readString();
+          self.user = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.STRING:
-          self.hostname = iprot.readString();
+          self.hostname = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -7779,12 +7779,12 @@ class LockResponse:
         break
       if fid == 1:
         if ftype == TType.I64:
-          self.lockid = iprot.readI64();
+          self.lockid = iprot.readI64()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.I32:
-          self.state = iprot.readI32();
+          self.state = iprot.readI32()
         else:
           iprot.skip(ftype)
       else:
@@ -7858,7 +7858,7 @@ class CheckLockRequest:
         break
       if fid == 1:
         if ftype == TType.I64:
-          self.lockid = iprot.readI64();
+          self.lockid = iprot.readI64()
         else:
           iprot.skip(ftype)
       else:
@@ -7925,7 +7925,7 @@ class UnlockRequest:
         break
       if fid == 1:
         if ftype == TType.I64:
-          self.lockid = iprot.readI64();
+          self.lockid = iprot.readI64()
         else:
           iprot.skip(ftype)
       else:
@@ -8068,57 +8068,57 @@ class ShowLocksResponseElement:
         break
       if fid == 1:
         if ftype == TType.I64:
-          self.lockid = iprot.readI64();
+          self.lockid = iprot.readI64()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.dbname = iprot.readString();
+          self.dbname = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.tablename = iprot.readString();
+          self.tablename = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.STRING:
-          self.partname = iprot.readString();
+          self.partname = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 5:
         if ftype == TType.I32:
-          self.state = iprot.readI32();
+          self.state = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 6:
         if ftype == TType.I32:
-          self.type = iprot.readI32();
+          self.type = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 7:
         if ftype == TType.I64:
-          self.txnid = iprot.readI64();
+          self.txnid = iprot.readI64()
         else:
           iprot.skip(ftype)
       elif fid == 8:
         if ftype == TType.I64:
-          self.lastheartbeat = iprot.readI64();
+          self.lastheartbeat = iprot.readI64()
         else:
           iprot.skip(ftype)
       elif fid == 9:
         if ftype == TType.I64:
-          self.acquiredat = iprot.readI64();
+          self.acquiredat = iprot.readI64()
         else:
           iprot.skip(ftype)
       elif fid == 10:
         if ftype == TType.STRING:
-          self.user = iprot.readString();
+          self.user = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 11:
         if ftype == TType.STRING:
-          self.hostname = iprot.readString();
+          self.hostname = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -8324,12 +8324,12 @@ class HeartbeatRequest:
         break
       if fid == 1:
         if ftype == TType.I64:
-          self.lockid = iprot.readI64();
+          self.lockid = iprot.readI64()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.I64:
-          self.txnid = iprot.readI64();
+          self.txnid = iprot.readI64()
         else:
           iprot.skip(ftype)
       else:
@@ -8402,12 +8402,12 @@ class HeartbeatTxnRangeRequest:
         break
       if fid == 1:
         if ftype == TType.I64:
-          self.min = iprot.readI64();
+          self.min = iprot.readI64()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.I64:
-          self.max = iprot.readI64();
+          self.max = iprot.readI64()
         else:
           iprot.skip(ftype)
       else:
@@ -8487,7 +8487,7 @@ class HeartbeatTxnRangeResponse:
           self.aborted = set()
           (_etype416, _size413) = iprot.readSetBegin()
           for _i417 in xrange(_size413):
-            _elem418 = iprot.readI64();
+            _elem418 = iprot.readI64()
             self.aborted.add(_elem418)
           iprot.readSetEnd()
         else:
@@ -8497,7 +8497,7 @@ class HeartbeatTxnRangeResponse:
           self.nosuch = set()
           (_etype422, _size419) = iprot.readSetBegin()
           for _i423 in xrange(_size419):
-            _elem424 = iprot.readI64();
+            _elem424 = iprot.readI64()
             self.nosuch.add(_elem424)
           iprot.readSetEnd()
         else:
@@ -8591,27 +8591,27 @@ class CompactionRequest:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.dbname = iprot.readString();
+          self.dbname = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.tablename = iprot.readString();
+          self.tablename = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.partitionname = iprot.readString();
+          self.partitionname = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.I32:
-          self.type = iprot.readI32();
+          self.type = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 5:
         if ftype == TType.STRING:
-          self.runas = iprot.readString();
+          self.runas = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -8769,42 +8769,42 @@ class ShowCompactResponseElement:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.dbname = iprot.readString();
+          self.dbname = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.tablename = iprot.readString();
+          self.tablename = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.partitionname = iprot.readString();
+          self.partitionname = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.I32:
-          self.type = iprot.readI32();
+          self.type = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 5:
         if ftype == TType.STRING:
-          self.state = iprot.readString();
+          self.state = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 6:
         if ftype == TType.STRING:
-          self.workerid = iprot.readString();
+          self.workerid = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 7:
         if ftype == TType.I64:
-          self.start = iprot.readI64();
+          self.start = iprot.readI64()
         else:
           iprot.skip(ftype)
       elif fid == 8:
         if ftype == TType.STRING:
-          self.runAs = iprot.readString();
+          self.runAs = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -8997,17 +8997,17 @@ class AddDynamicPartitions:
         break
       if fid == 1:
         if ftype == TType.I64:
-          self.txnid = iprot.readI64();
+          self.txnid = iprot.readI64()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.dbname = iprot.readString();
+          self.dbname = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.tablename = iprot.readString();
+          self.tablename = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 4:
@@ -9015,7 +9015,7 @@ class AddDynamicPartitions:
           self.partitionnames = []
           (_etype437, _size434) = iprot.readListBegin()
           for _i438 in xrange(_size434):
-            _elem439 = iprot.readString();
+            _elem439 = iprot.readString()
             self.partitionnames.append(_elem439)
           iprot.readListEnd()
         else:
@@ -9111,12 +9111,12 @@ class NotificationEventRequest:
         break
       if fid == 1:
         if ftype == TType.I64:
-          self.lastEvent = iprot.readI64();
+          self.lastEvent = iprot.readI64()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.I32:
-          self.maxEvents = iprot.readI32();
+          self.maxEvents = iprot.readI32()
         else:
           iprot.skip(ftype)
       else:
@@ -9203,32 +9203,32 @@ class NotificationEvent:
         break
       if fid == 1:
         if ftype == TType.I64:
-          self.eventId = iprot.readI64();
+          self.eventId = iprot.readI64()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.I32:
-          self.eventTime = iprot.readI32();
+          self.eventTime = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.eventType = iprot.readString();
+          self.eventType = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.STRING:
-          self.dbName = iprot.readString();
+          self.dbName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 5:
         if ftype == TType.STRING:
-          self.tableName = iprot.readString();
+          self.tableName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 6:
         if ftype == TType.STRING:
-          self.message = iprot.readString();
+          self.message = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -9402,7 +9402,7 @@ class CurrentNotificationEventId:
         break
       if fid == 1:
         if ftype == TType.I64:
-          self.eventId = iprot.readI64();
+          self.eventId = iprot.readI64()
         else:
           iprot.skip(ftype)
       else:
@@ -9472,7 +9472,7 @@ class InsertEventRequestData:
           self.filesAdded = []
           (_etype451, _size448) = iprot.readListBegin()
           for _i452 in xrange(_size448):
-            _elem453 = iprot.readString();
+            _elem453 = iprot.readString()
             self.filesAdded.append(_elem453)
           iprot.readListEnd()
         else:
@@ -9622,7 +9622,7 @@ class FireEventRequest:
         break
       if fid == 1:
         if ftype == TType.BOOL:
-          self.successful = iprot.readBool();
+          self.successful = iprot.readBool()
         else:
           iprot.skip(ftype)
       elif fid == 2:
@@ -9633,12 +9633,12 @@ class FireEventRequest:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.dbName = iprot.readString();
+          self.dbName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.STRING:
-          self.tableName = iprot.readString();
+          self.tableName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 5:
@@ -9646,7 +9646,7 @@ class FireEventRequest:
           self.partitionVals = []
           (_etype458, _size455) = iprot.readListBegin()
           for _i459 in xrange(_size455):
-            _elem460 = iprot.readString();
+            _elem460 = iprot.readString()
             self.partitionVals.append(_elem460)
           iprot.readListEnd()
         else:
@@ -9789,12 +9789,12 @@ class MetadataPpdResult:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.metadata = iprot.readString();
+          self.metadata = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.includeBitset = iprot.readString();
+          self.includeBitset = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -9870,7 +9870,7 @@ class GetFileMetadataByExprResult:
           self.metadata = {}
           (_ktype463, _vtype464, _size462 ) = iprot.readMapBegin()
           for _i466 in xrange(_size462):
-            _key467 = iprot.readI64();
+            _key467 = iprot.readI64()
             _val468 = MetadataPpdResult()
             _val468.read(iprot)
             self.metadata[_key467] = _val468
@@ -9879,7 +9879,7 @@ class GetFileMetadataByExprResult:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.BOOL:
-          self.isSupported = iprot.readBool();
+          self.isSupported = iprot.readBool()
         else:
           iprot.skip(ftype)
       else:
@@ -9969,24 +9969,24 @@ class GetFileMetadataByExprRequest:
           self.fileIds = []
           (_etype474, _size471) = iprot.readListBegin()
           for _i475 in xrange(_size471):
-            _elem476 = iprot.readI64();
+            _elem476 = iprot.readI64()
             self.fileIds.append(_elem476)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.expr = iprot.readString();
+          self.expr = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.BOOL:
-          self.doGetFooters = iprot.readBool();
+          self.doGetFooters = iprot.readBool()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.I32:
-          self.type = iprot.readI32();
+          self.type = iprot.readI32()
         else:
           iprot.skip(ftype)
       else:
@@ -10079,15 +10079,15 @@ class GetFileMetadataResult:
           self.metadata = {}
           (_ktype479, _vtype480, _size478 ) = iprot.readMapBegin()
           for _i482 in xrange(_size478):
-            _key483 = iprot.readI64();
-            _val484 = iprot.readString();
+            _key483 = iprot.readI64()
+            _val484 = iprot.readString()
             self.metadata[_key483] = _val484
           iprot.readMapEnd()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.BOOL:
-          self.isSupported = iprot.readBool();
+          self.isSupported = iprot.readBool()
         else:
           iprot.skip(ftype)
       else:
@@ -10168,7 +10168,7 @@ class GetFileMetadataRequest:
           self.fileIds = []
           (_etype490, _size487) = iprot.readListBegin()
           for _i491 in xrange(_size487):
-            _elem492 = iprot.readI64();
+            _elem492 = iprot.readI64()
             self.fileIds.append(_elem492)
           iprot.readListEnd()
         else:
@@ -10292,7 +10292,7 @@ class PutFileMetadataRequest:
           self.fileIds = []
           (_etype497, _size494) = iprot.readListBegin()
           for _i498 in xrange(_size494):
-            _elem499 = iprot.readI64();
+            _elem499 = iprot.readI64()
             self.fileIds.append(_elem499)
           iprot.readListEnd()
         else:
@@ -10302,7 +10302,7 @@ class PutFileMetadataRequest:
           self.metadata = []
           (_etype503, _size500) = iprot.readListBegin()
           for _i504 in xrange(_size500):
-            _elem505 = iprot.readString();
+            _elem505 = iprot.readString()
             self.metadata.append(_elem505)
           iprot.readListEnd()
         else:
@@ -10433,7 +10433,7 @@ class ClearFileMetadataRequest:
           self.fileIds = []
           (_etype511, _size508) = iprot.readListBegin()
           for _i512 in xrange(_size508):
-            _elem513 = iprot.readI64();
+            _elem513 = iprot.readI64()
             self.fileIds.append(_elem513)
           iprot.readListEnd()
         else:
@@ -10579,7 +10579,7 @@ class MetaException(TException):
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.message = iprot.readString();
+          self.message = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -10647,7 +10647,7 @@ class UnknownTableException(TException):
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.message = iprot.readString();
+          self.message = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -10715,7 +10715,7 @@ class UnknownDBException(TException):
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.message = iprot.readString();
+          self.message = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -10783,7 +10783,7 @@ class AlreadyExistsException(TException):
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.message = iprot.readString();
+          self.message = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -10851,7 +10851,7 @@ class InvalidPartitionException(TException):
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.message = iprot.readString();
+          self.message = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -10919,7 +10919,7 @@ class UnknownPartitionException(TException):
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.message = iprot.readString();
+          self.message = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -10987,7 +10987,7 @@ class InvalidObjectException(TException):
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.message = iprot.readString();
+          self.message = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -11055,7 +11055,7 @@ class NoSuchObjectException(TException):
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.message = iprot.readString();
+          self.message = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -11123,7 +11123,7 @@ class IndexAlreadyExistsException(TException):
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.message = iprot.readString();
+          self.message = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -11191,7 +11191,7 @@ class InvalidOperationException(TException):
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.message = iprot.readString();
+          self.message = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -11259,7 +11259,7 @@ class ConfigValSecurityException(TException):
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.message = iprot.readString();
+          self.message = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -11327,7 +11327,7 @@ class InvalidInputException(TException):
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.message = iprot.readString();
+          self.message = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -11395,7 +11395,7 @@ class NoSuchTxnException(TException):
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.message = iprot.readString();
+          self.message = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -11463,7 +11463,7 @@ class TxnAbortedException(TException):
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.message = iprot.readString();
+          self.message = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -11531,7 +11531,7 @@ class TxnOpenException(TException):
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.message = iprot.readString();
+          self.message = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -11599,7 +11599,7 @@ class NoSuchLockException(TException):
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.message = iprot.readString();
+          self.message = iprot.readString()
         else:
           iprot.skip(ftype)
       else:

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-rb/hive_metastore_constants.rb
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-rb/hive_metastore_constants.rb b/metastore/src/gen/thrift/gen-rb/hive_metastore_constants.rb
index 7ef6f43..eeccc84 100644
--- a/metastore/src/gen/thrift/gen-rb/hive_metastore_constants.rb
+++ b/metastore/src/gen/thrift/gen-rb/hive_metastore_constants.rb
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb b/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
index c86bb43..08b9b06 100644
--- a/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
+++ b/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb b/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb
index 8625c7b..7b93158 100644
--- a/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb
+++ b/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 6a96391..a400f78 100644
--- a/pom.xml
+++ b/pom.xml
@@ -147,8 +147,8 @@
     <json.version>20090211</json.version>
     <junit.version>4.11</junit.version>
     <kryo.version>2.22</kryo.version>
-    <libfb303.version>0.9.2</libfb303.version>
-    <libthrift.version>0.9.2</libthrift.version>
+    <libfb303.version>0.9.3</libfb303.version>
+    <libthrift.version>0.9.3</libthrift.version>
     <log4j2.version>2.4</log4j2.version>
     <opencsv.version>2.3</opencsv.version>
     <mockito-all.version>1.9.5</mockito-all.version>
@@ -956,7 +956,7 @@
                   <target>
                     <taskdef name="for" classname="net.sf.antcontrib.logic.ForTask"
                       classpathref="maven.plugin.classpath" />
-                    <property name="thrift.args" value="-I ${thrift.home} --gen java:beans,hashcode --gen cpp --gen php --gen py --gen rb"/>
+                    <property name="thrift.args" value="-I ${thrift.home} --gen java:beans,hashcode,generated_annotations=undated --gen cpp --gen php --gen py --gen rb"/>
                     <property name="thrift.gen.dir" value="${basedir}/src/gen/thrift"/>
                     <delete dir="${thrift.gen.dir}"/>
                     <mkdir dir="${thrift.gen.dir}"/>

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/ql/src/gen/thrift/gen-cpp/queryplan_constants.cpp
----------------------------------------------------------------------
diff --git a/ql/src/gen/thrift/gen-cpp/queryplan_constants.cpp b/ql/src/gen/thrift/gen-cpp/queryplan_constants.cpp
index 280edbe..9186bad 100644
--- a/ql/src/gen/thrift/gen-cpp/queryplan_constants.cpp
+++ b/ql/src/gen/thrift/gen-cpp/queryplan_constants.cpp
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/ql/src/gen/thrift/gen-cpp/queryplan_constants.h
----------------------------------------------------------------------
diff --git a/ql/src/gen/thrift/gen-cpp/queryplan_constants.h b/ql/src/gen/thrift/gen-cpp/queryplan_constants.h
index 6bee48d..6cc8af0 100644
--- a/ql/src/gen/thrift/gen-cpp/queryplan_constants.h
+++ b/ql/src/gen/thrift/gen-cpp/queryplan_constants.h
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated


[09/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)

Posted by xu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-cpp/testthrift_types.h
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-cpp/testthrift_types.h b/serde/src/gen/thrift/gen-cpp/testthrift_types.h
index 8c57e48..a59346a 100644
--- a/serde/src/gen/thrift/gen-cpp/testthrift_types.h
+++ b/serde/src/gen/thrift/gen-cpp/testthrift_types.h
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -31,9 +31,6 @@ typedef struct _InnerStruct__isset {
 class InnerStruct {
  public:
 
-  static const char* ascii_fingerprint; // = "E86CACEB22240450EDCBEFC3A83970E4";
-  static const uint8_t binary_fingerprint[16]; // = {0xE8,0x6C,0xAC,0xEB,0x22,0x24,0x04,0x50,0xED,0xCB,0xEF,0xC3,0xA8,0x39,0x70,0xE4};
-
   InnerStruct(const InnerStruct&);
   InnerStruct& operator=(const InnerStruct&);
   InnerStruct() : field0(0) {
@@ -61,11 +58,17 @@ class InnerStruct {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const InnerStruct& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(InnerStruct &a, InnerStruct &b);
 
+inline std::ostream& operator<<(std::ostream& out, const InnerStruct& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _ThriftTestObj__isset {
   _ThriftTestObj__isset() : field1(false), field2(false), field3(false) {}
   bool field1 :1;
@@ -76,9 +79,6 @@ typedef struct _ThriftTestObj__isset {
 class ThriftTestObj {
  public:
 
-  static const char* ascii_fingerprint; // = "2BA5D8DAACFBBE6599779830A6185706";
-  static const uint8_t binary_fingerprint[16]; // = {0x2B,0xA5,0xD8,0xDA,0xAC,0xFB,0xBE,0x65,0x99,0x77,0x98,0x30,0xA6,0x18,0x57,0x06};
-
   ThriftTestObj(const ThriftTestObj&);
   ThriftTestObj& operator=(const ThriftTestObj&);
   ThriftTestObj() : field1(0), field2() {
@@ -116,11 +116,17 @@ class ThriftTestObj {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftTestObj& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(ThriftTestObj &a, ThriftTestObj &b);
 
+inline std::ostream& operator<<(std::ostream& out, const ThriftTestObj& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 
 #endif

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java
index 7902849..8b3eeb7 100644
--- a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java
+++ b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/InnerStruct.java
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/InnerStruct.java b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/InnerStruct.java
index 3fdd0d9..01a84fe 100644
--- a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/InnerStruct.java
+++ b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/InnerStruct.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class InnerStruct implements org.apache.thrift.TBase<InnerStruct, InnerStruct._Fields>, java.io.Serializable, Cloneable, Comparable<InnerStruct> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("InnerStruct");
 
@@ -185,7 +185,7 @@ public class InnerStruct implements org.apache.thrift.TBase<InnerStruct, InnerSt
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case FIELD0:
-      return Integer.valueOf(getField0());
+      return getField0();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/ThriftTestObj.java
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/ThriftTestObj.java b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/ThriftTestObj.java
index b72454f..d2f78f5 100644
--- a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/ThriftTestObj.java
+++ b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/ThriftTestObj.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class ThriftTestObj implements org.apache.thrift.TBase<ThriftTestObj, ThriftTestObj._Fields>, java.io.Serializable, Cloneable, Comparable<ThriftTestObj> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ThriftTestObj");
 
@@ -293,7 +293,7 @@ public class ThriftTestObj implements org.apache.thrift.TBase<ThriftTestObj, Thr
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case FIELD1:
-      return Integer.valueOf(getField1());
+      return getField1();
 
     case FIELD2:
       return getField2();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java
index af5e2cb..6f0c721 100644
--- a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java
+++ b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields>, java.io.Serializable, Cloneable, Comparable<Complex> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Complex");
 
@@ -700,7 +700,7 @@ public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case AINT:
-      return Integer.valueOf(getAint());
+      return getAint();
 
     case A_STRING:
       return getAString();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/IntString.java
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/IntString.java b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/IntString.java
index 14a524c..ea7747f 100644
--- a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/IntString.java
+++ b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/IntString.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class IntString implements org.apache.thrift.TBase<IntString, IntString._Fields>, java.io.Serializable, Cloneable, Comparable<IntString> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("IntString");
 
@@ -273,13 +273,13 @@ public class IntString implements org.apache.thrift.TBase<IntString, IntString._
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case MYINT:
-      return Integer.valueOf(getMyint());
+      return getMyint();
 
     case MY_STRING:
       return getMyString();
 
     case UNDERSCORE_INT:
-      return Integer.valueOf(getUnderscore_int());
+      return getUnderscore_int();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MegaStruct.java
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MegaStruct.java b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MegaStruct.java
index 11e55bd..b8f8dfd 100644
--- a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MegaStruct.java
+++ b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MegaStruct.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class MegaStruct implements org.apache.thrift.TBase<MegaStruct, MegaStruct._Fields>, java.io.Serializable, Cloneable, Comparable<MegaStruct> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("MegaStruct");
 
@@ -1238,22 +1238,22 @@ public class MegaStruct implements org.apache.thrift.TBase<MegaStruct, MegaStruc
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case MY_BOOL:
-      return Boolean.valueOf(isMy_bool());
+      return isMy_bool();
 
     case MY_BYTE:
-      return Byte.valueOf(getMy_byte());
+      return getMy_byte();
 
     case MY_16BIT_INT:
-      return Short.valueOf(getMy_16bit_int());
+      return getMy_16bit_int();
 
     case MY_32BIT_INT:
-      return Integer.valueOf(getMy_32bit_int());
+      return getMy_32bit_int();
 
     case MY_64BIT_INT:
-      return Long.valueOf(getMy_64bit_int());
+      return getMy_64bit_int();
 
     case MY_DOUBLE:
-      return Double.valueOf(getMy_double());
+      return getMy_double();
 
     case MY_STRING:
       return getMy_string();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MiniStruct.java
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MiniStruct.java b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MiniStruct.java
index affc952..10f10d2 100644
--- a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MiniStruct.java
+++ b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MiniStruct.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class MiniStruct implements org.apache.thrift.TBase<MiniStruct, MiniStruct._Fields>, java.io.Serializable, Cloneable, Comparable<MiniStruct> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("MiniStruct");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MyEnum.java
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MyEnum.java b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MyEnum.java
index 03f88c5..e5baf36 100644
--- a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MyEnum.java
+++ b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MyEnum.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/PropValueUnion.java
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/PropValueUnion.java b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/PropValueUnion.java
index 2a70482..92d5a0a 100644
--- a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/PropValueUnion.java
+++ b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/PropValueUnion.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/SetIntString.java
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/SetIntString.java b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/SetIntString.java
index 6c2bdec..a331638 100644
--- a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/SetIntString.java
+++ b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/SetIntString.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class SetIntString implements org.apache.thrift.TBase<SetIntString, SetIntString._Fields>, java.io.Serializable, Cloneable, Comparable<SetIntString> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("SetIntString");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-php/Types.php
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-php/Types.php b/serde/src/gen/thrift/gen-php/Types.php
index 8bb2fe3..27c5b25 100644
--- a/serde/src/gen/thrift/gen-php/Types.php
+++ b/serde/src/gen/thrift/gen-php/Types.php
@@ -1,8 +1,6 @@
 <?php
-namespace ;
-
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php b/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php
index 94dc455..8370698 100644
--- a/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php
+++ b/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php
@@ -2,7 +2,7 @@
 namespace org\apache\hadoop\hive\serde;
 
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-py/complex/constants.py
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-py/complex/constants.py b/serde/src/gen/thrift/gen-py/complex/constants.py
index 99717a9..4a6492b 100644
--- a/serde/src/gen/thrift/gen-py/complex/constants.py
+++ b/serde/src/gen/thrift/gen-py/complex/constants.py
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-py/complex/ttypes.py
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-py/complex/ttypes.py b/serde/src/gen/thrift/gen-py/complex/ttypes.py
index 7f757da..d39de75 100644
--- a/serde/src/gen/thrift/gen-py/complex/ttypes.py
+++ b/serde/src/gen/thrift/gen-py/complex/ttypes.py
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #
@@ -60,27 +60,27 @@ class PropValueUnion:
         break
       if fid == 1:
         if ftype == TType.I32:
-          self.intValue = iprot.readI32();
+          self.intValue = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.I64:
-          self.longValue = iprot.readI64();
+          self.longValue = iprot.readI64()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.stringValue = iprot.readString();
+          self.stringValue = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.DOUBLE:
-          self.doubleValue = iprot.readDouble();
+          self.doubleValue = iprot.readDouble()
         else:
           iprot.skip(ftype)
       elif fid == 5:
         if ftype == TType.BOOL:
-          self.flag = iprot.readBool();
+          self.flag = iprot.readBool()
         else:
           iprot.skip(ftype)
       elif fid == 6:
@@ -88,7 +88,7 @@ class PropValueUnion:
           self.lString = []
           (_etype3, _size0) = iprot.readListBegin()
           for _i4 in xrange(_size0):
-            _elem5 = iprot.readString();
+            _elem5 = iprot.readString()
             self.lString.append(_elem5)
           iprot.readListEnd()
         else:
@@ -98,8 +98,8 @@ class PropValueUnion:
           self.unionMStringString = {}
           (_ktype7, _vtype8, _size6 ) = iprot.readMapBegin()
           for _i10 in xrange(_size6):
-            _key11 = iprot.readString();
-            _val12 = iprot.readString();
+            _key11 = iprot.readString()
+            _val12 = iprot.readString()
             self.unionMStringString[_key11] = _val12
           iprot.readMapEnd()
         else:
@@ -209,17 +209,17 @@ class IntString:
         break
       if fid == 1:
         if ftype == TType.I32:
-          self.myint = iprot.readI32();
+          self.myint = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.myString = iprot.readString();
+          self.myString = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.I32:
-          self.underscore_int = iprot.readI32();
+          self.underscore_int = iprot.readI32()
         else:
           iprot.skip(ftype)
       else:
@@ -321,12 +321,12 @@ class Complex:
         break
       if fid == 1:
         if ftype == TType.I32:
-          self.aint = iprot.readI32();
+          self.aint = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.aString = iprot.readString();
+          self.aString = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
@@ -334,7 +334,7 @@ class Complex:
           self.lint = []
           (_etype19, _size16) = iprot.readListBegin()
           for _i20 in xrange(_size16):
-            _elem21 = iprot.readI32();
+            _elem21 = iprot.readI32()
             self.lint.append(_elem21)
           iprot.readListEnd()
         else:
@@ -344,7 +344,7 @@ class Complex:
           self.lString = []
           (_etype25, _size22) = iprot.readListBegin()
           for _i26 in xrange(_size22):
-            _elem27 = iprot.readString();
+            _elem27 = iprot.readString()
             self.lString.append(_elem27)
           iprot.readListEnd()
         else:
@@ -365,8 +365,8 @@ class Complex:
           self.mStringString = {}
           (_ktype35, _vtype36, _size34 ) = iprot.readMapBegin()
           for _i38 in xrange(_size34):
-            _key39 = iprot.readString();
-            _val40 = iprot.readString();
+            _key39 = iprot.readString()
+            _val40 = iprot.readString()
             self.mStringString[_key39] = _val40
           iprot.readMapEnd()
         else:
@@ -376,15 +376,15 @@ class Complex:
           self.attributes = {}
           (_ktype42, _vtype43, _size41 ) = iprot.readMapBegin()
           for _i45 in xrange(_size41):
-            _key46 = iprot.readString();
+            _key46 = iprot.readString()
             _val47 = {}
             (_ktype49, _vtype50, _size48 ) = iprot.readMapBegin()
             for _i52 in xrange(_size48):
-              _key53 = iprot.readString();
+              _key53 = iprot.readString()
               _val54 = {}
               (_ktype56, _vtype57, _size55 ) = iprot.readMapBegin()
               for _i59 in xrange(_size55):
-                _key60 = iprot.readString();
+                _key60 = iprot.readString()
                 _val61 = PropValueUnion()
                 _val61.read(iprot)
                 _val54[_key60] = _val61
@@ -559,7 +559,7 @@ class SetIntString:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.aString = iprot.readString();
+          self.aString = iprot.readString()
         else:
           iprot.skip(ftype)
       else:

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-py/megastruct/constants.py
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-py/megastruct/constants.py b/serde/src/gen/thrift/gen-py/megastruct/constants.py
index 99717a9..4a6492b 100644
--- a/serde/src/gen/thrift/gen-py/megastruct/constants.py
+++ b/serde/src/gen/thrift/gen-py/megastruct/constants.py
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-py/megastruct/ttypes.py
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-py/megastruct/ttypes.py b/serde/src/gen/thrift/gen-py/megastruct/ttypes.py
index c4c5a7f..93c9250 100644
--- a/serde/src/gen/thrift/gen-py/megastruct/ttypes.py
+++ b/serde/src/gen/thrift/gen-py/megastruct/ttypes.py
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #
@@ -59,12 +59,12 @@ class MiniStruct:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.my_string = iprot.readString();
+          self.my_string = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.I32:
-          self.my_enum = iprot.readI32();
+          self.my_enum = iprot.readI32()
         else:
           iprot.skip(ftype)
       else:
@@ -191,42 +191,42 @@ class MegaStruct:
         break
       if fid == 1:
         if ftype == TType.BOOL:
-          self.my_bool = iprot.readBool();
+          self.my_bool = iprot.readBool()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.BYTE:
-          self.my_byte = iprot.readByte();
+          self.my_byte = iprot.readByte()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.I16:
-          self.my_16bit_int = iprot.readI16();
+          self.my_16bit_int = iprot.readI16()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.I32:
-          self.my_32bit_int = iprot.readI32();
+          self.my_32bit_int = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 5:
         if ftype == TType.I64:
-          self.my_64bit_int = iprot.readI64();
+          self.my_64bit_int = iprot.readI64()
         else:
           iprot.skip(ftype)
       elif fid == 6:
         if ftype == TType.DOUBLE:
-          self.my_double = iprot.readDouble();
+          self.my_double = iprot.readDouble()
         else:
           iprot.skip(ftype)
       elif fid == 7:
         if ftype == TType.STRING:
-          self.my_string = iprot.readString();
+          self.my_string = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 8:
         if ftype == TType.STRING:
-          self.my_binary = iprot.readString();
+          self.my_binary = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 9:
@@ -234,8 +234,8 @@ class MegaStruct:
           self.my_string_string_map = {}
           (_ktype1, _vtype2, _size0 ) = iprot.readMapBegin()
           for _i4 in xrange(_size0):
-            _key5 = iprot.readString();
-            _val6 = iprot.readString();
+            _key5 = iprot.readString()
+            _val6 = iprot.readString()
             self.my_string_string_map[_key5] = _val6
           iprot.readMapEnd()
         else:
@@ -245,8 +245,8 @@ class MegaStruct:
           self.my_string_enum_map = {}
           (_ktype8, _vtype9, _size7 ) = iprot.readMapBegin()
           for _i11 in xrange(_size7):
-            _key12 = iprot.readString();
-            _val13 = iprot.readI32();
+            _key12 = iprot.readString()
+            _val13 = iprot.readI32()
             self.my_string_enum_map[_key12] = _val13
           iprot.readMapEnd()
         else:
@@ -256,8 +256,8 @@ class MegaStruct:
           self.my_enum_string_map = {}
           (_ktype15, _vtype16, _size14 ) = iprot.readMapBegin()
           for _i18 in xrange(_size14):
-            _key19 = iprot.readI32();
-            _val20 = iprot.readString();
+            _key19 = iprot.readI32()
+            _val20 = iprot.readString()
             self.my_enum_string_map[_key19] = _val20
           iprot.readMapEnd()
         else:
@@ -267,7 +267,7 @@ class MegaStruct:
           self.my_enum_struct_map = {}
           (_ktype22, _vtype23, _size21 ) = iprot.readMapBegin()
           for _i25 in xrange(_size21):
-            _key26 = iprot.readI32();
+            _key26 = iprot.readI32()
             _val27 = MiniStruct()
             _val27.read(iprot)
             self.my_enum_struct_map[_key26] = _val27
@@ -279,11 +279,11 @@ class MegaStruct:
           self.my_enum_stringlist_map = {}
           (_ktype29, _vtype30, _size28 ) = iprot.readMapBegin()
           for _i32 in xrange(_size28):
-            _key33 = iprot.readI32();
+            _key33 = iprot.readI32()
             _val34 = []
             (_etype38, _size35) = iprot.readListBegin()
             for _i39 in xrange(_size35):
-              _elem40 = iprot.readString();
+              _elem40 = iprot.readString()
               _val34.append(_elem40)
             iprot.readListEnd()
             self.my_enum_stringlist_map[_key33] = _val34
@@ -295,7 +295,7 @@ class MegaStruct:
           self.my_enum_structlist_map = {}
           (_ktype42, _vtype43, _size41 ) = iprot.readMapBegin()
           for _i45 in xrange(_size41):
-            _key46 = iprot.readI32();
+            _key46 = iprot.readI32()
             _val47 = []
             (_etype51, _size48) = iprot.readListBegin()
             for _i52 in xrange(_size48):
@@ -312,7 +312,7 @@ class MegaStruct:
           self.my_stringlist = []
           (_etype57, _size54) = iprot.readListBegin()
           for _i58 in xrange(_size54):
-            _elem59 = iprot.readString();
+            _elem59 = iprot.readString()
             self.my_stringlist.append(_elem59)
           iprot.readListEnd()
         else:
@@ -333,7 +333,7 @@ class MegaStruct:
           self.my_enumlist = []
           (_etype69, _size66) = iprot.readListBegin()
           for _i70 in xrange(_size66):
-            _elem71 = iprot.readI32();
+            _elem71 = iprot.readI32()
             self.my_enumlist.append(_elem71)
           iprot.readListEnd()
         else:
@@ -343,7 +343,7 @@ class MegaStruct:
           self.my_stringset = set()
           (_etype75, _size72) = iprot.readSetBegin()
           for _i76 in xrange(_size72):
-            _elem77 = iprot.readString();
+            _elem77 = iprot.readString()
             self.my_stringset.add(_elem77)
           iprot.readSetEnd()
         else:
@@ -353,7 +353,7 @@ class MegaStruct:
           self.my_enumset = set()
           (_etype81, _size78) = iprot.readSetBegin()
           for _i82 in xrange(_size78):
-            _elem83 = iprot.readI32();
+            _elem83 = iprot.readI32()
             self.my_enumset.add(_elem83)
           iprot.readSetEnd()
         else:

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py b/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py
index 1673073..6ef3bcf 100644
--- a/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py
+++ b/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/ttypes.py
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/ttypes.py b/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/ttypes.py
index daa619e..8621257 100644
--- a/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/ttypes.py
+++ b/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/ttypes.py
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-py/testthrift/constants.py
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-py/testthrift/constants.py b/serde/src/gen/thrift/gen-py/testthrift/constants.py
index 99717a9..4a6492b 100644
--- a/serde/src/gen/thrift/gen-py/testthrift/constants.py
+++ b/serde/src/gen/thrift/gen-py/testthrift/constants.py
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-py/testthrift/ttypes.py
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-py/testthrift/ttypes.py b/serde/src/gen/thrift/gen-py/testthrift/ttypes.py
index 1b64a7d..133a602 100644
--- a/serde/src/gen/thrift/gen-py/testthrift/ttypes.py
+++ b/serde/src/gen/thrift/gen-py/testthrift/ttypes.py
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #
@@ -42,7 +42,7 @@ class InnerStruct:
         break
       if fid == 1:
         if ftype == TType.I32:
-          self.field0 = iprot.readI32();
+          self.field0 = iprot.readI32()
         else:
           iprot.skip(ftype)
       else:
@@ -113,12 +113,12 @@ class ThriftTestObj:
         break
       if fid == 1:
         if ftype == TType.I32:
-          self.field1 = iprot.readI32();
+          self.field1 = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.field2 = iprot.readString();
+          self.field2 = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-rb/complex_constants.rb
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-rb/complex_constants.rb b/serde/src/gen/thrift/gen-rb/complex_constants.rb
index 3df0b7a..8b75421 100644
--- a/serde/src/gen/thrift/gen-rb/complex_constants.rb
+++ b/serde/src/gen/thrift/gen-rb/complex_constants.rb
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-rb/complex_types.rb
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-rb/complex_types.rb b/serde/src/gen/thrift/gen-rb/complex_types.rb
index 13d981f..8a03efd 100644
--- a/serde/src/gen/thrift/gen-rb/complex_types.rb
+++ b/serde/src/gen/thrift/gen-rb/complex_types.rb
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-rb/megastruct_constants.rb
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-rb/megastruct_constants.rb b/serde/src/gen/thrift/gen-rb/megastruct_constants.rb
index 6eb3f07..ab01193 100644
--- a/serde/src/gen/thrift/gen-rb/megastruct_constants.rb
+++ b/serde/src/gen/thrift/gen-rb/megastruct_constants.rb
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-rb/megastruct_types.rb
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-rb/megastruct_types.rb b/serde/src/gen/thrift/gen-rb/megastruct_types.rb
index 7d425af..fb8ccdf 100644
--- a/serde/src/gen/thrift/gen-rb/megastruct_types.rb
+++ b/serde/src/gen/thrift/gen-rb/megastruct_types.rb
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-rb/serde_constants.rb
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-rb/serde_constants.rb b/serde/src/gen/thrift/gen-rb/serde_constants.rb
index 56d9e76..f98441b 100644
--- a/serde/src/gen/thrift/gen-rb/serde_constants.rb
+++ b/serde/src/gen/thrift/gen-rb/serde_constants.rb
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-rb/serde_types.rb
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-rb/serde_types.rb b/serde/src/gen/thrift/gen-rb/serde_types.rb
index b3143d1..3bf85ab 100644
--- a/serde/src/gen/thrift/gen-rb/serde_types.rb
+++ b/serde/src/gen/thrift/gen-rb/serde_types.rb
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-rb/testthrift_constants.rb
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-rb/testthrift_constants.rb b/serde/src/gen/thrift/gen-rb/testthrift_constants.rb
index 1f9473e..a793bf2 100644
--- a/serde/src/gen/thrift/gen-rb/testthrift_constants.rb
+++ b/serde/src/gen/thrift/gen-rb/testthrift_constants.rb
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-rb/testthrift_types.rb
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-rb/testthrift_types.rb b/serde/src/gen/thrift/gen-rb/testthrift_types.rb
index 80d5572..f265d42 100644
--- a/serde/src/gen/thrift/gen-rb/testthrift_types.rb
+++ b/serde/src/gen/thrift/gen-rb/testthrift_types.rb
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #


[46/55] [abbrv] hive git commit: HIVE-11973: IN operator fails when the column type is DATE (Yongzhi via Jimmy)

Posted by xu...@apache.org.
HIVE-11973: IN operator fails when the column type is DATE (Yongzhi via Jimmy)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/601a4812
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/601a4812
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/601a4812

Branch: refs/heads/spark
Commit: 601a48122992337ceca8e4042948b3a16334fa24
Parents: ccdd174
Author: Jimmy Xiang <jx...@cloudera.com>
Authored: Tue Oct 27 09:09:55 2015 -0700
Committer: Jimmy Xiang <jx...@cloudera.com>
Committed: Tue Oct 27 09:27:46 2015 -0700

----------------------------------------------------------------------
 .../hadoop/hive/ql/exec/FunctionRegistry.java   |  6 ++
 .../test/queries/clientpositive/selectindate.q  |  9 +++
 .../results/clientpositive/selectindate.q.out   | 70 ++++++++++++++++++++
 3 files changed, 85 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/601a4812/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
index d0e6122..9316600 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -784,6 +784,12 @@ public final class FunctionRegistry {
       return PrimitiveCategory.STRING;
     }
 
+    if (pgA == PrimitiveGrouping.DATE_GROUP && pgB == PrimitiveGrouping.STRING_GROUP) {
+      return PrimitiveCategory.STRING;
+    }
+    if (pgB == PrimitiveGrouping.DATE_GROUP && pgA == PrimitiveGrouping.STRING_GROUP) {
+      return PrimitiveCategory.STRING;
+    }
     Integer ai = numericTypes.get(pcA);
     Integer bi = numericTypes.get(pcB);
     if (ai == null || bi == null) {

http://git-wip-us.apache.org/repos/asf/hive/blob/601a4812/ql/src/test/queries/clientpositive/selectindate.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/selectindate.q b/ql/src/test/queries/clientpositive/selectindate.q
new file mode 100644
index 0000000..00f6a5a
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/selectindate.q
@@ -0,0 +1,9 @@
+drop table if exists datetest;
+create table datetest(dValue date, iValue int);
+insert into datetest values('2000-03-22', 1);
+insert into datetest values('2001-03-22', 2);
+insert into datetest values('2002-03-22', 3);
+insert into datetest values('2003-03-22', 4);
+SELECT * FROM datetest WHERE dValue IN ('2000-03-22','2001-03-22');
+drop table datetest;
+

http://git-wip-us.apache.org/repos/asf/hive/blob/601a4812/ql/src/test/results/clientpositive/selectindate.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/selectindate.q.out b/ql/src/test/results/clientpositive/selectindate.q.out
new file mode 100644
index 0000000..defebe3
--- /dev/null
+++ b/ql/src/test/results/clientpositive/selectindate.q.out
@@ -0,0 +1,70 @@
+PREHOOK: query: drop table if exists datetest
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table if exists datetest
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table datetest(dValue date, iValue int)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@datetest
+POSTHOOK: query: create table datetest(dValue date, iValue int)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@datetest
+PREHOOK: query: insert into datetest values('2000-03-22', 1)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__1
+PREHOOK: Output: default@datetest
+POSTHOOK: query: insert into datetest values('2000-03-22', 1)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__1
+POSTHOOK: Output: default@datetest
+POSTHOOK: Lineage: datetest.dvalue EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: datetest.ivalue EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+PREHOOK: query: insert into datetest values('2001-03-22', 2)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__2
+PREHOOK: Output: default@datetest
+POSTHOOK: query: insert into datetest values('2001-03-22', 2)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__2
+POSTHOOK: Output: default@datetest
+POSTHOOK: Lineage: datetest.dvalue EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: datetest.ivalue EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+PREHOOK: query: insert into datetest values('2002-03-22', 3)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__3
+PREHOOK: Output: default@datetest
+POSTHOOK: query: insert into datetest values('2002-03-22', 3)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__3
+POSTHOOK: Output: default@datetest
+POSTHOOK: Lineage: datetest.dvalue EXPRESSION [(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: datetest.ivalue EXPRESSION [(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+PREHOOK: query: insert into datetest values('2003-03-22', 4)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__4
+PREHOOK: Output: default@datetest
+POSTHOOK: query: insert into datetest values('2003-03-22', 4)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__4
+POSTHOOK: Output: default@datetest
+POSTHOOK: Lineage: datetest.dvalue EXPRESSION [(values__tmp__table__4)values__tmp__table__4.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: datetest.ivalue EXPRESSION [(values__tmp__table__4)values__tmp__table__4.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+PREHOOK: query: SELECT * FROM datetest WHERE dValue IN ('2000-03-22','2001-03-22')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@datetest
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM datetest WHERE dValue IN ('2000-03-22','2001-03-22')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@datetest
+#### A masked pattern was here ####
+2000-03-22	1
+2001-03-22	2
+PREHOOK: query: drop table datetest
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@datetest
+PREHOOK: Output: default@datetest
+POSTHOOK: query: drop table datetest
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@datetest
+POSTHOOK: Output: default@datetest


[13/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)

Posted by xu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowLocksResponse.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowLocksResponse.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowLocksResponse.java
index 4046e95..b9b7f3c 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowLocksResponse.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowLocksResponse.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class ShowLocksResponse implements org.apache.thrift.TBase<ShowLocksResponse, ShowLocksResponse._Fields>, java.io.Serializable, Cloneable, Comparable<ShowLocksResponse> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ShowLocksResponse");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowLocksResponseElement.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowLocksResponseElement.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowLocksResponseElement.java
index e0597a6..037a383 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowLocksResponseElement.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowLocksResponseElement.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class ShowLocksResponseElement implements org.apache.thrift.TBase<ShowLocksResponseElement, ShowLocksResponseElement._Fields>, java.io.Serializable, Cloneable, Comparable<ShowLocksResponseElement> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ShowLocksResponseElement");
 
@@ -640,7 +640,7 @@ public class ShowLocksResponseElement implements org.apache.thrift.TBase<ShowLoc
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case LOCKID:
-      return Long.valueOf(getLockid());
+      return getLockid();
 
     case DBNAME:
       return getDbname();
@@ -658,13 +658,13 @@ public class ShowLocksResponseElement implements org.apache.thrift.TBase<ShowLoc
       return getType();
 
     case TXNID:
-      return Long.valueOf(getTxnid());
+      return getTxnid();
 
     case LASTHEARTBEAT:
-      return Long.valueOf(getLastheartbeat());
+      return getLastheartbeat();
 
     case ACQUIREDAT:
-      return Long.valueOf(getAcquiredat());
+      return getAcquiredat();
 
     case USER:
       return getUser();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SkewedInfo.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SkewedInfo.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SkewedInfo.java
index 4b4ee50..c32f50c 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SkewedInfo.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SkewedInfo.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class SkewedInfo implements org.apache.thrift.TBase<SkewedInfo, SkewedInfo._Fields>, java.io.Serializable, Cloneable, Comparable<SkewedInfo> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("SkewedInfo");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/StorageDescriptor.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/StorageDescriptor.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/StorageDescriptor.java
index eb95e42..938f06b 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/StorageDescriptor.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/StorageDescriptor.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class StorageDescriptor implements org.apache.thrift.TBase<StorageDescriptor, StorageDescriptor._Fields>, java.io.Serializable, Cloneable, Comparable<StorageDescriptor> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("StorageDescriptor");
 
@@ -748,10 +748,10 @@ public class StorageDescriptor implements org.apache.thrift.TBase<StorageDescrip
       return getOutputFormat();
 
     case COMPRESSED:
-      return Boolean.valueOf(isCompressed());
+      return isCompressed();
 
     case NUM_BUCKETS:
-      return Integer.valueOf(getNumBuckets());
+      return getNumBuckets();
 
     case SERDE_INFO:
       return getSerdeInfo();
@@ -769,7 +769,7 @@ public class StorageDescriptor implements org.apache.thrift.TBase<StorageDescrip
       return getSkewedInfo();
 
     case STORED_AS_SUB_DIRECTORIES:
-      return Boolean.valueOf(isStoredAsSubDirectories());
+      return isStoredAsSubDirectories();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/StringColumnStatsData.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/StringColumnStatsData.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/StringColumnStatsData.java
index 5ec2ee2..bd8a922 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/StringColumnStatsData.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/StringColumnStatsData.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class StringColumnStatsData implements org.apache.thrift.TBase<StringColumnStatsData, StringColumnStatsData._Fields>, java.io.Serializable, Cloneable, Comparable<StringColumnStatsData> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("StringColumnStatsData");
 
@@ -317,16 +317,16 @@ public class StringColumnStatsData implements org.apache.thrift.TBase<StringColu
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case MAX_COL_LEN:
-      return Long.valueOf(getMaxColLen());
+      return getMaxColLen();
 
     case AVG_COL_LEN:
-      return Double.valueOf(getAvgColLen());
+      return getAvgColLen();
 
     case NUM_NULLS:
-      return Long.valueOf(getNumNulls());
+      return getNumNulls();
 
     case NUM_DVS:
-      return Long.valueOf(getNumDVs());
+      return getNumDVs();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Table.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Table.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Table.java
index 1f1fa82..5d683fb 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Table.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Table.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class Table implements org.apache.thrift.TBase<Table, Table._Fields>, java.io.Serializable, Cloneable, Comparable<Table> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Table");
 
@@ -798,13 +798,13 @@ public class Table implements org.apache.thrift.TBase<Table, Table._Fields>, jav
       return getOwner();
 
     case CREATE_TIME:
-      return Integer.valueOf(getCreateTime());
+      return getCreateTime();
 
     case LAST_ACCESS_TIME:
-      return Integer.valueOf(getLastAccessTime());
+      return getLastAccessTime();
 
     case RETENTION:
-      return Integer.valueOf(getRetention());
+      return getRetention();
 
     case SD:
       return getSd();
@@ -828,7 +828,7 @@ public class Table implements org.apache.thrift.TBase<Table, Table._Fields>, jav
       return getPrivileges();
 
     case TEMPORARY:
-      return Boolean.valueOf(isTemporary());
+      return isTemporary();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TableStatsRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TableStatsRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TableStatsRequest.java
index 36edc93..d0daee5 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TableStatsRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TableStatsRequest.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TableStatsRequest implements org.apache.thrift.TBase<TableStatsRequest, TableStatsRequest._Fields>, java.io.Serializable, Cloneable, Comparable<TableStatsRequest> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TableStatsRequest");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TableStatsResult.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TableStatsResult.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TableStatsResult.java
index 8d28168..78d4250 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TableStatsResult.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TableStatsResult.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TableStatsResult implements org.apache.thrift.TBase<TableStatsResult, TableStatsResult._Fields>, java.io.Serializable, Cloneable, Comparable<TableStatsResult> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TableStatsResult");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
index b54c5c4..0c67416 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class ThriftHiveMetastore {
 
   /**
@@ -24219,10 +24219,10 @@ public class ThriftHiveMetastore {
         return getName();
 
       case DELETE_DATA:
-        return Boolean.valueOf(isDeleteData());
+        return isDeleteData();
 
       case CASCADE:
-        return Boolean.valueOf(isCascade());
+        return isCascade();
 
       }
       throw new IllegalStateException();
@@ -29249,7 +29249,7 @@ public class ThriftHiveMetastore {
     public Object getFieldValue(_Fields field) {
       switch (field) {
       case SUCCESS:
-        return Boolean.valueOf(isSuccess());
+        return isSuccess();
 
       case O1:
         return getO1();
@@ -30239,7 +30239,7 @@ public class ThriftHiveMetastore {
     public Object getFieldValue(_Fields field) {
       switch (field) {
       case SUCCESS:
-        return Boolean.valueOf(isSuccess());
+        return isSuccess();
 
       case O1:
         return getO1();
@@ -38879,7 +38879,7 @@ public class ThriftHiveMetastore {
         return getName();
 
       case DELETE_DATA:
-        return Boolean.valueOf(isDeleteData());
+        return isDeleteData();
 
       }
       throw new IllegalStateException();
@@ -39953,7 +39953,7 @@ public class ThriftHiveMetastore {
         return getName();
 
       case DELETE_DATA:
-        return Boolean.valueOf(isDeleteData());
+        return isDeleteData();
 
       case ENVIRONMENT_CONTEXT:
         return getEnvironment_context();
@@ -45174,7 +45174,7 @@ public class ThriftHiveMetastore {
         return getFilter();
 
       case MAX_TABLES:
-        return Short.valueOf(getMax_tables());
+        return getMax_tables();
 
       }
       throw new IllegalStateException();
@@ -48693,7 +48693,7 @@ public class ThriftHiveMetastore {
         return getNew_tbl();
 
       case CASCADE:
-        return Boolean.valueOf(isCascade());
+        return isCascade();
 
       }
       throw new IllegalStateException();
@@ -52432,7 +52432,7 @@ public class ThriftHiveMetastore {
     public Object getFieldValue(_Fields field) {
       switch (field) {
       case SUCCESS:
-        return Integer.valueOf(getSuccess());
+        return getSuccess();
 
       case O1:
         return getO1();
@@ -53520,7 +53520,7 @@ public class ThriftHiveMetastore {
     public Object getFieldValue(_Fields field) {
       switch (field) {
       case SUCCESS:
-        return Integer.valueOf(getSuccess());
+        return getSuccess();
 
       case O1:
         return getO1();
@@ -60570,7 +60570,7 @@ public class ThriftHiveMetastore {
         return getPart_vals();
 
       case DELETE_DATA:
-        return Boolean.valueOf(isDeleteData());
+        return isDeleteData();
 
       }
       throw new IllegalStateException();
@@ -61217,7 +61217,7 @@ public class ThriftHiveMetastore {
     public Object getFieldValue(_Fields field) {
       switch (field) {
       case SUCCESS:
-        return Boolean.valueOf(isSuccess());
+        return isSuccess();
 
       case O1:
         return getO1();
@@ -61901,7 +61901,7 @@ public class ThriftHiveMetastore {
         return getPart_vals();
 
       case DELETE_DATA:
-        return Boolean.valueOf(isDeleteData());
+        return isDeleteData();
 
       case ENVIRONMENT_CONTEXT:
         return getEnvironment_context();
@@ -62613,7 +62613,7 @@ public class ThriftHiveMetastore {
     public Object getFieldValue(_Fields field) {
       switch (field) {
       case SUCCESS:
-        return Boolean.valueOf(isSuccess());
+        return isSuccess();
 
       case O1:
         return getO1();
@@ -63236,7 +63236,7 @@ public class ThriftHiveMetastore {
         return getPart_name();
 
       case DELETE_DATA:
-        return Boolean.valueOf(isDeleteData());
+        return isDeleteData();
 
       }
       throw new IllegalStateException();
@@ -63851,7 +63851,7 @@ public class ThriftHiveMetastore {
     public Object getFieldValue(_Fields field) {
       switch (field) {
       case SUCCESS:
-        return Boolean.valueOf(isSuccess());
+        return isSuccess();
 
       case O1:
         return getO1();
@@ -64518,7 +64518,7 @@ public class ThriftHiveMetastore {
         return getPart_name();
 
       case DELETE_DATA:
-        return Boolean.valueOf(isDeleteData());
+        return isDeleteData();
 
       case ENVIRONMENT_CONTEXT:
         return getEnvironment_context();
@@ -65198,7 +65198,7 @@ public class ThriftHiveMetastore {
     public Object getFieldValue(_Fields field) {
       switch (field) {
       case SUCCESS:
-        return Boolean.valueOf(isSuccess());
+        return isSuccess();
 
       case O1:
         return getO1();
@@ -72105,7 +72105,7 @@ public class ThriftHiveMetastore {
         return getTbl_name();
 
       case MAX_PARTS:
-        return Short.valueOf(getMax_parts());
+        return getMax_parts();
 
       }
       throw new IllegalStateException();
@@ -73400,7 +73400,7 @@ public class ThriftHiveMetastore {
         return getTbl_name();
 
       case MAX_PARTS:
-        return Short.valueOf(getMax_parts());
+        return getMax_parts();
 
       case USER_NAME:
         return getUser_name();
@@ -74742,7 +74742,7 @@ public class ThriftHiveMetastore {
         return getTbl_name();
 
       case MAX_PARTS:
-        return Integer.valueOf(getMax_parts());
+        return getMax_parts();
 
       }
       throw new IllegalStateException();
@@ -75932,7 +75932,7 @@ public class ThriftHiveMetastore {
         return getTbl_name();
 
       case MAX_PARTS:
-        return Short.valueOf(getMax_parts());
+        return getMax_parts();
 
       }
       throw new IllegalStateException();
@@ -77075,7 +77075,7 @@ public class ThriftHiveMetastore {
         return getPart_vals();
 
       case MAX_PARTS:
-        return Short.valueOf(getMax_parts());
+        return getMax_parts();
 
       }
       throw new IllegalStateException();
@@ -78523,7 +78523,7 @@ public class ThriftHiveMetastore {
         return getPart_vals();
 
       case MAX_PARTS:
-        return Short.valueOf(getMax_parts());
+        return getMax_parts();
 
       case USER_NAME:
         return getUser_name();
@@ -80018,7 +80018,7 @@ public class ThriftHiveMetastore {
         return getPart_vals();
 
       case MAX_PARTS:
-        return Short.valueOf(getMax_parts());
+        return getMax_parts();
 
       }
       throw new IllegalStateException();
@@ -81339,7 +81339,7 @@ public class ThriftHiveMetastore {
         return getFilter();
 
       case MAX_PARTS:
-        return Short.valueOf(getMax_parts());
+        return getMax_parts();
 
       }
       throw new IllegalStateException();
@@ -82633,7 +82633,7 @@ public class ThriftHiveMetastore {
         return getFilter();
 
       case MAX_PARTS:
-        return Integer.valueOf(getMax_parts());
+        return getMax_parts();
 
       }
       throw new IllegalStateException();
@@ -90484,7 +90484,7 @@ public class ThriftHiveMetastore {
         return getPart_vals();
 
       case THROW_EXCEPTION:
-        return Boolean.valueOf(isThrow_exception());
+        return isThrow_exception();
 
       }
       throw new IllegalStateException();
@@ -90973,7 +90973,7 @@ public class ThriftHiveMetastore {
     public Object getFieldValue(_Fields field) {
       switch (field) {
       case SUCCESS:
-        return Boolean.valueOf(isSuccess());
+        return isSuccess();
 
       case O1:
         return getO1();
@@ -96686,7 +96686,7 @@ public class ThriftHiveMetastore {
     public Object getFieldValue(_Fields field) {
       switch (field) {
       case SUCCESS:
-        return Boolean.valueOf(isSuccess());
+        return isSuccess();
 
       case O1:
         return getO1();
@@ -99851,7 +99851,7 @@ public class ThriftHiveMetastore {
         return getIndex_name();
 
       case DELETE_DATA:
-        return Boolean.valueOf(isDeleteData());
+        return isDeleteData();
 
       }
       throw new IllegalStateException();
@@ -100466,7 +100466,7 @@ public class ThriftHiveMetastore {
     public Object getFieldValue(_Fields field) {
       switch (field) {
       case SUCCESS:
-        return Boolean.valueOf(isSuccess());
+        return isSuccess();
 
       case O1:
         return getO1();
@@ -102185,7 +102185,7 @@ public class ThriftHiveMetastore {
         return getTbl_name();
 
       case MAX_INDEXES:
-        return Short.valueOf(getMax_indexes());
+        return getMax_indexes();
 
       }
       throw new IllegalStateException();
@@ -103375,7 +103375,7 @@ public class ThriftHiveMetastore {
         return getTbl_name();
 
       case MAX_INDEXES:
-        return Short.valueOf(getMax_indexes());
+        return getMax_indexes();
 
       }
       throw new IllegalStateException();
@@ -104897,7 +104897,7 @@ public class ThriftHiveMetastore {
     public Object getFieldValue(_Fields field) {
       switch (field) {
       case SUCCESS:
-        return Boolean.valueOf(isSuccess());
+        return isSuccess();
 
       case O1:
         return getO1();
@@ -106042,7 +106042,7 @@ public class ThriftHiveMetastore {
     public Object getFieldValue(_Fields field) {
       switch (field) {
       case SUCCESS:
-        return Boolean.valueOf(isSuccess());
+        return isSuccess();
 
       case O1:
         return getO1();
@@ -112811,7 +112811,7 @@ public class ThriftHiveMetastore {
     public Object getFieldValue(_Fields field) {
       switch (field) {
       case SUCCESS:
-        return Boolean.valueOf(isSuccess());
+        return isSuccess();
 
       case O1:
         return getO1();
@@ -114263,7 +114263,7 @@ public class ThriftHiveMetastore {
     public Object getFieldValue(_Fields field) {
       switch (field) {
       case SUCCESS:
-        return Boolean.valueOf(isSuccess());
+        return isSuccess();
 
       case O1:
         return getO1();
@@ -115611,7 +115611,7 @@ public class ThriftHiveMetastore {
     public Object getFieldValue(_Fields field) {
       switch (field) {
       case SUCCESS:
-        return Boolean.valueOf(isSuccess());
+        return isSuccess();
 
       case O1:
         return getO1();
@@ -122357,7 +122357,7 @@ public class ThriftHiveMetastore {
     public Object getFieldValue(_Fields field) {
       switch (field) {
       case SUCCESS:
-        return Boolean.valueOf(isSuccess());
+        return isSuccess();
 
       case O1:
         return getO1();
@@ -123179,7 +123179,7 @@ public class ThriftHiveMetastore {
     public Object getFieldValue(_Fields field) {
       switch (field) {
       case SUCCESS:
-        return Boolean.valueOf(isSuccess());
+        return isSuccess();
 
       case O1:
         return getO1();
@@ -124617,7 +124617,7 @@ public class ThriftHiveMetastore {
         return getGrantorType();
 
       case GRANT_OPTION:
-        return Boolean.valueOf(isGrant_option());
+        return isGrant_option();
 
       }
       throw new IllegalStateException();
@@ -125302,7 +125302,7 @@ public class ThriftHiveMetastore {
     public Object getFieldValue(_Fields field) {
       switch (field) {
       case SUCCESS:
-        return Boolean.valueOf(isSuccess());
+        return isSuccess();
 
       case O1:
         return getO1();
@@ -126344,7 +126344,7 @@ public class ThriftHiveMetastore {
     public Object getFieldValue(_Fields field) {
       switch (field) {
       case SUCCESS:
-        return Boolean.valueOf(isSuccess());
+        return isSuccess();
 
       case O1:
         return getO1();
@@ -132849,7 +132849,7 @@ public class ThriftHiveMetastore {
     public Object getFieldValue(_Fields field) {
       switch (field) {
       case SUCCESS:
-        return Boolean.valueOf(isSuccess());
+        return isSuccess();
 
       case O1:
         return getO1();
@@ -133676,7 +133676,7 @@ public class ThriftHiveMetastore {
     public Object getFieldValue(_Fields field) {
       switch (field) {
       case SUCCESS:
-        return Boolean.valueOf(isSuccess());
+        return isSuccess();
 
       case O1:
         return getO1();
@@ -137280,7 +137280,7 @@ public class ThriftHiveMetastore {
     public Object getFieldValue(_Fields field) {
       switch (field) {
       case SUCCESS:
-        return Long.valueOf(getSuccess());
+        return getSuccess();
 
       case O1:
         return getO1();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TxnAbortedException.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TxnAbortedException.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TxnAbortedException.java
index c0a60b7..689f646 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TxnAbortedException.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TxnAbortedException.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TxnAbortedException extends TException implements org.apache.thrift.TBase<TxnAbortedException, TxnAbortedException._Fields>, java.io.Serializable, Cloneable, Comparable<TxnAbortedException> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TxnAbortedException");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TxnInfo.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TxnInfo.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TxnInfo.java
index 0a9acf6..47cdaf4 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TxnInfo.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TxnInfo.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TxnInfo implements org.apache.thrift.TBase<TxnInfo, TxnInfo._Fields>, java.io.Serializable, Cloneable, Comparable<TxnInfo> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TxnInfo");
 
@@ -329,7 +329,7 @@ public class TxnInfo implements org.apache.thrift.TBase<TxnInfo, TxnInfo._Fields
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case ID:
-      return Long.valueOf(getId());
+      return getId();
 
     case STATE:
       return getState();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TxnOpenException.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TxnOpenException.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TxnOpenException.java
index 74f50bd..10b5f9b 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TxnOpenException.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TxnOpenException.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TxnOpenException extends TException implements org.apache.thrift.TBase<TxnOpenException, TxnOpenException._Fields>, java.io.Serializable, Cloneable, Comparable<TxnOpenException> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TxnOpenException");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TxnState.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TxnState.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TxnState.java
index 69d6239..de59755 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TxnState.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TxnState.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Type.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Type.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Type.java
index 55a3a23..14cd42e 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Type.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Type.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class Type implements org.apache.thrift.TBase<Type, Type._Fields>, java.io.Serializable, Cloneable, Comparable<Type> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Type");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/UnknownDBException.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/UnknownDBException.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/UnknownDBException.java
index f24c35d..a5ea86f 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/UnknownDBException.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/UnknownDBException.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class UnknownDBException extends TException implements org.apache.thrift.TBase<UnknownDBException, UnknownDBException._Fields>, java.io.Serializable, Cloneable, Comparable<UnknownDBException> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("UnknownDBException");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/UnknownPartitionException.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/UnknownPartitionException.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/UnknownPartitionException.java
index 25a33b1..538ce29 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/UnknownPartitionException.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/UnknownPartitionException.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class UnknownPartitionException extends TException implements org.apache.thrift.TBase<UnknownPartitionException, UnknownPartitionException._Fields>, java.io.Serializable, Cloneable, Comparable<UnknownPartitionException> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("UnknownPartitionException");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/UnknownTableException.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/UnknownTableException.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/UnknownTableException.java
index 8bab3c3..95bd0dd 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/UnknownTableException.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/UnknownTableException.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class UnknownTableException extends TException implements org.apache.thrift.TBase<UnknownTableException, UnknownTableException._Fields>, java.io.Serializable, Cloneable, Comparable<UnknownTableException> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("UnknownTableException");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/UnlockRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/UnlockRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/UnlockRequest.java
index 5686c4e..f28e808 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/UnlockRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/UnlockRequest.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class UnlockRequest implements org.apache.thrift.TBase<UnlockRequest, UnlockRequest._Fields>, java.io.Serializable, Cloneable, Comparable<UnlockRequest> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("UnlockRequest");
 
@@ -185,7 +185,7 @@ public class UnlockRequest implements org.apache.thrift.TBase<UnlockRequest, Unl
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case LOCKID:
-      return Long.valueOf(getLockid());
+      return getLockid();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Version.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Version.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Version.java
index 9d0e98e..12a2bce 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Version.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Version.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class Version implements org.apache.thrift.TBase<Version, Version._Fields>, java.io.Serializable, Cloneable, Comparable<Version> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Version");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/hive_metastoreConstants.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/hive_metastoreConstants.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/hive_metastoreConstants.java
index b6e0a29..5a666f2 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/hive_metastoreConstants.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/hive_metastoreConstants.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php b/metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php
index 2abd9fe..e922d7d 100644
--- a/metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php
+++ b/metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php
@@ -1,7 +1,7 @@
 <?php
 namespace metastore;
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-php/metastore/Types.php
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-php/metastore/Types.php b/metastore/src/gen/thrift/gen-php/metastore/Types.php
index e7c530c..e63213d 100644
--- a/metastore/src/gen/thrift/gen-php/metastore/Types.php
+++ b/metastore/src/gen/thrift/gen-php/metastore/Types.php
@@ -2,7 +2,7 @@
 namespace metastore;
 
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore-remote
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore-remote b/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore-remote
index 466063e..8dba17b 100755
--- a/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore-remote
+++ b/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore-remote
@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #


[42/55] [abbrv] hive git commit: HIVE-11378 Remove hadoop-1 support from master branch (gates, reviewed by Ashutosh Chauhan and Sergey Shelukhin)

Posted by xu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/itests/hive-unit/pom.xml
----------------------------------------------------------------------
diff --git a/itests/hive-unit/pom.xml b/itests/hive-unit/pom.xml
index 5295840..326d646 100644
--- a/itests/hive-unit/pom.xml
+++ b/itests/hive-unit/pom.xml
@@ -64,7 +64,23 @@
       <artifactId>hive-hcatalog-streaming</artifactId>
       <version>${project.version}</version>
     </dependency>
-
+    <!-- inter-project -->
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.tez</groupId>
+      <artifactId>tez-tests</artifactId>
+      <version>${tez.version}</version>
+      <type>test-jar</type>
+    </dependency>
+    <dependency>
+      <groupId>commons-logging</groupId>
+      <artifactId>commons-logging</artifactId>
+      <version>${commons-logging.version}</version>
+    </dependency>
     <!-- dependencies are always listed in sorted order by groupId, artifectId -->
     <!-- test intra-project -->
     <dependency>
@@ -122,6 +138,119 @@
       <version>${mockito-all.version}</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <version>${hadoop.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+      <version>${hadoop.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-server</artifactId>
+      <version>${hbase.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-server</artifactId>
+      <version>${hbase.version}</version>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-hadoop-compat</artifactId>
+      <version>${hbase.version}</version>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-hadoop2-compat</artifactId>
+      <version>${hbase.version}</version>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-minicluster</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-servlet</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-archives</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-hs</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.tez</groupId>
+      <artifactId>tez-api</artifactId>
+      <version>${tez.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.tez</groupId>
+      <artifactId>tez-runtime-library</artifactId>
+      <version>${tez.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.tez</groupId>
+      <artifactId>tez-mapreduce</artifactId>
+      <version>${tez.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.tez</groupId>
+      <artifactId>tez-dag</artifactId>
+      <version>${tez.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-server-tests</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+      <classifier>tests</classifier>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-client</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
 
   <profiles>
@@ -171,233 +300,6 @@
         </plugins>
       </build>
     </profile>
-    <profile>
-      <id>hadoop-1</id>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-compiler-plugin</artifactId>
-            <version>2.3.2</version>
-            <configuration>
-              <testExcludes>
-                <exclude>**/metastore/hbase/**</exclude>
-              </testExcludes>
-            </configuration>
-          </plugin>
-        </plugins>
-      </build>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-core</artifactId>
-          <version>${hadoop-20S.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-test</artifactId>
-          <version>${hadoop-20S.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-common</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-common</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop-compat</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop-compat</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop1-compat</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop1-compat</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-server</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-server</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-tools</artifactId>
-          <version>${hadoop-20S.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>com.sun.jersey</groupId>
-          <artifactId>jersey-servlet</artifactId>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
-    </profile>
-    <profile>
-      <id>hadoop-2</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-hdfs</artifactId>
-          <version>${hadoop-23.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
-          <version>${hadoop-23.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-core</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-server</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-server</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <type>test-jar</type>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop-compat</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <type>test-jar</type>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop2-compat</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <type>test-jar</type>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-minicluster</artifactId>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>com.sun.jersey</groupId>
-          <artifactId>jersey-servlet</artifactId>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-archives</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-hs</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>commons-logging</groupId>
-          <artifactId>commons-logging</artifactId>
-          <version>${commons-logging.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-yarn-server-tests</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-yarn-client</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.tez</groupId>
-          <artifactId>tez-tests</artifactId>
-          <version>${tez.version}</version>
-          <type>test-jar</type>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.tez</groupId>
-          <artifactId>tez-api</artifactId>
-          <version>${tez.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.tez</groupId>
-          <artifactId>tez-runtime-library</artifactId>
-          <version>${tez.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.tez</groupId>
-          <artifactId>tez-mapreduce</artifactId>
-          <version>${tez.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.tez</groupId>
-          <artifactId>tez-dag</artifactId>
-          <version>${tez.version}</version>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
-    </profile>
   </profiles>
 
   <build>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/itests/pom.xml
----------------------------------------------------------------------
diff --git a/itests/pom.xml b/itests/pom.xml
index acce713..0686f1f 100644
--- a/itests/pom.xml
+++ b/itests/pom.xml
@@ -40,17 +40,12 @@
    <module>qtest</module>
    <module>qtest-accumulo</module>
    <module>hive-jmh</module>
+   <module>hive-unit-hadoop2</module>
+   <module>hive-minikdc</module>
   </modules>
 
   <profiles>
     <profile>
-      <id>hadoop-2</id>
-      <modules>
-        <module>hive-unit-hadoop2</module>
-        <module>hive-minikdc</module>
-      </modules>
-    </profile>
-    <profile>
       <id>spark-test</id>
       <activation>
 	<property>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/itests/qtest-accumulo/pom.xml
----------------------------------------------------------------------
diff --git a/itests/qtest-accumulo/pom.xml b/itests/qtest-accumulo/pom.xml
index 09ae2a2..aafa034 100644
--- a/itests/qtest-accumulo/pom.xml
+++ b/itests/qtest-accumulo/pom.xml
@@ -39,6 +39,7 @@
          as long as -DskipAccumuloTests is not specified -->
     <skip.accumulo.tests>true</skip.accumulo.tests>
     <accumulo-thrift.version>0.9.0</accumulo-thrift.version>
+    <test.dfs.mkdir>-mkdir -p</test.dfs.mkdir>
   </properties>
 
   <dependencies>
@@ -112,6 +113,12 @@
       <scope>test</scope>
       <classifier>core</classifier>
     </dependency>
+    <!-- inter-project -->
+    <dependency>
+      <groupId>commons-logging</groupId>
+      <artifactId>commons-logging</artifactId>
+      <version>${commons-logging.version}</version>
+    </dependency>
     <!-- test inter-project -->
     <dependency>
       <groupId>junit</groupId>
@@ -139,6 +146,158 @@
       <version>${javolution.version}</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-servlet</artifactId>
+      <version>${jersey.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-archives</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <version>${hadoop.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+      <version>${hadoop.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-hs</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-server-tests</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+      <classifier>tests</classifier>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-client</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-common</artifactId>
+      <version>${hbase.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-common</artifactId>
+      <version>${hbase.version}</version>
+      <scope>test</scope>
+      <classifier>tests</classifier>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-hadoop-compat</artifactId>
+      <version>${hbase.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-hadoop-compat</artifactId>
+      <version>${hbase.version}</version>
+      <scope>test</scope>
+      <classifier>tests</classifier>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-hadoop2-compat</artifactId>
+      <version>${hbase.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-hadoop2-compat</artifactId>
+      <version>${hbase.version}</version>
+      <scope>test</scope>
+      <classifier>tests</classifier>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-server</artifactId>
+      <version>${hbase.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-server</artifactId>
+      <version>${hbase.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.tez</groupId>
+      <artifactId>tez-tests</artifactId>
+      <version>${tez.version}</version>
+      <type>test-jar</type>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.tez</groupId>
+      <artifactId>tez-api</artifactId>
+      <version>${tez.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.tez</groupId>
+      <artifactId>tez-runtime-library</artifactId>
+      <version>${tez.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.tez</groupId>
+      <artifactId>tez-mapreduce</artifactId>
+      <version>${tez.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.tez</groupId>
+      <artifactId>tez-dag</artifactId>
+      <version>${tez.version}</version>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
   <profiles>
     <profile>
@@ -149,251 +308,6 @@
       </properties>
     </profile>
     <profile>
-      <id>hadoop-1</id>
-      <properties>
-        <active.hadoop.version>${hadoop-20S.version}</active.hadoop.version>
-        <test.dfs.mkdir>-mkdir</test.dfs.mkdir>
-      </properties>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-core</artifactId>
-          <version>${hadoop-20S.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-test</artifactId>
-          <version>${hadoop-20S.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-tools</artifactId>
-          <version>${hadoop-20S.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-common</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-common</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop-compat</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop-compat</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop1-compat</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop1-compat</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-server</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-server</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
-    </profile>
-    <profile>
-      <id>hadoop-2</id>
-      <properties>
-        <active.hadoop.version>${hadoop-23.version}</active.hadoop.version>
-        <test.dfs.mkdir>-mkdir -p</test.dfs.mkdir>
-      </properties>
-      <dependencies>
-        <dependency>
-          <groupId>com.sun.jersey</groupId>
-          <artifactId>jersey-servlet</artifactId>
-          <version>${jersey.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-archives</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-hdfs</artifactId>
-          <version>${hadoop-23.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-hdfs</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
-          <version>${hadoop-23.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-hs</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-core</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>commons-logging</groupId>
-          <artifactId>commons-logging</artifactId>
-          <version>${commons-logging.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-yarn-server-tests</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-yarn-client</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-common</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-common</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <scope>test</scope>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop-compat</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop-compat</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <scope>test</scope>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop2-compat</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop2-compat</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <scope>test</scope>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-server</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-server</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency> 
-          <groupId>org.apache.tez</groupId> 
-          <artifactId>tez-tests</artifactId>
-          <version>${tez.version}</version>
-          <type>test-jar</type>
-        </dependency>
-        <dependency> 
-          <groupId>org.apache.tez</groupId>
-          <artifactId>tez-api</artifactId>
-          <version>${tez.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency> 
-          <groupId>org.apache.tez</groupId>
-          <artifactId>tez-runtime-library</artifactId>
-          <version>${tez.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency> 
-          <groupId>org.apache.tez</groupId>
-          <artifactId>tez-mapreduce</artifactId>
-          <version>${tez.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency> 
-          <groupId>org.apache.tez</groupId>
-          <artifactId>tez-dag</artifactId>
-          <version>${tez.version}</version>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
-    </profile>
-    <profile>
       <id>accumulo-tests</id>
       <activation>
         <property>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/itests/qtest-spark/pom.xml
----------------------------------------------------------------------
diff --git a/itests/qtest-spark/pom.xml b/itests/qtest-spark/pom.xml
index 05a3c9f..e06871a 100644
--- a/itests/qtest-spark/pom.xml
+++ b/itests/qtest-spark/pom.xml
@@ -38,7 +38,7 @@
     <qfile_regex></qfile_regex>
     <run_disabled>false</run_disabled>
     <execute.beeline.tests>false</execute.beeline.tests>
-    <active.hadoop.version>${hadoop-23.version}</active.hadoop.version>
+    <active.hadoop.version>${hadoop.version}</active.hadoop.version>
     <test.dfs.mkdir>-mkdir -p</test.dfs.mkdir>
     <spark.home>${basedir}/${hive.path.to.root}/itests/qtest-spark/target/spark</spark.home>
   </properties>
@@ -156,52 +156,52 @@
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-archives</artifactId>
-      <version>${hadoop-23.version}</version>
+      <version>${hadoop.version}</version>
       <scope>test</scope>
     </dependency>
       <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-common</artifactId>
-      <version>${hadoop-23.version}</version>
+      <version>${hadoop.version}</version>
       <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-common</artifactId>
-      <version>${hadoop-23.version}</version>
+      <version>${hadoop.version}</version>
       <classifier>tests</classifier>
       <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-hdfs</artifactId>
-      <version>${hadoop-23.version}</version>
+      <version>${hadoop.version}</version>
       <classifier>tests</classifier>
       <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-hdfs</artifactId>
-      <version>${hadoop-23.version}</version>
+      <version>${hadoop.version}</version>
       <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
-      <version>${hadoop-23.version}</version>
+      <version>${hadoop.version}</version>
       <classifier>tests</classifier>
       <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-mapreduce-client-hs</artifactId>
-      <version>${hadoop-23.version}</version>
+      <version>${hadoop.version}</version>
       <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-mapreduce-client-core</artifactId>
-      <version>${hadoop-23.version}</version>
+      <version>${hadoop.version}</version>
       <scope>test</scope>
     </dependency>
     <dependency>
@@ -212,65 +212,65 @@
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-yarn-server-tests</artifactId>
-      <version>${hadoop-23.version}</version>
+      <version>${hadoop.version}</version>
       <scope>test</scope>
       <classifier>tests</classifier>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-yarn-client</artifactId>
-      <version>${hadoop-23.version}</version>
+      <version>${hadoop.version}</version>
       <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hbase</groupId>
       <artifactId>hbase-common</artifactId>
-      <version>${hbase.hadoop2.version}</version>
+      <version>${hbase.version}</version>
       <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hbase</groupId>
       <artifactId>hbase-common</artifactId>
-      <version>${hbase.hadoop2.version}</version>
+      <version>${hbase.version}</version>
       <scope>test</scope>
       <classifier>tests</classifier>
     </dependency>
     <dependency>
       <groupId>org.apache.hbase</groupId>
       <artifactId>hbase-hadoop-compat</artifactId>
-      <version>${hbase.hadoop2.version}</version>
+      <version>${hbase.version}</version>
       <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hbase</groupId>
       <artifactId>hbase-hadoop-compat</artifactId>
-      <version>${hbase.hadoop2.version}</version>
+      <version>${hbase.version}</version>
       <scope>test</scope>
       <classifier>tests</classifier>
     </dependency>
     <dependency>
       <groupId>org.apache.hbase</groupId>
       <artifactId>hbase-hadoop2-compat</artifactId>
-      <version>${hbase.hadoop2.version}</version>
+      <version>${hbase.version}</version>
       <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hbase</groupId>
       <artifactId>hbase-hadoop2-compat</artifactId>
-      <version>${hbase.hadoop2.version}</version>
+      <version>${hbase.version}</version>
       <scope>test</scope>
       <classifier>tests</classifier>
     </dependency>
     <dependency>
       <groupId>org.apache.hbase</groupId>
       <artifactId>hbase-server</artifactId>
-      <version>${hbase.hadoop2.version}</version>
+      <version>${hbase.version}</version>
       <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hbase</groupId>
       <artifactId>hbase-server</artifactId>
-      <version>${hbase.hadoop2.version}</version>
+      <version>${hbase.version}</version>
       <classifier>tests</classifier>
       <scope>test</scope>
     </dependency>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/itests/qtest/pom.xml
----------------------------------------------------------------------
diff --git a/itests/qtest/pom.xml b/itests/qtest/pom.xml
index d5c3009..9504813 100644
--- a/itests/qtest/pom.xml
+++ b/itests/qtest/pom.xml
@@ -36,10 +36,16 @@
     <run_disabled>false</run_disabled>
     <clustermode></clustermode>
     <execute.beeline.tests>false</execute.beeline.tests>
+    <test.dfs.mkdir>-mkdir -p</test.dfs.mkdir>
   </properties>
 
   <dependencies>
     <!-- dependencies are always listed in sorted order by groupId, artifectId -->
+    <dependency>
+      <groupId>commons-logging</groupId>
+      <artifactId>commons-logging</artifactId>
+      <version>${commons-logging.version}</version>
+    </dependency>
     <!-- test intra-project -->
     <dependency>
       <groupId>org.apache.hive</groupId>
@@ -106,7 +112,7 @@
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-yarn-registry</artifactId>
-      <version>${hadoop-23.version}</version>
+      <version>${hadoop.version}</version>
       <optional>true</optional>
     </dependency>
 
@@ -117,6 +123,171 @@
       <version>${junit.version}</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-servlet</artifactId>
+      <version>${jersey.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-archives</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <version>${hadoop.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+      <version>${hadoop.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-hs</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-llap-server</artifactId>
+      <version>${project.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-llap-server</artifactId>
+      <version>${project.version}</version>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-server-tests</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+      <classifier>tests</classifier>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-client</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-common</artifactId>
+      <version>${hbase.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-common</artifactId>
+      <version>${hbase.version}</version>
+      <scope>test</scope>
+      <classifier>tests</classifier>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-hadoop-compat</artifactId>
+      <version>${hbase.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-hadoop-compat</artifactId>
+      <version>${hbase.version}</version>
+      <scope>test</scope>
+      <classifier>tests</classifier>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-hadoop2-compat</artifactId>
+      <version>${hbase.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-hadoop2-compat</artifactId>
+      <version>${hbase.version}</version>
+      <scope>test</scope>
+      <classifier>tests</classifier>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-server</artifactId>
+      <version>${hbase.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-server</artifactId>
+      <version>${hbase.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.tez</groupId>
+      <artifactId>tez-tests</artifactId>
+      <version>${tez.version}</version>
+      <type>test-jar</type>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.tez</groupId>
+      <artifactId>tez-api</artifactId>
+      <version>${tez.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.tez</groupId>
+      <artifactId>tez-runtime-library</artifactId>
+      <version>${tez.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.tez</groupId>
+      <artifactId>tez-mapreduce</artifactId>
+      <version>${tez.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.tez</groupId>
+      <artifactId>tez-dag</artifactId>
+      <version>${tez.version}</version>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
   <profiles>
     <profile>
@@ -126,261 +297,9 @@
         <test.warehouse.scheme></test.warehouse.scheme>
       </properties>
     </profile>
-    <profile>
-      <id>hadoop-1</id>
-      <properties>
-        <active.hadoop.version>${hadoop-20S.version}</active.hadoop.version>
-        <test.dfs.mkdir>-mkdir</test.dfs.mkdir>
-      </properties>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-core</artifactId>
-          <version>${hadoop-20S.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-test</artifactId>
-          <version>${hadoop-20S.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-tools</artifactId>
-          <version>${hadoop-20S.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-common</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-common</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop-compat</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop-compat</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop1-compat</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop1-compat</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-server</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
-    </profile>
-    <profile>
-      <id>hadoop-2</id>
-      <properties>
-        <active.hadoop.version>${hadoop-23.version}</active.hadoop.version>
-        <test.dfs.mkdir>-mkdir -p</test.dfs.mkdir>
-      </properties>
-      <dependencies>
-        <dependency>
-          <groupId>com.sun.jersey</groupId>
-          <artifactId>jersey-servlet</artifactId>
-          <version>${jersey.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-archives</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-hdfs</artifactId>
-          <version>${hadoop-23.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-hdfs</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
-          <version>${hadoop-23.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-hs</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-core</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hive</groupId>
-          <artifactId>hive-llap-server</artifactId>
-          <version>${project.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hive</groupId>
-          <artifactId>hive-llap-server</artifactId>
-          <version>${project.version}</version>
-          <type>test-jar</type>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>commons-logging</groupId>
-          <artifactId>commons-logging</artifactId>
-          <version>${commons-logging.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-yarn-server-tests</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-yarn-client</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-common</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-common</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <scope>test</scope>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop-compat</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop-compat</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <scope>test</scope>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop2-compat</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop2-compat</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <scope>test</scope>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-server</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-server</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency> 
-          <groupId>org.apache.tez</groupId> 
-          <artifactId>tez-tests</artifactId>
-          <version>${tez.version}</version>
-          <type>test-jar</type>
-        </dependency>
-        <dependency> 
-          <groupId>org.apache.tez</groupId>
-          <artifactId>tez-api</artifactId>
-          <version>${tez.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency> 
-          <groupId>org.apache.tez</groupId>
-          <artifactId>tez-runtime-library</artifactId>
-          <version>${tez.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency> 
-          <groupId>org.apache.tez</groupId>
-          <artifactId>tez-mapreduce</artifactId>
-          <version>${tez.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency> 
-          <groupId>org.apache.tez</groupId>
-          <artifactId>tez-dag</artifactId>
-          <version>${tez.version}</version>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
-    </profile>
   </profiles>
 
-  <build>
+ <build>
     <plugins>
       <plugin>
         <groupId>org.codehaus.mojo</groupId>
@@ -439,7 +358,7 @@
                   runDisabled="${run_disabled}"
                   resultsDirectory="${basedir}/${hive.path.to.root}/ql/src/test/results/compiler/errors/" className="TestParseNegative"
                   logFile="${project.build.directory}/testparseneggen.log"
-                  hadoopVersion="${active.hadoop.version}"
+                  hadoopVersion="${hadoop.version}"
                   logDirectory="${project.build.directory}/qfile-results/negative/"
                   initScript="${initScript}"
                   cleanupScript="q_test_cleanup.sql"/>
@@ -457,7 +376,7 @@
                   resultsDirectory="${basedir}/${hive.path.to.root}/ql/src/test/results/clientpositive/" className="TestCliDriver"
                   logFile="${project.build.directory}/testclidrivergen.log"
                   logDirectory="${project.build.directory}/qfile-results/clientpositive/"
-                  hadoopVersion="${active.hadoop.version}"
+                  hadoopVersion="${hadoop.version}"
                   initScript="${initScript}"
                   cleanupScript="q_test_cleanup.sql"/>
 
@@ -474,7 +393,7 @@
                   resultsDirectory="${basedir}/${hive.path.to.root}/ql/src/test/results/clientnegative/" className="TestNegativeCliDriver"
                   logFile="${project.build.directory}/testnegativeclidrivergen.log"
                   logDirectory="${project.build.directory}/qfile-results/clientnegative/"
-                  hadoopVersion="${active.hadoop.version}"
+                  hadoopVersion="${hadoop.version}"
                   initScript="${initScript}"
                   cleanupScript="q_test_cleanup.sql"/>
 
@@ -490,7 +409,7 @@
                   className="TestCompareCliDriver"
                   logFile="${project.build.directory}/testcompareclidrivergen.log"
                   logDirectory="${project.build.directory}/qfile-results/clientcompare/"
-                  hadoopVersion="${active.hadoop.version}"
+                  hadoopVersion="${hadoop.version}"
                   initScript="${initScript}"
                   cleanupScript="q_test_cleanup.sql"/>
 
@@ -507,72 +426,65 @@
                   resultsDirectory="${basedir}/${hive.path.to.root}/ql/src/test/results/clientpositive/" className="TestMinimrCliDriver"
                   logFile="${project.build.directory}/testminimrclidrivergen.log"
                   logDirectory="${project.build.directory}/qfile-results/clientpositive/"
-                  hadoopVersion="${active.hadoop.version}"
+                  hadoopVersion="${hadoop.version}"
                   initScript="${initScript}"
                   cleanupScript="q_test_cleanup.sql"/>
 
-                <if>
-                  <equals arg1="${active.hadoop.version}" arg2="${hadoop-23.version}"/>
-                  <then>
-                    <qtestgen hiveRootDirectory="${basedir}/${hive.path.to.root}/"
-                              outputDirectory="${project.build.directory}/generated-test-sources/java/org/apache/hadoop/hive/cli/"
-                              templatePath="${basedir}/${hive.path.to.root}/ql/src/test/templates/" template="TestCliDriver.vm"
-                              queryDirectory="${basedir}/${hive.path.to.root}/ql/src/test/queries/clientpositive/"
-                              queryFile="${qfile}"
-                              includeQueryFile="${minitez.query.files},${minitez.query.files.shared}"
-                              queryFileRegex="${qfile_regex}"
-                              clusterMode="tez"
-                              runDisabled="${run_disabled}"
-                              hiveConfDir="${basedir}/${hive.path.to.root}/data/conf/tez"
-                              resultsDirectory="${basedir}/${hive.path.to.root}/ql/src/test/results/clientpositive/tez"
-                              className="TestMiniTezCliDriver"
-                              logFile="${project.build.directory}/testminitezclidrivergen.log"
-                              logDirectory="${project.build.directory}/qfile-results/clientpositive/"
-                              hadoopVersion="${active.hadoop.version}"
-                              initScript="${initScript}"
-                              cleanupScript="q_test_cleanup.sql"
-                              useHBaseMetastore="true"/>
+                <qtestgen hiveRootDirectory="${basedir}/${hive.path.to.root}/"
+                  outputDirectory="${project.build.directory}/generated-test-sources/java/org/apache/hadoop/hive/cli/"
+                  templatePath="${basedir}/${hive.path.to.root}/ql/src/test/templates/" template="TestCliDriver.vm"
+                  queryDirectory="${basedir}/${hive.path.to.root}/ql/src/test/queries/clientpositive/"
+                  queryFile="${qfile}"
+                  includeQueryFile="${minitez.query.files},${minitez.query.files.shared}"
+                  queryFileRegex="${qfile_regex}"
+                  clusterMode="tez"
+                  runDisabled="${run_disabled}"
+                  hiveConfDir="${basedir}/${hive.path.to.root}/data/conf/tez"
+                  resultsDirectory="${basedir}/${hive.path.to.root}/ql/src/test/results/clientpositive/tez"
+                  className="TestMiniTezCliDriver"
+                  logFile="${project.build.directory}/testminitezclidrivergen.log"
+                  logDirectory="${project.build.directory}/qfile-results/clientpositive/"
+                  hadoopVersion="${hadoop.version}"
+                  initScript="${initScript}"
+                  cleanupScript="q_test_cleanup.sql"
+                  useHBaseMetastore="true"/>
 
-                    <qtestgen hiveRootDirectory="${basedir}/${hive.path.to.root}/"
-                              outputDirectory="${project.build.directory}/generated-test-sources/java/org/apache/hadoop/hive/cli/"
-                              templatePath="${basedir}/${hive.path.to.root}/ql/src/test/templates/" template="TestCliDriver.vm"
-                              queryDirectory="${basedir}/${hive.path.to.root}/ql/src/test/queries/clientpositive/"
-                              queryFile="${qfile}"
-                              includeQueryFile="${minillap.query.files}"
-                              queryFileRegex="${qfile_regex}"
-                              clusterMode="llap"
-                              runDisabled="${run_disabled}"
-                              hiveConfDir="${basedir}/${hive.path.to.root}/data/conf/llap"
-                              resultsDirectory="${basedir}/${hive.path.to.root}/ql/src/test/results/clientpositive/llap"
-                              className="TestMiniLlapCliDriver"
-                              logFile="${project.build.directory}/testminitezclidrivergen.log"
-                              logDirectory="${project.build.directory}/qfile-results/clientpositive/"
-                              hadoopVersion="${active.hadoop.version}"
-                              initScript="q_test_init.sql"
-                              cleanupScript="q_test_cleanup.sql"/>
+                <qtestgen hiveRootDirectory="${basedir}/${hive.path.to.root}/"
+                  outputDirectory="${project.build.directory}/generated-test-sources/java/org/apache/hadoop/hive/cli/"
+                  templatePath="${basedir}/${hive.path.to.root}/ql/src/test/templates/" template="TestCliDriver.vm"
+                  queryDirectory="${basedir}/${hive.path.to.root}/ql/src/test/queries/clientpositive/"
+                  queryFile="${qfile}"
+                  includeQueryFile="${minillap.query.files}"
+                  queryFileRegex="${qfile_regex}"
+                  clusterMode="llap"
+                  runDisabled="${run_disabled}"
+                  hiveConfDir="${basedir}/${hive.path.to.root}/data/conf/llap"
+                  resultsDirectory="${basedir}/${hive.path.to.root}/ql/src/test/results/clientpositive/llap"
+                  className="TestMiniLlapCliDriver"
+                  logFile="${project.build.directory}/testminitezclidrivergen.log"
+                  logDirectory="${project.build.directory}/qfile-results/clientpositive/"
+                  hadoopVersion="${hadoop.version}"
+                  initScript="q_test_init.sql"
+                  cleanupScript="q_test_cleanup.sql"/>
 
-                    <qtestgen hiveRootDirectory="${basedir}/${hive.path.to.root}/"
-                              outputDirectory="${project.build.directory}/generated-test-sources/java/org/apache/hadoop/hive/cli/"
-                              templatePath="${basedir}/${hive.path.to.root}/ql/src/test/templates/" template="TestCliDriver.vm"
-                              queryDirectory="${basedir}/${hive.path.to.root}/ql/src/test/queries/clientpositive/"
-                              queryFile="${qfile}"
-                              includeQueryFile="${encrypted.query.files}"
-                              queryFileRegex="${qfile_regex}"
-                              clusterMode="encrypted"
-                              runDisabled="${run_disabled}"
-                              hiveConfDir="${basedir}/${hive.path.to.root}/data/conf"
-                              resultsDirectory="${basedir}/${hive.path.to.root}/ql/src/test/results/clientpositive/encrypted"
-                              className="TestEncryptedHDFSCliDriver"
-                              logFile="${project.build.directory}/testencryptedhdfsclidrivergen.log"
-                              logDirectory="${project.build.directory}/qfile-results/clientpositive/"
-                              hadoopVersion="${active.hadoop.version}"
-                              initScript="q_test_init_for_encryption.sql"
-                              cleanupScript="q_test_cleanup_for_encryption.sql"/>
+                <qtestgen hiveRootDirectory="${basedir}/${hive.path.to.root}/"
+                  outputDirectory="${project.build.directory}/generated-test-sources/java/org/apache/hadoop/hive/cli/"
+                  templatePath="${basedir}/${hive.path.to.root}/ql/src/test/templates/" template="TestCliDriver.vm"
+                  queryDirectory="${basedir}/${hive.path.to.root}/ql/src/test/queries/clientpositive/"
+                  queryFile="${qfile}"
+                  includeQueryFile="${encrypted.query.files}"
+                  queryFileRegex="${qfile_regex}"
+                  clusterMode="encrypted"
+                  runDisabled="${run_disabled}"
+                  hiveConfDir="${basedir}/${hive.path.to.root}/data/conf"
+                  resultsDirectory="${basedir}/${hive.path.to.root}/ql/src/test/results/clientpositive/encrypted"
+                  className="TestEncryptedHDFSCliDriver"
+                  logFile="${project.build.directory}/testencryptedhdfsclidrivergen.log"
+                  logDirectory="${project.build.directory}/qfile-results/clientpositive/"
+                  hadoopVersion="${hadoop.version}"
+                  initScript="q_test_init_for_encryption.sql"
+                  cleanupScript="q_test_cleanup_for_encryption.sql"/>
 
-                  </then>
-                  <else>
-                  </else>
-                </if>
 
                 <!-- Negative Minimr -->
                 <qtestgen hiveRootDirectory="${basedir}/${hive.path.to.root}/"
@@ -587,7 +499,7 @@
                   resultsDirectory="${basedir}/${hive.path.to.root}/ql/src/test/results/clientnegative/" className="TestNegativeMinimrCliDriver"
                   logFile="${project.build.directory}/testnegativeminimrclidrivergen.log"
                   logDirectory="${project.build.directory}/qfile-results/clientnegative/"
-                  hadoopVersion="${hadoopVersion}"
+                  hadoopVersion="${hadoop.version}"
                   initScript="${initScript}"
                   cleanupScript="q_test_cleanup.sql"/>
 
@@ -649,7 +561,7 @@
                         resultsDirectory="${basedir}/${hive.path.to.root}/ql/src/test/results/clientpositive/" className="TestBeeLineDriver"
                         logFile="${project.build.directory}/testbeelinedrivergen.log"
                         logDirectory="${project.build.directory}/qfile-results/beelinepositive/"
-                        hadoopVersion="${hadoopVersion}"/>
+                        hadoopVersion="${hadoop.version}"/>
                     </then>
                 </if>
 
@@ -666,7 +578,7 @@
                   resultsDirectory="${basedir}/${hive.path.to.root}/contrib/src/test/results/clientpositive/" className="TestContribCliDriver"
                   logFile="${project.build.directory}/testcontribclidrivergen.log"
                   logDirectory="${project.build.directory}/qfile-results/contribclientpositive"
-                  hadoopVersion="${hadoopVersion}"
+                  hadoopVersion="${hadoop.version}"
                   initScript="${initScript}"
                   cleanupScript="q_test_cleanup.sql"/>
 

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/itests/test-serde/pom.xml
----------------------------------------------------------------------
diff --git a/itests/test-serde/pom.xml b/itests/test-serde/pom.xml
index 7a1c622..81b7293 100644
--- a/itests/test-serde/pom.xml
+++ b/itests/test-serde/pom.xml
@@ -40,31 +40,13 @@
       <version>${project.version}</version>
       <optional>true</optional>
     </dependency>
+    <!-- inter-project -->
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
   </dependencies>
 
-  <profiles>
-    <profile>
-      <id>hadoop-1</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-core</artifactId>
-          <version>${hadoop-20S.version}</version>
-         <optional>true</optional>
-        </dependency>
-      </dependencies>
-    </profile>
-   <profile>
-      <id>hadoop-2</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-      </dependencies>
-    </profile>
-  </profiles>
-
 </project>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/itests/util/pom.xml
----------------------------------------------------------------------
diff --git a/itests/util/pom.xml b/itests/util/pom.xml
index fdab72c..67e8e86 100644
--- a/itests/util/pom.xml
+++ b/itests/util/pom.xml
@@ -86,131 +86,54 @@
       <version>${project.version}</version>
       <classifier>tests</classifier>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-client</artifactId>
+      <version>${hbase.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-common</artifactId>
+      <version>${hbase.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-server</artifactId>
+      <version>${hbase.version}</version>
+    </dependency>
     <!-- test inter-project -->
     <dependency>
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>
       <version>${junit.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <version>${hadoop.version}</version>
+      <classifier>tests</classifier>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+      <version>${hadoop.version}</version>
+      <classifier>tests</classifier>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-server</artifactId>
+      <version>${hbase.version}</version>
+      <classifier>tests</classifier>
+    </dependency>
   </dependencies>
-
-  <profiles>
-    <profile>
-      <id>hadoop-1</id>
-        <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-compiler-plugin</artifactId>
-            <version>2.3.2</version>
-           <configuration>
-              <excludes>
-                <exclude>**/metastore/hbase/**</exclude>
-              </excludes>
-            </configuration>
-          </plugin>
-        </plugins>
-        </build>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-core</artifactId>
-          <version>${hadoop-20S.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-test</artifactId>
-          <version>${hadoop-20S.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-client</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-common</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-common</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-server</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-server</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <classifier>tests</classifier>
-        </dependency>
-      </dependencies>
-    </profile>
-   <profile>
-      <id>hadoop-2</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-hdfs</artifactId>
-          <version>${hadoop-23.version}</version>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
-          <version>${hadoop-23.version}</version>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-core</artifactId>
-          <version>${hadoop-23.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-client</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-common</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-server</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <type>test-jar</type>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-common</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <type>test-jar</type>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-server</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-server</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <classifier>tests</classifier>
-        </dependency>
-      </dependencies>
-    </profile>
-  </profiles>
-
 </project>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/jdbc/pom.xml
----------------------------------------------------------------------
diff --git a/jdbc/pom.xml b/jdbc/pom.xml
index 371d709..012908f 100644
--- a/jdbc/pom.xml
+++ b/jdbc/pom.xml
@@ -103,32 +103,16 @@
       <artifactId>curator-framework</artifactId>
       <version>${curator.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
   </dependencies>
 
   <profiles>
-    <profile>
-      <id>hadoop-1</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-core</artifactId>
-          <version>${hadoop-20S.version}</version>
-         <optional>true</optional>
-        </dependency>
-      </dependencies>
-    </profile>
    <profile>
-      <id>hadoop-2</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-      </dependencies>
-    </profile>
-    <profile>
       <id>dist</id>
       <properties>
         <packaging.minimizeJar>true</packaging.minimizeJar>
@@ -136,7 +120,6 @@
     </profile>
   </profiles>
 
-
   <build>
     <sourceDirectory>${basedir}/src/java</sourceDirectory>
     <resources>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/llap-client/pom.xml
----------------------------------------------------------------------
diff --git a/llap-client/pom.xml b/llap-client/pom.xml
index b7b5803..ff7c82c 100644
--- a/llap-client/pom.xml
+++ b/llap-client/pom.xml
@@ -65,6 +65,18 @@
       <artifactId>libthrift</artifactId>
       <version>${libthrift.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
     <!-- test inter-project -->
     <dependency>
       <groupId>junit</groupId>
@@ -84,66 +96,28 @@
       <version>${jersey.version}</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <version>${hadoop.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
 
-  <profiles>
-    <profile>
-      <id>hadoop-1</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-core</artifactId>
-          <version>${hadoop-20S.version}</version>
-         <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-test</artifactId>
-          <version>${hadoop-20S.version}</version>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
-    </profile>
-   <profile>
-      <id>hadoop-2</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-core</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-hdfs</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-hdfs</artifactId>
-          <version>${hadoop-23.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
-    </profile>
-  </profiles>
-
-
   <build>
     <sourceDirectory>${basedir}/src/java</sourceDirectory>
     <testSourceDirectory>${basedir}/src/test</testSourceDirectory>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/llap-server/pom.xml
----------------------------------------------------------------------
diff --git a/llap-server/pom.xml b/llap-server/pom.xml
index dd8dd7b..42e53b6 100644
--- a/llap-server/pom.xml
+++ b/llap-server/pom.xml
@@ -90,8 +90,70 @@
       <artifactId>json</artifactId>
       <version>${json.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-registry</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.tez</groupId>
+      <artifactId>tez-runtime-internals</artifactId>
+      <version>${tez.version}</version>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.tez</groupId>
+      <artifactId>tez-runtime-library</artifactId>
+      <version>${tez.version}</version>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.tez</groupId>
+      <artifactId>tez-mapreduce</artifactId>
+      <version>${tez.version}</version>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.tez</groupId>
+      <artifactId>tez-dag</artifactId>
+      <version>${tez.version}</version>
+      <optional>true</optional>
+    </dependency>
     <!-- test inter-project -->
     <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <version>${hadoop.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>
       <version>${junit.version}</version>
@@ -113,90 +175,6 @@
 
   <profiles>
     <profile>
-      <id>hadoop-1</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-core</artifactId>
-          <version>${hadoop-20S.version}</version>
-         <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-test</artifactId>
-          <version>${hadoop-20S.version}</version>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
-    </profile>
-   <profile>
-      <id>hadoop-2</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-core</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-hdfs</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-hdfs</artifactId>
-          <version>${hadoop-23.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-yarn-registry</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.tez</groupId>
-          <artifactId>tez-runtime-internals</artifactId>
-          <version>${tez.version}</version>
-          <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.tez</groupId>
-          <artifactId>tez-runtime-library</artifactId>
-          <version>${tez.version}</version>
-          <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.tez</groupId>
-          <artifactId>tez-mapreduce</artifactId>
-          <version>${tez.version}</version>
-          <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.tez</groupId>
-          <artifactId>tez-dag</artifactId>
-          <version>${tez.version}</version>
-          <optional>true</optional>
-        </dependency>
-      </dependencies>
-    </profile>
-    <profile>
       <id>protobuf</id>
       <build>
         <plugins>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/metastore/pom.xml
----------------------------------------------------------------------
diff --git a/metastore/pom.xml b/metastore/pom.xml
index f209d50..4cd1e6d 100644
--- a/metastore/pom.xml
+++ b/metastore/pom.xml
@@ -56,6 +56,11 @@
       <version>${protobuf.version}</version>
     </dependency>
     <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-client</artifactId>
+      <version>${hbase.version}</version>
+    </dependency>
+    <dependency>
       <groupId>com.jolbox</groupId>
       <artifactId>bonecp</artifactId>
       <version>${bonecp.version}</version>
@@ -117,6 +122,18 @@
       <version>${antlr.version}</version>
     </dependency>
     <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
       <groupId>org.apache.thrift</groupId>
       <artifactId>libfb303</artifactId>
       <version>${libfb303.version}</version>
@@ -164,56 +181,6 @@
 
   <profiles>
     <profile>
-      <id>hadoop-1</id>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-compiler-plugin</artifactId>
-            <version>2.3.2</version>
-            <configuration>
-              <excludes>
-                <exclude>**/hbase/**</exclude>
-              </excludes>
-              <testExcludes>
-                <exclude>**/hbase/**</exclude>
-              </testExcludes>
-            </configuration>
-          </plugin>
-        </plugins>
-      </build>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-core</artifactId>
-          <version>${hadoop-20S.version}</version>
-         <optional>true</optional>
-        </dependency>
-      </dependencies>
-    </profile>
-   <profile>
-      <id>hadoop-2</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-core</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-client</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-        </dependency>
-      </dependencies>
-    </profile>
-    <profile>
       <id>thriftif</id>
       <build>
         <plugins>


[14/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)

Posted by xu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatTxnRangeRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatTxnRangeRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatTxnRangeRequest.java
index 82dd057..f15d7a9 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatTxnRangeRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatTxnRangeRequest.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class HeartbeatTxnRangeRequest implements org.apache.thrift.TBase<HeartbeatTxnRangeRequest, HeartbeatTxnRangeRequest._Fields>, java.io.Serializable, Cloneable, Comparable<HeartbeatTxnRangeRequest> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HeartbeatTxnRangeRequest");
 
@@ -229,10 +229,10 @@ public class HeartbeatTxnRangeRequest implements org.apache.thrift.TBase<Heartbe
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case MIN:
-      return Long.valueOf(getMin());
+      return getMin();
 
     case MAX:
-      return Long.valueOf(getMax());
+      return getMax();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatTxnRangeResponse.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatTxnRangeResponse.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatTxnRangeResponse.java
index feb550a..b00fb9c 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatTxnRangeResponse.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatTxnRangeResponse.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class HeartbeatTxnRangeResponse implements org.apache.thrift.TBase<HeartbeatTxnRangeResponse, HeartbeatTxnRangeResponse._Fields>, java.io.Serializable, Cloneable, Comparable<HeartbeatTxnRangeResponse> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HeartbeatTxnRangeResponse");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HiveObjectPrivilege.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HiveObjectPrivilege.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HiveObjectPrivilege.java
index 30665de..3b818ab 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HiveObjectPrivilege.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HiveObjectPrivilege.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class HiveObjectPrivilege implements org.apache.thrift.TBase<HiveObjectPrivilege, HiveObjectPrivilege._Fields>, java.io.Serializable, Cloneable, Comparable<HiveObjectPrivilege> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HiveObjectPrivilege");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HiveObjectRef.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HiveObjectRef.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HiveObjectRef.java
index 8a06eb1..d7dd8ec 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HiveObjectRef.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HiveObjectRef.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class HiveObjectRef implements org.apache.thrift.TBase<HiveObjectRef, HiveObjectRef._Fields>, java.io.Serializable, Cloneable, Comparable<HiveObjectRef> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HiveObjectRef");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HiveObjectType.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HiveObjectType.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HiveObjectType.java
index 427930f..f5b1d0e 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HiveObjectType.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HiveObjectType.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Index.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Index.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Index.java
index 592c37a..44d99c7 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Index.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Index.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class Index implements org.apache.thrift.TBase<Index, Index._Fields>, java.io.Serializable, Cloneable, Comparable<Index> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Index");
 
@@ -607,10 +607,10 @@ public class Index implements org.apache.thrift.TBase<Index, Index._Fields>, jav
       return getOrigTableName();
 
     case CREATE_TIME:
-      return Integer.valueOf(getCreateTime());
+      return getCreateTime();
 
     case LAST_ACCESS_TIME:
-      return Integer.valueOf(getLastAccessTime());
+      return getLastAccessTime();
 
     case INDEX_TABLE_NAME:
       return getIndexTableName();
@@ -622,7 +622,7 @@ public class Index implements org.apache.thrift.TBase<Index, Index._Fields>, jav
       return getParameters();
 
     case DEFERRED_REBUILD:
-      return Boolean.valueOf(isDeferredRebuild());
+      return isDeferredRebuild();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/IndexAlreadyExistsException.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/IndexAlreadyExistsException.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/IndexAlreadyExistsException.java
index 565df3e..2588524 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/IndexAlreadyExistsException.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/IndexAlreadyExistsException.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class IndexAlreadyExistsException extends TException implements org.apache.thrift.TBase<IndexAlreadyExistsException, IndexAlreadyExistsException._Fields>, java.io.Serializable, Cloneable, Comparable<IndexAlreadyExistsException> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("IndexAlreadyExistsException");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InsertEventRequestData.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InsertEventRequestData.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InsertEventRequestData.java
index e3716fd..488d3a0 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InsertEventRequestData.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InsertEventRequestData.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class InsertEventRequestData implements org.apache.thrift.TBase<InsertEventRequestData, InsertEventRequestData._Fields>, java.io.Serializable, Cloneable, Comparable<InsertEventRequestData> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("InsertEventRequestData");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InvalidInputException.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InvalidInputException.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InvalidInputException.java
index f3f3d79..7c7e000 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InvalidInputException.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InvalidInputException.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class InvalidInputException extends TException implements org.apache.thrift.TBase<InvalidInputException, InvalidInputException._Fields>, java.io.Serializable, Cloneable, Comparable<InvalidInputException> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("InvalidInputException");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InvalidObjectException.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InvalidObjectException.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InvalidObjectException.java
index a43f55a..957aca0 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InvalidObjectException.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InvalidObjectException.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class InvalidObjectException extends TException implements org.apache.thrift.TBase<InvalidObjectException, InvalidObjectException._Fields>, java.io.Serializable, Cloneable, Comparable<InvalidObjectException> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("InvalidObjectException");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InvalidOperationException.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InvalidOperationException.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InvalidOperationException.java
index 4c6e4eb..6217bb8 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InvalidOperationException.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InvalidOperationException.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class InvalidOperationException extends TException implements org.apache.thrift.TBase<InvalidOperationException, InvalidOperationException._Fields>, java.io.Serializable, Cloneable, Comparable<InvalidOperationException> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("InvalidOperationException");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InvalidPartitionException.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InvalidPartitionException.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InvalidPartitionException.java
index 86a0ec1..ef71511 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InvalidPartitionException.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InvalidPartitionException.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class InvalidPartitionException extends TException implements org.apache.thrift.TBase<InvalidPartitionException, InvalidPartitionException._Fields>, java.io.Serializable, Cloneable, Comparable<InvalidPartitionException> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("InvalidPartitionException");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockComponent.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockComponent.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockComponent.java
index 9da4a36..adb0c44 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockComponent.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockComponent.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class LockComponent implements org.apache.thrift.TBase<LockComponent, LockComponent._Fields>, java.io.Serializable, Cloneable, Comparable<LockComponent> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("LockComponent");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockLevel.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockLevel.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockLevel.java
index 08840f1..e58ea46 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockLevel.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockLevel.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockRequest.java
index 5cc9b08..eada4ee 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockRequest.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class LockRequest implements org.apache.thrift.TBase<LockRequest, LockRequest._Fields>, java.io.Serializable, Cloneable, Comparable<LockRequest> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("LockRequest");
 
@@ -338,7 +338,7 @@ public class LockRequest implements org.apache.thrift.TBase<LockRequest, LockReq
       return getComponent();
 
     case TXNID:
-      return Long.valueOf(getTxnid());
+      return getTxnid();
 
     case USER:
       return getUser();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockResponse.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockResponse.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockResponse.java
index cb6ca5b..862c67f 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockResponse.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockResponse.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class LockResponse implements org.apache.thrift.TBase<LockResponse, LockResponse._Fields>, java.io.Serializable, Cloneable, Comparable<LockResponse> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("LockResponse");
 
@@ -241,7 +241,7 @@ public class LockResponse implements org.apache.thrift.TBase<LockResponse, LockR
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case LOCKID:
-      return Long.valueOf(getLockid());
+      return getLockid();
 
     case STATE:
       return getState();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockState.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockState.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockState.java
index bce2b5a..48a0bbd 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockState.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockState.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockType.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockType.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockType.java
index 9d60531..8ae4351 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockType.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockType.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LongColumnStatsData.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LongColumnStatsData.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LongColumnStatsData.java
index d02b643..2f41c5a 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LongColumnStatsData.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LongColumnStatsData.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class LongColumnStatsData implements org.apache.thrift.TBase<LongColumnStatsData, LongColumnStatsData._Fields>, java.io.Serializable, Cloneable, Comparable<LongColumnStatsData> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("LongColumnStatsData");
 
@@ -312,16 +312,16 @@ public class LongColumnStatsData implements org.apache.thrift.TBase<LongColumnSt
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case LOW_VALUE:
-      return Long.valueOf(getLowValue());
+      return getLowValue();
 
     case HIGH_VALUE:
-      return Long.valueOf(getHighValue());
+      return getHighValue();
 
     case NUM_NULLS:
-      return Long.valueOf(getNumNulls());
+      return getNumNulls();
 
     case NUM_DVS:
-      return Long.valueOf(getNumDVs());
+      return getNumDVs();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/MetaException.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/MetaException.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/MetaException.java
index 3dd693c..21be66b 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/MetaException.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/MetaException.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class MetaException extends TException implements org.apache.thrift.TBase<MetaException, MetaException._Fields>, java.io.Serializable, Cloneable, Comparable<MetaException> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("MetaException");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/MetadataPpdResult.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/MetadataPpdResult.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/MetadataPpdResult.java
index 652a4e7..87fad1a 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/MetadataPpdResult.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/MetadataPpdResult.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class MetadataPpdResult implements org.apache.thrift.TBase<MetadataPpdResult, MetadataPpdResult._Fields>, java.io.Serializable, Cloneable, Comparable<MetadataPpdResult> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("MetadataPpdResult");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NoSuchLockException.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NoSuchLockException.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NoSuchLockException.java
index df6c2b6..aefbe4a 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NoSuchLockException.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NoSuchLockException.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class NoSuchLockException extends TException implements org.apache.thrift.TBase<NoSuchLockException, NoSuchLockException._Fields>, java.io.Serializable, Cloneable, Comparable<NoSuchLockException> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("NoSuchLockException");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NoSuchObjectException.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NoSuchObjectException.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NoSuchObjectException.java
index 7f42b7f..efa5326 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NoSuchObjectException.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NoSuchObjectException.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class NoSuchObjectException extends TException implements org.apache.thrift.TBase<NoSuchObjectException, NoSuchObjectException._Fields>, java.io.Serializable, Cloneable, Comparable<NoSuchObjectException> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("NoSuchObjectException");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NoSuchTxnException.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NoSuchTxnException.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NoSuchTxnException.java
index 3146fbd..8149d9c 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NoSuchTxnException.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NoSuchTxnException.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class NoSuchTxnException extends TException implements org.apache.thrift.TBase<NoSuchTxnException, NoSuchTxnException._Fields>, java.io.Serializable, Cloneable, Comparable<NoSuchTxnException> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("NoSuchTxnException");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NotificationEvent.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NotificationEvent.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NotificationEvent.java
index 7498fc9..c40bb4b 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NotificationEvent.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NotificationEvent.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class NotificationEvent implements org.apache.thrift.TBase<NotificationEvent, NotificationEvent._Fields>, java.io.Serializable, Cloneable, Comparable<NotificationEvent> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("NotificationEvent");
 
@@ -402,10 +402,10 @@ public class NotificationEvent implements org.apache.thrift.TBase<NotificationEv
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case EVENT_ID:
-      return Long.valueOf(getEventId());
+      return getEventId();
 
     case EVENT_TIME:
-      return Integer.valueOf(getEventTime());
+      return getEventTime();
 
     case EVENT_TYPE:
       return getEventType();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NotificationEventRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NotificationEventRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NotificationEventRequest.java
index 6f92254..bafb9b6 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NotificationEventRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NotificationEventRequest.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class NotificationEventRequest implements org.apache.thrift.TBase<NotificationEventRequest, NotificationEventRequest._Fields>, java.io.Serializable, Cloneable, Comparable<NotificationEventRequest> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("NotificationEventRequest");
 
@@ -227,10 +227,10 @@ public class NotificationEventRequest implements org.apache.thrift.TBase<Notific
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case LAST_EVENT:
-      return Long.valueOf(getLastEvent());
+      return getLastEvent();
 
     case MAX_EVENTS:
-      return Integer.valueOf(getMaxEvents());
+      return getMaxEvents();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NotificationEventResponse.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NotificationEventResponse.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NotificationEventResponse.java
index 54e746c..fcbbd18 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NotificationEventResponse.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NotificationEventResponse.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class NotificationEventResponse implements org.apache.thrift.TBase<NotificationEventResponse, NotificationEventResponse._Fields>, java.io.Serializable, Cloneable, Comparable<NotificationEventResponse> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("NotificationEventResponse");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/OpenTxnRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/OpenTxnRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/OpenTxnRequest.java
index 46ce7f0..01371ec 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/OpenTxnRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/OpenTxnRequest.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class OpenTxnRequest implements org.apache.thrift.TBase<OpenTxnRequest, OpenTxnRequest._Fields>, java.io.Serializable, Cloneable, Comparable<OpenTxnRequest> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("OpenTxnRequest");
 
@@ -273,7 +273,7 @@ public class OpenTxnRequest implements org.apache.thrift.TBase<OpenTxnRequest, O
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case NUM_TXNS:
-      return Integer.valueOf(getNum_txns());
+      return getNum_txns();
 
     case USER:
       return getUser();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/OpenTxnsResponse.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/OpenTxnsResponse.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/OpenTxnsResponse.java
index 3d108eb..e11a2b3 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/OpenTxnsResponse.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/OpenTxnsResponse.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class OpenTxnsResponse implements org.apache.thrift.TBase<OpenTxnsResponse, OpenTxnsResponse._Fields>, java.io.Serializable, Cloneable, Comparable<OpenTxnsResponse> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("OpenTxnsResponse");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Order.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Order.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Order.java
index 596a5aa..cc0e2dd 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Order.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Order.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class Order implements org.apache.thrift.TBase<Order, Order._Fields>, java.io.Serializable, Cloneable, Comparable<Order> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Order");
 
@@ -232,7 +232,7 @@ public class Order implements org.apache.thrift.TBase<Order, Order._Fields>, jav
       return getCol();
 
     case ORDER:
-      return Integer.valueOf(getOrder());
+      return getOrder();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Partition.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Partition.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Partition.java
index 5a0dec0..a247221 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Partition.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Partition.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class Partition implements org.apache.thrift.TBase<Partition, Partition._Fields>, java.io.Serializable, Cloneable, Comparable<Partition> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Partition");
 
@@ -532,10 +532,10 @@ public class Partition implements org.apache.thrift.TBase<Partition, Partition._
       return getTableName();
 
     case CREATE_TIME:
-      return Integer.valueOf(getCreateTime());
+      return getCreateTime();
 
     case LAST_ACCESS_TIME:
-      return Integer.valueOf(getLastAccessTime());
+      return getLastAccessTime();
 
     case SD:
       return getSd();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionEventType.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionEventType.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionEventType.java
index 40bdb56..b515401 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionEventType.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionEventType.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionListComposingSpec.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionListComposingSpec.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionListComposingSpec.java
index a16fa2c..e8232b9 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionListComposingSpec.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionListComposingSpec.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class PartitionListComposingSpec implements org.apache.thrift.TBase<PartitionListComposingSpec, PartitionListComposingSpec._Fields>, java.io.Serializable, Cloneable, Comparable<PartitionListComposingSpec> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("PartitionListComposingSpec");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionSpec.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionSpec.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionSpec.java
index 5938c47..e8188a9 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionSpec.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionSpec.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class PartitionSpec implements org.apache.thrift.TBase<PartitionSpec, PartitionSpec._Fields>, java.io.Serializable, Cloneable, Comparable<PartitionSpec> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("PartitionSpec");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionSpecWithSharedSD.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionSpecWithSharedSD.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionSpecWithSharedSD.java
index 58098d1..2c41912 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionSpecWithSharedSD.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionSpecWithSharedSD.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class PartitionSpecWithSharedSD implements org.apache.thrift.TBase<PartitionSpecWithSharedSD, PartitionSpecWithSharedSD._Fields>, java.io.Serializable, Cloneable, Comparable<PartitionSpecWithSharedSD> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("PartitionSpecWithSharedSD");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionWithoutSD.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionWithoutSD.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionWithoutSD.java
index 0ff3930..f0c5ce1 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionWithoutSD.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionWithoutSD.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class PartitionWithoutSD implements org.apache.thrift.TBase<PartitionWithoutSD, PartitionWithoutSD._Fields>, java.io.Serializable, Cloneable, Comparable<PartitionWithoutSD> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("PartitionWithoutSD");
 
@@ -438,10 +438,10 @@ public class PartitionWithoutSD implements org.apache.thrift.TBase<PartitionWith
       return getValues();
 
     case CREATE_TIME:
-      return Integer.valueOf(getCreateTime());
+      return getCreateTime();
 
     case LAST_ACCESS_TIME:
-      return Integer.valueOf(getLastAccessTime());
+      return getLastAccessTime();
 
     case RELATIVE_PATH:
       return getRelativePath();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsByExprRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsByExprRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsByExprRequest.java
index 246e3ec..59972af 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsByExprRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsByExprRequest.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class PartitionsByExprRequest implements org.apache.thrift.TBase<PartitionsByExprRequest, PartitionsByExprRequest._Fields>, java.io.Serializable, Cloneable, Comparable<PartitionsByExprRequest> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("PartitionsByExprRequest");
 
@@ -380,7 +380,7 @@ public class PartitionsByExprRequest implements org.apache.thrift.TBase<Partitio
       return getDefaultPartitionName();
 
     case MAX_PARTS:
-      return Short.valueOf(getMaxParts());
+      return getMaxParts();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsByExprResult.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsByExprResult.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsByExprResult.java
index bccfc8a..12ae66d 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsByExprResult.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsByExprResult.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class PartitionsByExprResult implements org.apache.thrift.TBase<PartitionsByExprResult, PartitionsByExprResult._Fields>, java.io.Serializable, Cloneable, Comparable<PartitionsByExprResult> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("PartitionsByExprResult");
 
@@ -252,7 +252,7 @@ public class PartitionsByExprResult implements org.apache.thrift.TBase<Partition
       return getPartitions();
 
     case HAS_UNKNOWN_PARTITIONS:
-      return Boolean.valueOf(isHasUnknownPartitions());
+      return isHasUnknownPartitions();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsStatsRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsStatsRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsStatsRequest.java
index fe71919..8416369 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsStatsRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsStatsRequest.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class PartitionsStatsRequest implements org.apache.thrift.TBase<PartitionsStatsRequest, PartitionsStatsRequest._Fields>, java.io.Serializable, Cloneable, Comparable<PartitionsStatsRequest> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("PartitionsStatsRequest");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsStatsResult.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsStatsResult.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsStatsResult.java
index e20dbff..2e903f1 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsStatsResult.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsStatsResult.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class PartitionsStatsResult implements org.apache.thrift.TBase<PartitionsStatsResult, PartitionsStatsResult._Fields>, java.io.Serializable, Cloneable, Comparable<PartitionsStatsResult> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("PartitionsStatsResult");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrincipalPrivilegeSet.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrincipalPrivilegeSet.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrincipalPrivilegeSet.java
index 899d28e..e1792bd 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrincipalPrivilegeSet.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrincipalPrivilegeSet.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class PrincipalPrivilegeSet implements org.apache.thrift.TBase<PrincipalPrivilegeSet, PrincipalPrivilegeSet._Fields>, java.io.Serializable, Cloneable, Comparable<PrincipalPrivilegeSet> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("PrincipalPrivilegeSet");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrincipalType.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrincipalType.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrincipalType.java
index 65e937e..82eb8fd 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrincipalType.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrincipalType.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrivilegeBag.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrivilegeBag.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrivilegeBag.java
index a7a6e63..bba2d40 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrivilegeBag.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrivilegeBag.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class PrivilegeBag implements org.apache.thrift.TBase<PrivilegeBag, PrivilegeBag._Fields>, java.io.Serializable, Cloneable, Comparable<PrivilegeBag> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("PrivilegeBag");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrivilegeGrantInfo.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrivilegeGrantInfo.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrivilegeGrantInfo.java
index 5a1194a..ba08d3a 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrivilegeGrantInfo.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrivilegeGrantInfo.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class PrivilegeGrantInfo implements org.apache.thrift.TBase<PrivilegeGrantInfo, PrivilegeGrantInfo._Fields>, java.io.Serializable, Cloneable, Comparable<PrivilegeGrantInfo> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("PrivilegeGrantInfo");
 
@@ -376,7 +376,7 @@ public class PrivilegeGrantInfo implements org.apache.thrift.TBase<PrivilegeGran
       return getPrivilege();
 
     case CREATE_TIME:
-      return Integer.valueOf(getCreateTime());
+      return getCreateTime();
 
     case GRANTOR:
       return getGrantor();
@@ -385,7 +385,7 @@ public class PrivilegeGrantInfo implements org.apache.thrift.TBase<PrivilegeGran
       return getGrantorType();
 
     case GRANT_OPTION:
-      return Boolean.valueOf(isGrantOption());
+      return isGrantOption();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PutFileMetadataRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PutFileMetadataRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PutFileMetadataRequest.java
index bb9620c..a5fef3d 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PutFileMetadataRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PutFileMetadataRequest.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class PutFileMetadataRequest implements org.apache.thrift.TBase<PutFileMetadataRequest, PutFileMetadataRequest._Fields>, java.io.Serializable, Cloneable, Comparable<PutFileMetadataRequest> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("PutFileMetadataRequest");
 
@@ -396,7 +396,7 @@ public class PutFileMetadataRequest implements org.apache.thrift.TBase<PutFileMe
     if (this.metadata == null) {
       sb.append("null");
     } else {
-      sb.append(this.metadata);
+      org.apache.thrift.TBaseHelper.toString(this.metadata, sb);
     }
     first = false;
     sb.append(")");

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PutFileMetadataResult.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PutFileMetadataResult.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PutFileMetadataResult.java
index 2eb8558..d807671 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PutFileMetadataResult.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PutFileMetadataResult.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class PutFileMetadataResult implements org.apache.thrift.TBase<PutFileMetadataResult, PutFileMetadataResult._Fields>, java.io.Serializable, Cloneable, Comparable<PutFileMetadataResult> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("PutFileMetadataResult");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/RequestPartsSpec.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/RequestPartsSpec.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/RequestPartsSpec.java
index 9207934..7f8a044 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/RequestPartsSpec.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/RequestPartsSpec.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ResourceType.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ResourceType.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ResourceType.java
index 861a6db..a15c1ee 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ResourceType.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ResourceType.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ResourceUri.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ResourceUri.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ResourceUri.java
index c3cc482..994a498 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ResourceUri.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ResourceUri.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class ResourceUri implements org.apache.thrift.TBase<ResourceUri, ResourceUri._Fields>, java.io.Serializable, Cloneable, Comparable<ResourceUri> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ResourceUri");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Role.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Role.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Role.java
index ba150fc..612c64d 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Role.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Role.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class Role implements org.apache.thrift.TBase<Role, Role._Fields>, java.io.Serializable, Cloneable, Comparable<Role> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Role");
 
@@ -276,7 +276,7 @@ public class Role implements org.apache.thrift.TBase<Role, Role._Fields>, java.i
       return getRoleName();
 
     case CREATE_TIME:
-      return Integer.valueOf(getCreateTime());
+      return getCreateTime();
 
     case OWNER_NAME:
       return getOwnerName();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/RolePrincipalGrant.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/RolePrincipalGrant.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/RolePrincipalGrant.java
index f7d9c01..81e8d16 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/RolePrincipalGrant.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/RolePrincipalGrant.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class RolePrincipalGrant implements org.apache.thrift.TBase<RolePrincipalGrant, RolePrincipalGrant._Fields>, java.io.Serializable, Cloneable, Comparable<RolePrincipalGrant> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("RolePrincipalGrant");
 
@@ -482,10 +482,10 @@ public class RolePrincipalGrant implements org.apache.thrift.TBase<RolePrincipal
       return getPrincipalType();
 
     case GRANT_OPTION:
-      return Boolean.valueOf(isGrantOption());
+      return isGrantOption();
 
     case GRANT_TIME:
-      return Integer.valueOf(getGrantTime());
+      return getGrantTime();
 
     case GRANTOR_NAME:
       return getGrantorName();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Schema.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Schema.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Schema.java
index 748bf32..75d0ebb 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Schema.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Schema.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class Schema implements org.apache.thrift.TBase<Schema, Schema._Fields>, java.io.Serializable, Cloneable, Comparable<Schema> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Schema");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SerDeInfo.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SerDeInfo.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SerDeInfo.java
index a7c9f64..d5a8d99 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SerDeInfo.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SerDeInfo.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class SerDeInfo implements org.apache.thrift.TBase<SerDeInfo, SerDeInfo._Fields>, java.io.Serializable, Cloneable, Comparable<SerDeInfo> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("SerDeInfo");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SetPartitionsStatsRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SetPartitionsStatsRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SetPartitionsStatsRequest.java
index 0ac3c5c..6e334f6 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SetPartitionsStatsRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SetPartitionsStatsRequest.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class SetPartitionsStatsRequest implements org.apache.thrift.TBase<SetPartitionsStatsRequest, SetPartitionsStatsRequest._Fields>, java.io.Serializable, Cloneable, Comparable<SetPartitionsStatsRequest> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("SetPartitionsStatsRequest");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowCompactRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowCompactRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowCompactRequest.java
index 2d0d078..ad1c57c 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowCompactRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowCompactRequest.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class ShowCompactRequest implements org.apache.thrift.TBase<ShowCompactRequest, ShowCompactRequest._Fields>, java.io.Serializable, Cloneable, Comparable<ShowCompactRequest> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ShowCompactRequest");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowCompactResponse.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowCompactResponse.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowCompactResponse.java
index 5c28364..afa832c 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowCompactResponse.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowCompactResponse.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class ShowCompactResponse implements org.apache.thrift.TBase<ShowCompactResponse, ShowCompactResponse._Fields>, java.io.Serializable, Cloneable, Comparable<ShowCompactResponse> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ShowCompactResponse");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowCompactResponseElement.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowCompactResponseElement.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowCompactResponseElement.java
index 7a3aaa8..dd7dd8e 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowCompactResponseElement.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowCompactResponseElement.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class ShowCompactResponseElement implements org.apache.thrift.TBase<ShowCompactResponseElement, ShowCompactResponseElement._Fields>, java.io.Serializable, Cloneable, Comparable<ShowCompactResponseElement> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ShowCompactResponseElement");
 
@@ -515,7 +515,7 @@ public class ShowCompactResponseElement implements org.apache.thrift.TBase<ShowC
       return getWorkerid();
 
     case START:
-      return Long.valueOf(getStart());
+      return getStart();
 
     case RUN_AS:
       return getRunAs();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowLocksRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowLocksRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowLocksRequest.java
index 35c50e5..e89b93b 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowLocksRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowLocksRequest.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class ShowLocksRequest implements org.apache.thrift.TBase<ShowLocksRequest, ShowLocksRequest._Fields>, java.io.Serializable, Cloneable, Comparable<ShowLocksRequest> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ShowLocksRequest");
 


[54/55] [abbrv] hive git commit: HIVE-12284: Merge branch 'master' into spark

Posted by xu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/c9073aad/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/c9073aad/itests/src/test/resources/testconfiguration.properties
----------------------------------------------------------------------
diff --cc itests/src/test/resources/testconfiguration.properties
index 72dbcec,13efc58..e927955
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@@ -1167,20 -1227,7 +1227,18 @@@ miniSparkOnYarn.query.files=auto_sortme
    load_fs2.q,\
    load_hdfs_file_with_space_in_the_name.q,\
    optrstat_groupby.q,\
 +  orc_merge1.q,\
 +  orc_merge2.q,\
 +  orc_merge3.q,\
 +  orc_merge4.q,\
 +  orc_merge5.q,\
 +  orc_merge6.q,\
 +  orc_merge7.q,\
 +  orc_merge8.q,\
 +  orc_merge9.q,\
 +  orc_merge_incompat1.q,\
 +  orc_merge_incompat2.q,\
    parallel_orderby.q,\
-   ql_rewrite_gbtoidx.q,\
-   ql_rewrite_gbtoidx_cbo_1.q,\
    quotedid_smb.q,\
    reduce_deduplicate.q,\
    remote_script.q,\
@@@ -1194,15 -1240,14 +1251,21 @@@
    stats_counter_partitioned.q,\
    temp_table_external.q,\
    truncate_column_buckets.q,\
 -  uber_reduce.q
 +  uber_reduce.q,\
 +  vector_inner_join.q,\
 +  vector_outer_join0.q,\
 +  vector_outer_join1.q,\
 +  vector_outer_join2.q,\
 +  vector_outer_join3.q,\
 +  vector_outer_join4.q,\
 +  vector_outer_join5.q
  
+ # These tests are removed from miniSparkOnYarn.query.files
+ #  ql_rewrite_gbtoidx.q,\
+ #  ql_rewrite_gbtoidx_cbo_1.q,\
+ #  smb_mapjoin_8.q,\
+ 
+ 
  spark.query.negative.files=groupby2_map_skew_multi_distinct.q,\
    groupby2_multi_distinct.q,\
    groupby3_map_skew_multi_distinct.q,\

http://git-wip-us.apache.org/repos/asf/hive/blob/c9073aad/pom.xml
----------------------------------------------------------------------
diff --cc pom.xml
index 0cd4238,3b3303c..0d9c9a3
--- a/pom.xml
+++ b/pom.xml
@@@ -159,9 -158,9 +158,9 @@@
      <stax.version>1.0.1</stax.version>
      <slf4j.version>1.7.5</slf4j.version>
      <ST4.version>4.0.4</ST4.version>
-     <tez.version>0.5.2</tez.version>
+     <tez.version>0.8.1-alpha</tez.version>
      <super-csv.version>2.2.0</super-csv.version>
 -    <spark.version>1.4.0</spark.version>
 +    <spark.version>1.5.0</spark.version>
      <scala.binary.version>2.10</scala.binary.version>
      <scala.version>2.10.4</scala.version>
      <tempus-fugit.version>1.1</tempus-fugit.version>
@@@ -222,9 -222,18 +222,8 @@@
           <enabled>false</enabled>
         </snapshots>
      </repository>
 -     <repository>
 -       <id>spark-1.3</id>
 -       <url>https://s3-us-west-1.amazonaws.com/hive-spark/maven2/spark_2.10-1.3-rc1/</url>
 -       <releases>
 -         <enabled>true</enabled>
 -       </releases>
 -       <snapshots>
 -         <enabled>false</enabled>
 -       </snapshots>
 -    </repository>
    </repositories>
  
-   <!-- Hadoop dependency management is done at the bottom under profiles -->
    <dependencyManagement>
      <dependencies>
        <!-- dependencies are always listed in sorted order by groupId, artifectId -->

http://git-wip-us.apache.org/repos/asf/hive/blob/c9073aad/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java
----------------------------------------------------------------------
diff --cc ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java
index d2c5245,085ad9e..2ab9c2d
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java
@@@ -24,11 -23,8 +24,9 @@@ import java.util.List
  import java.util.Map;
  import java.util.Set;
  
- import com.google.common.base.Preconditions;
- 
  import org.apache.commons.logging.Log;
  import org.apache.commons.logging.LogFactory;
 +import org.apache.hadoop.fs.FileSystem;
  import org.apache.hadoop.fs.Path;
  import org.apache.hadoop.hive.common.JavaUtils;
  import org.apache.hadoop.hive.ql.io.merge.MergeFileMapper;

http://git-wip-us.apache.org/repos/asf/hive/blob/c9073aad/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinCommonOperator.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/c9073aad/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
----------------------------------------------------------------------


[31/55] [abbrv] hive git commit: HIVE-12262: Session log dir cannot be created in some cases (Daniel Dai, reviewed by Thejas Nair)

Posted by xu...@apache.org.
HIVE-12262: Session log dir cannot be created in some cases (Daniel Dai, reviewed by Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/48a1e1f7
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/48a1e1f7
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/48a1e1f7

Branch: refs/heads/spark
Commit: 48a1e1f7b051e6ca2a5bbb4a55eda16f5349d369
Parents: e3ef96f
Author: Daniel Dai <da...@hortonworks.com>
Authored: Sun Oct 25 09:46:27 2015 -0700
Committer: Daniel Dai <da...@hortonworks.com>
Committed: Sun Oct 25 09:48:00 2015 -0700

----------------------------------------------------------------------
 .../hive/service/cli/session/HiveSessionImpl.java       | 12 ++++++++++++
 1 file changed, 12 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/48a1e1f7/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
index a600309..3eaab9a 100644
--- a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
+++ b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
@@ -241,6 +241,18 @@ public class HiveSessionImpl implements HiveSession {
 
   @Override
   public void setOperationLogSessionDir(File operationLogRootDir) {
+    if (!operationLogRootDir.exists()) {
+      LOG.warn("The operation log root directory is removed, recreating:" +
+          operationLogRootDir.getAbsolutePath());
+      if (!operationLogRootDir.mkdirs()) {
+        LOG.warn("Unable to create operation log root directory: " +
+            operationLogRootDir.getAbsolutePath());
+      }
+    }
+    if (!operationLogRootDir.canWrite()) {
+      LOG.warn("The operation log root directory is not writable: " +
+          operationLogRootDir.getAbsolutePath());
+    }
     sessionLogDir = new File(operationLogRootDir, sessionHandle.getHandleIdentifier().toString());
     isOperationLogEnabled = true;
     if (!sessionLogDir.exists()) {


[44/55] [abbrv] hive git commit: HIVE-10807 : Invalidate basic stats for insert queries if autogather=false (Ashutosh Chauhan via Gopal V)

Posted by xu...@apache.org.
HIVE-10807 : Invalidate basic stats for insert queries if autogather=false (Ashutosh Chauhan via Gopal V)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/86346fb1
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/86346fb1
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/86346fb1

Branch: refs/heads/spark
Commit: 86346fb150f0358e40b6435077eccda3e07d17e2
Parents: f9517ef
Author: Ashutosh Chauhan <ha...@apache.org>
Authored: Mon Oct 26 17:45:59 2015 -0700
Committer: Ashutosh Chauhan <ha...@apache.org>
Committed: Mon Oct 26 17:45:59 2015 -0700

----------------------------------------------------------------------
 .../apache/hadoop/hive/ql/QueryProperties.java  |  10 --
 .../apache/hadoop/hive/ql/metadata/Hive.java    |  13 +-
 .../hive/ql/optimizer/GenMapRedUtils.java       |   3 +-
 .../hive/ql/optimizer/StatsOptimizer.java       |  38 +++--
 .../hadoop/hive/ql/parse/QBParseInfo.java       |   9 --
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  |   6 -
 .../test/queries/clientpositive/insert_into1.q  |  10 +-
 .../test/queries/clientpositive/insert_into2.q  |   8 +
 .../clientpositive/bucket_map_join_1.q.out      |   4 -
 .../clientpositive/bucket_map_join_2.q.out      |   4 -
 .../encryption_insert_partition_dynamic.q.out   |   4 -
 .../encryption_join_unencrypted_tbl.q.out       |   4 -
 .../results/clientpositive/insert_into1.q.out   | 151 +++++++++++++++++++
 .../results/clientpositive/insert_into2.q.out   |  69 +++++++++
 .../spark/bucket_map_join_1.q.out               |   8 -
 .../spark/bucket_map_join_2.q.out               |   8 -
 .../clientpositive/spark/insert_into1.q.out     | 116 ++++++++++++++
 .../clientpositive/spark/insert_into2.q.out     |  75 +++++++++
 .../results/clientpositive/spark/stats3.q.out   |   2 -
 ql/src/test/results/clientpositive/stats3.q.out |   2 -
 .../clientpositive/tez/insert_into1.q.out       | 120 +++++++++++++++
 .../clientpositive/tez/insert_into2.q.out       |  75 +++++++++
 22 files changed, 661 insertions(+), 78 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/86346fb1/ql/src/java/org/apache/hadoop/hive/ql/QueryProperties.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/QueryProperties.java b/ql/src/java/org/apache/hadoop/hive/ql/QueryProperties.java
index e8f7fba..3bc9432 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/QueryProperties.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/QueryProperties.java
@@ -39,7 +39,6 @@ public class QueryProperties {
   boolean noScanAnalyzeCommand;
   boolean analyzeRewrite;
   boolean ctas;
-  boolean insertToTable;
   int outerQueryLimit;
 
   boolean hasJoin = false;
@@ -115,14 +114,6 @@ public class QueryProperties {
     this.ctas = ctas;
   }
 
-  public boolean isInsertToTable() {
-    return insertToTable;
-  }
-
-  public void setInsertToTable(boolean insertToTable) {
-    this.insertToTable = insertToTable;
-  }
-
   public int getOuterQueryLimit() {
     return outerQueryLimit;
   }
@@ -276,7 +267,6 @@ public class QueryProperties {
     noScanAnalyzeCommand = false;
     analyzeRewrite = false;
     ctas = false;
-    insertToTable = false;
     outerQueryLimit = -1;
 
     hasJoin = false;

http://git-wip-us.apache.org/repos/asf/hive/blob/86346fb1/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
index c64d8d1..a2dea67 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -1464,6 +1464,7 @@ public class Hive {
 
       newTPart = getPartition(tbl, partSpec, true, newPartPath.toString(),
           inheritTableSpecs, newFiles);
+
       // recreate the partition if it existed before
       if (isSkewedStoreAsSubdir) {
         org.apache.hadoop.hive.metastore.api.Partition newCreatedTpart = newTPart.getTPartition();
@@ -1474,12 +1475,18 @@ public class Hive {
         /* Add list bucketing location mappings. */
         skewedInfo.setSkewedColValueLocationMaps(skewedColValueLocationMaps);
         newCreatedTpart.getSd().setSkewedInfo(skewedInfo);
+        if(!this.getConf().getBoolVar(HiveConf.ConfVars.HIVESTATSAUTOGATHER)) {
+          newTPart.getParameters().put(StatsSetupConst.COLUMN_STATS_ACCURATE, "false");
+        }
         alterPartition(tbl.getDbName(), tbl.getTableName(), new Partition(tbl, newCreatedTpart));
         newTPart = getPartition(tbl, partSpec, true, newPartPath.toString(), inheritTableSpecs,
             newFiles);
         return new Partition(tbl, newCreatedTpart);
       }
-
+      if(!this.getConf().getBoolVar(HiveConf.ConfVars.HIVESTATSAUTOGATHER)) {
+        newTPart.getParameters().put(StatsSetupConst.COLUMN_STATS_ACCURATE, "false");
+        alterPartition(tbl.getDbName(), tbl.getTableName(), new Partition(tbl, newTPart.getTPartition()));
+      }
     } catch (IOException e) {
       LOG.error(StringUtils.stringifyException(e));
       throw new HiveException(e);
@@ -1714,6 +1721,10 @@ private void constructOneLBLocationMap(FileStatus fSta,
       } catch (IOException e) {
         throw new HiveException("addFiles: filesystem error in check phase", e);
       }
+    }
+    if(!this.getConf().getBoolVar(HiveConf.ConfVars.HIVESTATSAUTOGATHER)) {
+      tbl.getParameters().put(StatsSetupConst.COLUMN_STATS_ACCURATE, "false");
+    }  else {
       tbl.getParameters().put(StatsSetupConst.STATS_GENERATED_VIA_STATS_TASK, "true");
     }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/86346fb1/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
index 109b938..c22c35f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
@@ -1500,8 +1500,7 @@ public final class GenMapRedUtils {
    * @return
    */
   public static boolean isInsertInto(ParseContext parseCtx, FileSinkOperator fsOp) {
-    return fsOp.getConf().getTableInfo().getTableName() != null &&
-        parseCtx.getQueryProperties().isInsertToTable();
+    return fsOp.getConf().getTableInfo().getTableName() != null;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hive/blob/86346fb1/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java
index 5a21e6b..aa204c7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java
@@ -144,17 +144,23 @@ public class StatsOptimizer implements Transform {
     }
 
     enum LongSubType {
-      BIGINT { Object cast(long longValue) { return longValue; } }, 
-      INT { Object cast(long longValue) { return (int)longValue; } },
-      SMALLINT { Object cast(long longValue) { return (short)longValue; } },
-      TINYINT { Object cast(long longValue) { return (byte)longValue; } };
+      BIGINT { @Override
+      Object cast(long longValue) { return longValue; } },
+      INT { @Override
+      Object cast(long longValue) { return (int)longValue; } },
+      SMALLINT { @Override
+      Object cast(long longValue) { return (short)longValue; } },
+      TINYINT { @Override
+      Object cast(long longValue) { return (byte)longValue; } };
 
       abstract Object cast(long longValue);
     }
 
     enum DoubleSubType {
-      DOUBLE { Object cast(double doubleValue) { return doubleValue; } },
-      FLOAT { Object cast(double doubleValue) { return (float) doubleValue; } };
+      DOUBLE { @Override
+      Object cast(double doubleValue) { return doubleValue; } },
+      FLOAT { @Override
+      Object cast(double doubleValue) { return (float) doubleValue; } };
 
       abstract Object cast(double doubleValue);
     }
@@ -221,7 +227,7 @@ public class StatsOptimizer implements Transform {
         // Since we have done an exact match on TS-SEL-GBY-RS-GBY-(SEL)-FS
         // we need not to do any instanceof checks for following.
         GroupByOperator pgbyOp = (GroupByOperator)stack.get(2);
-        if (pgbyOp.getConf().getOutputColumnNames().size() != 
+        if (pgbyOp.getConf().getOutputColumnNames().size() !=
             pgbyOp.getConf().getAggregators().size()) {
           return null;
         }
@@ -260,7 +266,7 @@ public class StatsOptimizer implements Transform {
         FileSinkOperator fsOp = (FileSinkOperator)last;
         if (fsOp.getNumChild() > 0) {
           // looks like a subq plan.
-          return null;  // todo we can collapse this part of tree into single TS 
+          return null;  // todo we can collapse this part of tree into single TS
         }
 
         Table tbl = tsOp.getConf().getTableMetadata();
@@ -296,7 +302,7 @@ public class StatsOptimizer implements Transform {
               return null;
             }
             switch (category) {
-              case LONG: 
+              case LONG:
                 oneRow.add(Long.valueOf(constant) * rowCnt);
                 break;
               case DOUBLE:
@@ -436,7 +442,7 @@ public class StatsOptimizer implements Transform {
               switch (type) {
                 case Integeral: {
                   LongSubType subType = LongSubType.valueOf(name);
-                  
+
                   Long maxVal = null;
                   Collection<List<ColumnStatisticsObj>> result =
                       verifyAndGetPartStats(hive, tbl, colName, parts);
@@ -462,7 +468,7 @@ public class StatsOptimizer implements Transform {
                 }
                 case Double: {
                   DoubleSubType subType = DoubleSubType.valueOf(name);
-                  
+
                   Double maxVal = null;
                   Collection<List<ColumnStatisticsObj>> result =
                       verifyAndGetPartStats(hive, tbl, colName, parts);
@@ -537,7 +543,7 @@ public class StatsOptimizer implements Transform {
               switch(type) {
                 case Integeral: {
                   LongSubType subType = LongSubType.valueOf(name);
-                  
+
                   Long minVal = null;
                   Collection<List<ColumnStatisticsObj>> result =
                       verifyAndGetPartStats(hive, tbl, colName, parts);
@@ -563,7 +569,7 @@ public class StatsOptimizer implements Transform {
                 }
                 case Double: {
                   DoubleSubType subType = DoubleSubType.valueOf(name);
-                  
+
                   Double minVal = null;
                   Collection<List<ColumnStatisticsObj>> result =
                       verifyAndGetPartStats(hive, tbl, colName, parts);
@@ -680,6 +686,9 @@ public class StatsOptimizer implements Transform {
       if (tbl.isPartitioned()) {
         for (Partition part : pctx.getPrunedPartitions(
             tsOp.getConf().getAlias(), tsOp).getPartitions()) {
+          if (!StatsSetupConst.areStatsUptoDate(part.getParameters())) {
+            return null;
+          }
           long partRowCnt = Long.parseLong(part.getParameters().get(StatsSetupConst.ROW_COUNT));
           if (partRowCnt < 1) {
             Log.debug("Partition doesn't have upto date stats " + part.getSpec());
@@ -688,6 +697,9 @@ public class StatsOptimizer implements Transform {
           rowCnt += partRowCnt;
         }
       } else { // unpartitioned table
+        if (!StatsSetupConst.areStatsUptoDate(tbl.getParameters())) {
+          return null;
+        }
         rowCnt = Long.parseLong(tbl.getProperty(StatsSetupConst.ROW_COUNT));
         if (rowCnt < 1) {
           // if rowCnt < 1 than its either empty table or table on which stats are not

http://git-wip-us.apache.org/repos/asf/hive/blob/86346fb1/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java
index 14a7e9c..9072d7f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java
@@ -65,7 +65,6 @@ public class QBParseInfo {
   private final HashSet<String> insertIntoTables;
 
   private boolean isAnalyzeCommand; // used for the analyze command (statistics)
-  private boolean isInsertToTable;  // used for insert overwrite command (statistics)
   private boolean isNoScanAnalyzeCommand; // used for the analyze command (statistics) (noscan)
   private boolean isPartialScanAnalyzeCommand; // used for the analyze command (statistics)
                                                // (partialscan)
@@ -550,14 +549,6 @@ public class QBParseInfo {
     return isAnalyzeCommand;
   }
 
-  public void setIsInsertToTable(boolean isInsertToTable) {
-    this.isInsertToTable = isInsertToTable;
-  }
-
-  public boolean isInsertToTable() {
-    return isInsertToTable;
-  }
-
   public void addTableSpec(String tName, TableSpec tSpec) {
     tableSpecs.put(tName, tSpec);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/86346fb1/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index f47428c..8927800 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -1735,8 +1735,6 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
             qb.getMetaData().setDestForAlias(name, ts.partHandle);
           }
           if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESTATSAUTOGATHER)) {
-            // Set that variable to automatically collect stats during the MapReduce job
-            qb.getParseInfo().setIsInsertToTable(true);
             // Add the table spec for the destination table.
             qb.getParseInfo().addTableSpec(ts.tableName.toLowerCase(), ts);
           }
@@ -1773,8 +1771,6 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
               }
               if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESTATSAUTOGATHER)) {
                 TableSpec ts = new TableSpec(db, conf, this.ast);
-                // Set that variable to automatically collect stats during the MapReduce job
-                qb.getParseInfo().setIsInsertToTable(true);
                 // Add the table spec for the destination table.
                 qb.getParseInfo().addTableSpec(ts.tableName.toLowerCase(), ts);
               }
@@ -6328,7 +6324,6 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
       // verify that our destination is empty before proceeding
       if (dest_tab.isImmutable() &&
           qb.getParseInfo().isInsertIntoTable(dest_tab.getDbName(),dest_tab.getTableName())){
-        qb.getParseInfo().isInsertToTable();
         try {
           FileSystem fs = partPath.getFileSystem(conf);
           if (! MetaStoreUtils.isDirEmpty(fs,partPath)){
@@ -12208,7 +12203,6 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
       queryProperties.setNoScanAnalyzeCommand(qb.getParseInfo().isNoScanAnalyzeCommand());
       queryProperties.setAnalyzeRewrite(qb.isAnalyzeRewrite());
       queryProperties.setCTAS(qb.getTableDesc() != null);
-      queryProperties.setInsertToTable(qb.getParseInfo().isInsertToTable());
       queryProperties.setHasOuterOrderBy(!qb.getParseInfo().getIsSubQ() &&
               !qb.getParseInfo().getDestToOrderBy().isEmpty());
       queryProperties.setOuterQueryLimit(qb.getParseInfo().getOuterQueryLimit());

http://git-wip-us.apache.org/repos/asf/hive/blob/86346fb1/ql/src/test/queries/clientpositive/insert_into1.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/insert_into1.q b/ql/src/test/queries/clientpositive/insert_into1.q
index af82e3a..f00d06e 100644
--- a/ql/src/test/queries/clientpositive/insert_into1.q
+++ b/ql/src/test/queries/clientpositive/insert_into1.q
@@ -43,6 +43,14 @@ insert into insert_into1 select 2, 'b';
 
 select * from insert_into1;
 
-DROP TABLE insert_into1;
+set hive.stats.autogather=false;                                                                                                                                    
+explain
+insert into table insert_into1 values(1, 'abc');                                                                                                                    
+insert into table insert_into1 values(1, 'abc');                                                                                                                    
+explain
+SELECT COUNT(*) FROM insert_into1;                                                                                                                                  
+select count(*) from insert_into1;
 
+DROP TABLE insert_into1;
+set hive.stats.autogather=true;
 set hive.compute.query.using.stats=false;

http://git-wip-us.apache.org/repos/asf/hive/blob/86346fb1/ql/src/test/queries/clientpositive/insert_into2.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/insert_into2.q b/ql/src/test/queries/clientpositive/insert_into2.q
index 7183c75..d127c04 100644
--- a/ql/src/test/queries/clientpositive/insert_into2.q
+++ b/ql/src/test/queries/clientpositive/insert_into2.q
@@ -41,7 +41,15 @@ explain
 SELECT COUNT(*) FROM insert_into2 WHERE ds='2';
 SELECT COUNT(*) FROM insert_into2 WHERE ds='2';
 
+set hive.stats.autogather=false;                                                                     
+
+insert into table insert_into2 partition (ds='2') values(1, 'abc');                                                                                                                    
+explain
+SELECT COUNT(*) FROM insert_into2 where ds='2';                                                                                                                                  
+select count(*) from insert_into2 where ds='2';
+
 
 DROP TABLE insert_into2;
 
+set hive.stats.autogather=true;                                                                                                                                    
 set hive.compute.query.using.stats=false;

http://git-wip-us.apache.org/repos/asf/hive/blob/86346fb1/ql/src/test/results/clientpositive/bucket_map_join_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucket_map_join_1.q.out b/ql/src/test/results/clientpositive/bucket_map_join_1.q.out
index ce0df01..c7a8a20 100644
--- a/ql/src/test/results/clientpositive/bucket_map_join_1.q.out
+++ b/ql/src/test/results/clientpositive/bucket_map_join_1.q.out
@@ -182,8 +182,6 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.table1
               numFiles 1
-              numRows 0
-              rawDataSize 0
               serialization.ddl struct table1 { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -204,8 +202,6 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.table1
                 numFiles 1
-                numRows 0
-                rawDataSize 0
                 serialization.ddl struct table1 { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/86346fb1/ql/src/test/results/clientpositive/bucket_map_join_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucket_map_join_2.q.out b/ql/src/test/results/clientpositive/bucket_map_join_2.q.out
index 791e606..3c3793f 100644
--- a/ql/src/test/results/clientpositive/bucket_map_join_2.q.out
+++ b/ql/src/test/results/clientpositive/bucket_map_join_2.q.out
@@ -182,8 +182,6 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.table1
               numFiles 1
-              numRows 0
-              rawDataSize 0
               serialization.ddl struct table1 { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -204,8 +202,6 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.table1
                 numFiles 1
-                numRows 0
-                rawDataSize 0
                 serialization.ddl struct table1 { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/86346fb1/ql/src/test/results/clientpositive/encrypted/encryption_insert_partition_dynamic.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/encrypted/encryption_insert_partition_dynamic.q.out b/ql/src/test/results/clientpositive/encrypted/encryption_insert_partition_dynamic.q.out
index 3ed1fdb..13fae42 100644
--- a/ql/src/test/results/clientpositive/encrypted/encryption_insert_partition_dynamic.q.out
+++ b/ql/src/test/results/clientpositive/encrypted/encryption_insert_partition_dynamic.q.out
@@ -309,8 +309,6 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.src
               numFiles 1
-              numRows 0
-              rawDataSize 0
               serialization.ddl struct src { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -329,8 +327,6 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.src
                 numFiles 1
-                numRows 0
-                rawDataSize 0
                 serialization.ddl struct src { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/86346fb1/ql/src/test/results/clientpositive/encrypted/encryption_join_unencrypted_tbl.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/encrypted/encryption_join_unencrypted_tbl.q.out b/ql/src/test/results/clientpositive/encrypted/encryption_join_unencrypted_tbl.q.out
index 7997fcb..5dd927d 100644
--- a/ql/src/test/results/clientpositive/encrypted/encryption_join_unencrypted_tbl.q.out
+++ b/ql/src/test/results/clientpositive/encrypted/encryption_join_unencrypted_tbl.q.out
@@ -673,8 +673,6 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.src
               numFiles 1
-              numRows 0
-              rawDataSize 0
               serialization.ddl struct src { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -693,8 +691,6 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.src
                 numFiles 1
-                numRows 0
-                rawDataSize 0
                 serialization.ddl struct src { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/86346fb1/ql/src/test/results/clientpositive/insert_into1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/insert_into1.q.out b/ql/src/test/results/clientpositive/insert_into1.q.out
index cf627a6..7f3112c 100644
--- a/ql/src/test/results/clientpositive/insert_into1.q.out
+++ b/ql/src/test/results/clientpositive/insert_into1.q.out
@@ -539,6 +539,157 @@ POSTHOOK: Input: default@insert_into1
 #### A masked pattern was here ####
 1	a
 2	b
+PREHOOK: query: explain
+insert into table insert_into1 values(1, 'abc')
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+insert into table insert_into1 values(1, 'abc')
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-6 depends on stages: Stage-1 , consists of Stage-3, Stage-2, Stage-4
+  Stage-3
+  Stage-0 depends on stages: Stage-3, Stage-2, Stage-5
+  Stage-2
+  Stage-4
+  Stage-5 depends on stages: Stage-4
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: values__tmp__table__1
+            Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: UDFToInteger(tmp_values_col1) (type: int), tmp_values_col2 (type: string)
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.insert_into1
+
+  Stage: Stage-6
+    Conditional Operator
+
+  Stage: Stage-3
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert_into1
+
+  Stage: Stage-2
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            File Output Operator
+              compressed: false
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.insert_into1
+
+  Stage: Stage-4
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            File Output Operator
+              compressed: false
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.insert_into1
+
+  Stage: Stage-5
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+PREHOOK: query: insert into table insert_into1 values(1, 'abc')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__2
+PREHOOK: Output: default@insert_into1
+POSTHOOK: query: insert into table insert_into1 values(1, 'abc')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__2
+POSTHOOK: Output: default@insert_into1
+POSTHOOK: Lineage: insert_into1.key EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: insert_into1.value SIMPLE [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+PREHOOK: query: explain
+SELECT COUNT(*) FROM insert_into1
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+SELECT COUNT(*) FROM insert_into1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: insert_into1
+            Statistics: Num rows: -1 Data size: 14 Basic stats: PARTIAL Column stats: COMPLETE
+            Select Operator
+              Statistics: Num rows: -1 Data size: 14 Basic stats: PARTIAL Column stats: COMPLETE
+              Group By Operator
+                aggregations: count()
+                mode: hash
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  value expressions: _col0 (type: bigint)
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: count(VALUE._col0)
+          mode: mergepartial
+          outputColumnNames: _col0
+          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select count(*) from insert_into1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into1
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from insert_into1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into1
+#### A masked pattern was here ####
+3
 PREHOOK: query: DROP TABLE insert_into1
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@insert_into1

http://git-wip-us.apache.org/repos/asf/hive/blob/86346fb1/ql/src/test/results/clientpositive/insert_into2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/insert_into2.q.out b/ql/src/test/results/clientpositive/insert_into2.q.out
index cc7e135..737e576 100644
--- a/ql/src/test/results/clientpositive/insert_into2.q.out
+++ b/ql/src/test/results/clientpositive/insert_into2.q.out
@@ -394,6 +394,75 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@insert_into2
 #### A masked pattern was here ####
 50
+PREHOOK: query: insert into table insert_into2 partition (ds='2') values(1, 'abc')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__1
+PREHOOK: Output: default@insert_into2@ds=2
+POSTHOOK: query: insert into table insert_into2 partition (ds='2') values(1, 'abc')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__1
+POSTHOOK: Output: default@insert_into2@ds=2
+POSTHOOK: Lineage: insert_into2 PARTITION(ds=2).key EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: insert_into2 PARTITION(ds=2).value SIMPLE [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+PREHOOK: query: explain
+SELECT COUNT(*) FROM insert_into2 where ds='2'
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+SELECT COUNT(*) FROM insert_into2 where ds='2'
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: insert_into2
+            Statistics: Num rows: 50 Data size: 530 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              Statistics: Num rows: 50 Data size: 530 Basic stats: COMPLETE Column stats: NONE
+              Group By Operator
+                aggregations: count()
+                mode: hash
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col0 (type: bigint)
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: count(VALUE._col0)
+          mode: mergepartial
+          outputColumnNames: _col0
+          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select count(*) from insert_into2 where ds='2'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into2
+PREHOOK: Input: default@insert_into2@ds=2
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from insert_into2 where ds='2'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into2
+POSTHOOK: Input: default@insert_into2@ds=2
+#### A masked pattern was here ####
+51
 PREHOOK: query: DROP TABLE insert_into2
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@insert_into2

http://git-wip-us.apache.org/repos/asf/hive/blob/86346fb1/ql/src/test/results/clientpositive/spark/bucket_map_join_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucket_map_join_1.q.out b/ql/src/test/results/clientpositive/spark/bucket_map_join_1.q.out
index d3f433d..c1b13aa 100644
--- a/ql/src/test/results/clientpositive/spark/bucket_map_join_1.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucket_map_join_1.q.out
@@ -149,8 +149,6 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.table2
                     numFiles 1
-                    numRows 0
-                    rawDataSize 0
                     serialization.ddl struct table2 { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -171,8 +169,6 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.table2
                       numFiles 1
-                      numRows 0
-                      rawDataSize 0
                       serialization.ddl struct table2 { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -242,8 +238,6 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.table1
                     numFiles 1
-                    numRows 0
-                    rawDataSize 0
                     serialization.ddl struct table1 { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -264,8 +258,6 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.table1
                       numFiles 1
-                      numRows 0
-                      rawDataSize 0
                       serialization.ddl struct table1 { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/86346fb1/ql/src/test/results/clientpositive/spark/bucket_map_join_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucket_map_join_2.q.out b/ql/src/test/results/clientpositive/spark/bucket_map_join_2.q.out
index 3d850db..580e098 100644
--- a/ql/src/test/results/clientpositive/spark/bucket_map_join_2.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucket_map_join_2.q.out
@@ -149,8 +149,6 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.table2
                     numFiles 1
-                    numRows 0
-                    rawDataSize 0
                     serialization.ddl struct table2 { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -171,8 +169,6 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.table2
                       numFiles 1
-                      numRows 0
-                      rawDataSize 0
                       serialization.ddl struct table2 { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -242,8 +238,6 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.table1
                     numFiles 1
-                    numRows 0
-                    rawDataSize 0
                     serialization.ddl struct table1 { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -264,8 +258,6 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.table1
                       numFiles 1
-                      numRows 0
-                      rawDataSize 0
                       serialization.ddl struct table1 { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/86346fb1/ql/src/test/results/clientpositive/spark/insert_into1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/insert_into1.q.out b/ql/src/test/results/clientpositive/spark/insert_into1.q.out
index 38134a1..00e71ba 100644
--- a/ql/src/test/results/clientpositive/spark/insert_into1.q.out
+++ b/ql/src/test/results/clientpositive/spark/insert_into1.q.out
@@ -475,6 +475,122 @@ POSTHOOK: Input: default@insert_into1
 #### A masked pattern was here ####
 1	a
 2	b
+PREHOOK: query: explain
+insert into table insert_into1 values(1, 'abc')
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+insert into table insert_into1 values(1, 'abc')
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: values__tmp__table__1
+                  Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: UDFToInteger(tmp_values_col1) (type: int), tmp_values_col2 (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.insert_into1
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert_into1
+
+PREHOOK: query: insert into table insert_into1 values(1, 'abc')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__2
+PREHOOK: Output: default@insert_into1
+POSTHOOK: query: insert into table insert_into1 values(1, 'abc')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__2
+POSTHOOK: Output: default@insert_into1
+POSTHOOK: Lineage: insert_into1.key EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: insert_into1.value SIMPLE [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+PREHOOK: query: explain
+SELECT COUNT(*) FROM insert_into1
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+SELECT COUNT(*) FROM insert_into1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+      Edges:
+        Reducer 2 <- Map 1 (GROUP, 1)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: insert_into1
+                  Statistics: Num rows: -1 Data size: 14 Basic stats: PARTIAL Column stats: COMPLETE
+                  Select Operator
+                    Statistics: Num rows: -1 Data size: 14 Basic stats: PARTIAL Column stats: COMPLETE
+                    Group By Operator
+                      aggregations: count()
+                      mode: hash
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        sort order: 
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                        value expressions: _col0 (type: bigint)
+        Reducer 2 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select count(*) from insert_into1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into1
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from insert_into1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into1
+#### A masked pattern was here ####
+3
 PREHOOK: query: DROP TABLE insert_into1
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@insert_into1

http://git-wip-us.apache.org/repos/asf/hive/blob/86346fb1/ql/src/test/results/clientpositive/spark/insert_into2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/insert_into2.q.out b/ql/src/test/results/clientpositive/spark/insert_into2.q.out
index 578fae2..26bf1e6 100644
--- a/ql/src/test/results/clientpositive/spark/insert_into2.q.out
+++ b/ql/src/test/results/clientpositive/spark/insert_into2.q.out
@@ -412,6 +412,81 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@insert_into2
 #### A masked pattern was here ####
 50
+PREHOOK: query: insert into table insert_into2 partition (ds='2') values(1, 'abc')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__1
+PREHOOK: Output: default@insert_into2@ds=2
+POSTHOOK: query: insert into table insert_into2 partition (ds='2') values(1, 'abc')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__1
+POSTHOOK: Output: default@insert_into2@ds=2
+POSTHOOK: Lineage: insert_into2 PARTITION(ds=2).key EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: insert_into2 PARTITION(ds=2).value SIMPLE [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+PREHOOK: query: explain
+SELECT COUNT(*) FROM insert_into2 where ds='2'
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+SELECT COUNT(*) FROM insert_into2 where ds='2'
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+      Edges:
+        Reducer 2 <- Map 1 (GROUP, 1)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: insert_into2
+                  Statistics: Num rows: 50 Data size: 530 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    Statistics: Num rows: 50 Data size: 530 Basic stats: COMPLETE Column stats: NONE
+                    Group By Operator
+                      aggregations: count()
+                      mode: hash
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        sort order: 
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col0 (type: bigint)
+        Reducer 2 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select count(*) from insert_into2 where ds='2'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into2
+PREHOOK: Input: default@insert_into2@ds=2
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from insert_into2 where ds='2'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into2
+POSTHOOK: Input: default@insert_into2@ds=2
+#### A masked pattern was here ####
+51
 PREHOOK: query: DROP TABLE insert_into2
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@insert_into2

http://git-wip-us.apache.org/repos/asf/hive/blob/86346fb1/ql/src/test/results/clientpositive/spark/stats3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/stats3.q.out b/ql/src/test/results/clientpositive/spark/stats3.q.out
index 2afb76e..cbd66e5 100644
--- a/ql/src/test/results/clientpositive/spark/stats3.q.out
+++ b/ql/src/test/results/clientpositive/spark/stats3.q.out
@@ -88,8 +88,6 @@ Table Type:         	MANAGED_TABLE
 Table Parameters:	 	 
 	COLUMN_STATS_ACCURATE	true                
 	numFiles            	1                   
-	numRows             	0                   
-	rawDataSize         	0                   
 	totalSize           	11                  
 #### A masked pattern was here ####
 	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/86346fb1/ql/src/test/results/clientpositive/stats3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/stats3.q.out b/ql/src/test/results/clientpositive/stats3.q.out
index 2afb76e..cbd66e5 100644
--- a/ql/src/test/results/clientpositive/stats3.q.out
+++ b/ql/src/test/results/clientpositive/stats3.q.out
@@ -88,8 +88,6 @@ Table Type:         	MANAGED_TABLE
 Table Parameters:	 	 
 	COLUMN_STATS_ACCURATE	true                
 	numFiles            	1                   
-	numRows             	0                   
-	rawDataSize         	0                   
 	totalSize           	11                  
 #### A masked pattern was here ####
 	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/86346fb1/ql/src/test/results/clientpositive/tez/insert_into1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/insert_into1.q.out b/ql/src/test/results/clientpositive/tez/insert_into1.q.out
index b24b407..0e82691 100644
--- a/ql/src/test/results/clientpositive/tez/insert_into1.q.out
+++ b/ql/src/test/results/clientpositive/tez/insert_into1.q.out
@@ -495,6 +495,126 @@ POSTHOOK: Input: default@insert_into1
 #### A masked pattern was here ####
 1	a
 2	b
+PREHOOK: query: explain
+insert into table insert_into1 values(1, 'abc')
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+insert into table insert_into1 values(1, 'abc')
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: values__tmp__table__1
+                  Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: UDFToInteger(tmp_values_col1) (type: int), tmp_values_col2 (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.insert_into1
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert_into1
+
+PREHOOK: query: insert into table insert_into1 values(1, 'abc')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__2
+PREHOOK: Output: default@insert_into1
+POSTHOOK: query: insert into table insert_into1 values(1, 'abc')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__2
+POSTHOOK: Output: default@insert_into1
+POSTHOOK: Lineage: insert_into1.key EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: insert_into1.value SIMPLE [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+PREHOOK: query: explain
+SELECT COUNT(*) FROM insert_into1
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+SELECT COUNT(*) FROM insert_into1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: insert_into1
+                  Statistics: Num rows: -1 Data size: 14 Basic stats: PARTIAL Column stats: COMPLETE
+                  Select Operator
+                    Statistics: Num rows: -1 Data size: 14 Basic stats: PARTIAL Column stats: COMPLETE
+                    Group By Operator
+                      aggregations: count()
+                      mode: hash
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        sort order: 
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                        value expressions: _col0 (type: bigint)
+        Reducer 2 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select count(*) from insert_into1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into1
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from insert_into1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into1
+#### A masked pattern was here ####
+3
 PREHOOK: query: DROP TABLE insert_into1
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@insert_into1

http://git-wip-us.apache.org/repos/asf/hive/blob/86346fb1/ql/src/test/results/clientpositive/tez/insert_into2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/insert_into2.q.out b/ql/src/test/results/clientpositive/tez/insert_into2.q.out
index 2c7ec63..b7668ff 100644
--- a/ql/src/test/results/clientpositive/tez/insert_into2.q.out
+++ b/ql/src/test/results/clientpositive/tez/insert_into2.q.out
@@ -424,6 +424,81 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@insert_into2
 #### A masked pattern was here ####
 50
+PREHOOK: query: insert into table insert_into2 partition (ds='2') values(1, 'abc')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__1
+PREHOOK: Output: default@insert_into2@ds=2
+POSTHOOK: query: insert into table insert_into2 partition (ds='2') values(1, 'abc')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__1
+POSTHOOK: Output: default@insert_into2@ds=2
+POSTHOOK: Lineage: insert_into2 PARTITION(ds=2).key EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: insert_into2 PARTITION(ds=2).value SIMPLE [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+PREHOOK: query: explain
+SELECT COUNT(*) FROM insert_into2 where ds='2'
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+SELECT COUNT(*) FROM insert_into2 where ds='2'
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: insert_into2
+                  Statistics: Num rows: 50 Data size: 530 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    Statistics: Num rows: 50 Data size: 530 Basic stats: COMPLETE Column stats: NONE
+                    Group By Operator
+                      aggregations: count()
+                      mode: hash
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        sort order: 
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col0 (type: bigint)
+        Reducer 2 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select count(*) from insert_into2 where ds='2'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into2
+PREHOOK: Input: default@insert_into2@ds=2
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from insert_into2 where ds='2'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into2
+POSTHOOK: Input: default@insert_into2@ds=2
+#### A masked pattern was here ####
+51
 PREHOOK: query: DROP TABLE insert_into2
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@insert_into2


[02/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)

Posted by xu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOperationHandle.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOperationHandle.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOperationHandle.java
index 75a3c89..d1d86fe 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOperationHandle.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOperationHandle.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TOperationHandle implements org.apache.thrift.TBase<TOperationHandle, TOperationHandle._Fields>, java.io.Serializable, Cloneable, Comparable<TOperationHandle> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TOperationHandle");
 
@@ -333,10 +333,10 @@ public class TOperationHandle implements org.apache.thrift.TBase<TOperationHandl
       return getOperationType();
 
     case HAS_RESULT_SET:
-      return Boolean.valueOf(isHasResultSet());
+      return isHasResultSet();
 
     case MODIFIED_ROW_COUNT:
-      return Double.valueOf(getModifiedRowCount());
+      return getModifiedRowCount();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOperationState.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOperationState.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOperationState.java
index 2dfde1c..fe029ef 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOperationState.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOperationState.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOperationType.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOperationType.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOperationType.java
index 6927740..f105259 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOperationType.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOperationType.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TPrimitiveTypeEntry.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TPrimitiveTypeEntry.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TPrimitiveTypeEntry.java
index 706c6ba..38a5be3 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TPrimitiveTypeEntry.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TPrimitiveTypeEntry.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TPrimitiveTypeEntry implements org.apache.thrift.TBase<TPrimitiveTypeEntry, TPrimitiveTypeEntry._Fields>, java.io.Serializable, Cloneable, Comparable<TPrimitiveTypeEntry> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TPrimitiveTypeEntry");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TProtocolVersion.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TProtocolVersion.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TProtocolVersion.java
index 6e714c6..c936ada 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TProtocolVersion.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TProtocolVersion.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TRenewDelegationTokenReq.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TRenewDelegationTokenReq.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TRenewDelegationTokenReq.java
index 9452fd8..e74bb29 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TRenewDelegationTokenReq.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TRenewDelegationTokenReq.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TRenewDelegationTokenReq implements org.apache.thrift.TBase<TRenewDelegationTokenReq, TRenewDelegationTokenReq._Fields>, java.io.Serializable, Cloneable, Comparable<TRenewDelegationTokenReq> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TRenewDelegationTokenReq");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TRenewDelegationTokenResp.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TRenewDelegationTokenResp.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TRenewDelegationTokenResp.java
index 8a0e78a..63633e3 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TRenewDelegationTokenResp.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TRenewDelegationTokenResp.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TRenewDelegationTokenResp implements org.apache.thrift.TBase<TRenewDelegationTokenResp, TRenewDelegationTokenResp._Fields>, java.io.Serializable, Cloneable, Comparable<TRenewDelegationTokenResp> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TRenewDelegationTokenResp");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TRow.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TRow.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TRow.java
index 96322db..28c34cd 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TRow.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TRow.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TRow implements org.apache.thrift.TBase<TRow, TRow._Fields>, java.io.Serializable, Cloneable, Comparable<TRow> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TRow");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TRowSet.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TRowSet.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TRowSet.java
index 05e0b89..9b9b2ac 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TRowSet.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TRowSet.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TRowSet implements org.apache.thrift.TBase<TRowSet, TRowSet._Fields>, java.io.Serializable, Cloneable, Comparable<TRowSet> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TRowSet");
 
@@ -312,7 +312,7 @@ public class TRowSet implements org.apache.thrift.TBase<TRowSet, TRowSet._Fields
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case START_ROW_OFFSET:
-      return Long.valueOf(getStartRowOffset());
+      return getStartRowOffset();
 
     case ROWS:
       return getRows();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TSessionHandle.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TSessionHandle.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TSessionHandle.java
index ff1e723..aeff2b6 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TSessionHandle.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TSessionHandle.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TSessionHandle implements org.apache.thrift.TBase<TSessionHandle, TSessionHandle._Fields>, java.io.Serializable, Cloneable, Comparable<TSessionHandle> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TSessionHandle");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TStatus.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TStatus.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TStatus.java
index 5309d49..ccf281a 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TStatus.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TStatus.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TStatus implements org.apache.thrift.TBase<TStatus, TStatus._Fields>, java.io.Serializable, Cloneable, Comparable<TStatus> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TStatus");
 
@@ -391,7 +391,7 @@ public class TStatus implements org.apache.thrift.TBase<TStatus, TStatus._Fields
       return getSqlState();
 
     case ERROR_CODE:
-      return Integer.valueOf(getErrorCode());
+      return getErrorCode();
 
     case ERROR_MESSAGE:
       return getErrorMessage();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TStatusCode.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TStatusCode.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TStatusCode.java
index 91d1a97..8900d4c 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TStatusCode.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TStatusCode.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TStringColumn.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TStringColumn.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TStringColumn.java
index a987733..7c15ec1 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TStringColumn.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TStringColumn.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TStringColumn implements org.apache.thrift.TBase<TStringColumn, TStringColumn._Fields>, java.io.Serializable, Cloneable, Comparable<TStringColumn> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TStringColumn");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TStringValue.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TStringValue.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TStringValue.java
index c902e0b..96e9be0 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TStringValue.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TStringValue.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TStringValue implements org.apache.thrift.TBase<TStringValue, TStringValue._Fields>, java.io.Serializable, Cloneable, Comparable<TStringValue> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TStringValue");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TStructTypeEntry.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TStructTypeEntry.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TStructTypeEntry.java
index f477764..c90926a 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TStructTypeEntry.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TStructTypeEntry.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TStructTypeEntry implements org.apache.thrift.TBase<TStructTypeEntry, TStructTypeEntry._Fields>, java.io.Serializable, Cloneable, Comparable<TStructTypeEntry> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TStructTypeEntry");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTableSchema.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTableSchema.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTableSchema.java
index d97f593..7a38e15 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTableSchema.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTableSchema.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TTableSchema implements org.apache.thrift.TBase<TTableSchema, TTableSchema._Fields>, java.io.Serializable, Cloneable, Comparable<TTableSchema> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TTableSchema");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeDesc.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeDesc.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeDesc.java
index c4fdd74..e187da2 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeDesc.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeDesc.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TTypeDesc implements org.apache.thrift.TBase<TTypeDesc, TTypeDesc._Fields>, java.io.Serializable, Cloneable, Comparable<TTypeDesc> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TTypeDesc");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeEntry.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeEntry.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeEntry.java
index 203d3b9..a53eaa2 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeEntry.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeEntry.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeId.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeId.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeId.java
index e6f2336..379bc92 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeId.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeId.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeQualifierValue.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeQualifierValue.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeQualifierValue.java
index 54df6dc..cc786b2 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeQualifierValue.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeQualifierValue.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeQualifiers.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeQualifiers.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeQualifiers.java
index 9312d92..ac97b7f 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeQualifiers.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeQualifiers.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TTypeQualifiers implements org.apache.thrift.TBase<TTypeQualifiers, TTypeQualifiers._Fields>, java.io.Serializable, Cloneable, Comparable<TTypeQualifiers> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TTypeQualifiers");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TUnionTypeEntry.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TUnionTypeEntry.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TUnionTypeEntry.java
index 5302024..0c7120d 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TUnionTypeEntry.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TUnionTypeEntry.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TUnionTypeEntry implements org.apache.thrift.TBase<TUnionTypeEntry, TUnionTypeEntry._Fields>, java.io.Serializable, Cloneable, Comparable<TUnionTypeEntry> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TUnionTypeEntry");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TUserDefinedTypeEntry.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TUserDefinedTypeEntry.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TUserDefinedTypeEntry.java
index 5b60476..4ae9505 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TUserDefinedTypeEntry.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TUserDefinedTypeEntry.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class TUserDefinedTypeEntry implements org.apache.thrift.TBase<TUserDefinedTypeEntry, TUserDefinedTypeEntry._Fields>, java.io.Serializable, Cloneable, Comparable<TUserDefinedTypeEntry> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TUserDefinedTypeEntry");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-php/TCLIService.php
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-php/TCLIService.php b/service/src/gen/thrift/gen-php/TCLIService.php
index f5dd904..eba62f1 100644
--- a/service/src/gen/thrift/gen-php/TCLIService.php
+++ b/service/src/gen/thrift/gen-php/TCLIService.php
@@ -1,7 +1,6 @@
 <?php
-namespace ;
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-php/ThriftHive.php
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-php/ThriftHive.php b/service/src/gen/thrift/gen-php/ThriftHive.php
index 2c783b6..23dc8fd 100644
--- a/service/src/gen/thrift/gen-php/ThriftHive.php
+++ b/service/src/gen/thrift/gen-php/ThriftHive.php
@@ -1,7 +1,6 @@
 <?php
-namespace ;
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-php/Types.php
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-php/Types.php b/service/src/gen/thrift/gen-php/Types.php
index d8f3c5b..cbf4c29 100644
--- a/service/src/gen/thrift/gen-php/Types.php
+++ b/service/src/gen/thrift/gen-php/Types.php
@@ -1,8 +1,6 @@
 <?php
-namespace ;
-
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-py/TCLIService/TCLIService-remote
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-py/TCLIService/TCLIService-remote b/service/src/gen/thrift/gen-py/TCLIService/TCLIService-remote
index 2bfca56..56f5c5d 100755
--- a/service/src/gen/thrift/gen-py/TCLIService/TCLIService-remote
+++ b/service/src/gen/thrift/gen-py/TCLIService/TCLIService-remote
@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-py/TCLIService/TCLIService.py
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-py/TCLIService/TCLIService.py b/service/src/gen/thrift/gen-py/TCLIService/TCLIService.py
index 0067195..ad2d71d 100644
--- a/service/src/gen/thrift/gen-py/TCLIService/TCLIService.py
+++ b/service/src/gen/thrift/gen-py/TCLIService/TCLIService.py
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #
@@ -7,6 +7,7 @@
 #
 
 from thrift.Thrift import TType, TMessageType, TException, TApplicationException
+import logging
 from ttypes import *
 from thrift.Thrift import TProcessor
 from thrift.transport import TTransport
@@ -188,7 +189,7 @@ class Client(Iface):
     iprot.readMessageEnd()
     if result.success is not None:
       return result.success
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "OpenSession failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "OpenSession failed: unknown result")
 
   def CloseSession(self, req):
     """
@@ -219,7 +220,7 @@ class Client(Iface):
     iprot.readMessageEnd()
     if result.success is not None:
       return result.success
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "CloseSession failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "CloseSession failed: unknown result")
 
   def GetInfo(self, req):
     """
@@ -250,7 +251,7 @@ class Client(Iface):
     iprot.readMessageEnd()
     if result.success is not None:
       return result.success
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "GetInfo failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "GetInfo failed: unknown result")
 
   def ExecuteStatement(self, req):
     """
@@ -281,7 +282,7 @@ class Client(Iface):
     iprot.readMessageEnd()
     if result.success is not None:
       return result.success
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "ExecuteStatement failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "ExecuteStatement failed: unknown result")
 
   def GetTypeInfo(self, req):
     """
@@ -312,7 +313,7 @@ class Client(Iface):
     iprot.readMessageEnd()
     if result.success is not None:
       return result.success
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "GetTypeInfo failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "GetTypeInfo failed: unknown result")
 
   def GetCatalogs(self, req):
     """
@@ -343,7 +344,7 @@ class Client(Iface):
     iprot.readMessageEnd()
     if result.success is not None:
       return result.success
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "GetCatalogs failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "GetCatalogs failed: unknown result")
 
   def GetSchemas(self, req):
     """
@@ -374,7 +375,7 @@ class Client(Iface):
     iprot.readMessageEnd()
     if result.success is not None:
       return result.success
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "GetSchemas failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "GetSchemas failed: unknown result")
 
   def GetTables(self, req):
     """
@@ -405,7 +406,7 @@ class Client(Iface):
     iprot.readMessageEnd()
     if result.success is not None:
       return result.success
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "GetTables failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "GetTables failed: unknown result")
 
   def GetTableTypes(self, req):
     """
@@ -436,7 +437,7 @@ class Client(Iface):
     iprot.readMessageEnd()
     if result.success is not None:
       return result.success
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "GetTableTypes failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "GetTableTypes failed: unknown result")
 
   def GetColumns(self, req):
     """
@@ -467,7 +468,7 @@ class Client(Iface):
     iprot.readMessageEnd()
     if result.success is not None:
       return result.success
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "GetColumns failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "GetColumns failed: unknown result")
 
   def GetFunctions(self, req):
     """
@@ -498,7 +499,7 @@ class Client(Iface):
     iprot.readMessageEnd()
     if result.success is not None:
       return result.success
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "GetFunctions failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "GetFunctions failed: unknown result")
 
   def GetOperationStatus(self, req):
     """
@@ -529,7 +530,7 @@ class Client(Iface):
     iprot.readMessageEnd()
     if result.success is not None:
       return result.success
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "GetOperationStatus failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "GetOperationStatus failed: unknown result")
 
   def CancelOperation(self, req):
     """
@@ -560,7 +561,7 @@ class Client(Iface):
     iprot.readMessageEnd()
     if result.success is not None:
       return result.success
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "CancelOperation failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "CancelOperation failed: unknown result")
 
   def CloseOperation(self, req):
     """
@@ -591,7 +592,7 @@ class Client(Iface):
     iprot.readMessageEnd()
     if result.success is not None:
       return result.success
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "CloseOperation failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "CloseOperation failed: unknown result")
 
   def GetResultSetMetadata(self, req):
     """
@@ -622,7 +623,7 @@ class Client(Iface):
     iprot.readMessageEnd()
     if result.success is not None:
       return result.success
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "GetResultSetMetadata failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "GetResultSetMetadata failed: unknown result")
 
   def FetchResults(self, req):
     """
@@ -653,7 +654,7 @@ class Client(Iface):
     iprot.readMessageEnd()
     if result.success is not None:
       return result.success
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "FetchResults failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "FetchResults failed: unknown result")
 
   def GetDelegationToken(self, req):
     """
@@ -684,7 +685,7 @@ class Client(Iface):
     iprot.readMessageEnd()
     if result.success is not None:
       return result.success
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "GetDelegationToken failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "GetDelegationToken failed: unknown result")
 
   def CancelDelegationToken(self, req):
     """
@@ -715,7 +716,7 @@ class Client(Iface):
     iprot.readMessageEnd()
     if result.success is not None:
       return result.success
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "CancelDelegationToken failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "CancelDelegationToken failed: unknown result")
 
   def RenewDelegationToken(self, req):
     """
@@ -746,7 +747,7 @@ class Client(Iface):
     iprot.readMessageEnd()
     if result.success is not None:
       return result.success
-    raise TApplicationException(TApplicationException.MISSING_RESULT, "RenewDelegationToken failed: unknown result");
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "RenewDelegationToken failed: unknown result")
 
 
 class Processor(Iface, TProcessor):
@@ -793,8 +794,16 @@ class Processor(Iface, TProcessor):
     args.read(iprot)
     iprot.readMessageEnd()
     result = OpenSession_result()
-    result.success = self._handler.OpenSession(args.req)
-    oprot.writeMessageBegin("OpenSession", TMessageType.REPLY, seqid)
+    try:
+      result.success = self._handler.OpenSession(args.req)
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("OpenSession", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -804,8 +813,16 @@ class Processor(Iface, TProcessor):
     args.read(iprot)
     iprot.readMessageEnd()
     result = CloseSession_result()
-    result.success = self._handler.CloseSession(args.req)
-    oprot.writeMessageBegin("CloseSession", TMessageType.REPLY, seqid)
+    try:
+      result.success = self._handler.CloseSession(args.req)
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("CloseSession", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -815,8 +832,16 @@ class Processor(Iface, TProcessor):
     args.read(iprot)
     iprot.readMessageEnd()
     result = GetInfo_result()
-    result.success = self._handler.GetInfo(args.req)
-    oprot.writeMessageBegin("GetInfo", TMessageType.REPLY, seqid)
+    try:
+      result.success = self._handler.GetInfo(args.req)
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("GetInfo", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -826,8 +851,16 @@ class Processor(Iface, TProcessor):
     args.read(iprot)
     iprot.readMessageEnd()
     result = ExecuteStatement_result()
-    result.success = self._handler.ExecuteStatement(args.req)
-    oprot.writeMessageBegin("ExecuteStatement", TMessageType.REPLY, seqid)
+    try:
+      result.success = self._handler.ExecuteStatement(args.req)
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("ExecuteStatement", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -837,8 +870,16 @@ class Processor(Iface, TProcessor):
     args.read(iprot)
     iprot.readMessageEnd()
     result = GetTypeInfo_result()
-    result.success = self._handler.GetTypeInfo(args.req)
-    oprot.writeMessageBegin("GetTypeInfo", TMessageType.REPLY, seqid)
+    try:
+      result.success = self._handler.GetTypeInfo(args.req)
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("GetTypeInfo", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -848,8 +889,16 @@ class Processor(Iface, TProcessor):
     args.read(iprot)
     iprot.readMessageEnd()
     result = GetCatalogs_result()
-    result.success = self._handler.GetCatalogs(args.req)
-    oprot.writeMessageBegin("GetCatalogs", TMessageType.REPLY, seqid)
+    try:
+      result.success = self._handler.GetCatalogs(args.req)
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("GetCatalogs", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -859,8 +908,16 @@ class Processor(Iface, TProcessor):
     args.read(iprot)
     iprot.readMessageEnd()
     result = GetSchemas_result()
-    result.success = self._handler.GetSchemas(args.req)
-    oprot.writeMessageBegin("GetSchemas", TMessageType.REPLY, seqid)
+    try:
+      result.success = self._handler.GetSchemas(args.req)
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("GetSchemas", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -870,8 +927,16 @@ class Processor(Iface, TProcessor):
     args.read(iprot)
     iprot.readMessageEnd()
     result = GetTables_result()
-    result.success = self._handler.GetTables(args.req)
-    oprot.writeMessageBegin("GetTables", TMessageType.REPLY, seqid)
+    try:
+      result.success = self._handler.GetTables(args.req)
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("GetTables", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -881,8 +946,16 @@ class Processor(Iface, TProcessor):
     args.read(iprot)
     iprot.readMessageEnd()
     result = GetTableTypes_result()
-    result.success = self._handler.GetTableTypes(args.req)
-    oprot.writeMessageBegin("GetTableTypes", TMessageType.REPLY, seqid)
+    try:
+      result.success = self._handler.GetTableTypes(args.req)
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("GetTableTypes", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -892,8 +965,16 @@ class Processor(Iface, TProcessor):
     args.read(iprot)
     iprot.readMessageEnd()
     result = GetColumns_result()
-    result.success = self._handler.GetColumns(args.req)
-    oprot.writeMessageBegin("GetColumns", TMessageType.REPLY, seqid)
+    try:
+      result.success = self._handler.GetColumns(args.req)
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("GetColumns", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -903,8 +984,16 @@ class Processor(Iface, TProcessor):
     args.read(iprot)
     iprot.readMessageEnd()
     result = GetFunctions_result()
-    result.success = self._handler.GetFunctions(args.req)
-    oprot.writeMessageBegin("GetFunctions", TMessageType.REPLY, seqid)
+    try:
+      result.success = self._handler.GetFunctions(args.req)
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("GetFunctions", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -914,8 +1003,16 @@ class Processor(Iface, TProcessor):
     args.read(iprot)
     iprot.readMessageEnd()
     result = GetOperationStatus_result()
-    result.success = self._handler.GetOperationStatus(args.req)
-    oprot.writeMessageBegin("GetOperationStatus", TMessageType.REPLY, seqid)
+    try:
+      result.success = self._handler.GetOperationStatus(args.req)
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("GetOperationStatus", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -925,8 +1022,16 @@ class Processor(Iface, TProcessor):
     args.read(iprot)
     iprot.readMessageEnd()
     result = CancelOperation_result()
-    result.success = self._handler.CancelOperation(args.req)
-    oprot.writeMessageBegin("CancelOperation", TMessageType.REPLY, seqid)
+    try:
+      result.success = self._handler.CancelOperation(args.req)
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("CancelOperation", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -936,8 +1041,16 @@ class Processor(Iface, TProcessor):
     args.read(iprot)
     iprot.readMessageEnd()
     result = CloseOperation_result()
-    result.success = self._handler.CloseOperation(args.req)
-    oprot.writeMessageBegin("CloseOperation", TMessageType.REPLY, seqid)
+    try:
+      result.success = self._handler.CloseOperation(args.req)
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("CloseOperation", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -947,8 +1060,16 @@ class Processor(Iface, TProcessor):
     args.read(iprot)
     iprot.readMessageEnd()
     result = GetResultSetMetadata_result()
-    result.success = self._handler.GetResultSetMetadata(args.req)
-    oprot.writeMessageBegin("GetResultSetMetadata", TMessageType.REPLY, seqid)
+    try:
+      result.success = self._handler.GetResultSetMetadata(args.req)
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("GetResultSetMetadata", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -958,8 +1079,16 @@ class Processor(Iface, TProcessor):
     args.read(iprot)
     iprot.readMessageEnd()
     result = FetchResults_result()
-    result.success = self._handler.FetchResults(args.req)
-    oprot.writeMessageBegin("FetchResults", TMessageType.REPLY, seqid)
+    try:
+      result.success = self._handler.FetchResults(args.req)
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("FetchResults", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -969,8 +1098,16 @@ class Processor(Iface, TProcessor):
     args.read(iprot)
     iprot.readMessageEnd()
     result = GetDelegationToken_result()
-    result.success = self._handler.GetDelegationToken(args.req)
-    oprot.writeMessageBegin("GetDelegationToken", TMessageType.REPLY, seqid)
+    try:
+      result.success = self._handler.GetDelegationToken(args.req)
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("GetDelegationToken", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -980,8 +1117,16 @@ class Processor(Iface, TProcessor):
     args.read(iprot)
     iprot.readMessageEnd()
     result = CancelDelegationToken_result()
-    result.success = self._handler.CancelDelegationToken(args.req)
-    oprot.writeMessageBegin("CancelDelegationToken", TMessageType.REPLY, seqid)
+    try:
+      result.success = self._handler.CancelDelegationToken(args.req)
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("CancelDelegationToken", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()
@@ -991,8 +1136,16 @@ class Processor(Iface, TProcessor):
     args.read(iprot)
     iprot.readMessageEnd()
     result = RenewDelegationToken_result()
-    result.success = self._handler.RenewDelegationToken(args.req)
-    oprot.writeMessageBegin("RenewDelegationToken", TMessageType.REPLY, seqid)
+    try:
+      result.success = self._handler.RenewDelegationToken(args.req)
+      msg_type = TMessageType.REPLY
+    except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
+      raise
+    except Exception as ex:
+      msg_type = TMessageType.EXCEPTION
+      logging.exception(ex)
+      result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
+    oprot.writeMessageBegin("RenewDelegationToken", msg_type, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
     oprot.trans.flush()

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-py/TCLIService/constants.py
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-py/TCLIService/constants.py b/service/src/gen/thrift/gen-py/TCLIService/constants.py
index 9fb1a86..c8d4f8f 100644
--- a/service/src/gen/thrift/gen-py/TCLIService/constants.py
+++ b/service/src/gen/thrift/gen-py/TCLIService/constants.py
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-py/TCLIService/ttypes.py
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-py/TCLIService/ttypes.py b/service/src/gen/thrift/gen-py/TCLIService/ttypes.py
index efee8ef..ef5f5f5 100644
--- a/service/src/gen/thrift/gen-py/TCLIService/ttypes.py
+++ b/service/src/gen/thrift/gen-py/TCLIService/ttypes.py
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #
@@ -416,12 +416,12 @@ class TTypeQualifierValue:
         break
       if fid == 1:
         if ftype == TType.I32:
-          self.i32Value = iprot.readI32();
+          self.i32Value = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.stringValue = iprot.readString();
+          self.stringValue = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -494,7 +494,7 @@ class TTypeQualifiers:
           self.qualifiers = {}
           (_ktype1, _vtype2, _size0 ) = iprot.readMapBegin()
           for _i4 in xrange(_size0):
-            _key5 = iprot.readString();
+            _key5 = iprot.readString()
             _val6 = TTypeQualifierValue()
             _val6.read(iprot)
             self.qualifiers[_key5] = _val6
@@ -572,7 +572,7 @@ class TPrimitiveTypeEntry:
         break
       if fid == 1:
         if ftype == TType.I32:
-          self.type = iprot.readI32();
+          self.type = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 2:
@@ -650,7 +650,7 @@ class TArrayTypeEntry:
         break
       if fid == 1:
         if ftype == TType.I32:
-          self.objectTypePtr = iprot.readI32();
+          self.objectTypePtr = iprot.readI32()
         else:
           iprot.skip(ftype)
       else:
@@ -720,12 +720,12 @@ class TMapTypeEntry:
         break
       if fid == 1:
         if ftype == TType.I32:
-          self.keyTypePtr = iprot.readI32();
+          self.keyTypePtr = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.I32:
-          self.valueTypePtr = iprot.readI32();
+          self.valueTypePtr = iprot.readI32()
         else:
           iprot.skip(ftype)
       else:
@@ -802,8 +802,8 @@ class TStructTypeEntry:
           self.nameToTypePtr = {}
           (_ktype10, _vtype11, _size9 ) = iprot.readMapBegin()
           for _i13 in xrange(_size9):
-            _key14 = iprot.readString();
-            _val15 = iprot.readI32();
+            _key14 = iprot.readString()
+            _val15 = iprot.readI32()
             self.nameToTypePtr[_key14] = _val15
           iprot.readMapEnd()
         else:
@@ -879,8 +879,8 @@ class TUnionTypeEntry:
           self.nameToTypePtr = {}
           (_ktype19, _vtype20, _size18 ) = iprot.readMapBegin()
           for _i22 in xrange(_size18):
-            _key23 = iprot.readString();
-            _val24 = iprot.readI32();
+            _key23 = iprot.readString()
+            _val24 = iprot.readI32()
             self.nameToTypePtr[_key23] = _val24
           iprot.readMapEnd()
         else:
@@ -953,7 +953,7 @@ class TUserDefinedTypeEntry:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.typeClassName = iprot.readString();
+          self.typeClassName = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -1241,7 +1241,7 @@ class TColumnDesc:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.columnName = iprot.readString();
+          self.columnName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
@@ -1252,12 +1252,12 @@ class TColumnDesc:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.I32:
-          self.position = iprot.readI32();
+          self.position = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.STRING:
-          self.comment = iprot.readString();
+          self.comment = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -1419,7 +1419,7 @@ class TBoolValue:
         break
       if fid == 1:
         if ftype == TType.BOOL:
-          self.value = iprot.readBool();
+          self.value = iprot.readBool()
         else:
           iprot.skip(ftype)
       else:
@@ -1484,7 +1484,7 @@ class TByteValue:
         break
       if fid == 1:
         if ftype == TType.BYTE:
-          self.value = iprot.readByte();
+          self.value = iprot.readByte()
         else:
           iprot.skip(ftype)
       else:
@@ -1549,7 +1549,7 @@ class TI16Value:
         break
       if fid == 1:
         if ftype == TType.I16:
-          self.value = iprot.readI16();
+          self.value = iprot.readI16()
         else:
           iprot.skip(ftype)
       else:
@@ -1614,7 +1614,7 @@ class TI32Value:
         break
       if fid == 1:
         if ftype == TType.I32:
-          self.value = iprot.readI32();
+          self.value = iprot.readI32()
         else:
           iprot.skip(ftype)
       else:
@@ -1679,7 +1679,7 @@ class TI64Value:
         break
       if fid == 1:
         if ftype == TType.I64:
-          self.value = iprot.readI64();
+          self.value = iprot.readI64()
         else:
           iprot.skip(ftype)
       else:
@@ -1744,7 +1744,7 @@ class TDoubleValue:
         break
       if fid == 1:
         if ftype == TType.DOUBLE:
-          self.value = iprot.readDouble();
+          self.value = iprot.readDouble()
         else:
           iprot.skip(ftype)
       else:
@@ -1809,7 +1809,7 @@ class TStringValue:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.value = iprot.readString();
+          self.value = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -2106,14 +2106,14 @@ class TBoolColumn:
           self.values = []
           (_etype51, _size48) = iprot.readListBegin()
           for _i52 in xrange(_size48):
-            _elem53 = iprot.readBool();
+            _elem53 = iprot.readBool()
             self.values.append(_elem53)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.nulls = iprot.readString();
+          self.nulls = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -2196,14 +2196,14 @@ class TByteColumn:
           self.values = []
           (_etype58, _size55) = iprot.readListBegin()
           for _i59 in xrange(_size55):
-            _elem60 = iprot.readByte();
+            _elem60 = iprot.readByte()
             self.values.append(_elem60)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.nulls = iprot.readString();
+          self.nulls = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -2286,14 +2286,14 @@ class TI16Column:
           self.values = []
           (_etype65, _size62) = iprot.readListBegin()
           for _i66 in xrange(_size62):
-            _elem67 = iprot.readI16();
+            _elem67 = iprot.readI16()
             self.values.append(_elem67)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.nulls = iprot.readString();
+          self.nulls = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -2376,14 +2376,14 @@ class TI32Column:
           self.values = []
           (_etype72, _size69) = iprot.readListBegin()
           for _i73 in xrange(_size69):
-            _elem74 = iprot.readI32();
+            _elem74 = iprot.readI32()
             self.values.append(_elem74)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.nulls = iprot.readString();
+          self.nulls = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -2466,14 +2466,14 @@ class TI64Column:
           self.values = []
           (_etype79, _size76) = iprot.readListBegin()
           for _i80 in xrange(_size76):
-            _elem81 = iprot.readI64();
+            _elem81 = iprot.readI64()
             self.values.append(_elem81)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.nulls = iprot.readString();
+          self.nulls = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -2556,14 +2556,14 @@ class TDoubleColumn:
           self.values = []
           (_etype86, _size83) = iprot.readListBegin()
           for _i87 in xrange(_size83):
-            _elem88 = iprot.readDouble();
+            _elem88 = iprot.readDouble()
             self.values.append(_elem88)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.nulls = iprot.readString();
+          self.nulls = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -2646,14 +2646,14 @@ class TStringColumn:
           self.values = []
           (_etype93, _size90) = iprot.readListBegin()
           for _i94 in xrange(_size90):
-            _elem95 = iprot.readString();
+            _elem95 = iprot.readString()
             self.values.append(_elem95)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.nulls = iprot.readString();
+          self.nulls = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -2736,14 +2736,14 @@ class TBinaryColumn:
           self.values = []
           (_etype100, _size97) = iprot.readListBegin()
           for _i101 in xrange(_size97):
-            _elem102 = iprot.readString();
+            _elem102 = iprot.readString()
             self.values.append(_elem102)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.nulls = iprot.readString();
+          self.nulls = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -2990,7 +2990,7 @@ class TRowSet:
         break
       if fid == 1:
         if ftype == TType.I64:
-          self.startRowOffset = iprot.readI64();
+          self.startRowOffset = iprot.readI64()
         else:
           iprot.skip(ftype)
       elif fid == 2:
@@ -3109,7 +3109,7 @@ class TStatus:
         break
       if fid == 1:
         if ftype == TType.I32:
-          self.statusCode = iprot.readI32();
+          self.statusCode = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 2:
@@ -3117,24 +3117,24 @@ class TStatus:
           self.infoMessages = []
           (_etype121, _size118) = iprot.readListBegin()
           for _i122 in xrange(_size118):
-            _elem123 = iprot.readString();
+            _elem123 = iprot.readString()
             self.infoMessages.append(_elem123)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.sqlState = iprot.readString();
+          self.sqlState = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.I32:
-          self.errorCode = iprot.readI32();
+          self.errorCode = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 5:
         if ftype == TType.STRING:
-          self.errorMessage = iprot.readString();
+          self.errorMessage = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -3227,12 +3227,12 @@ class THandleIdentifier:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.guid = iprot.readString();
+          self.guid = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.secret = iprot.readString();
+          self.secret = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -3389,17 +3389,17 @@ class TOperationHandle:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.I32:
-          self.operationType = iprot.readI32();
+          self.operationType = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.BOOL:
-          self.hasResultSet = iprot.readBool();
+          self.hasResultSet = iprot.readBool()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.DOUBLE:
-          self.modifiedRowCount = iprot.readDouble();
+          self.modifiedRowCount = iprot.readDouble()
         else:
           iprot.skip(ftype)
       else:
@@ -3494,17 +3494,17 @@ class TOpenSessionReq:
         break
       if fid == 1:
         if ftype == TType.I32:
-          self.client_protocol = iprot.readI32();
+          self.client_protocol = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.username = iprot.readString();
+          self.username = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.password = iprot.readString();
+          self.password = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 4:
@@ -3512,8 +3512,8 @@ class TOpenSessionReq:
           self.configuration = {}
           (_ktype126, _vtype127, _size125 ) = iprot.readMapBegin()
           for _i129 in xrange(_size125):
-            _key130 = iprot.readString();
-            _val131 = iprot.readString();
+            _key130 = iprot.readString()
+            _val131 = iprot.readString()
             self.configuration[_key130] = _val131
           iprot.readMapEnd()
         else:
@@ -3616,7 +3616,7 @@ class TOpenSessionResp:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.I32:
-          self.serverProtocolVersion = iprot.readI32();
+          self.serverProtocolVersion = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 3:
@@ -3630,8 +3630,8 @@ class TOpenSessionResp:
           self.configuration = {}
           (_ktype135, _vtype136, _size134 ) = iprot.readMapBegin()
           for _i138 in xrange(_size134):
-            _key139 = iprot.readString();
-            _val140 = iprot.readString();
+            _key139 = iprot.readString()
+            _val140 = iprot.readString()
             self.configuration[_key139] = _val140
           iprot.readMapEnd()
         else:
@@ -3872,32 +3872,32 @@ class TGetInfoValue:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.stringValue = iprot.readString();
+          self.stringValue = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.I16:
-          self.smallIntValue = iprot.readI16();
+          self.smallIntValue = iprot.readI16()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.I32:
-          self.integerBitmask = iprot.readI32();
+          self.integerBitmask = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.I32:
-          self.integerFlag = iprot.readI32();
+          self.integerFlag = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 5:
         if ftype == TType.I32:
-          self.binaryValue = iprot.readI32();
+          self.binaryValue = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 6:
         if ftype == TType.I64:
-          self.lenValue = iprot.readI64();
+          self.lenValue = iprot.readI64()
         else:
           iprot.skip(ftype)
       else:
@@ -3996,7 +3996,7 @@ class TGetInfoReq:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.I32:
-          self.infoType = iprot.readI32();
+          self.infoType = iprot.readI32()
         else:
           iprot.skip(ftype)
       else:
@@ -4169,7 +4169,7 @@ class TExecuteStatementReq:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.statement = iprot.readString();
+          self.statement = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
@@ -4177,15 +4177,15 @@ class TExecuteStatementReq:
           self.confOverlay = {}
           (_ktype144, _vtype145, _size143 ) = iprot.readMapBegin()
           for _i147 in xrange(_size143):
-            _key148 = iprot.readString();
-            _val149 = iprot.readString();
+            _key148 = iprot.readString()
+            _val149 = iprot.readString()
             self.confOverlay[_key148] = _val149
           iprot.readMapEnd()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.BOOL:
-          self.runAsync = iprot.readBool();
+          self.runAsync = iprot.readBool()
         else:
           iprot.skip(ftype)
       else:
@@ -4667,12 +4667,12 @@ class TGetSchemasReq:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.catalogName = iprot.readString();
+          self.catalogName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.schemaName = iprot.readString();
+          self.schemaName = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -4849,17 +4849,17 @@ class TGetTablesReq:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.catalogName = iprot.readString();
+          self.catalogName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.schemaName = iprot.readString();
+          self.schemaName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.STRING:
-          self.tableName = iprot.readString();
+          self.tableName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 5:
@@ -4867,7 +4867,7 @@ class TGetTablesReq:
           self.tableTypes = []
           (_etype155, _size152) = iprot.readListBegin()
           for _i156 in xrange(_size152):
-            _elem157 = iprot.readString();
+            _elem157 = iprot.readString()
             self.tableTypes.append(_elem157)
           iprot.readListEnd()
         else:
@@ -5209,22 +5209,22 @@ class TGetColumnsReq:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.catalogName = iprot.readString();
+          self.catalogName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.schemaName = iprot.readString();
+          self.schemaName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.STRING:
-          self.tableName = iprot.readString();
+          self.tableName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 5:
         if ftype == TType.STRING:
-          self.columnName = iprot.readString();
+          self.columnName = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -5408,17 +5408,17 @@ class TGetFunctionsReq:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.catalogName = iprot.readString();
+          self.catalogName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.schemaName = iprot.readString();
+          self.schemaName = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.STRING:
-          self.functionName = iprot.readString();
+          self.functionName = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -5670,22 +5670,22 @@ class TGetOperationStatusResp:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.I32:
-          self.operationState = iprot.readI32();
+          self.operationState = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.sqlState = iprot.readString();
+          self.sqlState = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.I32:
-          self.errorCode = iprot.readI32();
+          self.errorCode = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 5:
         if ftype == TType.STRING:
-          self.errorMessage = iprot.readString();
+          self.errorMessage = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -6209,17 +6209,17 @@ class TFetchResultsReq:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.I32:
-          self.orientation = iprot.readI32();
+          self.orientation = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.I64:
-          self.maxRows = iprot.readI64();
+          self.maxRows = iprot.readI64()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.I16:
-          self.fetchType = iprot.readI16();
+          self.fetchType = iprot.readI16()
         else:
           iprot.skip(ftype)
       else:
@@ -6317,7 +6317,7 @@ class TFetchResultsResp:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.BOOL:
-          self.hasMoreRows = iprot.readBool();
+          self.hasMoreRows = iprot.readBool()
         else:
           iprot.skip(ftype)
       elif fid == 3:
@@ -6412,12 +6412,12 @@ class TGetDelegationTokenReq:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.owner = iprot.readString();
+          self.owner = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.STRING:
-          self.renewer = iprot.readString();
+          self.renewer = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -6507,7 +6507,7 @@ class TGetDelegationTokenResp:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.delegationToken = iprot.readString();
+          self.delegationToken = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -6588,7 +6588,7 @@ class TCancelDelegationTokenReq:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.delegationToken = iprot.readString();
+          self.delegationToken = iprot.readString()
         else:
           iprot.skip(ftype)
       else:
@@ -6739,7 +6739,7 @@ class TRenewDelegationTokenReq:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.delegationToken = iprot.readString();
+          self.delegationToken = iprot.readString()
         else:
           iprot.skip(ftype)
       else:

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-py/hive_service/ThriftHive-remote
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-py/hive_service/ThriftHive-remote b/service/src/gen/thrift/gen-py/hive_service/ThriftHive-remote
index 54d59a8..e167d5b 100755
--- a/service/src/gen/thrift/gen-py/hive_service/ThriftHive-remote
+++ b/service/src/gen/thrift/gen-py/hive_service/ThriftHive-remote
@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #


[24/55] [abbrv] hive git commit: HIVE-12253 : revert HIVE-12061 (Sergey Shelukhin, reviewed by Prasanth Jayachandran)

Posted by xu...@apache.org.
HIVE-12253 : revert HIVE-12061 (Sergey Shelukhin, reviewed by Prasanth Jayachandran)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/3e0d87f8
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/3e0d87f8
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/3e0d87f8

Branch: refs/heads/spark
Commit: 3e0d87f8114508916c4268bf3317ba3da5523def
Parents: 2653537
Author: Sergey Shelukhin <se...@apache.org>
Authored: Fri Oct 23 16:42:16 2015 -0700
Committer: Sergey Shelukhin <se...@apache.org>
Committed: Fri Oct 23 16:49:14 2015 -0700

----------------------------------------------------------------------
 metastore/if/hive_metastore.thrift              |    8 +-
 .../gen/thrift/gen-cpp/ThriftHiveMetastore.cpp  | 1744 +++++++++---------
 .../gen/thrift/gen-cpp/hive_metastore_types.cpp |  402 ++--
 .../gen/thrift/gen-cpp/hive_metastore_types.h   |   20 +-
 .../metastore/api/FileMetadataExprType.java     |   42 -
 .../api/GetFileMetadataByExprRequest.java       |  126 +-
 .../src/gen/thrift/gen-php/metastore/Types.php  |   30 -
 .../gen/thrift/gen-py/hive_metastore/ttypes.py  |   26 +-
 .../gen/thrift/gen-rb/hive_metastore_types.rb   |   13 +-
 .../hive/metastore/FileMetadataHandler.java     |   30 -
 .../hadoop/hive/metastore/HiveMetaStore.java    |   15 +-
 .../hadoop/hive/metastore/ObjectStore.java      |    3 +-
 .../apache/hadoop/hive/metastore/RawStore.java  |    8 +-
 .../filemeta/OrcFileMetadataHandler.java        |   63 -
 .../hive/metastore/hbase/HBaseReadWrite.java    |    2 +-
 .../hadoop/hive/metastore/hbase/HBaseStore.java |   41 +-
 .../DummyRawStoreControlledCommit.java          |    3 +-
 .../DummyRawStoreForJdoConnection.java          |    3 +-
 18 files changed, 1095 insertions(+), 1484 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/3e0d87f8/metastore/if/hive_metastore.thrift
----------------------------------------------------------------------
diff --git a/metastore/if/hive_metastore.thrift b/metastore/if/hive_metastore.thrift
index 3e30f56..751cebe 100755
--- a/metastore/if/hive_metastore.thrift
+++ b/metastore/if/hive_metastore.thrift
@@ -725,17 +725,11 @@ struct GetFileMetadataByExprResult {
   2: required bool isSupported
 }
 
-enum FileMetadataExprType {
-  ORC_SARG = 1
-}
-
-
 // Request type for get_file_metadata_by_expr
 struct GetFileMetadataByExprRequest {
   1: required list<i64> fileIds,
   2: required binary expr,
-  3: optional bool doGetFooters,
-  4: optional FileMetadataExprType type
+  3: optional bool doGetFooters
 }
 
 // Return type for get_file_metadata


[32/55] [abbrv] hive git commit: HIVE-12234: Beeline quit tries to connect again if no existing connections(Ferdinand Xu, reviewed by Szehon Ho)

Posted by xu...@apache.org.
HIVE-12234: Beeline quit tries to connect again if no existing connections(Ferdinand Xu, reviewed by Szehon Ho)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/9ea51d12
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/9ea51d12
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/9ea51d12

Branch: refs/heads/spark
Commit: 9ea51d122bcab987a5eca4e8244b1d121b9d8041
Parents: 48a1e1f
Author: Ferdinand Xu <ch...@intel.com>
Authored: Mon Oct 26 00:48:49 2015 -0400
Committer: Ferdinand Xu <ch...@intel.com>
Committed: Mon Oct 26 00:48:49 2015 -0400

----------------------------------------------------------------------
 beeline/src/java/org/apache/hive/beeline/Commands.java         | 6 +++---
 .../src/java/org/apache/hive/beeline/DatabaseConnection.java   | 4 +++-
 2 files changed, 6 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/9ea51d12/beeline/src/java/org/apache/hive/beeline/Commands.java
----------------------------------------------------------------------
diff --git a/beeline/src/java/org/apache/hive/beeline/Commands.java b/beeline/src/java/org/apache/hive/beeline/Commands.java
index 44dd18b..745f694 100644
--- a/beeline/src/java/org/apache/hive/beeline/Commands.java
+++ b/beeline/src/java/org/apache/hive/beeline/Commands.java
@@ -1226,11 +1226,11 @@ public class Commands {
       return false;
     }
     try {
-      if (beeLine.getDatabaseConnection().getConnection() != null
-          && !(beeLine.getDatabaseConnection().getConnection().isClosed())) {
+      if (beeLine.getDatabaseConnection().getCurrentConnection() != null
+          && !(beeLine.getDatabaseConnection().getCurrentConnection().isClosed())) {
         int index = beeLine.getDatabaseConnections().getIndex();
         beeLine.info(beeLine.loc("closing", index, beeLine.getDatabaseConnection()));
-        beeLine.getDatabaseConnection().getConnection().close();
+        beeLine.getDatabaseConnection().getCurrentConnection().close();
       } else {
         beeLine.info(beeLine.loc("already-closed"));
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/9ea51d12/beeline/src/java/org/apache/hive/beeline/DatabaseConnection.java
----------------------------------------------------------------------
diff --git a/beeline/src/java/org/apache/hive/beeline/DatabaseConnection.java b/beeline/src/java/org/apache/hive/beeline/DatabaseConnection.java
index e349a49..791fec6 100644
--- a/beeline/src/java/org/apache/hive/beeline/DatabaseConnection.java
+++ b/beeline/src/java/org/apache/hive/beeline/DatabaseConnection.java
@@ -201,7 +201,6 @@ class DatabaseConnection {
     return null;
   }
 
-
   public Connection getConnection() throws SQLException {
     if (connection != null) {
       return connection;
@@ -210,6 +209,9 @@ class DatabaseConnection {
     return connection;
   }
 
+  public Connection getCurrentConnection() {
+    return connection;
+  }
 
   public void reconnect() throws Exception {
     close();


[08/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)

Posted by xu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-cpp/TCLIService.cpp
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-cpp/TCLIService.cpp b/service/src/gen/thrift/gen-cpp/TCLIService.cpp
index 7dae2f8..7e2c028 100644
--- a/service/src/gen/thrift/gen-cpp/TCLIService.cpp
+++ b/service/src/gen/thrift/gen-cpp/TCLIService.cpp
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -15,6 +15,7 @@ TCLIService_OpenSession_args::~TCLIService_OpenSession_args() throw() {
 
 uint32_t TCLIService_OpenSession_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -55,7 +56,7 @@ uint32_t TCLIService_OpenSession_args::read(::apache::thrift::protocol::TProtoco
 
 uint32_t TCLIService_OpenSession_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_OpenSession_args");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -64,7 +65,6 @@ uint32_t TCLIService_OpenSession_args::write(::apache::thrift::protocol::TProtoc
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -75,7 +75,7 @@ TCLIService_OpenSession_pargs::~TCLIService_OpenSession_pargs() throw() {
 
 uint32_t TCLIService_OpenSession_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_OpenSession_pargs");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -84,7 +84,6 @@ uint32_t TCLIService_OpenSession_pargs::write(::apache::thrift::protocol::TProto
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -95,6 +94,7 @@ TCLIService_OpenSession_result::~TCLIService_OpenSession_result() throw() {
 
 uint32_t TCLIService_OpenSession_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -156,6 +156,7 @@ TCLIService_OpenSession_presult::~TCLIService_OpenSession_presult() throw() {
 
 uint32_t TCLIService_OpenSession_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -201,6 +202,7 @@ TCLIService_CloseSession_args::~TCLIService_CloseSession_args() throw() {
 
 uint32_t TCLIService_CloseSession_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -241,7 +243,7 @@ uint32_t TCLIService_CloseSession_args::read(::apache::thrift::protocol::TProtoc
 
 uint32_t TCLIService_CloseSession_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_CloseSession_args");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -250,7 +252,6 @@ uint32_t TCLIService_CloseSession_args::write(::apache::thrift::protocol::TProto
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -261,7 +262,7 @@ TCLIService_CloseSession_pargs::~TCLIService_CloseSession_pargs() throw() {
 
 uint32_t TCLIService_CloseSession_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_CloseSession_pargs");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -270,7 +271,6 @@ uint32_t TCLIService_CloseSession_pargs::write(::apache::thrift::protocol::TProt
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -281,6 +281,7 @@ TCLIService_CloseSession_result::~TCLIService_CloseSession_result() throw() {
 
 uint32_t TCLIService_CloseSession_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -342,6 +343,7 @@ TCLIService_CloseSession_presult::~TCLIService_CloseSession_presult() throw() {
 
 uint32_t TCLIService_CloseSession_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -387,6 +389,7 @@ TCLIService_GetInfo_args::~TCLIService_GetInfo_args() throw() {
 
 uint32_t TCLIService_GetInfo_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -427,7 +430,7 @@ uint32_t TCLIService_GetInfo_args::read(::apache::thrift::protocol::TProtocol* i
 
 uint32_t TCLIService_GetInfo_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_GetInfo_args");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -436,7 +439,6 @@ uint32_t TCLIService_GetInfo_args::write(::apache::thrift::protocol::TProtocol*
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -447,7 +449,7 @@ TCLIService_GetInfo_pargs::~TCLIService_GetInfo_pargs() throw() {
 
 uint32_t TCLIService_GetInfo_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_GetInfo_pargs");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -456,7 +458,6 @@ uint32_t TCLIService_GetInfo_pargs::write(::apache::thrift::protocol::TProtocol*
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -467,6 +468,7 @@ TCLIService_GetInfo_result::~TCLIService_GetInfo_result() throw() {
 
 uint32_t TCLIService_GetInfo_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -528,6 +530,7 @@ TCLIService_GetInfo_presult::~TCLIService_GetInfo_presult() throw() {
 
 uint32_t TCLIService_GetInfo_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -573,6 +576,7 @@ TCLIService_ExecuteStatement_args::~TCLIService_ExecuteStatement_args() throw()
 
 uint32_t TCLIService_ExecuteStatement_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -613,7 +617,7 @@ uint32_t TCLIService_ExecuteStatement_args::read(::apache::thrift::protocol::TPr
 
 uint32_t TCLIService_ExecuteStatement_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_ExecuteStatement_args");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -622,7 +626,6 @@ uint32_t TCLIService_ExecuteStatement_args::write(::apache::thrift::protocol::TP
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -633,7 +636,7 @@ TCLIService_ExecuteStatement_pargs::~TCLIService_ExecuteStatement_pargs() throw(
 
 uint32_t TCLIService_ExecuteStatement_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_ExecuteStatement_pargs");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -642,7 +645,6 @@ uint32_t TCLIService_ExecuteStatement_pargs::write(::apache::thrift::protocol::T
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -653,6 +655,7 @@ TCLIService_ExecuteStatement_result::~TCLIService_ExecuteStatement_result() thro
 
 uint32_t TCLIService_ExecuteStatement_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -714,6 +717,7 @@ TCLIService_ExecuteStatement_presult::~TCLIService_ExecuteStatement_presult() th
 
 uint32_t TCLIService_ExecuteStatement_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -759,6 +763,7 @@ TCLIService_GetTypeInfo_args::~TCLIService_GetTypeInfo_args() throw() {
 
 uint32_t TCLIService_GetTypeInfo_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -799,7 +804,7 @@ uint32_t TCLIService_GetTypeInfo_args::read(::apache::thrift::protocol::TProtoco
 
 uint32_t TCLIService_GetTypeInfo_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_GetTypeInfo_args");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -808,7 +813,6 @@ uint32_t TCLIService_GetTypeInfo_args::write(::apache::thrift::protocol::TProtoc
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -819,7 +823,7 @@ TCLIService_GetTypeInfo_pargs::~TCLIService_GetTypeInfo_pargs() throw() {
 
 uint32_t TCLIService_GetTypeInfo_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_GetTypeInfo_pargs");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -828,7 +832,6 @@ uint32_t TCLIService_GetTypeInfo_pargs::write(::apache::thrift::protocol::TProto
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -839,6 +842,7 @@ TCLIService_GetTypeInfo_result::~TCLIService_GetTypeInfo_result() throw() {
 
 uint32_t TCLIService_GetTypeInfo_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -900,6 +904,7 @@ TCLIService_GetTypeInfo_presult::~TCLIService_GetTypeInfo_presult() throw() {
 
 uint32_t TCLIService_GetTypeInfo_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -945,6 +950,7 @@ TCLIService_GetCatalogs_args::~TCLIService_GetCatalogs_args() throw() {
 
 uint32_t TCLIService_GetCatalogs_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -985,7 +991,7 @@ uint32_t TCLIService_GetCatalogs_args::read(::apache::thrift::protocol::TProtoco
 
 uint32_t TCLIService_GetCatalogs_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_GetCatalogs_args");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -994,7 +1000,6 @@ uint32_t TCLIService_GetCatalogs_args::write(::apache::thrift::protocol::TProtoc
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1005,7 +1010,7 @@ TCLIService_GetCatalogs_pargs::~TCLIService_GetCatalogs_pargs() throw() {
 
 uint32_t TCLIService_GetCatalogs_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_GetCatalogs_pargs");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -1014,7 +1019,6 @@ uint32_t TCLIService_GetCatalogs_pargs::write(::apache::thrift::protocol::TProto
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1025,6 +1029,7 @@ TCLIService_GetCatalogs_result::~TCLIService_GetCatalogs_result() throw() {
 
 uint32_t TCLIService_GetCatalogs_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1086,6 +1091,7 @@ TCLIService_GetCatalogs_presult::~TCLIService_GetCatalogs_presult() throw() {
 
 uint32_t TCLIService_GetCatalogs_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1131,6 +1137,7 @@ TCLIService_GetSchemas_args::~TCLIService_GetSchemas_args() throw() {
 
 uint32_t TCLIService_GetSchemas_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1171,7 +1178,7 @@ uint32_t TCLIService_GetSchemas_args::read(::apache::thrift::protocol::TProtocol
 
 uint32_t TCLIService_GetSchemas_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_GetSchemas_args");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -1180,7 +1187,6 @@ uint32_t TCLIService_GetSchemas_args::write(::apache::thrift::protocol::TProtoco
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1191,7 +1197,7 @@ TCLIService_GetSchemas_pargs::~TCLIService_GetSchemas_pargs() throw() {
 
 uint32_t TCLIService_GetSchemas_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_GetSchemas_pargs");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -1200,7 +1206,6 @@ uint32_t TCLIService_GetSchemas_pargs::write(::apache::thrift::protocol::TProtoc
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1211,6 +1216,7 @@ TCLIService_GetSchemas_result::~TCLIService_GetSchemas_result() throw() {
 
 uint32_t TCLIService_GetSchemas_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1272,6 +1278,7 @@ TCLIService_GetSchemas_presult::~TCLIService_GetSchemas_presult() throw() {
 
 uint32_t TCLIService_GetSchemas_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1317,6 +1324,7 @@ TCLIService_GetTables_args::~TCLIService_GetTables_args() throw() {
 
 uint32_t TCLIService_GetTables_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1357,7 +1365,7 @@ uint32_t TCLIService_GetTables_args::read(::apache::thrift::protocol::TProtocol*
 
 uint32_t TCLIService_GetTables_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_GetTables_args");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -1366,7 +1374,6 @@ uint32_t TCLIService_GetTables_args::write(::apache::thrift::protocol::TProtocol
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1377,7 +1384,7 @@ TCLIService_GetTables_pargs::~TCLIService_GetTables_pargs() throw() {
 
 uint32_t TCLIService_GetTables_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_GetTables_pargs");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -1386,7 +1393,6 @@ uint32_t TCLIService_GetTables_pargs::write(::apache::thrift::protocol::TProtoco
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1397,6 +1403,7 @@ TCLIService_GetTables_result::~TCLIService_GetTables_result() throw() {
 
 uint32_t TCLIService_GetTables_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1458,6 +1465,7 @@ TCLIService_GetTables_presult::~TCLIService_GetTables_presult() throw() {
 
 uint32_t TCLIService_GetTables_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1503,6 +1511,7 @@ TCLIService_GetTableTypes_args::~TCLIService_GetTableTypes_args() throw() {
 
 uint32_t TCLIService_GetTableTypes_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1543,7 +1552,7 @@ uint32_t TCLIService_GetTableTypes_args::read(::apache::thrift::protocol::TProto
 
 uint32_t TCLIService_GetTableTypes_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_GetTableTypes_args");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -1552,7 +1561,6 @@ uint32_t TCLIService_GetTableTypes_args::write(::apache::thrift::protocol::TProt
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1563,7 +1571,7 @@ TCLIService_GetTableTypes_pargs::~TCLIService_GetTableTypes_pargs() throw() {
 
 uint32_t TCLIService_GetTableTypes_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_GetTableTypes_pargs");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -1572,7 +1580,6 @@ uint32_t TCLIService_GetTableTypes_pargs::write(::apache::thrift::protocol::TPro
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1583,6 +1590,7 @@ TCLIService_GetTableTypes_result::~TCLIService_GetTableTypes_result() throw() {
 
 uint32_t TCLIService_GetTableTypes_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1644,6 +1652,7 @@ TCLIService_GetTableTypes_presult::~TCLIService_GetTableTypes_presult() throw()
 
 uint32_t TCLIService_GetTableTypes_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1689,6 +1698,7 @@ TCLIService_GetColumns_args::~TCLIService_GetColumns_args() throw() {
 
 uint32_t TCLIService_GetColumns_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1729,7 +1739,7 @@ uint32_t TCLIService_GetColumns_args::read(::apache::thrift::protocol::TProtocol
 
 uint32_t TCLIService_GetColumns_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_GetColumns_args");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -1738,7 +1748,6 @@ uint32_t TCLIService_GetColumns_args::write(::apache::thrift::protocol::TProtoco
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1749,7 +1758,7 @@ TCLIService_GetColumns_pargs::~TCLIService_GetColumns_pargs() throw() {
 
 uint32_t TCLIService_GetColumns_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_GetColumns_pargs");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -1758,7 +1767,6 @@ uint32_t TCLIService_GetColumns_pargs::write(::apache::thrift::protocol::TProtoc
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1769,6 +1777,7 @@ TCLIService_GetColumns_result::~TCLIService_GetColumns_result() throw() {
 
 uint32_t TCLIService_GetColumns_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1830,6 +1839,7 @@ TCLIService_GetColumns_presult::~TCLIService_GetColumns_presult() throw() {
 
 uint32_t TCLIService_GetColumns_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1875,6 +1885,7 @@ TCLIService_GetFunctions_args::~TCLIService_GetFunctions_args() throw() {
 
 uint32_t TCLIService_GetFunctions_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1915,7 +1926,7 @@ uint32_t TCLIService_GetFunctions_args::read(::apache::thrift::protocol::TProtoc
 
 uint32_t TCLIService_GetFunctions_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_GetFunctions_args");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -1924,7 +1935,6 @@ uint32_t TCLIService_GetFunctions_args::write(::apache::thrift::protocol::TProto
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1935,7 +1945,7 @@ TCLIService_GetFunctions_pargs::~TCLIService_GetFunctions_pargs() throw() {
 
 uint32_t TCLIService_GetFunctions_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_GetFunctions_pargs");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -1944,7 +1954,6 @@ uint32_t TCLIService_GetFunctions_pargs::write(::apache::thrift::protocol::TProt
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1955,6 +1964,7 @@ TCLIService_GetFunctions_result::~TCLIService_GetFunctions_result() throw() {
 
 uint32_t TCLIService_GetFunctions_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2016,6 +2026,7 @@ TCLIService_GetFunctions_presult::~TCLIService_GetFunctions_presult() throw() {
 
 uint32_t TCLIService_GetFunctions_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2061,6 +2072,7 @@ TCLIService_GetOperationStatus_args::~TCLIService_GetOperationStatus_args() thro
 
 uint32_t TCLIService_GetOperationStatus_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2101,7 +2113,7 @@ uint32_t TCLIService_GetOperationStatus_args::read(::apache::thrift::protocol::T
 
 uint32_t TCLIService_GetOperationStatus_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_GetOperationStatus_args");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -2110,7 +2122,6 @@ uint32_t TCLIService_GetOperationStatus_args::write(::apache::thrift::protocol::
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2121,7 +2132,7 @@ TCLIService_GetOperationStatus_pargs::~TCLIService_GetOperationStatus_pargs() th
 
 uint32_t TCLIService_GetOperationStatus_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_GetOperationStatus_pargs");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -2130,7 +2141,6 @@ uint32_t TCLIService_GetOperationStatus_pargs::write(::apache::thrift::protocol:
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2141,6 +2151,7 @@ TCLIService_GetOperationStatus_result::~TCLIService_GetOperationStatus_result()
 
 uint32_t TCLIService_GetOperationStatus_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2202,6 +2213,7 @@ TCLIService_GetOperationStatus_presult::~TCLIService_GetOperationStatus_presult(
 
 uint32_t TCLIService_GetOperationStatus_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2247,6 +2259,7 @@ TCLIService_CancelOperation_args::~TCLIService_CancelOperation_args() throw() {
 
 uint32_t TCLIService_CancelOperation_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2287,7 +2300,7 @@ uint32_t TCLIService_CancelOperation_args::read(::apache::thrift::protocol::TPro
 
 uint32_t TCLIService_CancelOperation_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_CancelOperation_args");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -2296,7 +2309,6 @@ uint32_t TCLIService_CancelOperation_args::write(::apache::thrift::protocol::TPr
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2307,7 +2319,7 @@ TCLIService_CancelOperation_pargs::~TCLIService_CancelOperation_pargs() throw()
 
 uint32_t TCLIService_CancelOperation_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_CancelOperation_pargs");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -2316,7 +2328,6 @@ uint32_t TCLIService_CancelOperation_pargs::write(::apache::thrift::protocol::TP
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2327,6 +2338,7 @@ TCLIService_CancelOperation_result::~TCLIService_CancelOperation_result() throw(
 
 uint32_t TCLIService_CancelOperation_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2388,6 +2400,7 @@ TCLIService_CancelOperation_presult::~TCLIService_CancelOperation_presult() thro
 
 uint32_t TCLIService_CancelOperation_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2433,6 +2446,7 @@ TCLIService_CloseOperation_args::~TCLIService_CloseOperation_args() throw() {
 
 uint32_t TCLIService_CloseOperation_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2473,7 +2487,7 @@ uint32_t TCLIService_CloseOperation_args::read(::apache::thrift::protocol::TProt
 
 uint32_t TCLIService_CloseOperation_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_CloseOperation_args");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -2482,7 +2496,6 @@ uint32_t TCLIService_CloseOperation_args::write(::apache::thrift::protocol::TPro
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2493,7 +2506,7 @@ TCLIService_CloseOperation_pargs::~TCLIService_CloseOperation_pargs() throw() {
 
 uint32_t TCLIService_CloseOperation_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_CloseOperation_pargs");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -2502,7 +2515,6 @@ uint32_t TCLIService_CloseOperation_pargs::write(::apache::thrift::protocol::TPr
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2513,6 +2525,7 @@ TCLIService_CloseOperation_result::~TCLIService_CloseOperation_result() throw()
 
 uint32_t TCLIService_CloseOperation_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2574,6 +2587,7 @@ TCLIService_CloseOperation_presult::~TCLIService_CloseOperation_presult() throw(
 
 uint32_t TCLIService_CloseOperation_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2619,6 +2633,7 @@ TCLIService_GetResultSetMetadata_args::~TCLIService_GetResultSetMetadata_args()
 
 uint32_t TCLIService_GetResultSetMetadata_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2659,7 +2674,7 @@ uint32_t TCLIService_GetResultSetMetadata_args::read(::apache::thrift::protocol:
 
 uint32_t TCLIService_GetResultSetMetadata_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_GetResultSetMetadata_args");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -2668,7 +2683,6 @@ uint32_t TCLIService_GetResultSetMetadata_args::write(::apache::thrift::protocol
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2679,7 +2693,7 @@ TCLIService_GetResultSetMetadata_pargs::~TCLIService_GetResultSetMetadata_pargs(
 
 uint32_t TCLIService_GetResultSetMetadata_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_GetResultSetMetadata_pargs");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -2688,7 +2702,6 @@ uint32_t TCLIService_GetResultSetMetadata_pargs::write(::apache::thrift::protoco
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2699,6 +2712,7 @@ TCLIService_GetResultSetMetadata_result::~TCLIService_GetResultSetMetadata_resul
 
 uint32_t TCLIService_GetResultSetMetadata_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2760,6 +2774,7 @@ TCLIService_GetResultSetMetadata_presult::~TCLIService_GetResultSetMetadata_pres
 
 uint32_t TCLIService_GetResultSetMetadata_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2805,6 +2820,7 @@ TCLIService_FetchResults_args::~TCLIService_FetchResults_args() throw() {
 
 uint32_t TCLIService_FetchResults_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2845,7 +2861,7 @@ uint32_t TCLIService_FetchResults_args::read(::apache::thrift::protocol::TProtoc
 
 uint32_t TCLIService_FetchResults_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_FetchResults_args");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -2854,7 +2870,6 @@ uint32_t TCLIService_FetchResults_args::write(::apache::thrift::protocol::TProto
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2865,7 +2880,7 @@ TCLIService_FetchResults_pargs::~TCLIService_FetchResults_pargs() throw() {
 
 uint32_t TCLIService_FetchResults_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_FetchResults_pargs");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -2874,7 +2889,6 @@ uint32_t TCLIService_FetchResults_pargs::write(::apache::thrift::protocol::TProt
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2885,6 +2899,7 @@ TCLIService_FetchResults_result::~TCLIService_FetchResults_result() throw() {
 
 uint32_t TCLIService_FetchResults_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2946,6 +2961,7 @@ TCLIService_FetchResults_presult::~TCLIService_FetchResults_presult() throw() {
 
 uint32_t TCLIService_FetchResults_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2991,6 +3007,7 @@ TCLIService_GetDelegationToken_args::~TCLIService_GetDelegationToken_args() thro
 
 uint32_t TCLIService_GetDelegationToken_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -3031,7 +3048,7 @@ uint32_t TCLIService_GetDelegationToken_args::read(::apache::thrift::protocol::T
 
 uint32_t TCLIService_GetDelegationToken_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_GetDelegationToken_args");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -3040,7 +3057,6 @@ uint32_t TCLIService_GetDelegationToken_args::write(::apache::thrift::protocol::
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -3051,7 +3067,7 @@ TCLIService_GetDelegationToken_pargs::~TCLIService_GetDelegationToken_pargs() th
 
 uint32_t TCLIService_GetDelegationToken_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_GetDelegationToken_pargs");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -3060,7 +3076,6 @@ uint32_t TCLIService_GetDelegationToken_pargs::write(::apache::thrift::protocol:
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -3071,6 +3086,7 @@ TCLIService_GetDelegationToken_result::~TCLIService_GetDelegationToken_result()
 
 uint32_t TCLIService_GetDelegationToken_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -3132,6 +3148,7 @@ TCLIService_GetDelegationToken_presult::~TCLIService_GetDelegationToken_presult(
 
 uint32_t TCLIService_GetDelegationToken_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -3177,6 +3194,7 @@ TCLIService_CancelDelegationToken_args::~TCLIService_CancelDelegationToken_args(
 
 uint32_t TCLIService_CancelDelegationToken_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -3217,7 +3235,7 @@ uint32_t TCLIService_CancelDelegationToken_args::read(::apache::thrift::protocol
 
 uint32_t TCLIService_CancelDelegationToken_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_CancelDelegationToken_args");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -3226,7 +3244,6 @@ uint32_t TCLIService_CancelDelegationToken_args::write(::apache::thrift::protoco
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -3237,7 +3254,7 @@ TCLIService_CancelDelegationToken_pargs::~TCLIService_CancelDelegationToken_parg
 
 uint32_t TCLIService_CancelDelegationToken_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_CancelDelegationToken_pargs");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -3246,7 +3263,6 @@ uint32_t TCLIService_CancelDelegationToken_pargs::write(::apache::thrift::protoc
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -3257,6 +3273,7 @@ TCLIService_CancelDelegationToken_result::~TCLIService_CancelDelegationToken_res
 
 uint32_t TCLIService_CancelDelegationToken_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -3318,6 +3335,7 @@ TCLIService_CancelDelegationToken_presult::~TCLIService_CancelDelegationToken_pr
 
 uint32_t TCLIService_CancelDelegationToken_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -3363,6 +3381,7 @@ TCLIService_RenewDelegationToken_args::~TCLIService_RenewDelegationToken_args()
 
 uint32_t TCLIService_RenewDelegationToken_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -3403,7 +3422,7 @@ uint32_t TCLIService_RenewDelegationToken_args::read(::apache::thrift::protocol:
 
 uint32_t TCLIService_RenewDelegationToken_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_RenewDelegationToken_args");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -3412,7 +3431,6 @@ uint32_t TCLIService_RenewDelegationToken_args::write(::apache::thrift::protocol
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -3423,7 +3441,7 @@ TCLIService_RenewDelegationToken_pargs::~TCLIService_RenewDelegationToken_pargs(
 
 uint32_t TCLIService_RenewDelegationToken_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("TCLIService_RenewDelegationToken_pargs");
 
   xfer += oprot->writeFieldBegin("req", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -3432,7 +3450,6 @@ uint32_t TCLIService_RenewDelegationToken_pargs::write(::apache::thrift::protoco
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -3443,6 +3460,7 @@ TCLIService_RenewDelegationToken_result::~TCLIService_RenewDelegationToken_resul
 
 uint32_t TCLIService_RenewDelegationToken_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -3504,6 +3522,7 @@ TCLIService_RenewDelegationToken_presult::~TCLIService_RenewDelegationToken_pres
 
 uint32_t TCLIService_RenewDelegationToken_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -5695,5 +5714,1602 @@ void TCLIServiceProcessor::process_RenewDelegationToken(int32_t seqid, ::apache:
   ::boost::shared_ptr< ::apache::thrift::TProcessor > processor(new TCLIServiceProcessor(handler));
   return processor;
 }
+
+void TCLIServiceConcurrentClient::OpenSession(TOpenSessionResp& _return, const TOpenSessionReq& req)
+{
+  int32_t seqid = send_OpenSession(req);
+  recv_OpenSession(_return, seqid);
+}
+
+int32_t TCLIServiceConcurrentClient::send_OpenSession(const TOpenSessionReq& req)
+{
+  int32_t cseqid = this->sync_.generateSeqId();
+  ::apache::thrift::async::TConcurrentSendSentry sentry(&this->sync_);
+  oprot_->writeMessageBegin("OpenSession", ::apache::thrift::protocol::T_CALL, cseqid);
+
+  TCLIService_OpenSession_pargs args;
+  args.req = &req;
+  args.write(oprot_);
+
+  oprot_->writeMessageEnd();
+  oprot_->getTransport()->writeEnd();
+  oprot_->getTransport()->flush();
+
+  sentry.commit();
+  return cseqid;
+}
+
+void TCLIServiceConcurrentClient::recv_OpenSession(TOpenSessionResp& _return, const int32_t seqid)
+{
+
+  int32_t rseqid = 0;
+  std::string fname;
+  ::apache::thrift::protocol::TMessageType mtype;
+
+  // the read mutex gets dropped and reacquired as part of waitForWork()
+  // The destructor of this sentry wakes up other clients
+  ::apache::thrift::async::TConcurrentRecvSentry sentry(&this->sync_, seqid);
+
+  while(true) {
+    if(!this->sync_.getPending(fname, mtype, rseqid)) {
+      iprot_->readMessageBegin(fname, mtype, rseqid);
+    }
+    if(seqid == rseqid) {
+      if (mtype == ::apache::thrift::protocol::T_EXCEPTION) {
+        ::apache::thrift::TApplicationException x;
+        x.read(iprot_);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+        sentry.commit();
+        throw x;
+      }
+      if (mtype != ::apache::thrift::protocol::T_REPLY) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+      }
+      if (fname.compare("OpenSession") != 0) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+
+        // in a bad state, don't commit
+        using ::apache::thrift::protocol::TProtocolException;
+        throw TProtocolException(TProtocolException::INVALID_DATA);
+      }
+      TCLIService_OpenSession_presult result;
+      result.success = &_return;
+      result.read(iprot_);
+      iprot_->readMessageEnd();
+      iprot_->getTransport()->readEnd();
+
+      if (result.__isset.success) {
+        // _return pointer has now been filled
+        sentry.commit();
+        return;
+      }
+      // in a bad state, don't commit
+      throw ::apache::thrift::TApplicationException(::apache::thrift::TApplicationException::MISSING_RESULT, "OpenSession failed: unknown result");
+    }
+    // seqid != rseqid
+    this->sync_.updatePending(fname, mtype, rseqid);
+
+    // this will temporarily unlock the readMutex, and let other clients get work done
+    this->sync_.waitForWork(seqid);
+  } // end while(true)
+}
+
+void TCLIServiceConcurrentClient::CloseSession(TCloseSessionResp& _return, const TCloseSessionReq& req)
+{
+  int32_t seqid = send_CloseSession(req);
+  recv_CloseSession(_return, seqid);
+}
+
+int32_t TCLIServiceConcurrentClient::send_CloseSession(const TCloseSessionReq& req)
+{
+  int32_t cseqid = this->sync_.generateSeqId();
+  ::apache::thrift::async::TConcurrentSendSentry sentry(&this->sync_);
+  oprot_->writeMessageBegin("CloseSession", ::apache::thrift::protocol::T_CALL, cseqid);
+
+  TCLIService_CloseSession_pargs args;
+  args.req = &req;
+  args.write(oprot_);
+
+  oprot_->writeMessageEnd();
+  oprot_->getTransport()->writeEnd();
+  oprot_->getTransport()->flush();
+
+  sentry.commit();
+  return cseqid;
+}
+
+void TCLIServiceConcurrentClient::recv_CloseSession(TCloseSessionResp& _return, const int32_t seqid)
+{
+
+  int32_t rseqid = 0;
+  std::string fname;
+  ::apache::thrift::protocol::TMessageType mtype;
+
+  // the read mutex gets dropped and reacquired as part of waitForWork()
+  // The destructor of this sentry wakes up other clients
+  ::apache::thrift::async::TConcurrentRecvSentry sentry(&this->sync_, seqid);
+
+  while(true) {
+    if(!this->sync_.getPending(fname, mtype, rseqid)) {
+      iprot_->readMessageBegin(fname, mtype, rseqid);
+    }
+    if(seqid == rseqid) {
+      if (mtype == ::apache::thrift::protocol::T_EXCEPTION) {
+        ::apache::thrift::TApplicationException x;
+        x.read(iprot_);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+        sentry.commit();
+        throw x;
+      }
+      if (mtype != ::apache::thrift::protocol::T_REPLY) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+      }
+      if (fname.compare("CloseSession") != 0) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+
+        // in a bad state, don't commit
+        using ::apache::thrift::protocol::TProtocolException;
+        throw TProtocolException(TProtocolException::INVALID_DATA);
+      }
+      TCLIService_CloseSession_presult result;
+      result.success = &_return;
+      result.read(iprot_);
+      iprot_->readMessageEnd();
+      iprot_->getTransport()->readEnd();
+
+      if (result.__isset.success) {
+        // _return pointer has now been filled
+        sentry.commit();
+        return;
+      }
+      // in a bad state, don't commit
+      throw ::apache::thrift::TApplicationException(::apache::thrift::TApplicationException::MISSING_RESULT, "CloseSession failed: unknown result");
+    }
+    // seqid != rseqid
+    this->sync_.updatePending(fname, mtype, rseqid);
+
+    // this will temporarily unlock the readMutex, and let other clients get work done
+    this->sync_.waitForWork(seqid);
+  } // end while(true)
+}
+
+void TCLIServiceConcurrentClient::GetInfo(TGetInfoResp& _return, const TGetInfoReq& req)
+{
+  int32_t seqid = send_GetInfo(req);
+  recv_GetInfo(_return, seqid);
+}
+
+int32_t TCLIServiceConcurrentClient::send_GetInfo(const TGetInfoReq& req)
+{
+  int32_t cseqid = this->sync_.generateSeqId();
+  ::apache::thrift::async::TConcurrentSendSentry sentry(&this->sync_);
+  oprot_->writeMessageBegin("GetInfo", ::apache::thrift::protocol::T_CALL, cseqid);
+
+  TCLIService_GetInfo_pargs args;
+  args.req = &req;
+  args.write(oprot_);
+
+  oprot_->writeMessageEnd();
+  oprot_->getTransport()->writeEnd();
+  oprot_->getTransport()->flush();
+
+  sentry.commit();
+  return cseqid;
+}
+
+void TCLIServiceConcurrentClient::recv_GetInfo(TGetInfoResp& _return, const int32_t seqid)
+{
+
+  int32_t rseqid = 0;
+  std::string fname;
+  ::apache::thrift::protocol::TMessageType mtype;
+
+  // the read mutex gets dropped and reacquired as part of waitForWork()
+  // The destructor of this sentry wakes up other clients
+  ::apache::thrift::async::TConcurrentRecvSentry sentry(&this->sync_, seqid);
+
+  while(true) {
+    if(!this->sync_.getPending(fname, mtype, rseqid)) {
+      iprot_->readMessageBegin(fname, mtype, rseqid);
+    }
+    if(seqid == rseqid) {
+      if (mtype == ::apache::thrift::protocol::T_EXCEPTION) {
+        ::apache::thrift::TApplicationException x;
+        x.read(iprot_);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+        sentry.commit();
+        throw x;
+      }
+      if (mtype != ::apache::thrift::protocol::T_REPLY) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+      }
+      if (fname.compare("GetInfo") != 0) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+
+        // in a bad state, don't commit
+        using ::apache::thrift::protocol::TProtocolException;
+        throw TProtocolException(TProtocolException::INVALID_DATA);
+      }
+      TCLIService_GetInfo_presult result;
+      result.success = &_return;
+      result.read(iprot_);
+      iprot_->readMessageEnd();
+      iprot_->getTransport()->readEnd();
+
+      if (result.__isset.success) {
+        // _return pointer has now been filled
+        sentry.commit();
+        return;
+      }
+      // in a bad state, don't commit
+      throw ::apache::thrift::TApplicationException(::apache::thrift::TApplicationException::MISSING_RESULT, "GetInfo failed: unknown result");
+    }
+    // seqid != rseqid
+    this->sync_.updatePending(fname, mtype, rseqid);
+
+    // this will temporarily unlock the readMutex, and let other clients get work done
+    this->sync_.waitForWork(seqid);
+  } // end while(true)
+}
+
+void TCLIServiceConcurrentClient::ExecuteStatement(TExecuteStatementResp& _return, const TExecuteStatementReq& req)
+{
+  int32_t seqid = send_ExecuteStatement(req);
+  recv_ExecuteStatement(_return, seqid);
+}
+
+int32_t TCLIServiceConcurrentClient::send_ExecuteStatement(const TExecuteStatementReq& req)
+{
+  int32_t cseqid = this->sync_.generateSeqId();
+  ::apache::thrift::async::TConcurrentSendSentry sentry(&this->sync_);
+  oprot_->writeMessageBegin("ExecuteStatement", ::apache::thrift::protocol::T_CALL, cseqid);
+
+  TCLIService_ExecuteStatement_pargs args;
+  args.req = &req;
+  args.write(oprot_);
+
+  oprot_->writeMessageEnd();
+  oprot_->getTransport()->writeEnd();
+  oprot_->getTransport()->flush();
+
+  sentry.commit();
+  return cseqid;
+}
+
+void TCLIServiceConcurrentClient::recv_ExecuteStatement(TExecuteStatementResp& _return, const int32_t seqid)
+{
+
+  int32_t rseqid = 0;
+  std::string fname;
+  ::apache::thrift::protocol::TMessageType mtype;
+
+  // the read mutex gets dropped and reacquired as part of waitForWork()
+  // The destructor of this sentry wakes up other clients
+  ::apache::thrift::async::TConcurrentRecvSentry sentry(&this->sync_, seqid);
+
+  while(true) {
+    if(!this->sync_.getPending(fname, mtype, rseqid)) {
+      iprot_->readMessageBegin(fname, mtype, rseqid);
+    }
+    if(seqid == rseqid) {
+      if (mtype == ::apache::thrift::protocol::T_EXCEPTION) {
+        ::apache::thrift::TApplicationException x;
+        x.read(iprot_);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+        sentry.commit();
+        throw x;
+      }
+      if (mtype != ::apache::thrift::protocol::T_REPLY) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+      }
+      if (fname.compare("ExecuteStatement") != 0) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+
+        // in a bad state, don't commit
+        using ::apache::thrift::protocol::TProtocolException;
+        throw TProtocolException(TProtocolException::INVALID_DATA);
+      }
+      TCLIService_ExecuteStatement_presult result;
+      result.success = &_return;
+      result.read(iprot_);
+      iprot_->readMessageEnd();
+      iprot_->getTransport()->readEnd();
+
+      if (result.__isset.success) {
+        // _return pointer has now been filled
+        sentry.commit();
+        return;
+      }
+      // in a bad state, don't commit
+      throw ::apache::thrift::TApplicationException(::apache::thrift::TApplicationException::MISSING_RESULT, "ExecuteStatement failed: unknown result");
+    }
+    // seqid != rseqid
+    this->sync_.updatePending(fname, mtype, rseqid);
+
+    // this will temporarily unlock the readMutex, and let other clients get work done
+    this->sync_.waitForWork(seqid);
+  } // end while(true)
+}
+
+void TCLIServiceConcurrentClient::GetTypeInfo(TGetTypeInfoResp& _return, const TGetTypeInfoReq& req)
+{
+  int32_t seqid = send_GetTypeInfo(req);
+  recv_GetTypeInfo(_return, seqid);
+}
+
+int32_t TCLIServiceConcurrentClient::send_GetTypeInfo(const TGetTypeInfoReq& req)
+{
+  int32_t cseqid = this->sync_.generateSeqId();
+  ::apache::thrift::async::TConcurrentSendSentry sentry(&this->sync_);
+  oprot_->writeMessageBegin("GetTypeInfo", ::apache::thrift::protocol::T_CALL, cseqid);
+
+  TCLIService_GetTypeInfo_pargs args;
+  args.req = &req;
+  args.write(oprot_);
+
+  oprot_->writeMessageEnd();
+  oprot_->getTransport()->writeEnd();
+  oprot_->getTransport()->flush();
+
+  sentry.commit();
+  return cseqid;
+}
+
+void TCLIServiceConcurrentClient::recv_GetTypeInfo(TGetTypeInfoResp& _return, const int32_t seqid)
+{
+
+  int32_t rseqid = 0;
+  std::string fname;
+  ::apache::thrift::protocol::TMessageType mtype;
+
+  // the read mutex gets dropped and reacquired as part of waitForWork()
+  // The destructor of this sentry wakes up other clients
+  ::apache::thrift::async::TConcurrentRecvSentry sentry(&this->sync_, seqid);
+
+  while(true) {
+    if(!this->sync_.getPending(fname, mtype, rseqid)) {
+      iprot_->readMessageBegin(fname, mtype, rseqid);
+    }
+    if(seqid == rseqid) {
+      if (mtype == ::apache::thrift::protocol::T_EXCEPTION) {
+        ::apache::thrift::TApplicationException x;
+        x.read(iprot_);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+        sentry.commit();
+        throw x;
+      }
+      if (mtype != ::apache::thrift::protocol::T_REPLY) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+      }
+      if (fname.compare("GetTypeInfo") != 0) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+
+        // in a bad state, don't commit
+        using ::apache::thrift::protocol::TProtocolException;
+        throw TProtocolException(TProtocolException::INVALID_DATA);
+      }
+      TCLIService_GetTypeInfo_presult result;
+      result.success = &_return;
+      result.read(iprot_);
+      iprot_->readMessageEnd();
+      iprot_->getTransport()->readEnd();
+
+      if (result.__isset.success) {
+        // _return pointer has now been filled
+        sentry.commit();
+        return;
+      }
+      // in a bad state, don't commit
+      throw ::apache::thrift::TApplicationException(::apache::thrift::TApplicationException::MISSING_RESULT, "GetTypeInfo failed: unknown result");
+    }
+    // seqid != rseqid
+    this->sync_.updatePending(fname, mtype, rseqid);
+
+    // this will temporarily unlock the readMutex, and let other clients get work done
+    this->sync_.waitForWork(seqid);
+  } // end while(true)
+}
+
+void TCLIServiceConcurrentClient::GetCatalogs(TGetCatalogsResp& _return, const TGetCatalogsReq& req)
+{
+  int32_t seqid = send_GetCatalogs(req);
+  recv_GetCatalogs(_return, seqid);
+}
+
+int32_t TCLIServiceConcurrentClient::send_GetCatalogs(const TGetCatalogsReq& req)
+{
+  int32_t cseqid = this->sync_.generateSeqId();
+  ::apache::thrift::async::TConcurrentSendSentry sentry(&this->sync_);
+  oprot_->writeMessageBegin("GetCatalogs", ::apache::thrift::protocol::T_CALL, cseqid);
+
+  TCLIService_GetCatalogs_pargs args;
+  args.req = &req;
+  args.write(oprot_);
+
+  oprot_->writeMessageEnd();
+  oprot_->getTransport()->writeEnd();
+  oprot_->getTransport()->flush();
+
+  sentry.commit();
+  return cseqid;
+}
+
+void TCLIServiceConcurrentClient::recv_GetCatalogs(TGetCatalogsResp& _return, const int32_t seqid)
+{
+
+  int32_t rseqid = 0;
+  std::string fname;
+  ::apache::thrift::protocol::TMessageType mtype;
+
+  // the read mutex gets dropped and reacquired as part of waitForWork()
+  // The destructor of this sentry wakes up other clients
+  ::apache::thrift::async::TConcurrentRecvSentry sentry(&this->sync_, seqid);
+
+  while(true) {
+    if(!this->sync_.getPending(fname, mtype, rseqid)) {
+      iprot_->readMessageBegin(fname, mtype, rseqid);
+    }
+    if(seqid == rseqid) {
+      if (mtype == ::apache::thrift::protocol::T_EXCEPTION) {
+        ::apache::thrift::TApplicationException x;
+        x.read(iprot_);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+        sentry.commit();
+        throw x;
+      }
+      if (mtype != ::apache::thrift::protocol::T_REPLY) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+      }
+      if (fname.compare("GetCatalogs") != 0) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+
+        // in a bad state, don't commit
+        using ::apache::thrift::protocol::TProtocolException;
+        throw TProtocolException(TProtocolException::INVALID_DATA);
+      }
+      TCLIService_GetCatalogs_presult result;
+      result.success = &_return;
+      result.read(iprot_);
+      iprot_->readMessageEnd();
+      iprot_->getTransport()->readEnd();
+
+      if (result.__isset.success) {
+        // _return pointer has now been filled
+        sentry.commit();
+        return;
+      }
+      // in a bad state, don't commit
+      throw ::apache::thrift::TApplicationException(::apache::thrift::TApplicationException::MISSING_RESULT, "GetCatalogs failed: unknown result");
+    }
+    // seqid != rseqid
+    this->sync_.updatePending(fname, mtype, rseqid);
+
+    // this will temporarily unlock the readMutex, and let other clients get work done
+    this->sync_.waitForWork(seqid);
+  } // end while(true)
+}
+
+void TCLIServiceConcurrentClient::GetSchemas(TGetSchemasResp& _return, const TGetSchemasReq& req)
+{
+  int32_t seqid = send_GetSchemas(req);
+  recv_GetSchemas(_return, seqid);
+}
+
+int32_t TCLIServiceConcurrentClient::send_GetSchemas(const TGetSchemasReq& req)
+{
+  int32_t cseqid = this->sync_.generateSeqId();
+  ::apache::thrift::async::TConcurrentSendSentry sentry(&this->sync_);
+  oprot_->writeMessageBegin("GetSchemas", ::apache::thrift::protocol::T_CALL, cseqid);
+
+  TCLIService_GetSchemas_pargs args;
+  args.req = &req;
+  args.write(oprot_);
+
+  oprot_->writeMessageEnd();
+  oprot_->getTransport()->writeEnd();
+  oprot_->getTransport()->flush();
+
+  sentry.commit();
+  return cseqid;
+}
+
+void TCLIServiceConcurrentClient::recv_GetSchemas(TGetSchemasResp& _return, const int32_t seqid)
+{
+
+  int32_t rseqid = 0;
+  std::string fname;
+  ::apache::thrift::protocol::TMessageType mtype;
+
+  // the read mutex gets dropped and reacquired as part of waitForWork()
+  // The destructor of this sentry wakes up other clients
+  ::apache::thrift::async::TConcurrentRecvSentry sentry(&this->sync_, seqid);
+
+  while(true) {
+    if(!this->sync_.getPending(fname, mtype, rseqid)) {
+      iprot_->readMessageBegin(fname, mtype, rseqid);
+    }
+    if(seqid == rseqid) {
+      if (mtype == ::apache::thrift::protocol::T_EXCEPTION) {
+        ::apache::thrift::TApplicationException x;
+        x.read(iprot_);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+        sentry.commit();
+        throw x;
+      }
+      if (mtype != ::apache::thrift::protocol::T_REPLY) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+      }
+      if (fname.compare("GetSchemas") != 0) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+
+        // in a bad state, don't commit
+        using ::apache::thrift::protocol::TProtocolException;
+        throw TProtocolException(TProtocolException::INVALID_DATA);
+      }
+      TCLIService_GetSchemas_presult result;
+      result.success = &_return;
+      result.read(iprot_);
+      iprot_->readMessageEnd();
+      iprot_->getTransport()->readEnd();
+
+      if (result.__isset.success) {
+        // _return pointer has now been filled
+        sentry.commit();
+        return;
+      }
+      // in a bad state, don't commit
+      throw ::apache::thrift::TApplicationException(::apache::thrift::TApplicationException::MISSING_RESULT, "GetSchemas failed: unknown result");
+    }
+    // seqid != rseqid
+    this->sync_.updatePending(fname, mtype, rseqid);
+
+    // this will temporarily unlock the readMutex, and let other clients get work done
+    this->sync_.waitForWork(seqid);
+  } // end while(true)
+}
+
+void TCLIServiceConcurrentClient::GetTables(TGetTablesResp& _return, const TGetTablesReq& req)
+{
+  int32_t seqid = send_GetTables(req);
+  recv_GetTables(_return, seqid);
+}
+
+int32_t TCLIServiceConcurrentClient::send_GetTables(const TGetTablesReq& req)
+{
+  int32_t cseqid = this->sync_.generateSeqId();
+  ::apache::thrift::async::TConcurrentSendSentry sentry(&this->sync_);
+  oprot_->writeMessageBegin("GetTables", ::apache::thrift::protocol::T_CALL, cseqid);
+
+  TCLIService_GetTables_pargs args;
+  args.req = &req;
+  args.write(oprot_);
+
+  oprot_->writeMessageEnd();
+  oprot_->getTransport()->writeEnd();
+  oprot_->getTransport()->flush();
+
+  sentry.commit();
+  return cseqid;
+}
+
+void TCLIServiceConcurrentClient::recv_GetTables(TGetTablesResp& _return, const int32_t seqid)
+{
+
+  int32_t rseqid = 0;
+  std::string fname;
+  ::apache::thrift::protocol::TMessageType mtype;
+
+  // the read mutex gets dropped and reacquired as part of waitForWork()
+  // The destructor of this sentry wakes up other clients
+  ::apache::thrift::async::TConcurrentRecvSentry sentry(&this->sync_, seqid);
+
+  while(true) {
+    if(!this->sync_.getPending(fname, mtype, rseqid)) {
+      iprot_->readMessageBegin(fname, mtype, rseqid);
+    }
+    if(seqid == rseqid) {
+      if (mtype == ::apache::thrift::protocol::T_EXCEPTION) {
+        ::apache::thrift::TApplicationException x;
+        x.read(iprot_);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+        sentry.commit();
+        throw x;
+      }
+      if (mtype != ::apache::thrift::protocol::T_REPLY) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+      }
+      if (fname.compare("GetTables") != 0) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+
+        // in a bad state, don't commit
+        using ::apache::thrift::protocol::TProtocolException;
+        throw TProtocolException(TProtocolException::INVALID_DATA);
+      }
+      TCLIService_GetTables_presult result;
+      result.success = &_return;
+      result.read(iprot_);
+      iprot_->readMessageEnd();
+      iprot_->getTransport()->readEnd();
+
+      if (result.__isset.success) {
+        // _return pointer has now been filled
+        sentry.commit();
+        return;
+      }
+      // in a bad state, don't commit
+      throw ::apache::thrift::TApplicationException(::apache::thrift::TApplicationException::MISSING_RESULT, "GetTables failed: unknown result");
+    }
+    // seqid != rseqid
+    this->sync_.updatePending(fname, mtype, rseqid);
+
+    // this will temporarily unlock the readMutex, and let other clients get work done
+    this->sync_.waitForWork(seqid);
+  } // end while(true)
+}
+
+void TCLIServiceConcurrentClient::GetTableTypes(TGetTableTypesResp& _return, const TGetTableTypesReq& req)
+{
+  int32_t seqid = send_GetTableTypes(req);
+  recv_GetTableTypes(_return, seqid);
+}
+
+int32_t TCLIServiceConcurrentClient::send_GetTableTypes(const TGetTableTypesReq& req)
+{
+  int32_t cseqid = this->sync_.generateSeqId();
+  ::apache::thrift::async::TConcurrentSendSentry sentry(&this->sync_);
+  oprot_->writeMessageBegin("GetTableTypes", ::apache::thrift::protocol::T_CALL, cseqid);
+
+  TCLIService_GetTableTypes_pargs args;
+  args.req = &req;
+  args.write(oprot_);
+
+  oprot_->writeMessageEnd();
+  oprot_->getTransport()->writeEnd();
+  oprot_->getTransport()->flush();
+
+  sentry.commit();
+  return cseqid;
+}
+
+void TCLIServiceConcurrentClient::recv_GetTableTypes(TGetTableTypesResp& _return, const int32_t seqid)
+{
+
+  int32_t rseqid = 0;
+  std::string fname;
+  ::apache::thrift::protocol::TMessageType mtype;
+
+  // the read mutex gets dropped and reacquired as part of waitForWork()
+  // The destructor of this sentry wakes up other clients
+  ::apache::thrift::async::TConcurrentRecvSentry sentry(&this->sync_, seqid);
+
+  while(true) {
+    if(!this->sync_.getPending(fname, mtype, rseqid)) {
+      iprot_->readMessageBegin(fname, mtype, rseqid);
+    }
+    if(seqid == rseqid) {
+      if (mtype == ::apache::thrift::protocol::T_EXCEPTION) {
+        ::apache::thrift::TApplicationException x;
+        x.read(iprot_);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+        sentry.commit();
+        throw x;
+      }
+      if (mtype != ::apache::thrift::protocol::T_REPLY) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+      }
+      if (fname.compare("GetTableTypes") != 0) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+
+        // in a bad state, don't commit
+        using ::apache::thrift::protocol::TProtocolException;
+        throw TProtocolException(TProtocolException::INVALID_DATA);
+      }
+      TCLIService_GetTableTypes_presult result;
+      result.success = &_return;
+      result.read(iprot_);
+      iprot_->readMessageEnd();
+      iprot_->getTransport()->readEnd();
+
+      if (result.__isset.success) {
+        // _return pointer has now been filled
+        sentry.commit();
+        return;
+      }
+      // in a bad state, don't commit
+      throw ::apache::thrift::TApplicationException(::apache::thrift::TApplicationException::MISSING_RESULT, "GetTableTypes failed: unknown result");
+    }
+    // seqid != rseqid
+    this->sync_.updatePending(fname, mtype, rseqid);
+
+    // this will temporarily unlock the readMutex, and let other clients get work done
+    this->sync_.waitForWork(seqid);
+  } // end while(true)
+}
+
+void TCLIServiceConcurrentClient::GetColumns(TGetColumnsResp& _return, const TGetColumnsReq& req)
+{
+  int32_t seqid = send_GetColumns(req);
+  recv_GetColumns(_return, seqid);
+}
+
+int32_t TCLIServiceConcurrentClient::send_GetColumns(const TGetColumnsReq& req)
+{
+  int32_t cseqid = this->sync_.generateSeqId();
+  ::apache::thrift::async::TConcurrentSendSentry sentry(&this->sync_);
+  oprot_->writeMessageBegin("GetColumns", ::apache::thrift::protocol::T_CALL, cseqid);
+
+  TCLIService_GetColumns_pargs args;
+  args.req = &req;
+  args.write(oprot_);
+
+  oprot_->writeMessageEnd();
+  oprot_->getTransport()->writeEnd();
+  oprot_->getTransport()->flush();
+
+  sentry.commit();
+  return cseqid;
+}
+
+void TCLIServiceConcurrentClient::recv_GetColumns(TGetColumnsResp& _return, const int32_t seqid)
+{
+
+  int32_t rseqid = 0;
+  std::string fname;
+  ::apache::thrift::protocol::TMessageType mtype;
+
+  // the read mutex gets dropped and reacquired as part of waitForWork()
+  // The destructor of this sentry wakes up other clients
+  ::apache::thrift::async::TConcurrentRecvSentry sentry(&this->sync_, seqid);
+
+  while(true) {
+    if(!this->sync_.getPending(fname, mtype, rseqid)) {
+      iprot_->readMessageBegin(fname, mtype, rseqid);
+    }
+    if(seqid == rseqid) {
+      if (mtype == ::apache::thrift::protocol::T_EXCEPTION) {
+        ::apache::thrift::TApplicationException x;
+        x.read(iprot_);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+        sentry.commit();
+        throw x;
+      }
+      if (mtype != ::apache::thrift::protocol::T_REPLY) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+      }
+      if (fname.compare("GetColumns") != 0) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+
+        // in a bad state, don't commit
+        using ::apache::thrift::protocol::TProtocolException;
+        throw TProtocolException(TProtocolException::INVALID_DATA);
+      }
+      TCLIService_GetColumns_presult result;
+      result.success = &_return;
+      result.read(iprot_);
+      iprot_->readMessageEnd();
+      iprot_->getTransport()->readEnd();
+
+      if (result.__isset.success) {
+        // _return pointer has now been filled
+        sentry.commit();
+        return;
+      }
+      // in a bad state, don't commit
+      throw ::apache::thrift::TApplicationException(::apache::thrift::TApplicationException::MISSING_RESULT, "GetColumns failed: unknown result");
+    }
+    // seqid != rseqid
+    this->sync_.updatePending(fname, mtype, rseqid);
+
+    // this will temporarily unlock the readMutex, and let other clients get work done
+    this->sync_.waitForWork(seqid);
+  } // end while(true)
+}
+
+void TCLIServiceConcurrentClient::GetFunctions(TGetFunctionsResp& _return, const TGetFunctionsReq& req)
+{
+  int32_t seqid = send_GetFunctions(req);
+  recv_GetFunctions(_return, seqid);
+}
+
+int32_t TCLIServiceConcurrentClient::send_GetFunctions(const TGetFunctionsReq& req)
+{
+  int32_t cseqid = this->sync_.generateSeqId();
+  ::apache::thrift::async::TConcurrentSendSentry sentry(&this->sync_);
+  oprot_->writeMessageBegin("GetFunctions", ::apache::thrift::protocol::T_CALL, cseqid);
+
+  TCLIService_GetFunctions_pargs args;
+  args.req = &req;
+  args.write(oprot_);
+
+  oprot_->writeMessageEnd();
+  oprot_->getTransport()->writeEnd();
+  oprot_->getTransport()->flush();
+
+  sentry.commit();
+  return cseqid;
+}
+
+void TCLIServiceConcurrentClient::recv_GetFunctions(TGetFunctionsResp& _return, const int32_t seqid)
+{
+
+  int32_t rseqid = 0;
+  std::string fname;
+  ::apache::thrift::protocol::TMessageType mtype;
+
+  // the read mutex gets dropped and reacquired as part of waitForWork()
+  // The destructor of this sentry wakes up other clients
+  ::apache::thrift::async::TConcurrentRecvSentry sentry(&this->sync_, seqid);
+
+  while(true) {
+    if(!this->sync_.getPending(fname, mtype, rseqid)) {
+      iprot_->readMessageBegin(fname, mtype, rseqid);
+    }
+    if(seqid == rseqid) {
+      if (mtype == ::apache::thrift::protocol::T_EXCEPTION) {
+        ::apache::thrift::TApplicationException x;
+        x.read(iprot_);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+        sentry.commit();
+        throw x;
+      }
+      if (mtype != ::apache::thrift::protocol::T_REPLY) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+      }
+      if (fname.compare("GetFunctions") != 0) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+
+        // in a bad state, don't commit
+        using ::apache::thrift::protocol::TProtocolException;
+        throw TProtocolException(TProtocolException::INVALID_DATA);
+      }
+      TCLIService_GetFunctions_presult result;
+      result.success = &_return;
+      result.read(iprot_);
+      iprot_->readMessageEnd();
+      iprot_->getTransport()->readEnd();
+
+      if (result.__isset.success) {
+        // _return pointer has now been filled
+        sentry.commit();
+        return;
+      }
+      // in a bad state, don't commit
+      throw ::apache::thrift::TApplicationException(::apache::thrift::TApplicationException::MISSING_RESULT, "GetFunctions failed: unknown result");
+    }
+    // seqid != rseqid
+    this->sync_.updatePending(fname, mtype, rseqid);
+
+    // this will temporarily unlock the readMutex, and let other clients get work done
+    this->sync_.waitForWork(seqid);
+  } // end while(true)
+}
+
+void TCLIServiceConcurrentClient::GetOperationStatus(TGetOperationStatusResp& _return, const TGetOperationStatusReq& req)
+{
+  int32_t seqid = send_GetOperationStatus(req);
+  recv_GetOperationStatus(_return, seqid);
+}
+
+int32_t TCLIServiceConcurrentClient::send_GetOperationStatus(const TGetOperationStatusReq& req)
+{
+  int32_t cseqid = this->sync_.generateSeqId();
+  ::apache::thrift::async::TConcurrentSendSentry sentry(&this->sync_);
+  oprot_->writeMessageBegin("GetOperationStatus", ::apache::thrift::protocol::T_CALL, cseqid);
+
+  TCLIService_GetOperationStatus_pargs args;
+  args.req = &req;
+  args.write(oprot_);
+
+  oprot_->writeMessageEnd();
+  oprot_->getTransport()->writeEnd();
+  oprot_->getTransport()->flush();
+
+  sentry.commit();
+  return cseqid;
+}
+
+void TCLIServiceConcurrentClient::recv_GetOperationStatus(TGetOperationStatusResp& _return, const int32_t seqid)
+{
+
+  int32_t rseqid = 0;
+  std::string fname;
+  ::apache::thrift::protocol::TMessageType mtype;
+
+  // the read mutex gets dropped and reacquired as part of waitForWork()
+  // The destructor of this sentry wakes up other clients
+  ::apache::thrift::async::TConcurrentRecvSentry sentry(&this->sync_, seqid);
+
+  while(true) {
+    if(!this->sync_.getPending(fname, mtype, rseqid)) {
+      iprot_->readMessageBegin(fname, mtype, rseqid);
+    }
+    if(seqid == rseqid) {
+      if (mtype == ::apache::thrift::protocol::T_EXCEPTION) {
+        ::apache::thrift::TApplicationException x;
+        x.read(iprot_);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+        sentry.commit();
+        throw x;
+      }
+      if (mtype != ::apache::thrift::protocol::T_REPLY) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+      }
+      if (fname.compare("GetOperationStatus") != 0) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+
+        // in a bad state, don't commit
+        using ::apache::thrift::protocol::TProtocolException;
+        throw TProtocolException(TProtocolException::INVALID_DATA);
+      }
+      TCLIService_GetOperationStatus_presult result;
+      result.success = &_return;
+      result.read(iprot_);
+      iprot_->readMessageEnd();
+      iprot_->getTransport()->readEnd();
+
+      if (result.__isset.success) {
+        // _return pointer has now been filled
+        sentry.commit();
+        return;
+      }
+      // in a bad state, don't commit
+      throw ::apache::thrift::TApplicationException(::apache::thrift::TApplicationException::MISSING_RESULT, "GetOperationStatus failed: unknown result");
+    }
+    // seqid != rseqid
+    this->sync_.updatePending(fname, mtype, rseqid);
+
+    // this will temporarily unlock the readMutex, and let other clients get work done
+    this->sync_.waitForWork(seqid);
+  } // end while(true)
+}
+
+void TCLIServiceConcurrentClient::CancelOperation(TCancelOperationResp& _return, const TCancelOperationReq& req)
+{
+  int32_t seqid = send_CancelOperation(req);
+  recv_CancelOperation(_return, seqid);
+}
+
+int32_t TCLIServiceConcurrentClient::send_CancelOperation(const TCancelOperationReq& req)
+{
+  int32_t cseqid = this->sync_.generateSeqId();
+  ::apache::thrift::async::TConcurrentSendSentry sentry(&this->sync_);
+  oprot_->writeMessageBegin("CancelOperation", ::apache::thrift::protocol::T_CALL, cseqid);
+
+  TCLIService_CancelOperation_pargs args;
+  args.req = &req;
+  args.write(oprot_);
+
+  oprot_->writeMessageEnd();
+  oprot_->getTransport()->writeEnd();
+  oprot_->getTransport()->flush();
+
+  sentry.commit();
+  return cseqid;
+}
+
+void TCLIServiceConcurrentClient::recv_CancelOperation(TCancelOperationResp& _return, const int32_t seqid)
+{
+
+  int32_t rseqid = 0;
+  std::string fname;
+  ::apache::thrift::protocol::TMessageType mtype;
+
+  // the read mutex gets dropped and reacquired as part of waitForWork()
+  // The destructor of this sentry wakes up other clients
+  ::apache::thrift::async::TConcurrentRecvSentry sentry(&this->sync_, seqid);
+
+  while(true) {
+    if(!this->sync_.getPending(fname, mtype, rseqid)) {
+      iprot_->readMessageBegin(fname, mtype, rseqid);
+    }
+    if(seqid == rseqid) {
+      if (mtype == ::apache::thrift::protocol::T_EXCEPTION) {
+        ::apache::thrift::TApplicationException x;
+        x.read(iprot_);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+        sentry.commit();
+        throw x;
+      }
+      if (mtype != ::apache::thrift::protocol::T_REPLY) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+      }
+      if (fname.compare("CancelOperation") != 0) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+
+        // in a bad state, don't commit
+        using ::apache::thrift::protocol::TProtocolException;
+        throw TProtocolException(TProtocolException::INVALID_DATA);
+      }
+      TCLIService_CancelOperation_presult result;
+      result.success = &_return;
+      result.read(iprot_);
+      iprot_->readMessageEnd();
+      iprot_->getTransport()->readEnd();
+
+      if (result.__isset.success) {
+        // _return pointer has now been filled
+        sentry.commit();
+        return;
+      }
+      // in a bad state, don't commit
+      throw ::apache::thrift::TApplicationException(::apache::thrift::TApplicationException::MISSING_RESULT, "CancelOperation failed: unknown result");
+    }
+    // seqid != rseqid
+    this->sync_.updatePending(fname, mtype, rseqid);
+
+    // this will temporarily unlock the readMutex, and let other clients get work done
+    this->sync_.waitForWork(seqid);
+  } // end while(true)
+}
+
+void TCLIServiceConcurrentClient::CloseOperation(TCloseOperationResp& _return, const TCloseOperationReq& req)
+{
+  int32_t seqid = send_CloseOperation(req);
+  recv_CloseOperation(_return, seqid);
+}
+
+int32_t TCLIServiceConcurrentClient::send_CloseOperation(const TCloseOperationReq& req)
+{
+  int32_t cseqid = this->sync_.generateSeqId();
+  ::apache::thrift::async::TConcurrentSendSentry sentry(&this->sync_);
+  oprot_->writeMessageBegin("CloseOperation", ::apache::thrift::protocol::T_CALL, cseqid);
+
+  TCLIService_CloseOperation_pargs args;
+  args.req = &req;
+  args.write(oprot_);
+
+  oprot_->writeMessageEnd();
+  oprot_->getTransport()->writeEnd();
+  oprot_->getTransport()->flush();
+
+  sentry.commit();
+  return cseqid;
+}
+
+void TCLIServiceConcurrentClient::recv_CloseOperation(TCloseOperationResp& _return, const int32_t seqid)
+{
+
+  int32_t rseqid = 0;
+  std::string fname;
+  ::apache::thrift::protocol::TMessageType mtype;
+
+  // the read mutex gets dropped and reacquired as part of waitForWork()
+  // The destructor of this sentry wakes up other clients
+  ::apache::thrift::async::TConcurrentRecvSentry sentry(&this->sync_, seqid);
+
+  while(true) {
+    if(!this->sync_.getPending(fname, mtype, rseqid)) {
+      iprot_->readMessageBegin(fname, mtype, rseqid);
+    }
+    if(seqid == rseqid) {
+      if (mtype == ::apache::thrift::protocol::T_EXCEPTION) {
+        ::apache::thrift::TApplicationException x;
+        x.read(iprot_);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+        sentry.commit();
+        throw x;
+      }
+      if (mtype != ::apache::thrift::protocol::T_REPLY) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+      }
+      if (fname.compare("CloseOperation") != 0) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+
+        // in a bad state, don't commit
+        using ::apache::thrift::protocol::TProtocolException;
+        throw TProtocolException(TProtocolException::INVALID_DATA);
+      }
+      TCLIService_CloseOperation_presult result;
+      result.success = &_return;
+      result.read(iprot_);
+      iprot_->readMessageEnd();
+      iprot_->getTransport()->readEnd();
+
+      if (result.__isset.success) {
+        // _return pointer has now been filled
+        sentry.commit();
+        return;
+      }
+      // in a bad state, don't commit
+      throw ::apache::thrift::TApplicationException(::apache::thrift::TApplicationException::MISSING_RESULT, "CloseOperation failed: unknown result");
+    }
+    // seqid != rseqid
+    this->sync_.updatePending(fname, mtype, rseqid);
+
+    // this will tempo

<TRUNCATED>

[29/55] [abbrv] hive git commit: HIVE-11688 : OrcRawRecordMerger does not close primary reader if not fully consumed (Sudheesh Katkam via Prasanth J)

Posted by xu...@apache.org.
HIVE-11688 : OrcRawRecordMerger does not close primary reader if not fully consumed (Sudheesh Katkam via Prasanth J)

Signed-off-by: Ashutosh Chauhan <ha...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/24ec6bed
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/24ec6bed
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/24ec6bed

Branch: refs/heads/spark
Commit: 24ec6bedaaf683ab7dd4fc28801f3283b5a1744f
Parents: a6da5d1
Author: Sudheesh Katkam <sk...@maprtech.com>
Authored: Fri Aug 28 15:48:00 2015 -0800
Committer: Ashutosh Chauhan <ha...@apache.org>
Committed: Sat Oct 24 18:10:45 2015 -0700

----------------------------------------------------------------------
 .../java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java | 3 +++
 1 file changed, 3 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/24ec6bed/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java
index 58b85ef..fb5110d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java
@@ -660,6 +660,9 @@ public class OrcRawRecordMerger implements AcidInputFormat.RawReader<OrcStruct>{
 
   @Override
   public void close() throws IOException {
+    if (primary != null) {
+      primary.recordReader.close();
+    }
     for(ReaderPair pair: readers.values()) {
       pair.recordReader.close();
     }


[16/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)

Posted by xu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h b/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
index a4eb625..53ab272 100644
--- a/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
+++ b/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -390,9 +390,6 @@ typedef struct _Version__isset {
 class Version {
  public:
 
-  static const char* ascii_fingerprint; // = "07A9615F837F7D0A952B595DD3020972";
-  static const uint8_t binary_fingerprint[16]; // = {0x07,0xA9,0x61,0x5F,0x83,0x7F,0x7D,0x0A,0x95,0x2B,0x59,0x5D,0xD3,0x02,0x09,0x72};
-
   Version(const Version&);
   Version& operator=(const Version&);
   Version() : version(), comments() {
@@ -425,11 +422,17 @@ class Version {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const Version& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(Version &a, Version &b);
 
+inline std::ostream& operator<<(std::ostream& out, const Version& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _FieldSchema__isset {
   _FieldSchema__isset() : name(false), type(false), comment(false) {}
   bool name :1;
@@ -440,9 +443,6 @@ typedef struct _FieldSchema__isset {
 class FieldSchema {
  public:
 
-  static const char* ascii_fingerprint; // = "AB879940BD15B6B25691265F7384B271";
-  static const uint8_t binary_fingerprint[16]; // = {0xAB,0x87,0x99,0x40,0xBD,0x15,0xB6,0xB2,0x56,0x91,0x26,0x5F,0x73,0x84,0xB2,0x71};
-
   FieldSchema(const FieldSchema&);
   FieldSchema& operator=(const FieldSchema&);
   FieldSchema() : name(), type(), comment() {
@@ -480,11 +480,17 @@ class FieldSchema {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const FieldSchema& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(FieldSchema &a, FieldSchema &b);
 
+inline std::ostream& operator<<(std::ostream& out, const FieldSchema& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _Type__isset {
   _Type__isset() : name(false), type1(false), type2(false), fields(false) {}
   bool name :1;
@@ -496,9 +502,6 @@ typedef struct _Type__isset {
 class Type {
  public:
 
-  static const char* ascii_fingerprint; // = "20DF02DE523C27F7066C7BD4D9120842";
-  static const uint8_t binary_fingerprint[16]; // = {0x20,0xDF,0x02,0xDE,0x52,0x3C,0x27,0xF7,0x06,0x6C,0x7B,0xD4,0xD9,0x12,0x08,0x42};
-
   Type(const Type&);
   Type& operator=(const Type&);
   Type() : name(), type1(), type2() {
@@ -547,11 +550,17 @@ class Type {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const Type& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(Type &a, Type &b);
 
+inline std::ostream& operator<<(std::ostream& out, const Type& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _HiveObjectRef__isset {
   _HiveObjectRef__isset() : objectType(false), dbName(false), objectName(false), partValues(false), columnName(false) {}
   bool objectType :1;
@@ -564,9 +573,6 @@ typedef struct _HiveObjectRef__isset {
 class HiveObjectRef {
  public:
 
-  static const char* ascii_fingerprint; // = "205CD8311CF3AA9EC161BAEF8D7C933C";
-  static const uint8_t binary_fingerprint[16]; // = {0x20,0x5C,0xD8,0x31,0x1C,0xF3,0xAA,0x9E,0xC1,0x61,0xBA,0xEF,0x8D,0x7C,0x93,0x3C};
-
   HiveObjectRef(const HiveObjectRef&);
   HiveObjectRef& operator=(const HiveObjectRef&);
   HiveObjectRef() : objectType((HiveObjectType::type)0), dbName(), objectName(), columnName() {
@@ -614,11 +620,17 @@ class HiveObjectRef {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const HiveObjectRef& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(HiveObjectRef &a, HiveObjectRef &b);
 
+inline std::ostream& operator<<(std::ostream& out, const HiveObjectRef& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _PrivilegeGrantInfo__isset {
   _PrivilegeGrantInfo__isset() : privilege(false), createTime(false), grantor(false), grantorType(false), grantOption(false) {}
   bool privilege :1;
@@ -631,9 +643,6 @@ typedef struct _PrivilegeGrantInfo__isset {
 class PrivilegeGrantInfo {
  public:
 
-  static const char* ascii_fingerprint; // = "A58923AF7294BE492D6F90E07E8CEE1F";
-  static const uint8_t binary_fingerprint[16]; // = {0xA5,0x89,0x23,0xAF,0x72,0x94,0xBE,0x49,0x2D,0x6F,0x90,0xE0,0x7E,0x8C,0xEE,0x1F};
-
   PrivilegeGrantInfo(const PrivilegeGrantInfo&);
   PrivilegeGrantInfo& operator=(const PrivilegeGrantInfo&);
   PrivilegeGrantInfo() : privilege(), createTime(0), grantor(), grantorType((PrincipalType::type)0), grantOption(0) {
@@ -681,11 +690,17 @@ class PrivilegeGrantInfo {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const PrivilegeGrantInfo& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(PrivilegeGrantInfo &a, PrivilegeGrantInfo &b);
 
+inline std::ostream& operator<<(std::ostream& out, const PrivilegeGrantInfo& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _HiveObjectPrivilege__isset {
   _HiveObjectPrivilege__isset() : hiveObject(false), principalName(false), principalType(false), grantInfo(false) {}
   bool hiveObject :1;
@@ -697,9 +712,6 @@ typedef struct _HiveObjectPrivilege__isset {
 class HiveObjectPrivilege {
  public:
 
-  static const char* ascii_fingerprint; // = "83D71969B23BD853E29DBA9D43B29AF8";
-  static const uint8_t binary_fingerprint[16]; // = {0x83,0xD7,0x19,0x69,0xB2,0x3B,0xD8,0x53,0xE2,0x9D,0xBA,0x9D,0x43,0xB2,0x9A,0xF8};
-
   HiveObjectPrivilege(const HiveObjectPrivilege&);
   HiveObjectPrivilege& operator=(const HiveObjectPrivilege&);
   HiveObjectPrivilege() : principalName(), principalType((PrincipalType::type)0) {
@@ -742,11 +754,17 @@ class HiveObjectPrivilege {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const HiveObjectPrivilege& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(HiveObjectPrivilege &a, HiveObjectPrivilege &b);
 
+inline std::ostream& operator<<(std::ostream& out, const HiveObjectPrivilege& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _PrivilegeBag__isset {
   _PrivilegeBag__isset() : privileges(false) {}
   bool privileges :1;
@@ -755,9 +773,6 @@ typedef struct _PrivilegeBag__isset {
 class PrivilegeBag {
  public:
 
-  static const char* ascii_fingerprint; // = "BB89E4701B7B709B046A74C90B1147F2";
-  static const uint8_t binary_fingerprint[16]; // = {0xBB,0x89,0xE4,0x70,0x1B,0x7B,0x70,0x9B,0x04,0x6A,0x74,0xC9,0x0B,0x11,0x47,0xF2};
-
   PrivilegeBag(const PrivilegeBag&);
   PrivilegeBag& operator=(const PrivilegeBag&);
   PrivilegeBag() {
@@ -785,11 +800,17 @@ class PrivilegeBag {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const PrivilegeBag& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(PrivilegeBag &a, PrivilegeBag &b);
 
+inline std::ostream& operator<<(std::ostream& out, const PrivilegeBag& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _PrincipalPrivilegeSet__isset {
   _PrincipalPrivilegeSet__isset() : userPrivileges(false), groupPrivileges(false), rolePrivileges(false) {}
   bool userPrivileges :1;
@@ -800,9 +821,6 @@ typedef struct _PrincipalPrivilegeSet__isset {
 class PrincipalPrivilegeSet {
  public:
 
-  static const char* ascii_fingerprint; // = "08F75D2533906EA87BE34EA640856683";
-  static const uint8_t binary_fingerprint[16]; // = {0x08,0xF7,0x5D,0x25,0x33,0x90,0x6E,0xA8,0x7B,0xE3,0x4E,0xA6,0x40,0x85,0x66,0x83};
-
   PrincipalPrivilegeSet(const PrincipalPrivilegeSet&);
   PrincipalPrivilegeSet& operator=(const PrincipalPrivilegeSet&);
   PrincipalPrivilegeSet() {
@@ -840,11 +858,17 @@ class PrincipalPrivilegeSet {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const PrincipalPrivilegeSet& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(PrincipalPrivilegeSet &a, PrincipalPrivilegeSet &b);
 
+inline std::ostream& operator<<(std::ostream& out, const PrincipalPrivilegeSet& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _GrantRevokePrivilegeRequest__isset {
   _GrantRevokePrivilegeRequest__isset() : requestType(false), privileges(false), revokeGrantOption(false) {}
   bool requestType :1;
@@ -855,9 +879,6 @@ typedef struct _GrantRevokePrivilegeRequest__isset {
 class GrantRevokePrivilegeRequest {
  public:
 
-  static const char* ascii_fingerprint; // = "DF474A3CB526AD40DC0F2C3702F7AA2C";
-  static const uint8_t binary_fingerprint[16]; // = {0xDF,0x47,0x4A,0x3C,0xB5,0x26,0xAD,0x40,0xDC,0x0F,0x2C,0x37,0x02,0xF7,0xAA,0x2C};
-
   GrantRevokePrivilegeRequest(const GrantRevokePrivilegeRequest&);
   GrantRevokePrivilegeRequest& operator=(const GrantRevokePrivilegeRequest&);
   GrantRevokePrivilegeRequest() : requestType((GrantRevokeType::type)0), revokeGrantOption(0) {
@@ -897,11 +918,17 @@ class GrantRevokePrivilegeRequest {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const GrantRevokePrivilegeRequest& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(GrantRevokePrivilegeRequest &a, GrantRevokePrivilegeRequest &b);
 
+inline std::ostream& operator<<(std::ostream& out, const GrantRevokePrivilegeRequest& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _GrantRevokePrivilegeResponse__isset {
   _GrantRevokePrivilegeResponse__isset() : success(false) {}
   bool success :1;
@@ -910,9 +937,6 @@ typedef struct _GrantRevokePrivilegeResponse__isset {
 class GrantRevokePrivilegeResponse {
  public:
 
-  static const char* ascii_fingerprint; // = "BF054652DEF86253C2BEE7D947F167DD";
-  static const uint8_t binary_fingerprint[16]; // = {0xBF,0x05,0x46,0x52,0xDE,0xF8,0x62,0x53,0xC2,0xBE,0xE7,0xD9,0x47,0xF1,0x67,0xDD};
-
   GrantRevokePrivilegeResponse(const GrantRevokePrivilegeResponse&);
   GrantRevokePrivilegeResponse& operator=(const GrantRevokePrivilegeResponse&);
   GrantRevokePrivilegeResponse() : success(0) {
@@ -942,11 +966,17 @@ class GrantRevokePrivilegeResponse {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const GrantRevokePrivilegeResponse& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(GrantRevokePrivilegeResponse &a, GrantRevokePrivilegeResponse &b);
 
+inline std::ostream& operator<<(std::ostream& out, const GrantRevokePrivilegeResponse& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _Role__isset {
   _Role__isset() : roleName(false), createTime(false), ownerName(false) {}
   bool roleName :1;
@@ -957,9 +987,6 @@ typedef struct _Role__isset {
 class Role {
  public:
 
-  static const char* ascii_fingerprint; // = "70563A0628F75DF9555F4D24690B1E26";
-  static const uint8_t binary_fingerprint[16]; // = {0x70,0x56,0x3A,0x06,0x28,0xF7,0x5D,0xF9,0x55,0x5F,0x4D,0x24,0x69,0x0B,0x1E,0x26};
-
   Role(const Role&);
   Role& operator=(const Role&);
   Role() : roleName(), createTime(0), ownerName() {
@@ -997,11 +1024,17 @@ class Role {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const Role& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(Role &a, Role &b);
 
+inline std::ostream& operator<<(std::ostream& out, const Role& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _RolePrincipalGrant__isset {
   _RolePrincipalGrant__isset() : roleName(false), principalName(false), principalType(false), grantOption(false), grantTime(false), grantorName(false), grantorPrincipalType(false) {}
   bool roleName :1;
@@ -1016,9 +1049,6 @@ typedef struct _RolePrincipalGrant__isset {
 class RolePrincipalGrant {
  public:
 
-  static const char* ascii_fingerprint; // = "899BA3F6214DD1B79D27206BA857C772";
-  static const uint8_t binary_fingerprint[16]; // = {0x89,0x9B,0xA3,0xF6,0x21,0x4D,0xD1,0xB7,0x9D,0x27,0x20,0x6B,0xA8,0x57,0xC7,0x72};
-
   RolePrincipalGrant(const RolePrincipalGrant&);
   RolePrincipalGrant& operator=(const RolePrincipalGrant&);
   RolePrincipalGrant() : roleName(), principalName(), principalType((PrincipalType::type)0), grantOption(0), grantTime(0), grantorName(), grantorPrincipalType((PrincipalType::type)0) {
@@ -1076,18 +1106,21 @@ class RolePrincipalGrant {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const RolePrincipalGrant& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(RolePrincipalGrant &a, RolePrincipalGrant &b);
 
+inline std::ostream& operator<<(std::ostream& out, const RolePrincipalGrant& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class GetRoleGrantsForPrincipalRequest {
  public:
 
-  static const char* ascii_fingerprint; // = "D6FD826D949221396F4FFC3ECCD3D192";
-  static const uint8_t binary_fingerprint[16]; // = {0xD6,0xFD,0x82,0x6D,0x94,0x92,0x21,0x39,0x6F,0x4F,0xFC,0x3E,0xCC,0xD3,0xD1,0x92};
-
   GetRoleGrantsForPrincipalRequest(const GetRoleGrantsForPrincipalRequest&);
   GetRoleGrantsForPrincipalRequest& operator=(const GetRoleGrantsForPrincipalRequest&);
   GetRoleGrantsForPrincipalRequest() : principal_name(), principal_type((PrincipalType::type)0) {
@@ -1118,18 +1151,21 @@ class GetRoleGrantsForPrincipalRequest {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const GetRoleGrantsForPrincipalRequest& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(GetRoleGrantsForPrincipalRequest &a, GetRoleGrantsForPrincipalRequest &b);
 
+inline std::ostream& operator<<(std::ostream& out, const GetRoleGrantsForPrincipalRequest& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class GetRoleGrantsForPrincipalResponse {
  public:
 
-  static const char* ascii_fingerprint; // = "5926B4B3541A62E17663820C7E3BE690";
-  static const uint8_t binary_fingerprint[16]; // = {0x59,0x26,0xB4,0xB3,0x54,0x1A,0x62,0xE1,0x76,0x63,0x82,0x0C,0x7E,0x3B,0xE6,0x90};
-
   GetRoleGrantsForPrincipalResponse(const GetRoleGrantsForPrincipalResponse&);
   GetRoleGrantsForPrincipalResponse& operator=(const GetRoleGrantsForPrincipalResponse&);
   GetRoleGrantsForPrincipalResponse() {
@@ -1155,18 +1191,21 @@ class GetRoleGrantsForPrincipalResponse {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const GetRoleGrantsForPrincipalResponse& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(GetRoleGrantsForPrincipalResponse &a, GetRoleGrantsForPrincipalResponse &b);
 
+inline std::ostream& operator<<(std::ostream& out, const GetRoleGrantsForPrincipalResponse& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class GetPrincipalsInRoleRequest {
  public:
 
-  static const char* ascii_fingerprint; // = "EFB929595D312AC8F305D5A794CFEDA1";
-  static const uint8_t binary_fingerprint[16]; // = {0xEF,0xB9,0x29,0x59,0x5D,0x31,0x2A,0xC8,0xF3,0x05,0xD5,0xA7,0x94,0xCF,0xED,0xA1};
-
   GetPrincipalsInRoleRequest(const GetPrincipalsInRoleRequest&);
   GetPrincipalsInRoleRequest& operator=(const GetPrincipalsInRoleRequest&);
   GetPrincipalsInRoleRequest() : roleName() {
@@ -1192,18 +1231,21 @@ class GetPrincipalsInRoleRequest {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const GetPrincipalsInRoleRequest& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(GetPrincipalsInRoleRequest &a, GetPrincipalsInRoleRequest &b);
 
+inline std::ostream& operator<<(std::ostream& out, const GetPrincipalsInRoleRequest& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class GetPrincipalsInRoleResponse {
  public:
 
-  static const char* ascii_fingerprint; // = "5926B4B3541A62E17663820C7E3BE690";
-  static const uint8_t binary_fingerprint[16]; // = {0x59,0x26,0xB4,0xB3,0x54,0x1A,0x62,0xE1,0x76,0x63,0x82,0x0C,0x7E,0x3B,0xE6,0x90};
-
   GetPrincipalsInRoleResponse(const GetPrincipalsInRoleResponse&);
   GetPrincipalsInRoleResponse& operator=(const GetPrincipalsInRoleResponse&);
   GetPrincipalsInRoleResponse() {
@@ -1229,11 +1271,17 @@ class GetPrincipalsInRoleResponse {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const GetPrincipalsInRoleResponse& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(GetPrincipalsInRoleResponse &a, GetPrincipalsInRoleResponse &b);
 
+inline std::ostream& operator<<(std::ostream& out, const GetPrincipalsInRoleResponse& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _GrantRevokeRoleRequest__isset {
   _GrantRevokeRoleRequest__isset() : requestType(false), roleName(false), principalName(false), principalType(false), grantor(false), grantorType(false), grantOption(false) {}
   bool requestType :1;
@@ -1248,9 +1296,6 @@ typedef struct _GrantRevokeRoleRequest__isset {
 class GrantRevokeRoleRequest {
  public:
 
-  static const char* ascii_fingerprint; // = "907DEA796F2BA7AF76DC2566E75FAEE7";
-  static const uint8_t binary_fingerprint[16]; // = {0x90,0x7D,0xEA,0x79,0x6F,0x2B,0xA7,0xAF,0x76,0xDC,0x25,0x66,0xE7,0x5F,0xAE,0xE7};
-
   GrantRevokeRoleRequest(const GrantRevokeRoleRequest&);
   GrantRevokeRoleRequest& operator=(const GrantRevokeRoleRequest&);
   GrantRevokeRoleRequest() : requestType((GrantRevokeType::type)0), roleName(), principalName(), principalType((PrincipalType::type)0), grantor(), grantorType((PrincipalType::type)0), grantOption(0) {
@@ -1314,11 +1359,17 @@ class GrantRevokeRoleRequest {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const GrantRevokeRoleRequest& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(GrantRevokeRoleRequest &a, GrantRevokeRoleRequest &b);
 
+inline std::ostream& operator<<(std::ostream& out, const GrantRevokeRoleRequest& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _GrantRevokeRoleResponse__isset {
   _GrantRevokeRoleResponse__isset() : success(false) {}
   bool success :1;
@@ -1327,9 +1378,6 @@ typedef struct _GrantRevokeRoleResponse__isset {
 class GrantRevokeRoleResponse {
  public:
 
-  static const char* ascii_fingerprint; // = "BF054652DEF86253C2BEE7D947F167DD";
-  static const uint8_t binary_fingerprint[16]; // = {0xBF,0x05,0x46,0x52,0xDE,0xF8,0x62,0x53,0xC2,0xBE,0xE7,0xD9,0x47,0xF1,0x67,0xDD};
-
   GrantRevokeRoleResponse(const GrantRevokeRoleResponse&);
   GrantRevokeRoleResponse& operator=(const GrantRevokeRoleResponse&);
   GrantRevokeRoleResponse() : success(0) {
@@ -1359,11 +1407,17 @@ class GrantRevokeRoleResponse {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const GrantRevokeRoleResponse& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(GrantRevokeRoleResponse &a, GrantRevokeRoleResponse &b);
 
+inline std::ostream& operator<<(std::ostream& out, const GrantRevokeRoleResponse& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _Database__isset {
   _Database__isset() : name(false), description(false), locationUri(false), parameters(false), privileges(false), ownerName(false), ownerType(false) {}
   bool name :1;
@@ -1378,9 +1432,6 @@ typedef struct _Database__isset {
 class Database {
  public:
 
-  static const char* ascii_fingerprint; // = "553495CAE243A1C583D5C3DD990AED53";
-  static const uint8_t binary_fingerprint[16]; // = {0x55,0x34,0x95,0xCA,0xE2,0x43,0xA1,0xC5,0x83,0xD5,0xC3,0xDD,0x99,0x0A,0xED,0x53};
-
   Database(const Database&);
   Database& operator=(const Database&);
   Database() : name(), description(), locationUri(), ownerName(), ownerType((PrincipalType::type)0) {
@@ -1444,11 +1495,17 @@ class Database {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const Database& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(Database &a, Database &b);
 
+inline std::ostream& operator<<(std::ostream& out, const Database& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _SerDeInfo__isset {
   _SerDeInfo__isset() : name(false), serializationLib(false), parameters(false) {}
   bool name :1;
@@ -1459,9 +1516,6 @@ typedef struct _SerDeInfo__isset {
 class SerDeInfo {
  public:
 
-  static const char* ascii_fingerprint; // = "B1021C32A35A2AEFCD2F57A5424159A7";
-  static const uint8_t binary_fingerprint[16]; // = {0xB1,0x02,0x1C,0x32,0xA3,0x5A,0x2A,0xEF,0xCD,0x2F,0x57,0xA5,0x42,0x41,0x59,0xA7};
-
   SerDeInfo(const SerDeInfo&);
   SerDeInfo& operator=(const SerDeInfo&);
   SerDeInfo() : name(), serializationLib() {
@@ -1499,11 +1553,17 @@ class SerDeInfo {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const SerDeInfo& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(SerDeInfo &a, SerDeInfo &b);
 
+inline std::ostream& operator<<(std::ostream& out, const SerDeInfo& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _Order__isset {
   _Order__isset() : col(false), order(false) {}
   bool col :1;
@@ -1513,9 +1573,6 @@ typedef struct _Order__isset {
 class Order {
  public:
 
-  static const char* ascii_fingerprint; // = "EEBC915CE44901401D881E6091423036";
-  static const uint8_t binary_fingerprint[16]; // = {0xEE,0xBC,0x91,0x5C,0xE4,0x49,0x01,0x40,0x1D,0x88,0x1E,0x60,0x91,0x42,0x30,0x36};
-
   Order(const Order&);
   Order& operator=(const Order&);
   Order() : col(), order(0) {
@@ -1548,11 +1605,17 @@ class Order {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const Order& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(Order &a, Order &b);
 
+inline std::ostream& operator<<(std::ostream& out, const Order& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _SkewedInfo__isset {
   _SkewedInfo__isset() : skewedColNames(false), skewedColValues(false), skewedColValueLocationMaps(false) {}
   bool skewedColNames :1;
@@ -1563,9 +1626,6 @@ typedef struct _SkewedInfo__isset {
 class SkewedInfo {
  public:
 
-  static const char* ascii_fingerprint; // = "4BF2ED84BC3C3EB297A2AE2FA8427EB1";
-  static const uint8_t binary_fingerprint[16]; // = {0x4B,0xF2,0xED,0x84,0xBC,0x3C,0x3E,0xB2,0x97,0xA2,0xAE,0x2F,0xA8,0x42,0x7E,0xB1};
-
   SkewedInfo(const SkewedInfo&);
   SkewedInfo& operator=(const SkewedInfo&);
   SkewedInfo() {
@@ -1603,11 +1663,17 @@ class SkewedInfo {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const SkewedInfo& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(SkewedInfo &a, SkewedInfo &b);
 
+inline std::ostream& operator<<(std::ostream& out, const SkewedInfo& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _StorageDescriptor__isset {
   _StorageDescriptor__isset() : cols(false), location(false), inputFormat(false), outputFormat(false), compressed(false), numBuckets(false), serdeInfo(false), bucketCols(false), sortCols(false), parameters(false), skewedInfo(false), storedAsSubDirectories(false) {}
   bool cols :1;
@@ -1627,9 +1693,6 @@ typedef struct _StorageDescriptor__isset {
 class StorageDescriptor {
  public:
 
-  static const char* ascii_fingerprint; // = "CA8C9AA5FE4C32643757D8639CEF0CD7";
-  static const uint8_t binary_fingerprint[16]; // = {0xCA,0x8C,0x9A,0xA5,0xFE,0x4C,0x32,0x64,0x37,0x57,0xD8,0x63,0x9C,0xEF,0x0C,0xD7};
-
   StorageDescriptor(const StorageDescriptor&);
   StorageDescriptor& operator=(const StorageDescriptor&);
   StorageDescriptor() : location(), inputFormat(), outputFormat(), compressed(0), numBuckets(0), storedAsSubDirectories(0) {
@@ -1716,11 +1779,17 @@ class StorageDescriptor {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const StorageDescriptor& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(StorageDescriptor &a, StorageDescriptor &b);
 
+inline std::ostream& operator<<(std::ostream& out, const StorageDescriptor& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _Table__isset {
   _Table__isset() : tableName(false), dbName(false), owner(false), createTime(false), lastAccessTime(false), retention(false), sd(false), partitionKeys(false), parameters(false), viewOriginalText(false), viewExpandedText(false), tableType(false), privileges(false), temporary(true) {}
   bool tableName :1;
@@ -1742,9 +1811,6 @@ typedef struct _Table__isset {
 class Table {
  public:
 
-  static const char* ascii_fingerprint; // = "29EFB2A5970EF572039E5D94CC78AA85";
-  static const uint8_t binary_fingerprint[16]; // = {0x29,0xEF,0xB2,0xA5,0x97,0x0E,0xF5,0x72,0x03,0x9E,0x5D,0x94,0xCC,0x78,0xAA,0x85};
-
   Table(const Table&);
   Table& operator=(const Table&);
   Table() : tableName(), dbName(), owner(), createTime(0), lastAccessTime(0), retention(0), viewOriginalText(), viewExpandedText(), tableType(), temporary(false) {
@@ -1841,11 +1907,17 @@ class Table {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const Table& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(Table &a, Table &b);
 
+inline std::ostream& operator<<(std::ostream& out, const Table& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _Partition__isset {
   _Partition__isset() : values(false), dbName(false), tableName(false), createTime(false), lastAccessTime(false), sd(false), parameters(false), privileges(false) {}
   bool values :1;
@@ -1861,9 +1933,6 @@ typedef struct _Partition__isset {
 class Partition {
  public:
 
-  static const char* ascii_fingerprint; // = "31A52241B88A426C34087FE38343FF51";
-  static const uint8_t binary_fingerprint[16]; // = {0x31,0xA5,0x22,0x41,0xB8,0x8A,0x42,0x6C,0x34,0x08,0x7F,0xE3,0x83,0x43,0xFF,0x51};
-
   Partition(const Partition&);
   Partition& operator=(const Partition&);
   Partition() : dbName(), tableName(), createTime(0), lastAccessTime(0) {
@@ -1928,11 +1997,17 @@ class Partition {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const Partition& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(Partition &a, Partition &b);
 
+inline std::ostream& operator<<(std::ostream& out, const Partition& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _PartitionWithoutSD__isset {
   _PartitionWithoutSD__isset() : values(false), createTime(false), lastAccessTime(false), relativePath(false), parameters(false), privileges(false) {}
   bool values :1;
@@ -1946,9 +2021,6 @@ typedef struct _PartitionWithoutSD__isset {
 class PartitionWithoutSD {
  public:
 
-  static const char* ascii_fingerprint; // = "D79FA44499888D0E50B5625E0C536DEA";
-  static const uint8_t binary_fingerprint[16]; // = {0xD7,0x9F,0xA4,0x44,0x99,0x88,0x8D,0x0E,0x50,0xB5,0x62,0x5E,0x0C,0x53,0x6D,0xEA};
-
   PartitionWithoutSD(const PartitionWithoutSD&);
   PartitionWithoutSD& operator=(const PartitionWithoutSD&);
   PartitionWithoutSD() : createTime(0), lastAccessTime(0), relativePath() {
@@ -2003,11 +2075,17 @@ class PartitionWithoutSD {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const PartitionWithoutSD& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(PartitionWithoutSD &a, PartitionWithoutSD &b);
 
+inline std::ostream& operator<<(std::ostream& out, const PartitionWithoutSD& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _PartitionSpecWithSharedSD__isset {
   _PartitionSpecWithSharedSD__isset() : partitions(false), sd(false) {}
   bool partitions :1;
@@ -2017,9 +2095,6 @@ typedef struct _PartitionSpecWithSharedSD__isset {
 class PartitionSpecWithSharedSD {
  public:
 
-  static const char* ascii_fingerprint; // = "7BEE9305B42DCD083FF06BEE6DDC61CF";
-  static const uint8_t binary_fingerprint[16]; // = {0x7B,0xEE,0x93,0x05,0xB4,0x2D,0xCD,0x08,0x3F,0xF0,0x6B,0xEE,0x6D,0xDC,0x61,0xCF};
-
   PartitionSpecWithSharedSD(const PartitionSpecWithSharedSD&);
   PartitionSpecWithSharedSD& operator=(const PartitionSpecWithSharedSD&);
   PartitionSpecWithSharedSD() {
@@ -2052,11 +2127,17 @@ class PartitionSpecWithSharedSD {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const PartitionSpecWithSharedSD& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(PartitionSpecWithSharedSD &a, PartitionSpecWithSharedSD &b);
 
+inline std::ostream& operator<<(std::ostream& out, const PartitionSpecWithSharedSD& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _PartitionListComposingSpec__isset {
   _PartitionListComposingSpec__isset() : partitions(false) {}
   bool partitions :1;
@@ -2065,9 +2146,6 @@ typedef struct _PartitionListComposingSpec__isset {
 class PartitionListComposingSpec {
  public:
 
-  static const char* ascii_fingerprint; // = "A048235CB9A257C8A74E3691BEFE0674";
-  static const uint8_t binary_fingerprint[16]; // = {0xA0,0x48,0x23,0x5C,0xB9,0xA2,0x57,0xC8,0xA7,0x4E,0x36,0x91,0xBE,0xFE,0x06,0x74};
-
   PartitionListComposingSpec(const PartitionListComposingSpec&);
   PartitionListComposingSpec& operator=(const PartitionListComposingSpec&);
   PartitionListComposingSpec() {
@@ -2095,11 +2173,17 @@ class PartitionListComposingSpec {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const PartitionListComposingSpec& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(PartitionListComposingSpec &a, PartitionListComposingSpec &b);
 
+inline std::ostream& operator<<(std::ostream& out, const PartitionListComposingSpec& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _PartitionSpec__isset {
   _PartitionSpec__isset() : dbName(false), tableName(false), rootPath(false), sharedSDPartitionSpec(false), partitionList(false) {}
   bool dbName :1;
@@ -2112,9 +2196,6 @@ typedef struct _PartitionSpec__isset {
 class PartitionSpec {
  public:
 
-  static const char* ascii_fingerprint; // = "C3F548C24D072CF6422F25096143E3E8";
-  static const uint8_t binary_fingerprint[16]; // = {0xC3,0xF5,0x48,0xC2,0x4D,0x07,0x2C,0xF6,0x42,0x2F,0x25,0x09,0x61,0x43,0xE3,0xE8};
-
   PartitionSpec(const PartitionSpec&);
   PartitionSpec& operator=(const PartitionSpec&);
   PartitionSpec() : dbName(), tableName(), rootPath() {
@@ -2166,11 +2247,17 @@ class PartitionSpec {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const PartitionSpec& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(PartitionSpec &a, PartitionSpec &b);
 
+inline std::ostream& operator<<(std::ostream& out, const PartitionSpec& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _Index__isset {
   _Index__isset() : indexName(false), indexHandlerClass(false), dbName(false), origTableName(false), createTime(false), lastAccessTime(false), indexTableName(false), sd(false), parameters(false), deferredRebuild(false) {}
   bool indexName :1;
@@ -2188,9 +2275,6 @@ typedef struct _Index__isset {
 class Index {
  public:
 
-  static const char* ascii_fingerprint; // = "09EEF655216AC81802850988D6C470A6";
-  static const uint8_t binary_fingerprint[16]; // = {0x09,0xEE,0xF6,0x55,0x21,0x6A,0xC8,0x18,0x02,0x85,0x09,0x88,0xD6,0xC4,0x70,0xA6};
-
   Index(const Index&);
   Index& operator=(const Index&);
   Index() : indexName(), indexHandlerClass(), dbName(), origTableName(), createTime(0), lastAccessTime(0), indexTableName(), deferredRebuild(0) {
@@ -2263,18 +2347,21 @@ class Index {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const Index& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(Index &a, Index &b);
 
+inline std::ostream& operator<<(std::ostream& out, const Index& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class BooleanColumnStatsData {
  public:
 
-  static const char* ascii_fingerprint; // = "EA2D65F1E0BB78760205682082304B41";
-  static const uint8_t binary_fingerprint[16]; // = {0xEA,0x2D,0x65,0xF1,0xE0,0xBB,0x78,0x76,0x02,0x05,0x68,0x20,0x82,0x30,0x4B,0x41};
-
   BooleanColumnStatsData(const BooleanColumnStatsData&);
   BooleanColumnStatsData& operator=(const BooleanColumnStatsData&);
   BooleanColumnStatsData() : numTrues(0), numFalses(0), numNulls(0) {
@@ -2310,11 +2397,17 @@ class BooleanColumnStatsData {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const BooleanColumnStatsData& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(BooleanColumnStatsData &a, BooleanColumnStatsData &b);
 
+inline std::ostream& operator<<(std::ostream& out, const BooleanColumnStatsData& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _DoubleColumnStatsData__isset {
   _DoubleColumnStatsData__isset() : lowValue(false), highValue(false) {}
   bool lowValue :1;
@@ -2324,9 +2417,6 @@ typedef struct _DoubleColumnStatsData__isset {
 class DoubleColumnStatsData {
  public:
 
-  static const char* ascii_fingerprint; // = "DA7C011321D74C48396AA002E61A0CBB";
-  static const uint8_t binary_fingerprint[16]; // = {0xDA,0x7C,0x01,0x13,0x21,0xD7,0x4C,0x48,0x39,0x6A,0xA0,0x02,0xE6,0x1A,0x0C,0xBB};
-
   DoubleColumnStatsData(const DoubleColumnStatsData&);
   DoubleColumnStatsData& operator=(const DoubleColumnStatsData&);
   DoubleColumnStatsData() : lowValue(0), highValue(0), numNulls(0), numDVs(0) {
@@ -2373,11 +2463,17 @@ class DoubleColumnStatsData {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const DoubleColumnStatsData& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(DoubleColumnStatsData &a, DoubleColumnStatsData &b);
 
+inline std::ostream& operator<<(std::ostream& out, const DoubleColumnStatsData& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _LongColumnStatsData__isset {
   _LongColumnStatsData__isset() : lowValue(false), highValue(false) {}
   bool lowValue :1;
@@ -2387,9 +2483,6 @@ typedef struct _LongColumnStatsData__isset {
 class LongColumnStatsData {
  public:
 
-  static const char* ascii_fingerprint; // = "E685FC220B24E3B8B93604790DCB9AEA";
-  static const uint8_t binary_fingerprint[16]; // = {0xE6,0x85,0xFC,0x22,0x0B,0x24,0xE3,0xB8,0xB9,0x36,0x04,0x79,0x0D,0xCB,0x9A,0xEA};
-
   LongColumnStatsData(const LongColumnStatsData&);
   LongColumnStatsData& operator=(const LongColumnStatsData&);
   LongColumnStatsData() : lowValue(0), highValue(0), numNulls(0), numDVs(0) {
@@ -2436,18 +2529,21 @@ class LongColumnStatsData {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const LongColumnStatsData& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(LongColumnStatsData &a, LongColumnStatsData &b);
 
+inline std::ostream& operator<<(std::ostream& out, const LongColumnStatsData& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class StringColumnStatsData {
  public:
 
-  static const char* ascii_fingerprint; // = "D017B08C3DF12C3AB98788B2E67DAAB3";
-  static const uint8_t binary_fingerprint[16]; // = {0xD0,0x17,0xB0,0x8C,0x3D,0xF1,0x2C,0x3A,0xB9,0x87,0x88,0xB2,0xE6,0x7D,0xAA,0xB3};
-
   StringColumnStatsData(const StringColumnStatsData&);
   StringColumnStatsData& operator=(const StringColumnStatsData&);
   StringColumnStatsData() : maxColLen(0), avgColLen(0), numNulls(0), numDVs(0) {
@@ -2488,18 +2584,21 @@ class StringColumnStatsData {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const StringColumnStatsData& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(StringColumnStatsData &a, StringColumnStatsData &b);
 
+inline std::ostream& operator<<(std::ostream& out, const StringColumnStatsData& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class BinaryColumnStatsData {
  public:
 
-  static const char* ascii_fingerprint; // = "22B0CB67183FCDB945892B9974518D06";
-  static const uint8_t binary_fingerprint[16]; // = {0x22,0xB0,0xCB,0x67,0x18,0x3F,0xCD,0xB9,0x45,0x89,0x2B,0x99,0x74,0x51,0x8D,0x06};
-
   BinaryColumnStatsData(const BinaryColumnStatsData&);
   BinaryColumnStatsData& operator=(const BinaryColumnStatsData&);
   BinaryColumnStatsData() : maxColLen(0), avgColLen(0), numNulls(0) {
@@ -2535,18 +2634,21 @@ class BinaryColumnStatsData {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const BinaryColumnStatsData& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(BinaryColumnStatsData &a, BinaryColumnStatsData &b);
 
+inline std::ostream& operator<<(std::ostream& out, const BinaryColumnStatsData& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class Decimal {
  public:
 
-  static const char* ascii_fingerprint; // = "C4DDF6759F9B17C5C380806CE743DE8E";
-  static const uint8_t binary_fingerprint[16]; // = {0xC4,0xDD,0xF6,0x75,0x9F,0x9B,0x17,0xC5,0xC3,0x80,0x80,0x6C,0xE7,0x43,0xDE,0x8E};
-
   Decimal(const Decimal&);
   Decimal& operator=(const Decimal&);
   Decimal() : unscaled(), scale(0) {
@@ -2577,11 +2679,17 @@ class Decimal {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const Decimal& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(Decimal &a, Decimal &b);
 
+inline std::ostream& operator<<(std::ostream& out, const Decimal& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _DecimalColumnStatsData__isset {
   _DecimalColumnStatsData__isset() : lowValue(false), highValue(false) {}
   bool lowValue :1;
@@ -2591,9 +2699,6 @@ typedef struct _DecimalColumnStatsData__isset {
 class DecimalColumnStatsData {
  public:
 
-  static const char* ascii_fingerprint; // = "B6D47E7A28922BFA93FE05E9F1B04748";
-  static const uint8_t binary_fingerprint[16]; // = {0xB6,0xD4,0x7E,0x7A,0x28,0x92,0x2B,0xFA,0x93,0xFE,0x05,0xE9,0xF1,0xB0,0x47,0x48};
-
   DecimalColumnStatsData(const DecimalColumnStatsData&);
   DecimalColumnStatsData& operator=(const DecimalColumnStatsData&);
   DecimalColumnStatsData() : numNulls(0), numDVs(0) {
@@ -2640,18 +2745,21 @@ class DecimalColumnStatsData {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const DecimalColumnStatsData& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(DecimalColumnStatsData &a, DecimalColumnStatsData &b);
 
+inline std::ostream& operator<<(std::ostream& out, const DecimalColumnStatsData& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class Date {
  public:
 
-  static const char* ascii_fingerprint; // = "56A59CE7FFAF82BCA8A19FAACDE4FB75";
-  static const uint8_t binary_fingerprint[16]; // = {0x56,0xA5,0x9C,0xE7,0xFF,0xAF,0x82,0xBC,0xA8,0xA1,0x9F,0xAA,0xCD,0xE4,0xFB,0x75};
-
   Date(const Date&);
   Date& operator=(const Date&);
   Date() : daysSinceEpoch(0) {
@@ -2677,11 +2785,17 @@ class Date {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const Date& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(Date &a, Date &b);
 
+inline std::ostream& operator<<(std::ostream& out, const Date& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _DateColumnStatsData__isset {
   _DateColumnStatsData__isset() : lowValue(false), highValue(false) {}
   bool lowValue :1;
@@ -2691,9 +2805,6 @@ typedef struct _DateColumnStatsData__isset {
 class DateColumnStatsData {
  public:
 
-  static const char* ascii_fingerprint; // = "D0719F3BBA8248297BB5287552897F59";
-  static const uint8_t binary_fingerprint[16]; // = {0xD0,0x71,0x9F,0x3B,0xBA,0x82,0x48,0x29,0x7B,0xB5,0x28,0x75,0x52,0x89,0x7F,0x59};
-
   DateColumnStatsData(const DateColumnStatsData&);
   DateColumnStatsData& operator=(const DateColumnStatsData&);
   DateColumnStatsData() : numNulls(0), numDVs(0) {
@@ -2740,11 +2851,17 @@ class DateColumnStatsData {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const DateColumnStatsData& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(DateColumnStatsData &a, DateColumnStatsData &b);
 
+inline std::ostream& operator<<(std::ostream& out, const DateColumnStatsData& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _ColumnStatisticsData__isset {
   _ColumnStatisticsData__isset() : booleanStats(false), longStats(false), doubleStats(false), stringStats(false), binaryStats(false), decimalStats(false), dateStats(false) {}
   bool booleanStats :1;
@@ -2759,9 +2876,6 @@ typedef struct _ColumnStatisticsData__isset {
 class ColumnStatisticsData {
  public:
 
-  static const char* ascii_fingerprint; // = "15E449CA15A23E37F2D54C31ACA52106";
-  static const uint8_t binary_fingerprint[16]; // = {0x15,0xE4,0x49,0xCA,0x15,0xA2,0x3E,0x37,0xF2,0xD5,0x4C,0x31,0xAC,0xA5,0x21,0x06};
-
   ColumnStatisticsData(const ColumnStatisticsData&);
   ColumnStatisticsData& operator=(const ColumnStatisticsData&);
   ColumnStatisticsData() {
@@ -2819,18 +2933,21 @@ class ColumnStatisticsData {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ColumnStatisticsData& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(ColumnStatisticsData &a, ColumnStatisticsData &b);
 
+inline std::ostream& operator<<(std::ostream& out, const ColumnStatisticsData& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class ColumnStatisticsObj {
  public:
 
-  static const char* ascii_fingerprint; // = "A82BA819C9FFEDDEBC0D50F6F4E46534";
-  static const uint8_t binary_fingerprint[16]; // = {0xA8,0x2B,0xA8,0x19,0xC9,0xFF,0xED,0xDE,0xBC,0x0D,0x50,0xF6,0xF4,0xE4,0x65,0x34};
-
   ColumnStatisticsObj(const ColumnStatisticsObj&);
   ColumnStatisticsObj& operator=(const ColumnStatisticsObj&);
   ColumnStatisticsObj() : colName(), colType() {
@@ -2866,11 +2983,17 @@ class ColumnStatisticsObj {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ColumnStatisticsObj& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(ColumnStatisticsObj &a, ColumnStatisticsObj &b);
 
+inline std::ostream& operator<<(std::ostream& out, const ColumnStatisticsObj& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _ColumnStatisticsDesc__isset {
   _ColumnStatisticsDesc__isset() : partName(false), lastAnalyzed(false) {}
   bool partName :1;
@@ -2880,9 +3003,6 @@ typedef struct _ColumnStatisticsDesc__isset {
 class ColumnStatisticsDesc {
  public:
 
-  static const char* ascii_fingerprint; // = "261759FF6F8FAB53F941453007FE18CB";
-  static const uint8_t binary_fingerprint[16]; // = {0x26,0x17,0x59,0xFF,0x6F,0x8F,0xAB,0x53,0xF9,0x41,0x45,0x30,0x07,0xFE,0x18,0xCB};
-
   ColumnStatisticsDesc(const ColumnStatisticsDesc&);
   ColumnStatisticsDesc& operator=(const ColumnStatisticsDesc&);
   ColumnStatisticsDesc() : isTblLevel(0), dbName(), tableName(), partName(), lastAnalyzed(0) {
@@ -2934,18 +3054,21 @@ class ColumnStatisticsDesc {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ColumnStatisticsDesc& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(ColumnStatisticsDesc &a, ColumnStatisticsDesc &b);
 
+inline std::ostream& operator<<(std::ostream& out, const ColumnStatisticsDesc& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class ColumnStatistics {
  public:
 
-  static const char* ascii_fingerprint; // = "8A64D0A67FFD3A372726A320B3913D5A";
-  static const uint8_t binary_fingerprint[16]; // = {0x8A,0x64,0xD0,0xA6,0x7F,0xFD,0x3A,0x37,0x27,0x26,0xA3,0x20,0xB3,0x91,0x3D,0x5A};
-
   ColumnStatistics(const ColumnStatistics&);
   ColumnStatistics& operator=(const ColumnStatistics&);
   ColumnStatistics() {
@@ -2976,18 +3099,21 @@ class ColumnStatistics {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ColumnStatistics& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(ColumnStatistics &a, ColumnStatistics &b);
 
+inline std::ostream& operator<<(std::ostream& out, const ColumnStatistics& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class AggrStats {
  public:
 
-  static const char* ascii_fingerprint; // = "1FC765A3AB2954591C14B0D946778213";
-  static const uint8_t binary_fingerprint[16]; // = {0x1F,0xC7,0x65,0xA3,0xAB,0x29,0x54,0x59,0x1C,0x14,0xB0,0xD9,0x46,0x77,0x82,0x13};
-
   AggrStats(const AggrStats&);
   AggrStats& operator=(const AggrStats&);
   AggrStats() : partsFound(0) {
@@ -3018,18 +3144,21 @@ class AggrStats {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const AggrStats& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(AggrStats &a, AggrStats &b);
 
+inline std::ostream& operator<<(std::ostream& out, const AggrStats& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class SetPartitionsStatsRequest {
  public:
 
-  static const char* ascii_fingerprint; // = "8BCBF2CE9E942D0FB7D8F4ED57D8DC27";
-  static const uint8_t binary_fingerprint[16]; // = {0x8B,0xCB,0xF2,0xCE,0x9E,0x94,0x2D,0x0F,0xB7,0xD8,0xF4,0xED,0x57,0xD8,0xDC,0x27};
-
   SetPartitionsStatsRequest(const SetPartitionsStatsRequest&);
   SetPartitionsStatsRequest& operator=(const SetPartitionsStatsRequest&);
   SetPartitionsStatsRequest() {
@@ -3055,11 +3184,17 @@ class SetPartitionsStatsRequest {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const SetPartitionsStatsRequest& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(SetPartitionsStatsRequest &a, SetPartitionsStatsRequest &b);
 
+inline std::ostream& operator<<(std::ostream& out, const SetPartitionsStatsRequest& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _Schema__isset {
   _Schema__isset() : fieldSchemas(false), properties(false) {}
   bool fieldSchemas :1;
@@ -3069,9 +3204,6 @@ typedef struct _Schema__isset {
 class Schema {
  public:
 
-  static const char* ascii_fingerprint; // = "5CFEE46C975F4E2368D905109B8E3B5B";
-  static const uint8_t binary_fingerprint[16]; // = {0x5C,0xFE,0xE4,0x6C,0x97,0x5F,0x4E,0x23,0x68,0xD9,0x05,0x10,0x9B,0x8E,0x3B,0x5B};
-
   Schema(const Schema&);
   Schema& operator=(const Schema&);
   Schema() {
@@ -3104,11 +3236,17 @@ class Schema {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const Schema& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(Schema &a, Schema &b);
 
+inline std::ostream& operator<<(std::ostream& out, const Schema& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _EnvironmentContext__isset {
   _EnvironmentContext__isset() : properties(false) {}
   bool properties :1;
@@ -3117,9 +3255,6 @@ typedef struct _EnvironmentContext__isset {
 class EnvironmentContext {
  public:
 
-  static const char* ascii_fingerprint; // = "5EA2D527ECA3BA20C77AFC023EE8C05F";
-  static const uint8_t binary_fingerprint[16]; // = {0x5E,0xA2,0xD5,0x27,0xEC,0xA3,0xBA,0x20,0xC7,0x7A,0xFC,0x02,0x3E,0xE8,0xC0,0x5F};
-
   EnvironmentContext(const EnvironmentContext&);
   EnvironmentContext& operator=(const EnvironmentContext&);
   EnvironmentContext() {
@@ -3147,18 +3282,21 @@ class EnvironmentContext {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const EnvironmentContext& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(EnvironmentContext &a, EnvironmentContext &b);
 
+inline std::ostream& operator<<(std::ostream& out, const EnvironmentContext& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class PartitionsByExprResult {
  public:
 
-  static const char* ascii_fingerprint; // = "40B789CC91B508FE36600A14E3F80425";
-  static const uint8_t binary_fingerprint[16]; // = {0x40,0xB7,0x89,0xCC,0x91,0xB5,0x08,0xFE,0x36,0x60,0x0A,0x14,0xE3,0xF8,0x04,0x25};
-
   PartitionsByExprResult(const PartitionsByExprResult&);
   PartitionsByExprResult& operator=(const PartitionsByExprResult&);
   PartitionsByExprResult() : hasUnknownPartitions(0) {
@@ -3189,11 +3327,17 @@ class PartitionsByExprResult {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const PartitionsByExprResult& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(PartitionsByExprResult &a, PartitionsByExprResult &b);
 
+inline std::ostream& operator<<(std::ostream& out, const PartitionsByExprResult& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _PartitionsByExprRequest__isset {
   _PartitionsByExprRequest__isset() : defaultPartitionName(false), maxParts(true) {}
   bool defaultPartitionName :1;
@@ -3203,9 +3347,6 @@ typedef struct _PartitionsByExprRequest__isset {
 class PartitionsByExprRequest {
  public:
 
-  static const char* ascii_fingerprint; // = "835944417A026FE6ABD0DF5A35BF52C5";
-  static const uint8_t binary_fingerprint[16]; // = {0x83,0x59,0x44,0x41,0x7A,0x02,0x6F,0xE6,0xAB,0xD0,0xDF,0x5A,0x35,0xBF,0x52,0xC5};
-
   PartitionsByExprRequest(const PartitionsByExprRequest&);
   PartitionsByExprRequest& operator=(const PartitionsByExprRequest&);
   PartitionsByExprRequest() : dbName(), tblName(), expr(), defaultPartitionName(), maxParts(-1) {
@@ -3257,18 +3398,21 @@ class PartitionsByExprRequest {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const PartitionsByExprRequest& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(PartitionsByExprRequest &a, PartitionsByExprRequest &b);
 
+inline std::ostream& operator<<(std::ostream& out, const PartitionsByExprRequest& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TableStatsResult {
  public:
 
-  static const char* ascii_fingerprint; // = "2E398BAC949C93A194F1A19CA5074FF8";
-  static const uint8_t binary_fingerprint[16]; // = {0x2E,0x39,0x8B,0xAC,0x94,0x9C,0x93,0xA1,0x94,0xF1,0xA1,0x9C,0xA5,0x07,0x4F,0xF8};
-
   TableStatsResult(const TableStatsResult&);
   TableStatsResult& operator=(const TableStatsResult&);
   TableStatsResult() {
@@ -3294,18 +3438,21 @@ class TableStatsResult {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TableStatsResult& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TableStatsResult &a, TableStatsResult &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TableStatsResult& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class PartitionsStatsResult {
  public:
 
-  static const char* ascii_fingerprint; // = "DD38B4AA7D02E288659AA12CBAAEF33B";
-  static const uint8_t binary_fingerprint[16]; // = {0xDD,0x38,0xB4,0xAA,0x7D,0x02,0xE2,0x88,0x65,0x9A,0xA1,0x2C,0xBA,0xAE,0xF3,0x3B};
-
   PartitionsStatsResult(const PartitionsStatsResult&);
   PartitionsStatsResult& operator=(const PartitionsStatsResult&);
   PartitionsStatsResult() {
@@ -3331,18 +3478,21 @@ class PartitionsStatsResult {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const PartitionsStatsResult& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(PartitionsStatsResult &a, PartitionsStatsResult &b);
 
+inline std::ostream& operator<<(std::ostream& out, const PartitionsStatsResult& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TableStatsRequest {
  public:
 
-  static const char* ascii_fingerprint; // = "8E2AD6401E83558ECFD6A13D74DD0A3F";
-  static const uint8_t binary_fingerprint[16]; // = {0x8E,0x2A,0xD6,0x40,0x1E,0x83,0x55,0x8E,0xCF,0xD6,0xA1,0x3D,0x74,0xDD,0x0A,0x3F};
-
   TableStatsRequest(const TableStatsRequest&);
   TableStatsRequest& operator=(const TableStatsRequest&);
   TableStatsRequest() : dbName(), tblName() {
@@ -3378,18 +3528,21 @@ class TableStatsRequest {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TableStatsRequest& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TableStatsRequest &a, TableStatsRequest &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TableStatsRequest& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class PartitionsStatsRequest {
  public:
 
-  static const char* ascii_fingerprint; // = "5F51D90BC323BCE4B704B7D98EDA0BD4";
-  static const uint8_t binary_fingerprint[16]; // = {0x5F,0x51,0xD9,0x0B,0xC3,0x23,0xBC,0xE4,0xB7,0x04,0xB7,0xD9,0x8E,0xDA,0x0B,0xD4};
-
   PartitionsStatsRequest(const PartitionsStatsRequest&);
   PartitionsStatsRequest& operator=(const PartitionsStatsRequest&);
   PartitionsStatsRequest() : dbName(), tblName() {
@@ -3430,11 +3583,17 @@ class PartitionsStatsRequest {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const PartitionsStatsRequest& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(PartitionsStatsRequest &a, PartitionsStatsRequest &b);
 
+inline std::ostream& operator<<(std::ostream& out, const PartitionsStatsRequest& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _AddPartitionsResult__isset {
   _AddPartitionsResult__isset() : partitions(false) {}
   bool partitions :1;
@@ -3443,9 +3602,6 @@ typedef struct _AddPartitionsResult__isset {
 class AddPartitionsResult {
  public:
 
-  static const char* ascii_fingerprint; // = "5A689D0823E7BFBB60C799BA60065C31";
-  static const uint8_t binary_fingerprint[16]; // = {0x5A,0x68,0x9D,0x08,0x23,0xE7,0xBF,0xBB,0x60,0xC7,0x99,0xBA,0x60,0x06,0x5C,0x31};
-
   AddPartitionsResult(const AddPartitionsResult&);
   AddPartitionsResult& operator=(const AddPartitionsResult&);
   AddPartitionsResult() {
@@ -3475,11 +3631,17 @@ class AddPartitionsResult {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const AddPartitionsResult& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(AddPartitionsResult &a, AddPartitionsResult &b);
 
+inline std::ostream& operator<<(std::ostream& out, const AddPartitionsResult& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _AddPartitionsRequest__isset {
   _AddPartitionsRequest__isset() : needResult(true) {}
   bool needResult :1;
@@ -3488,9 +3650,6 @@ typedef struct _AddPartitionsRequest__isset {
 class AddPartitionsRequest {
  public:
 
-  static const char* ascii_fingerprint; // = "94F938D035892CF6873DEDB99358F069";
-  static const uint8_t binary_fingerprint[16]; // = {0x94,0xF9,0x38,0xD0,0x35,0x89,0x2C,0xF6,0x87,0x3D,0xED,0xB9,0x93,0x58,0xF0,0x69};
-
   AddPartitionsRequest(const AddPartitionsRequest&);
   AddPartitionsRequest& operator=(const AddPartitionsRequest&);
   AddPartitionsRequest() : dbName(), tblName(), ifNotExists(0), needResult(true) {
@@ -3540,11 +3699,17 @@ class AddPartitionsRequest {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const AddPartitionsRequest& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(AddPartitionsRequest &a, AddPartitionsRequest &b);
 
+inline std::ostream& operator<<(std::ostream& out, const AddPartitionsRequest& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _DropPartitionsResult__isset {
   _DropPartitionsResult__isset() : partitions(false) {}
   bool partitions :1;
@@ -3553,9 +3718,6 @@ typedef struct _DropPartitionsResult__isset {
 class DropPartitionsResult {
  public:
 
-  static const char* ascii_fingerprint; // = "5A689D0823E7BFBB60C799BA60065C31";
-  static const uint8_t binary_fingerprint[16]; // = {0x5A,0x68,0x9D,0x08,0x23,0xE7,0xBF,0xBB,0x60,0xC7,0x99,0xBA,0x60,0x06,0x5C,0x31};
-
   DropPartitionsResult(const DropPartitionsResult&);
   DropPartitionsResult& operator=(const DropPartitionsResult&);
   DropPartitionsResult() {
@@ -3585,11 +3747,17 @@ class DropPartitionsResult {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const DropPartitionsResult& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(DropPartitionsResult &a, DropPartitionsResult &b);
 
+inline std::ostream& operator<<(std::ostream& out, const DropPartitionsResult& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _DropPartitionsExpr__isset {
   _DropPartitionsExpr__isset() : partArchiveLevel(false) {}
   bool partArchiveLevel :1;
@@ -3598,9 +3766,6 @@ typedef struct _DropPartitionsExpr__isset {
 class DropPartitionsExpr {
  public:
 
-  static const char* ascii_fingerprint; // = "18B162B1D15D8D46509D3911A9F1C2AA";
-  static const uint8_t binary_fingerprint[16]; // = {0x18,0xB1,0x62,0xB1,0xD1,0x5D,0x8D,0x46,0x50,0x9D,0x39,0x11,0xA9,0xF1,0xC2,0xAA};
-
   DropPartitionsExpr(const DropPartitionsExpr&);
   DropPartitionsExpr& operator=(const DropPartitionsExpr&);
   DropPartitionsExpr() : expr(), partArchiveLevel(0) {
@@ -3635,11 +3800,17 @@ class DropPartitionsExpr {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const DropPartitionsExpr& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(DropPartitionsExpr &a, DropPartitionsExpr &b);
 
+inline std::ostream& operator<<(std::ostream& out, const DropPartitionsExpr& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _RequestPartsSpec__isset {
   _RequestPartsSpec__isset() : names(false), exprs(false) {}
   bool names :1;
@@ -3649,9 +3820,6 @@ typedef struct _RequestPartsSpec__isset {
 class RequestPartsSpec {
  public:
 
-  static const char* ascii_fingerprint; // = "864492ECAB27996CD222AACDA10C292E";
-  static const uint8_t binary_fingerprint[16]; // = {0x86,0x44,0x92,0xEC,0xAB,0x27,0x99,0x6C,0xD2,0x22,0xAA,0xCD,0xA1,0x0C,0x29,0x2E};
-
   RequestPartsSpec(const RequestPartsSpec&);
   RequestPartsSpec& operator=(const RequestPartsSpec&);
   RequestPartsSpec() {
@@ -3684,11 +3852,17 @@ class RequestPartsSpec {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const RequestPartsSpec& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(RequestPartsSpec &a, RequestPartsSpec &b);
 
+inline std::ostream& operator<<(std::ostream& out, const RequestPartsSpec& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _DropPartitionsRequest__isset {
   _DropPartitionsRequest__isset() : deleteData(false), ifExists(true), ignoreProtection(false), environmentContext(false), needResult(true) {}
   bool deleteData :1;
@@ -3701,9 +3875,6 @@ typedef struct _DropPartitionsRequest__isset {
 class DropPartitionsRequest {
  public:
 
-  static const char* ascii_fingerprint; // = "EB263FBA01215C480A9A24C11D69E672";
-  static const uint8_t binary_fingerprint[16]; // = {0xEB,0x26,0x3F,0xBA,0x01,0x21,0x5C,0x48,0x0A,0x9A,0x24,0xC1,0x1D,0x69,0xE6,0x72};
-
   DropPartitionsRequest(const DropPartitionsRequest&);
   DropPartitionsRequest& operator=(const DropPartitionsRequest&);
   DropPartitionsRequest() : dbName(), tblName(), deleteData(0), ifExists(true), ignoreProtection(0), needResult(true) {
@@ -3776,11 +3947,17 @@ class DropPartitionsRequest {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const DropPartitionsRequest& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(DropPartitionsRequest &a, DropPartitionsRequest &b);
 
+inline std::ostream& operator<<(std::ostream& out, const DropPartitionsRequest& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _ResourceUri__isset {
   _ResourceUri__isset() : resourceType(false), uri(false) {}
   bool resourceType :1;
@@ -3790,9 +3967,6 @@ typedef struct _ResourceUri__isset {
 class ResourceUri {
  public:
 
-  static const char* ascii_fingerprint; // = "19B5240589E680301A7E32DF3971EFBE";
-  static const uint8_t binary_fingerprint[16]; // = {0x19,0xB5,0x24,0x05,0x89,0xE6,0x80,0x30,0x1A,0x7E,0x32,0xDF,0x39,0x71,0xEF,0xBE};
-
   ResourceUri(const ResourceUri&);
   ResourceUri& operator=(const ResourceUri&);
   ResourceUri() : resourceType((ResourceType::type)0), uri() {
@@ -3825,11 +3999,17 @@ class ResourceUri {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ResourceUri& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(ResourceUri &a, ResourceUri &b);
 
+inline std::ostream& operator<<(std::ostream& out, const ResourceUri& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _Function__isset {
   _Function__isset() : functionName(false), dbName(false), className(false), ownerName(false), ownerType(false), createTime(false), functionType(false), resourceUris(false) {}
   bool functionName :1;
@@ -3845,9 +4025,6 @@ typedef struct _Function__isset {
 class Function {
  public:
 
-  static const char* ascii_fingerprint; // = "72279C515E70F888568542F97616ADB8";
-  static const uint8_t binary_fingerprint[16]; // = {0x72,0x27,0x9C,0x51,0x5E,0x70,0xF8,0x88,0x56,0x85,0x42,0xF9,0x76,0x16,0xAD,0xB8};
-
   Function(const Function&);
   Function& operator=(const Function&);
   Function() : functionName(), dbName(), className(), ownerName(), ownerType((PrincipalType::type)0), createTime(0), functionType((FunctionType::type)0) {
@@ -3910,18 +4087,21 @@ class Function {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const Function& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(Function &a, Function &b);
 
+inline std::ostream& operator<<(std::ostream& out, const Function& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TxnInfo {
  public:
 
-  static const char* ascii_fingerprint; // = "6C5C0773A901CCA3BE9D085B3B47A767";
-  static const uint8_t binary_fingerprint[16]; // = {0x6C,0x5C,0x07,0x73,0xA9,0x01,0xCC,0xA3,0xBE,0x9D,0x08,0x5B,0x3B,0x47,0xA7,0x67};
-
   TxnInfo(const TxnInfo&);
   TxnInfo& operator=(const TxnInfo&);
   TxnInfo() : id(0), state((TxnState::type)0), user(), hostname() {
@@ -3962,18 +4142,21 @@ class TxnInfo {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TxnInfo& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TxnInfo &a, TxnInfo &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TxnInfo& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class GetOpenTxnsInfoResponse {
  public:
 
-  static const char* ascii_fingerprint; // = "CCF769BBD33005B61F2079A6665E3B9C";
-  static const uint8_t binary_fingerprint[16]; // = {0xCC,0xF7,0x69,0xBB,0xD3,0x30,0x05,0xB6,0x1F,0x20,0x79,0xA6,0x66,0x5E,0x3B,0x9C};
-
   GetOpenTxnsInfoResponse(const GetOpenTxnsInfoResponse&);
   GetOpenTxnsInfoResponse& operator=(const GetOpenTxnsInfoResponse&);
   GetOpenTxnsInfoResponse() : txn_high_water_mark(0) {
@@ -4004,18 +4187,21 @@ class GetOpenTxnsInfoResponse {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const GetOpenTxnsInfoResponse& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(GetOpenTxnsInfoResponse &a, GetOpenTxnsInfoResponse &b);
 
+inline std::ostream& operator<<(std::ostream& out, const GetOpenTxnsInfoResponse& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class GetOpenTxnsResponse {
  public:
 
-  static const char* ascii_fingerprint; // = "590531FF1BE8611678B255374F6109EE";
-  static const uint8_t binary_fingerprint[16]; // = {0x59,0x05,0x31,0xFF,0x1B,0xE8,0x61,0x16,0x78,0xB2,0x55,0x37,0x4F,0x61,0x09,0xEE};
-
   GetOpenTxnsResponse(const GetOpenTxnsResponse&);
   GetOpenTxnsResponse& operator=(const GetOpenTxnsResponse&);
   GetOpenTxnsResponse() : txn_high_water_mark(0) {
@@ -4046,18 +4232,21 @@ class GetOpenTxnsResponse {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const GetOpenTxnsResponse& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(GetOpenTxnsResponse &a, GetOpenTxnsResponse &b);
 
+inline std::ostream& operator<<(std::ostream& out, const GetOpenTxnsResponse& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class OpenTxnRequest {
  public:
 
-  static const char* ascii_fingerprint; // = "3368C2F81F2FEF71F11EDACDB2A3ECEF";
-  static const uint8_t binary_fingerprint[16]; // = {0x33,0x68,0xC2,0xF8,0x1F,0x2F,0xEF,0x71,0xF1,0x1E,0xDA,0xCD,0xB2,0xA3,0xEC,0xEF};
-
   OpenTxnRequest(const OpenTxnRequest&);
   OpenTxnRequest& operator=(const OpenTxnRequest&);
   OpenTxnRequest() : num_txns(0), user(), hostname() {
@@ -4093,18 +4282,21 @@ class OpenTxnRequest {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const OpenTxnRequest& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(OpenTxnRequest &a, OpenTxnRequest &b);
 
+inline std::ostream& operator<<(std::ostream& out, const OpenTxnRequest& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class OpenTxnsResponse {
  public:
 
-  static const char* ascii_fingerprint; // = "E49D7D1A9013CC81CD0F69D631EF82E4";
-  static const uint8_t binary_fingerprint[16]; // = {0xE4,0x9D,0x7D,0x1A,0x90,0x13,0xCC,0x81,0xCD,0x0F,0x69,0xD6,0x31,0xEF,0x82,0xE4};
-
   OpenTxnsResponse(const OpenTxnsResponse&);
   OpenTxnsResponse& operator=(const OpenTxnsResponse&);
   OpenTxnsResponse() {
@@ -4130,18 +4322,21 @@ class OpenTxnsResponse {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const OpenTxnsResponse& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(OpenTxnsResponse &a, OpenTxnsResponse &b);
 
+inline std::ostream& operator<<(std::ostream& out, const OpenTxnsResponse& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class AbortTxnRequest {
  public:
 
-  static const char* ascii_fingerprint; // = "56A59CE7FFAF82BCA8A19FAACDE4FB75";
-  static const uint8_t binary_fingerprint[16]; // = {0x56,0xA5,0x9C,0xE7,0xFF,0xAF,0x82,0xBC,0xA8,0xA1,0x9F,0xAA,0xCD,0xE4,0xFB,0x75};
-
   AbortTxnRequest(const AbortTxnRequest&);
   AbortTxnRequest& operator=(const AbortTxnRequest&);
   AbortTxnRequest() : txnid(0) {
@@ -4167,18 +4362,21 @@ class AbortTxnRequest {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const AbortTxnRequest& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(AbortTxnRequest &a, AbortTxnRequest &b);
 
+inline std::ostream& operator<<(std::ostream& out, const AbortTxnRequest& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class CommitTxnRequest {
  public:
 
-  static const char* ascii_fingerprint; // = "56A59CE7FFAF82BCA8A19FAACDE4FB75";
-  static const uint8_t binary_fingerprint[16]; // = {0x56,0xA5,0x9C,0xE7,0xFF,0xAF,0x82,0xBC,0xA8,0xA1,0x9F,0xAA,0xCD,0xE4,0xFB,0x75};
-
   CommitTxnRequest(const CommitTxnRequest&);
   CommitTxnRequest& operator=(const CommitTxnRequest&);
   CommitTxnRequest() : txnid(0) {
@@ -4204,11 +4402,17 @@ class CommitTxnRequest {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const CommitTxnRequest& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(CommitTxnRequest &a, CommitTxnRequest &b);
 
+inline std::ostream& operator<<(std::ostream& out, const CommitTxnRequest& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _LockComponent__isset {
   _LockComponent__isset() : tablename(false), partitionname(false) {}
   bool tablename :1;
@@ -4218,9 +4422,6 @@ typedef struct _LockComponent__isset {
 class LockComponent {
  public:
 
-  static const char* ascii_fingerprint; // = "38B02531B0840AC9C72904A4649FD15F";
-  static const uint8_t binary_fingerprint[16]; // = {0x38,0xB0,0x25,0x31,0xB0,0x84,0x0A,0xC9,0xC7,0x29,0x04,0xA4,0x64,0x9F,0xD1,0x5F};
-
   LockComponent(const LockComponent&);
   LockComponent& operator=(const LockComponent&);
   LockComponent() : type((LockType::type)0), level((LockLevel::type)0), dbname(), tablename(), partitionname() {
@@ -4272,11 +4473,17 @@ class LockComponent {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const LockComponent& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(LockComponent &a, LockComponent &b);
 
+inline std::ostream& operator<<(std::ostream& out, const LockComponent& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _LockRequest__isset {
   _LockRequest__isset() : txnid(false) {}
   bool txnid :1;
@@ -4285,9 +4492,6 @@ typedef struct _LockRequest__isset {
 class LockRequest {
  public:
 
-  static const char* ascii_fingerprint; // = "46BC5ED7196BC16CB216AD5CC67C6930";
-  static const uint8_t binary_fingerprint[16]; // = {0x46,0xBC,0x5E,0xD7,0x19,0x6B,0xC1,0x6C,0xB2,0x16,0xAD,0x5C,0xC6,0x7C,0x69,0x30};
-
   LockRequest(const LockRequest&);
   LockRequest& operator=(const LockRequest&);
   LockRequest() : txnid(0), user(), hostname() {
@@ -4332,18 +4536,21 @@ class LockRequest {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const LockRequest& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(LockRequest &a, LockRequest &b);
 
+inline std::ostream& operator<<(std::ostream& out, const LockRequest& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class LockResponse {
  public:
 
-  static const char* ascii_fingerprint; // = "DFA40D9D2884599F3D1E7A57578F1384";
-  static const uint8_t binary_fingerprint[16]; // = {0xDF,0xA4,0x0D,0x9D,0x28,0x84,0x59,0x9F,0x3D,0x1E,0x7A,0x57,0x57,0x8F,0x13,0x84};
-
   LockResponse(const LockResponse&);
   LockResponse& operator=(const LockResponse&);
   LockResponse() : lockid(0), state((LockState::type)0) {
@@ -4374,18 +4581,21 @@ class LockResponse {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const LockResponse& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(LockResponse &a, LockResponse &b);
 
+inline std::ostream& operator<<(std::ostream& out, const LockResponse& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class CheckLockRequest {
  public:
 
-  static const char* ascii_fingerprint; // = "56A59CE7FFAF82BCA8A19FAACDE4FB75";
-  static const uint8_t binary_fingerprint[16]; // = {0x56,0xA5,0x9C,0xE7,0xFF,0xAF,0x82,0xBC,0xA8,0xA1,0x9F,0xAA,0xCD,0xE4,0xFB,0x75};
-
   CheckLockRequest(const CheckLockRequest&);
   CheckLockRequest& operator=(const CheckLockRequest&);
   CheckLockRequest() : lockid(0) {
@@ -4411,18 +4621,21 @@ class CheckLockRequest {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const CheckLockRequest& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(CheckLockRequest &a, CheckLockRequest &b);
 
+inline std::ostream& operator<<(std::ostream& out, const CheckLockRequest& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class UnlockRequest {
  public:
 
-  static const char* ascii_fingerprint; // = "56A59CE7FFAF82BCA8A19FAACDE4FB75";
-  static const uint8_t binary_fingerprint[16]; // = {0x56,0xA5,0x9C,0xE7,0xFF,0xAF,0x82,0xBC,0xA8,0xA1,0x9F,0xAA,0xCD,0xE4,0xFB,0x75};
-
   UnlockRequest(const UnlockRequest&);
   UnlockRequest& operator=(const UnlockRequest&);
   UnlockRequest() : lockid(0) {
@@ -4448,18 +4661,21 @@ class UnlockRequest {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const UnlockRequest& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(UnlockRequest &a, UnlockRequest &b);
 
+inline std::ostream& operator<<(std::ostream& out, const UnlockRequest& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class ShowLocksRequest {
  public:
 
-  static const char* ascii_fingerprint; // = "99914B932BD37A50B983C5E7C90AE93B";
-  static const uint8_t binary_fingerprint[16]; // = {0x99,0x91,0x4B,0x93,0x2B,0xD3,0x7A,0x50,0xB9,0x83,0xC5,0xE7,0xC9,0x0A,0xE9,0x3B};
-
   ShowLocksRequest(const ShowLocksRequest&);
   ShowLocksRequest& operator=(const ShowLocksRequest&);
   ShowLocksRequest() {
@@ -4480,11 +4696,17 @@ class ShowLocksRequest {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ShowLocksRequest& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(ShowLocksRequest &a, ShowLocksRequest &b);
 
+inline std::ostream& operator<<(std::ostream& out, const ShowLocksRequest& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _ShowLocksResponseElement__isset {
   _ShowLocksResponseElement__isset() : tablename(false), partname(false), txnid(false), acquiredat(false) {}
   bool tablename :1;
@@ -4496,9 +4718,6 @@ typedef struct _ShowLocksResponseElement__isset {
 class ShowLocksResponseElement {
  public:
 
-  static const char* ascii_fingerprint; // = "5AD11F0E0EF1EE0A7C08B00FEFCFF24F";
-  static const uint8_t binary_fingerprint[16]; // = {0x5A,0xD1,0x1F,0x0E,0x0E,0xF1,0xEE,0x0A,0x7C,0x08,0xB0,0x0F,0xEF,0xCF,0xF2,0x4F};
-
   ShowLocksResponseElement(const ShowLocksResponseElement&);
   ShowLocksResponseElement& operator=(const ShowLocksResponseElement&);
   ShowLocksResponseElement() : lockid(0), dbname(), tablename(), partname(), state((LockState::type)0), type((LockType::type)0), txnid(0), lastheartbeat(0), acquiredat(0), user(), hostname() {
@@ -4584,11 +4803,17 @@ class ShowLocksResponseElement {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ShowLocksResponseElement& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(ShowLocksResponseElement &a, ShowLocksResponseElement &b);
 
+inline std::ostream& operator<<(std::ostream& out, const ShowLocksResponseElement& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _ShowLocksResponse__isset {
   _ShowLocksResponse__isset() : locks(false) {}
   bool locks :1;
@@ -4597,9 +4822,6 @@ typedef struct _ShowLocksResponse__isset {
 class ShowLocksResponse {
  public:
 
-  static const char* ascii_fingerprint; // = "BD598AA60FE941361FB54C43973C011F";
-  static const uint8_t binary_fingerprint[16]; // = {0xBD,0x59,0x8A,0xA6,0x0F,0xE9,0x41,0x36,0x1F,0xB5,0x4C,0x43,0x97,0x3C,0x01,0x1F};
-
   ShowLocksResponse(const ShowLocksResponse&);
   ShowLocksResponse& operator=(const ShowLocksResponse&);
   ShowLocksResponse() {
@@ -4627,11 +4849,17 @@ class ShowLocksResponse {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ShowLocksResponse& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(ShowLocksResponse &a, ShowLocksResponse &b);
 
+inline std::ostream& operator<<(std::ostream& out, const ShowLocksResponse& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _HeartbeatRequest__isset {
   _HeartbeatRequest__isset() : lockid(false), txnid(false) {}
   bool lockid :1;
@@ -4641,9 +4869,6 @@ typedef struct _HeartbeatRequest__isset {
 class HeartbeatRequest {
  public:
 
-  static const char* ascii_fingerprint; // = "0354D07C94CB8542872CA1277008860A";
-  static const uint8_t binary_fingerprint[16]; // = {0x03,0x54,0xD0,0x7C,0x94,0xCB,0x85,0x42,0x87,0x2C,0xA1,0x27,0x70,0x08,0x86,0x0A};
-
   HeartbeatRequest(const HeartbeatRequest&);
   HeartbeatRequest& operator=(const HeartbeatRequest&);
   HeartbeatRequest() : lockid(0), txnid(0) {
@@ -4680,18 +4905,21 @@ class HeartbeatRequest {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const HeartbeatRequest& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(HeartbeatRequest &a, HeartbeatRequest &b);
 
+inline std::ostream& operator<<(std::ostream& out, const HeartbeatRequest& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class HeartbeatTxnRangeRequest {
  public:
 
-  static const char* ascii_fingerprint; // = "F33135321253DAEB67B0E79E416CA831";
-  static const uint8_t binary_fingerprint[16]; // = {0xF3,0x31,0x35,0x32,0x12,0x53,0xDA,0xEB,0x67,0xB0,0xE7,0x9E,0x41,0x6C,0xA8,0x31};
-
   HeartbeatTxnRangeRequest(const HeartbeatTxnRangeRequest&);
   HeartbeatTxnRangeRequest& operator=(const HeartbeatTxnRangeRequest&);
   HeartbeatTxnRangeRequest() : min(0), max(0) {
@@ -4722,18 +4950,21 @@ class HeartbeatTxnRangeRequest {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const HeartbeatTxnRangeRequest& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(HeartbeatTxnRangeRequest &a, HeartbeatTxnRangeRequest &b);
 
+inline std::ostream& operator<<(std::ostream& out, const HeartbeatTxnRangeRequest& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class HeartbeatTxnRangeResponse {
  public:
 
-  static const char* ascii_fingerprint; // = "33E49A70BD5C04262A0F407E3656E3CF";
-  static const uint8_t binary_fingerprint[16]; // = {0x33,0xE4,0x9A,0x70,0xBD,0x5C,0x04,0x26,0x2A,0x0F,0x40,0x7E,0x36,0x56,0xE3,0xCF};
-
   HeartbeatTxnRangeResponse(const HeartbeatTxnRangeResponse&);
   HeartbeatTxnRangeResponse& operator=(const HeartbeatTxnRangeResponse&);
   HeartbeatTxnRangeResponse() {
@@ -4764,11 +4995,17 @@ class HeartbeatTxnRangeResponse {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const HeartbeatTxnRangeResponse& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(HeartbeatTxnRangeResponse &a, HeartbeatTxnRangeResponse &b);
 
+inline std::ostream& operator<<(std::ostream& out, const HeartbeatTxnRangeResponse& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _CompactionRequest__isset {
   _CompactionRequest__isset() : partitionname(false), runas(false) {}
   bool partitionname :1;
@@ -4778,9 +5015,6 @@ typedef struct _CompactionRequest__isset {
 class CompactionRequest {
  public:
 
-  static const char* ascii_fingerprint; // = "899FD1F339D8318D628687CC2CE2864B";
-  static const uint8_t binary_fingerprint[16]; // = {0x89,0x9F,0xD1,0xF3,0x39,0xD8,0x31,0x8D,0x62,0x86,0x87,0xCC,0x2C,0xE2,0x86,0x4B};
-
   CompactionRequest(const CompactionRequest&);
   CompactionRequest& operator=(const CompactionRequest&);
   CompactionRequest() : dbname(), tablename(), partitionname(), type((CompactionType::type)0), runas() {
@@ -4832,18 +5066,21 @@ class CompactionRequest {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const CompactionRequest& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(CompactionRequest &a, CompactionRequest &b);
 
+inline std::ostream& operator<<(std::ostream& out, const CompactionRequest& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class ShowCompactRequest {
  public:
 
-  static const char* ascii_fingerprint; // = "99914B932BD37A50B983C5E7C90AE93B";
-  static const uint8_t binary_fingerprint[16]; // = {0x99,0x91,0x4B,0x93,0x2B,0xD3,0x7A,0x50,0xB9,0x83,0xC5,0xE7,0xC9,0x0A,0xE9,0x3B};
-
   ShowCompactRequest(const ShowCompactRequest&);
   ShowCompactRequest& operator=(const ShowCompactRequest&);
   ShowCompactRequest() {
@@ -4864,11 +5101,17 @@ class ShowCompactRequest {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ShowCompactRequest& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(ShowCompactRequest &a, ShowCompactRequest &b);
 
+inline std::ostream& operator<<(std::ostream& out, const ShowCompactRequest& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _ShowCompactResponseElement__isset {
   _ShowCompactResponseElement__isset() : partitionname(false), workerid(false), start(false), runAs(false) {}
   bool partitionname :1;
@@ -4880,9 +5123,6 @@ typedef struct _ShowCompactResponseElement__isset {
 class ShowCompactResponseElement {
  public:
 
-  static const char* ascii_fingerprint; // = "2F338C265DC4FD82DD13F4966FE43F13";
-  static const uint8_t binary_fingerprint[16]; // = {0x2F,0x33,0x8C,0x26,0x5D,0xC4,0xFD,0x82,0xDD,0x13,0xF4,0x96,0x6F,0xE4,0x3F,0x13};
-
   ShowCompactResponseElement(const ShowCompactResponseElement&);
   ShowCompactResponseElement& operator=(const ShowCompactResponseElement&);
   ShowCompactResponseElement() : dbname(), tablename(), partitionname(), type((CompactionType::type)0), state(), workerid(), start(0), runAs() {
@@ -4953,18 +5193,21 @@ class ShowCompactResponseElement {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ShowCompactResponseElement& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(ShowCompactResponseElement &a, ShowCompactResponseElement &b);
 
+inline std::ostream& operator<<(std::ostream& out, const ShowCompactResponseElement& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class ShowCompactResponse {
  public:
 
-  static const char* ascii_fingerprint; // = "915B7B8DB8966D65769

<TRUNCATED>

[07/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)

Posted by xu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-cpp/TCLIService.h
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-cpp/TCLIService.h b/service/src/gen/thrift/gen-cpp/TCLIService.h
index 29a9f4a..2ea80c7 100644
--- a/service/src/gen/thrift/gen-cpp/TCLIService.h
+++ b/service/src/gen/thrift/gen-cpp/TCLIService.h
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -8,10 +8,16 @@
 #define TCLIService_H
 
 #include <thrift/TDispatchProcessor.h>
+#include <thrift/async/TConcurrentClientSyncInfo.h>
 #include "TCLIService_types.h"
 
 namespace apache { namespace hive { namespace service { namespace cli { namespace thrift {
 
+#ifdef _WIN32
+  #pragma warning( push )
+  #pragma warning (disable : 4250 ) //inheriting methods via dominance 
+#endif
+
 class TCLIServiceIf {
  public:
   virtual ~TCLIServiceIf() {}
@@ -130,9 +136,6 @@ typedef struct _TCLIService_OpenSession_args__isset {
 class TCLIService_OpenSession_args {
  public:
 
-  static const char* ascii_fingerprint; // = "657FF0677838A57698AD9D58A923940A";
-  static const uint8_t binary_fingerprint[16]; // = {0x65,0x7F,0xF0,0x67,0x78,0x38,0xA5,0x76,0x98,0xAD,0x9D,0x58,0xA9,0x23,0x94,0x0A};
-
   TCLIService_OpenSession_args(const TCLIService_OpenSession_args&);
   TCLIService_OpenSession_args& operator=(const TCLIService_OpenSession_args&);
   TCLIService_OpenSession_args() {
@@ -160,23 +163,18 @@ class TCLIService_OpenSession_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_OpenSession_args& obj);
 };
 
 
 class TCLIService_OpenSession_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "657FF0677838A57698AD9D58A923940A";
-  static const uint8_t binary_fingerprint[16]; // = {0x65,0x7F,0xF0,0x67,0x78,0x38,0xA5,0x76,0x98,0xAD,0x9D,0x58,0xA9,0x23,0x94,0x0A};
-
 
   virtual ~TCLIService_OpenSession_pargs() throw();
   const TOpenSessionReq* req;
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_OpenSession_pargs& obj);
 };
 
 typedef struct _TCLIService_OpenSession_result__isset {
@@ -187,9 +185,6 @@ typedef struct _TCLIService_OpenSession_result__isset {
 class TCLIService_OpenSession_result {
  public:
 
-  static const char* ascii_fingerprint; // = "C55268D57D6DC6A256619A7DB419699E";
-  static const uint8_t binary_fingerprint[16]; // = {0xC5,0x52,0x68,0xD5,0x7D,0x6D,0xC6,0xA2,0x56,0x61,0x9A,0x7D,0xB4,0x19,0x69,0x9E};
-
   TCLIService_OpenSession_result(const TCLIService_OpenSession_result&);
   TCLIService_OpenSession_result& operator=(const TCLIService_OpenSession_result&);
   TCLIService_OpenSession_result() {
@@ -217,7 +212,6 @@ class TCLIService_OpenSession_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_OpenSession_result& obj);
 };
 
 typedef struct _TCLIService_OpenSession_presult__isset {
@@ -228,9 +222,6 @@ typedef struct _TCLIService_OpenSession_presult__isset {
 class TCLIService_OpenSession_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "C55268D57D6DC6A256619A7DB419699E";
-  static const uint8_t binary_fingerprint[16]; // = {0xC5,0x52,0x68,0xD5,0x7D,0x6D,0xC6,0xA2,0x56,0x61,0x9A,0x7D,0xB4,0x19,0x69,0x9E};
-
 
   virtual ~TCLIService_OpenSession_presult() throw();
   TOpenSessionResp* success;
@@ -239,7 +230,6 @@ class TCLIService_OpenSession_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_OpenSession_presult& obj);
 };
 
 typedef struct _TCLIService_CloseSession_args__isset {
@@ -250,9 +240,6 @@ typedef struct _TCLIService_CloseSession_args__isset {
 class TCLIService_CloseSession_args {
  public:
 
-  static const char* ascii_fingerprint; // = "FD7076C37D193E2A343D9691B59D94EC";
-  static const uint8_t binary_fingerprint[16]; // = {0xFD,0x70,0x76,0xC3,0x7D,0x19,0x3E,0x2A,0x34,0x3D,0x96,0x91,0xB5,0x9D,0x94,0xEC};
-
   TCLIService_CloseSession_args(const TCLIService_CloseSession_args&);
   TCLIService_CloseSession_args& operator=(const TCLIService_CloseSession_args&);
   TCLIService_CloseSession_args() {
@@ -280,23 +267,18 @@ class TCLIService_CloseSession_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_CloseSession_args& obj);
 };
 
 
 class TCLIService_CloseSession_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "FD7076C37D193E2A343D9691B59D94EC";
-  static const uint8_t binary_fingerprint[16]; // = {0xFD,0x70,0x76,0xC3,0x7D,0x19,0x3E,0x2A,0x34,0x3D,0x96,0x91,0xB5,0x9D,0x94,0xEC};
-
 
   virtual ~TCLIService_CloseSession_pargs() throw();
   const TCloseSessionReq* req;
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_CloseSession_pargs& obj);
 };
 
 typedef struct _TCLIService_CloseSession_result__isset {
@@ -307,9 +289,6 @@ typedef struct _TCLIService_CloseSession_result__isset {
 class TCLIService_CloseSession_result {
  public:
 
-  static const char* ascii_fingerprint; // = "BDB51A479DCD1EB1DB636FA1B4BE02A7";
-  static const uint8_t binary_fingerprint[16]; // = {0xBD,0xB5,0x1A,0x47,0x9D,0xCD,0x1E,0xB1,0xDB,0x63,0x6F,0xA1,0xB4,0xBE,0x02,0xA7};
-
   TCLIService_CloseSession_result(const TCLIService_CloseSession_result&);
   TCLIService_CloseSession_result& operator=(const TCLIService_CloseSession_result&);
   TCLIService_CloseSession_result() {
@@ -337,7 +316,6 @@ class TCLIService_CloseSession_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_CloseSession_result& obj);
 };
 
 typedef struct _TCLIService_CloseSession_presult__isset {
@@ -348,9 +326,6 @@ typedef struct _TCLIService_CloseSession_presult__isset {
 class TCLIService_CloseSession_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "BDB51A479DCD1EB1DB636FA1B4BE02A7";
-  static const uint8_t binary_fingerprint[16]; // = {0xBD,0xB5,0x1A,0x47,0x9D,0xCD,0x1E,0xB1,0xDB,0x63,0x6F,0xA1,0xB4,0xBE,0x02,0xA7};
-
 
   virtual ~TCLIService_CloseSession_presult() throw();
   TCloseSessionResp* success;
@@ -359,7 +334,6 @@ class TCLIService_CloseSession_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_CloseSession_presult& obj);
 };
 
 typedef struct _TCLIService_GetInfo_args__isset {
@@ -370,9 +344,6 @@ typedef struct _TCLIService_GetInfo_args__isset {
 class TCLIService_GetInfo_args {
  public:
 
-  static const char* ascii_fingerprint; // = "482A174DD6064955A19F28C5395E27FA";
-  static const uint8_t binary_fingerprint[16]; // = {0x48,0x2A,0x17,0x4D,0xD6,0x06,0x49,0x55,0xA1,0x9F,0x28,0xC5,0x39,0x5E,0x27,0xFA};
-
   TCLIService_GetInfo_args(const TCLIService_GetInfo_args&);
   TCLIService_GetInfo_args& operator=(const TCLIService_GetInfo_args&);
   TCLIService_GetInfo_args() {
@@ -400,23 +371,18 @@ class TCLIService_GetInfo_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetInfo_args& obj);
 };
 
 
 class TCLIService_GetInfo_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "482A174DD6064955A19F28C5395E27FA";
-  static const uint8_t binary_fingerprint[16]; // = {0x48,0x2A,0x17,0x4D,0xD6,0x06,0x49,0x55,0xA1,0x9F,0x28,0xC5,0x39,0x5E,0x27,0xFA};
-
 
   virtual ~TCLIService_GetInfo_pargs() throw();
   const TGetInfoReq* req;
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetInfo_pargs& obj);
 };
 
 typedef struct _TCLIService_GetInfo_result__isset {
@@ -427,9 +393,6 @@ typedef struct _TCLIService_GetInfo_result__isset {
 class TCLIService_GetInfo_result {
  public:
 
-  static const char* ascii_fingerprint; // = "95AE9E06990A55202EF5D96DABE20D75";
-  static const uint8_t binary_fingerprint[16]; // = {0x95,0xAE,0x9E,0x06,0x99,0x0A,0x55,0x20,0x2E,0xF5,0xD9,0x6D,0xAB,0xE2,0x0D,0x75};
-
   TCLIService_GetInfo_result(const TCLIService_GetInfo_result&);
   TCLIService_GetInfo_result& operator=(const TCLIService_GetInfo_result&);
   TCLIService_GetInfo_result() {
@@ -457,7 +420,6 @@ class TCLIService_GetInfo_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetInfo_result& obj);
 };
 
 typedef struct _TCLIService_GetInfo_presult__isset {
@@ -468,9 +430,6 @@ typedef struct _TCLIService_GetInfo_presult__isset {
 class TCLIService_GetInfo_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "95AE9E06990A55202EF5D96DABE20D75";
-  static const uint8_t binary_fingerprint[16]; // = {0x95,0xAE,0x9E,0x06,0x99,0x0A,0x55,0x20,0x2E,0xF5,0xD9,0x6D,0xAB,0xE2,0x0D,0x75};
-
 
   virtual ~TCLIService_GetInfo_presult() throw();
   TGetInfoResp* success;
@@ -479,7 +438,6 @@ class TCLIService_GetInfo_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetInfo_presult& obj);
 };
 
 typedef struct _TCLIService_ExecuteStatement_args__isset {
@@ -490,9 +448,6 @@ typedef struct _TCLIService_ExecuteStatement_args__isset {
 class TCLIService_ExecuteStatement_args {
  public:
 
-  static const char* ascii_fingerprint; // = "BD5534ACDA7A523F638927AC476C2173";
-  static const uint8_t binary_fingerprint[16]; // = {0xBD,0x55,0x34,0xAC,0xDA,0x7A,0x52,0x3F,0x63,0x89,0x27,0xAC,0x47,0x6C,0x21,0x73};
-
   TCLIService_ExecuteStatement_args(const TCLIService_ExecuteStatement_args&);
   TCLIService_ExecuteStatement_args& operator=(const TCLIService_ExecuteStatement_args&);
   TCLIService_ExecuteStatement_args() {
@@ -520,23 +475,18 @@ class TCLIService_ExecuteStatement_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_ExecuteStatement_args& obj);
 };
 
 
 class TCLIService_ExecuteStatement_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "BD5534ACDA7A523F638927AC476C2173";
-  static const uint8_t binary_fingerprint[16]; // = {0xBD,0x55,0x34,0xAC,0xDA,0x7A,0x52,0x3F,0x63,0x89,0x27,0xAC,0x47,0x6C,0x21,0x73};
-
 
   virtual ~TCLIService_ExecuteStatement_pargs() throw();
   const TExecuteStatementReq* req;
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_ExecuteStatement_pargs& obj);
 };
 
 typedef struct _TCLIService_ExecuteStatement_result__isset {
@@ -547,9 +497,6 @@ typedef struct _TCLIService_ExecuteStatement_result__isset {
 class TCLIService_ExecuteStatement_result {
  public:
 
-  static const char* ascii_fingerprint; // = "783BF5EE3B1FEAC8DF5FDAEF1F551CDF";
-  static const uint8_t binary_fingerprint[16]; // = {0x78,0x3B,0xF5,0xEE,0x3B,0x1F,0xEA,0xC8,0xDF,0x5F,0xDA,0xEF,0x1F,0x55,0x1C,0xDF};
-
   TCLIService_ExecuteStatement_result(const TCLIService_ExecuteStatement_result&);
   TCLIService_ExecuteStatement_result& operator=(const TCLIService_ExecuteStatement_result&);
   TCLIService_ExecuteStatement_result() {
@@ -577,7 +524,6 @@ class TCLIService_ExecuteStatement_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_ExecuteStatement_result& obj);
 };
 
 typedef struct _TCLIService_ExecuteStatement_presult__isset {
@@ -588,9 +534,6 @@ typedef struct _TCLIService_ExecuteStatement_presult__isset {
 class TCLIService_ExecuteStatement_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "783BF5EE3B1FEAC8DF5FDAEF1F551CDF";
-  static const uint8_t binary_fingerprint[16]; // = {0x78,0x3B,0xF5,0xEE,0x3B,0x1F,0xEA,0xC8,0xDF,0x5F,0xDA,0xEF,0x1F,0x55,0x1C,0xDF};
-
 
   virtual ~TCLIService_ExecuteStatement_presult() throw();
   TExecuteStatementResp* success;
@@ -599,7 +542,6 @@ class TCLIService_ExecuteStatement_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_ExecuteStatement_presult& obj);
 };
 
 typedef struct _TCLIService_GetTypeInfo_args__isset {
@@ -610,9 +552,6 @@ typedef struct _TCLIService_GetTypeInfo_args__isset {
 class TCLIService_GetTypeInfo_args {
  public:
 
-  static const char* ascii_fingerprint; // = "FD7076C37D193E2A343D9691B59D94EC";
-  static const uint8_t binary_fingerprint[16]; // = {0xFD,0x70,0x76,0xC3,0x7D,0x19,0x3E,0x2A,0x34,0x3D,0x96,0x91,0xB5,0x9D,0x94,0xEC};
-
   TCLIService_GetTypeInfo_args(const TCLIService_GetTypeInfo_args&);
   TCLIService_GetTypeInfo_args& operator=(const TCLIService_GetTypeInfo_args&);
   TCLIService_GetTypeInfo_args() {
@@ -640,23 +579,18 @@ class TCLIService_GetTypeInfo_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetTypeInfo_args& obj);
 };
 
 
 class TCLIService_GetTypeInfo_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "FD7076C37D193E2A343D9691B59D94EC";
-  static const uint8_t binary_fingerprint[16]; // = {0xFD,0x70,0x76,0xC3,0x7D,0x19,0x3E,0x2A,0x34,0x3D,0x96,0x91,0xB5,0x9D,0x94,0xEC};
-
 
   virtual ~TCLIService_GetTypeInfo_pargs() throw();
   const TGetTypeInfoReq* req;
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetTypeInfo_pargs& obj);
 };
 
 typedef struct _TCLIService_GetTypeInfo_result__isset {
@@ -667,9 +601,6 @@ typedef struct _TCLIService_GetTypeInfo_result__isset {
 class TCLIService_GetTypeInfo_result {
  public:
 
-  static const char* ascii_fingerprint; // = "783BF5EE3B1FEAC8DF5FDAEF1F551CDF";
-  static const uint8_t binary_fingerprint[16]; // = {0x78,0x3B,0xF5,0xEE,0x3B,0x1F,0xEA,0xC8,0xDF,0x5F,0xDA,0xEF,0x1F,0x55,0x1C,0xDF};
-
   TCLIService_GetTypeInfo_result(const TCLIService_GetTypeInfo_result&);
   TCLIService_GetTypeInfo_result& operator=(const TCLIService_GetTypeInfo_result&);
   TCLIService_GetTypeInfo_result() {
@@ -697,7 +628,6 @@ class TCLIService_GetTypeInfo_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetTypeInfo_result& obj);
 };
 
 typedef struct _TCLIService_GetTypeInfo_presult__isset {
@@ -708,9 +638,6 @@ typedef struct _TCLIService_GetTypeInfo_presult__isset {
 class TCLIService_GetTypeInfo_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "783BF5EE3B1FEAC8DF5FDAEF1F551CDF";
-  static const uint8_t binary_fingerprint[16]; // = {0x78,0x3B,0xF5,0xEE,0x3B,0x1F,0xEA,0xC8,0xDF,0x5F,0xDA,0xEF,0x1F,0x55,0x1C,0xDF};
-
 
   virtual ~TCLIService_GetTypeInfo_presult() throw();
   TGetTypeInfoResp* success;
@@ -719,7 +646,6 @@ class TCLIService_GetTypeInfo_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetTypeInfo_presult& obj);
 };
 
 typedef struct _TCLIService_GetCatalogs_args__isset {
@@ -730,9 +656,6 @@ typedef struct _TCLIService_GetCatalogs_args__isset {
 class TCLIService_GetCatalogs_args {
  public:
 
-  static const char* ascii_fingerprint; // = "FD7076C37D193E2A343D9691B59D94EC";
-  static const uint8_t binary_fingerprint[16]; // = {0xFD,0x70,0x76,0xC3,0x7D,0x19,0x3E,0x2A,0x34,0x3D,0x96,0x91,0xB5,0x9D,0x94,0xEC};
-
   TCLIService_GetCatalogs_args(const TCLIService_GetCatalogs_args&);
   TCLIService_GetCatalogs_args& operator=(const TCLIService_GetCatalogs_args&);
   TCLIService_GetCatalogs_args() {
@@ -760,23 +683,18 @@ class TCLIService_GetCatalogs_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetCatalogs_args& obj);
 };
 
 
 class TCLIService_GetCatalogs_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "FD7076C37D193E2A343D9691B59D94EC";
-  static const uint8_t binary_fingerprint[16]; // = {0xFD,0x70,0x76,0xC3,0x7D,0x19,0x3E,0x2A,0x34,0x3D,0x96,0x91,0xB5,0x9D,0x94,0xEC};
-
 
   virtual ~TCLIService_GetCatalogs_pargs() throw();
   const TGetCatalogsReq* req;
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetCatalogs_pargs& obj);
 };
 
 typedef struct _TCLIService_GetCatalogs_result__isset {
@@ -787,9 +705,6 @@ typedef struct _TCLIService_GetCatalogs_result__isset {
 class TCLIService_GetCatalogs_result {
  public:
 
-  static const char* ascii_fingerprint; // = "783BF5EE3B1FEAC8DF5FDAEF1F551CDF";
-  static const uint8_t binary_fingerprint[16]; // = {0x78,0x3B,0xF5,0xEE,0x3B,0x1F,0xEA,0xC8,0xDF,0x5F,0xDA,0xEF,0x1F,0x55,0x1C,0xDF};
-
   TCLIService_GetCatalogs_result(const TCLIService_GetCatalogs_result&);
   TCLIService_GetCatalogs_result& operator=(const TCLIService_GetCatalogs_result&);
   TCLIService_GetCatalogs_result() {
@@ -817,7 +732,6 @@ class TCLIService_GetCatalogs_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetCatalogs_result& obj);
 };
 
 typedef struct _TCLIService_GetCatalogs_presult__isset {
@@ -828,9 +742,6 @@ typedef struct _TCLIService_GetCatalogs_presult__isset {
 class TCLIService_GetCatalogs_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "783BF5EE3B1FEAC8DF5FDAEF1F551CDF";
-  static const uint8_t binary_fingerprint[16]; // = {0x78,0x3B,0xF5,0xEE,0x3B,0x1F,0xEA,0xC8,0xDF,0x5F,0xDA,0xEF,0x1F,0x55,0x1C,0xDF};
-
 
   virtual ~TCLIService_GetCatalogs_presult() throw();
   TGetCatalogsResp* success;
@@ -839,7 +750,6 @@ class TCLIService_GetCatalogs_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetCatalogs_presult& obj);
 };
 
 typedef struct _TCLIService_GetSchemas_args__isset {
@@ -850,9 +760,6 @@ typedef struct _TCLIService_GetSchemas_args__isset {
 class TCLIService_GetSchemas_args {
  public:
 
-  static const char* ascii_fingerprint; // = "34B9FACB4B4C34ABAEDCF0A2B60345DE";
-  static const uint8_t binary_fingerprint[16]; // = {0x34,0xB9,0xFA,0xCB,0x4B,0x4C,0x34,0xAB,0xAE,0xDC,0xF0,0xA2,0xB6,0x03,0x45,0xDE};
-
   TCLIService_GetSchemas_args(const TCLIService_GetSchemas_args&);
   TCLIService_GetSchemas_args& operator=(const TCLIService_GetSchemas_args&);
   TCLIService_GetSchemas_args() {
@@ -880,23 +787,18 @@ class TCLIService_GetSchemas_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetSchemas_args& obj);
 };
 
 
 class TCLIService_GetSchemas_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "34B9FACB4B4C34ABAEDCF0A2B60345DE";
-  static const uint8_t binary_fingerprint[16]; // = {0x34,0xB9,0xFA,0xCB,0x4B,0x4C,0x34,0xAB,0xAE,0xDC,0xF0,0xA2,0xB6,0x03,0x45,0xDE};
-
 
   virtual ~TCLIService_GetSchemas_pargs() throw();
   const TGetSchemasReq* req;
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetSchemas_pargs& obj);
 };
 
 typedef struct _TCLIService_GetSchemas_result__isset {
@@ -907,9 +809,6 @@ typedef struct _TCLIService_GetSchemas_result__isset {
 class TCLIService_GetSchemas_result {
  public:
 
-  static const char* ascii_fingerprint; // = "783BF5EE3B1FEAC8DF5FDAEF1F551CDF";
-  static const uint8_t binary_fingerprint[16]; // = {0x78,0x3B,0xF5,0xEE,0x3B,0x1F,0xEA,0xC8,0xDF,0x5F,0xDA,0xEF,0x1F,0x55,0x1C,0xDF};
-
   TCLIService_GetSchemas_result(const TCLIService_GetSchemas_result&);
   TCLIService_GetSchemas_result& operator=(const TCLIService_GetSchemas_result&);
   TCLIService_GetSchemas_result() {
@@ -937,7 +836,6 @@ class TCLIService_GetSchemas_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetSchemas_result& obj);
 };
 
 typedef struct _TCLIService_GetSchemas_presult__isset {
@@ -948,9 +846,6 @@ typedef struct _TCLIService_GetSchemas_presult__isset {
 class TCLIService_GetSchemas_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "783BF5EE3B1FEAC8DF5FDAEF1F551CDF";
-  static const uint8_t binary_fingerprint[16]; // = {0x78,0x3B,0xF5,0xEE,0x3B,0x1F,0xEA,0xC8,0xDF,0x5F,0xDA,0xEF,0x1F,0x55,0x1C,0xDF};
-
 
   virtual ~TCLIService_GetSchemas_presult() throw();
   TGetSchemasResp* success;
@@ -959,7 +854,6 @@ class TCLIService_GetSchemas_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetSchemas_presult& obj);
 };
 
 typedef struct _TCLIService_GetTables_args__isset {
@@ -970,9 +864,6 @@ typedef struct _TCLIService_GetTables_args__isset {
 class TCLIService_GetTables_args {
  public:
 
-  static const char* ascii_fingerprint; // = "58075D8350502C9B1F3532079C1CF7A6";
-  static const uint8_t binary_fingerprint[16]; // = {0x58,0x07,0x5D,0x83,0x50,0x50,0x2C,0x9B,0x1F,0x35,0x32,0x07,0x9C,0x1C,0xF7,0xA6};
-
   TCLIService_GetTables_args(const TCLIService_GetTables_args&);
   TCLIService_GetTables_args& operator=(const TCLIService_GetTables_args&);
   TCLIService_GetTables_args() {
@@ -1000,23 +891,18 @@ class TCLIService_GetTables_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetTables_args& obj);
 };
 
 
 class TCLIService_GetTables_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "58075D8350502C9B1F3532079C1CF7A6";
-  static const uint8_t binary_fingerprint[16]; // = {0x58,0x07,0x5D,0x83,0x50,0x50,0x2C,0x9B,0x1F,0x35,0x32,0x07,0x9C,0x1C,0xF7,0xA6};
-
 
   virtual ~TCLIService_GetTables_pargs() throw();
   const TGetTablesReq* req;
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetTables_pargs& obj);
 };
 
 typedef struct _TCLIService_GetTables_result__isset {
@@ -1027,9 +913,6 @@ typedef struct _TCLIService_GetTables_result__isset {
 class TCLIService_GetTables_result {
  public:
 
-  static const char* ascii_fingerprint; // = "783BF5EE3B1FEAC8DF5FDAEF1F551CDF";
-  static const uint8_t binary_fingerprint[16]; // = {0x78,0x3B,0xF5,0xEE,0x3B,0x1F,0xEA,0xC8,0xDF,0x5F,0xDA,0xEF,0x1F,0x55,0x1C,0xDF};
-
   TCLIService_GetTables_result(const TCLIService_GetTables_result&);
   TCLIService_GetTables_result& operator=(const TCLIService_GetTables_result&);
   TCLIService_GetTables_result() {
@@ -1057,7 +940,6 @@ class TCLIService_GetTables_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetTables_result& obj);
 };
 
 typedef struct _TCLIService_GetTables_presult__isset {
@@ -1068,9 +950,6 @@ typedef struct _TCLIService_GetTables_presult__isset {
 class TCLIService_GetTables_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "783BF5EE3B1FEAC8DF5FDAEF1F551CDF";
-  static const uint8_t binary_fingerprint[16]; // = {0x78,0x3B,0xF5,0xEE,0x3B,0x1F,0xEA,0xC8,0xDF,0x5F,0xDA,0xEF,0x1F,0x55,0x1C,0xDF};
-
 
   virtual ~TCLIService_GetTables_presult() throw();
   TGetTablesResp* success;
@@ -1079,7 +958,6 @@ class TCLIService_GetTables_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetTables_presult& obj);
 };
 
 typedef struct _TCLIService_GetTableTypes_args__isset {
@@ -1090,9 +968,6 @@ typedef struct _TCLIService_GetTableTypes_args__isset {
 class TCLIService_GetTableTypes_args {
  public:
 
-  static const char* ascii_fingerprint; // = "FD7076C37D193E2A343D9691B59D94EC";
-  static const uint8_t binary_fingerprint[16]; // = {0xFD,0x70,0x76,0xC3,0x7D,0x19,0x3E,0x2A,0x34,0x3D,0x96,0x91,0xB5,0x9D,0x94,0xEC};
-
   TCLIService_GetTableTypes_args(const TCLIService_GetTableTypes_args&);
   TCLIService_GetTableTypes_args& operator=(const TCLIService_GetTableTypes_args&);
   TCLIService_GetTableTypes_args() {
@@ -1120,23 +995,18 @@ class TCLIService_GetTableTypes_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetTableTypes_args& obj);
 };
 
 
 class TCLIService_GetTableTypes_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "FD7076C37D193E2A343D9691B59D94EC";
-  static const uint8_t binary_fingerprint[16]; // = {0xFD,0x70,0x76,0xC3,0x7D,0x19,0x3E,0x2A,0x34,0x3D,0x96,0x91,0xB5,0x9D,0x94,0xEC};
-
 
   virtual ~TCLIService_GetTableTypes_pargs() throw();
   const TGetTableTypesReq* req;
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetTableTypes_pargs& obj);
 };
 
 typedef struct _TCLIService_GetTableTypes_result__isset {
@@ -1147,9 +1017,6 @@ typedef struct _TCLIService_GetTableTypes_result__isset {
 class TCLIService_GetTableTypes_result {
  public:
 
-  static const char* ascii_fingerprint; // = "783BF5EE3B1FEAC8DF5FDAEF1F551CDF";
-  static const uint8_t binary_fingerprint[16]; // = {0x78,0x3B,0xF5,0xEE,0x3B,0x1F,0xEA,0xC8,0xDF,0x5F,0xDA,0xEF,0x1F,0x55,0x1C,0xDF};
-
   TCLIService_GetTableTypes_result(const TCLIService_GetTableTypes_result&);
   TCLIService_GetTableTypes_result& operator=(const TCLIService_GetTableTypes_result&);
   TCLIService_GetTableTypes_result() {
@@ -1177,7 +1044,6 @@ class TCLIService_GetTableTypes_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetTableTypes_result& obj);
 };
 
 typedef struct _TCLIService_GetTableTypes_presult__isset {
@@ -1188,9 +1054,6 @@ typedef struct _TCLIService_GetTableTypes_presult__isset {
 class TCLIService_GetTableTypes_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "783BF5EE3B1FEAC8DF5FDAEF1F551CDF";
-  static const uint8_t binary_fingerprint[16]; // = {0x78,0x3B,0xF5,0xEE,0x3B,0x1F,0xEA,0xC8,0xDF,0x5F,0xDA,0xEF,0x1F,0x55,0x1C,0xDF};
-
 
   virtual ~TCLIService_GetTableTypes_presult() throw();
   TGetTableTypesResp* success;
@@ -1199,7 +1062,6 @@ class TCLIService_GetTableTypes_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetTableTypes_presult& obj);
 };
 
 typedef struct _TCLIService_GetColumns_args__isset {
@@ -1210,9 +1072,6 @@ typedef struct _TCLIService_GetColumns_args__isset {
 class TCLIService_GetColumns_args {
  public:
 
-  static const char* ascii_fingerprint; // = "7894A0356591B039C72040E21BAAC3E3";
-  static const uint8_t binary_fingerprint[16]; // = {0x78,0x94,0xA0,0x35,0x65,0x91,0xB0,0x39,0xC7,0x20,0x40,0xE2,0x1B,0xAA,0xC3,0xE3};
-
   TCLIService_GetColumns_args(const TCLIService_GetColumns_args&);
   TCLIService_GetColumns_args& operator=(const TCLIService_GetColumns_args&);
   TCLIService_GetColumns_args() {
@@ -1240,23 +1099,18 @@ class TCLIService_GetColumns_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetColumns_args& obj);
 };
 
 
 class TCLIService_GetColumns_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "7894A0356591B039C72040E21BAAC3E3";
-  static const uint8_t binary_fingerprint[16]; // = {0x78,0x94,0xA0,0x35,0x65,0x91,0xB0,0x39,0xC7,0x20,0x40,0xE2,0x1B,0xAA,0xC3,0xE3};
-
 
   virtual ~TCLIService_GetColumns_pargs() throw();
   const TGetColumnsReq* req;
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetColumns_pargs& obj);
 };
 
 typedef struct _TCLIService_GetColumns_result__isset {
@@ -1267,9 +1121,6 @@ typedef struct _TCLIService_GetColumns_result__isset {
 class TCLIService_GetColumns_result {
  public:
 
-  static const char* ascii_fingerprint; // = "783BF5EE3B1FEAC8DF5FDAEF1F551CDF";
-  static const uint8_t binary_fingerprint[16]; // = {0x78,0x3B,0xF5,0xEE,0x3B,0x1F,0xEA,0xC8,0xDF,0x5F,0xDA,0xEF,0x1F,0x55,0x1C,0xDF};
-
   TCLIService_GetColumns_result(const TCLIService_GetColumns_result&);
   TCLIService_GetColumns_result& operator=(const TCLIService_GetColumns_result&);
   TCLIService_GetColumns_result() {
@@ -1297,7 +1148,6 @@ class TCLIService_GetColumns_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetColumns_result& obj);
 };
 
 typedef struct _TCLIService_GetColumns_presult__isset {
@@ -1308,9 +1158,6 @@ typedef struct _TCLIService_GetColumns_presult__isset {
 class TCLIService_GetColumns_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "783BF5EE3B1FEAC8DF5FDAEF1F551CDF";
-  static const uint8_t binary_fingerprint[16]; // = {0x78,0x3B,0xF5,0xEE,0x3B,0x1F,0xEA,0xC8,0xDF,0x5F,0xDA,0xEF,0x1F,0x55,0x1C,0xDF};
-
 
   virtual ~TCLIService_GetColumns_presult() throw();
   TGetColumnsResp* success;
@@ -1319,7 +1166,6 @@ class TCLIService_GetColumns_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetColumns_presult& obj);
 };
 
 typedef struct _TCLIService_GetFunctions_args__isset {
@@ -1330,9 +1176,6 @@ typedef struct _TCLIService_GetFunctions_args__isset {
 class TCLIService_GetFunctions_args {
  public:
 
-  static const char* ascii_fingerprint; // = "AC28BA383D0EC96F55B7C42FA3E1AF52";
-  static const uint8_t binary_fingerprint[16]; // = {0xAC,0x28,0xBA,0x38,0x3D,0x0E,0xC9,0x6F,0x55,0xB7,0xC4,0x2F,0xA3,0xE1,0xAF,0x52};
-
   TCLIService_GetFunctions_args(const TCLIService_GetFunctions_args&);
   TCLIService_GetFunctions_args& operator=(const TCLIService_GetFunctions_args&);
   TCLIService_GetFunctions_args() {
@@ -1360,23 +1203,18 @@ class TCLIService_GetFunctions_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetFunctions_args& obj);
 };
 
 
 class TCLIService_GetFunctions_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "AC28BA383D0EC96F55B7C42FA3E1AF52";
-  static const uint8_t binary_fingerprint[16]; // = {0xAC,0x28,0xBA,0x38,0x3D,0x0E,0xC9,0x6F,0x55,0xB7,0xC4,0x2F,0xA3,0xE1,0xAF,0x52};
-
 
   virtual ~TCLIService_GetFunctions_pargs() throw();
   const TGetFunctionsReq* req;
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetFunctions_pargs& obj);
 };
 
 typedef struct _TCLIService_GetFunctions_result__isset {
@@ -1387,9 +1225,6 @@ typedef struct _TCLIService_GetFunctions_result__isset {
 class TCLIService_GetFunctions_result {
  public:
 
-  static const char* ascii_fingerprint; // = "783BF5EE3B1FEAC8DF5FDAEF1F551CDF";
-  static const uint8_t binary_fingerprint[16]; // = {0x78,0x3B,0xF5,0xEE,0x3B,0x1F,0xEA,0xC8,0xDF,0x5F,0xDA,0xEF,0x1F,0x55,0x1C,0xDF};
-
   TCLIService_GetFunctions_result(const TCLIService_GetFunctions_result&);
   TCLIService_GetFunctions_result& operator=(const TCLIService_GetFunctions_result&);
   TCLIService_GetFunctions_result() {
@@ -1417,7 +1252,6 @@ class TCLIService_GetFunctions_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetFunctions_result& obj);
 };
 
 typedef struct _TCLIService_GetFunctions_presult__isset {
@@ -1428,9 +1262,6 @@ typedef struct _TCLIService_GetFunctions_presult__isset {
 class TCLIService_GetFunctions_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "783BF5EE3B1FEAC8DF5FDAEF1F551CDF";
-  static const uint8_t binary_fingerprint[16]; // = {0x78,0x3B,0xF5,0xEE,0x3B,0x1F,0xEA,0xC8,0xDF,0x5F,0xDA,0xEF,0x1F,0x55,0x1C,0xDF};
-
 
   virtual ~TCLIService_GetFunctions_presult() throw();
   TGetFunctionsResp* success;
@@ -1439,7 +1270,6 @@ class TCLIService_GetFunctions_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetFunctions_presult& obj);
 };
 
 typedef struct _TCLIService_GetOperationStatus_args__isset {
@@ -1450,9 +1280,6 @@ typedef struct _TCLIService_GetOperationStatus_args__isset {
 class TCLIService_GetOperationStatus_args {
  public:
 
-  static const char* ascii_fingerprint; // = "2A0009415DC2A8A9CDCF3A75C16ADBE7";
-  static const uint8_t binary_fingerprint[16]; // = {0x2A,0x00,0x09,0x41,0x5D,0xC2,0xA8,0xA9,0xCD,0xCF,0x3A,0x75,0xC1,0x6A,0xDB,0xE7};
-
   TCLIService_GetOperationStatus_args(const TCLIService_GetOperationStatus_args&);
   TCLIService_GetOperationStatus_args& operator=(const TCLIService_GetOperationStatus_args&);
   TCLIService_GetOperationStatus_args() {
@@ -1480,23 +1307,18 @@ class TCLIService_GetOperationStatus_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetOperationStatus_args& obj);
 };
 
 
 class TCLIService_GetOperationStatus_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "2A0009415DC2A8A9CDCF3A75C16ADBE7";
-  static const uint8_t binary_fingerprint[16]; // = {0x2A,0x00,0x09,0x41,0x5D,0xC2,0xA8,0xA9,0xCD,0xCF,0x3A,0x75,0xC1,0x6A,0xDB,0xE7};
-
 
   virtual ~TCLIService_GetOperationStatus_pargs() throw();
   const TGetOperationStatusReq* req;
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetOperationStatus_pargs& obj);
 };
 
 typedef struct _TCLIService_GetOperationStatus_result__isset {
@@ -1507,9 +1329,6 @@ typedef struct _TCLIService_GetOperationStatus_result__isset {
 class TCLIService_GetOperationStatus_result {
  public:
 
-  static const char* ascii_fingerprint; // = "C399947D185D259358E6B922C40ACF85";
-  static const uint8_t binary_fingerprint[16]; // = {0xC3,0x99,0x94,0x7D,0x18,0x5D,0x25,0x93,0x58,0xE6,0xB9,0x22,0xC4,0x0A,0xCF,0x85};
-
   TCLIService_GetOperationStatus_result(const TCLIService_GetOperationStatus_result&);
   TCLIService_GetOperationStatus_result& operator=(const TCLIService_GetOperationStatus_result&);
   TCLIService_GetOperationStatus_result() {
@@ -1537,7 +1356,6 @@ class TCLIService_GetOperationStatus_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetOperationStatus_result& obj);
 };
 
 typedef struct _TCLIService_GetOperationStatus_presult__isset {
@@ -1548,9 +1366,6 @@ typedef struct _TCLIService_GetOperationStatus_presult__isset {
 class TCLIService_GetOperationStatus_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "C399947D185D259358E6B922C40ACF85";
-  static const uint8_t binary_fingerprint[16]; // = {0xC3,0x99,0x94,0x7D,0x18,0x5D,0x25,0x93,0x58,0xE6,0xB9,0x22,0xC4,0x0A,0xCF,0x85};
-
 
   virtual ~TCLIService_GetOperationStatus_presult() throw();
   TGetOperationStatusResp* success;
@@ -1559,7 +1374,6 @@ class TCLIService_GetOperationStatus_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetOperationStatus_presult& obj);
 };
 
 typedef struct _TCLIService_CancelOperation_args__isset {
@@ -1570,9 +1384,6 @@ typedef struct _TCLIService_CancelOperation_args__isset {
 class TCLIService_CancelOperation_args {
  public:
 
-  static const char* ascii_fingerprint; // = "2A0009415DC2A8A9CDCF3A75C16ADBE7";
-  static const uint8_t binary_fingerprint[16]; // = {0x2A,0x00,0x09,0x41,0x5D,0xC2,0xA8,0xA9,0xCD,0xCF,0x3A,0x75,0xC1,0x6A,0xDB,0xE7};
-
   TCLIService_CancelOperation_args(const TCLIService_CancelOperation_args&);
   TCLIService_CancelOperation_args& operator=(const TCLIService_CancelOperation_args&);
   TCLIService_CancelOperation_args() {
@@ -1600,23 +1411,18 @@ class TCLIService_CancelOperation_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_CancelOperation_args& obj);
 };
 
 
 class TCLIService_CancelOperation_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "2A0009415DC2A8A9CDCF3A75C16ADBE7";
-  static const uint8_t binary_fingerprint[16]; // = {0x2A,0x00,0x09,0x41,0x5D,0xC2,0xA8,0xA9,0xCD,0xCF,0x3A,0x75,0xC1,0x6A,0xDB,0xE7};
-
 
   virtual ~TCLIService_CancelOperation_pargs() throw();
   const TCancelOperationReq* req;
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_CancelOperation_pargs& obj);
 };
 
 typedef struct _TCLIService_CancelOperation_result__isset {
@@ -1627,9 +1433,6 @@ typedef struct _TCLIService_CancelOperation_result__isset {
 class TCLIService_CancelOperation_result {
  public:
 
-  static const char* ascii_fingerprint; // = "BDB51A479DCD1EB1DB636FA1B4BE02A7";
-  static const uint8_t binary_fingerprint[16]; // = {0xBD,0xB5,0x1A,0x47,0x9D,0xCD,0x1E,0xB1,0xDB,0x63,0x6F,0xA1,0xB4,0xBE,0x02,0xA7};
-
   TCLIService_CancelOperation_result(const TCLIService_CancelOperation_result&);
   TCLIService_CancelOperation_result& operator=(const TCLIService_CancelOperation_result&);
   TCLIService_CancelOperation_result() {
@@ -1657,7 +1460,6 @@ class TCLIService_CancelOperation_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_CancelOperation_result& obj);
 };
 
 typedef struct _TCLIService_CancelOperation_presult__isset {
@@ -1668,9 +1470,6 @@ typedef struct _TCLIService_CancelOperation_presult__isset {
 class TCLIService_CancelOperation_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "BDB51A479DCD1EB1DB636FA1B4BE02A7";
-  static const uint8_t binary_fingerprint[16]; // = {0xBD,0xB5,0x1A,0x47,0x9D,0xCD,0x1E,0xB1,0xDB,0x63,0x6F,0xA1,0xB4,0xBE,0x02,0xA7};
-
 
   virtual ~TCLIService_CancelOperation_presult() throw();
   TCancelOperationResp* success;
@@ -1679,7 +1478,6 @@ class TCLIService_CancelOperation_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_CancelOperation_presult& obj);
 };
 
 typedef struct _TCLIService_CloseOperation_args__isset {
@@ -1690,9 +1488,6 @@ typedef struct _TCLIService_CloseOperation_args__isset {
 class TCLIService_CloseOperation_args {
  public:
 
-  static const char* ascii_fingerprint; // = "2A0009415DC2A8A9CDCF3A75C16ADBE7";
-  static const uint8_t binary_fingerprint[16]; // = {0x2A,0x00,0x09,0x41,0x5D,0xC2,0xA8,0xA9,0xCD,0xCF,0x3A,0x75,0xC1,0x6A,0xDB,0xE7};
-
   TCLIService_CloseOperation_args(const TCLIService_CloseOperation_args&);
   TCLIService_CloseOperation_args& operator=(const TCLIService_CloseOperation_args&);
   TCLIService_CloseOperation_args() {
@@ -1720,23 +1515,18 @@ class TCLIService_CloseOperation_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_CloseOperation_args& obj);
 };
 
 
 class TCLIService_CloseOperation_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "2A0009415DC2A8A9CDCF3A75C16ADBE7";
-  static const uint8_t binary_fingerprint[16]; // = {0x2A,0x00,0x09,0x41,0x5D,0xC2,0xA8,0xA9,0xCD,0xCF,0x3A,0x75,0xC1,0x6A,0xDB,0xE7};
-
 
   virtual ~TCLIService_CloseOperation_pargs() throw();
   const TCloseOperationReq* req;
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_CloseOperation_pargs& obj);
 };
 
 typedef struct _TCLIService_CloseOperation_result__isset {
@@ -1747,9 +1537,6 @@ typedef struct _TCLIService_CloseOperation_result__isset {
 class TCLIService_CloseOperation_result {
  public:
 
-  static const char* ascii_fingerprint; // = "BDB51A479DCD1EB1DB636FA1B4BE02A7";
-  static const uint8_t binary_fingerprint[16]; // = {0xBD,0xB5,0x1A,0x47,0x9D,0xCD,0x1E,0xB1,0xDB,0x63,0x6F,0xA1,0xB4,0xBE,0x02,0xA7};
-
   TCLIService_CloseOperation_result(const TCLIService_CloseOperation_result&);
   TCLIService_CloseOperation_result& operator=(const TCLIService_CloseOperation_result&);
   TCLIService_CloseOperation_result() {
@@ -1777,7 +1564,6 @@ class TCLIService_CloseOperation_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_CloseOperation_result& obj);
 };
 
 typedef struct _TCLIService_CloseOperation_presult__isset {
@@ -1788,9 +1574,6 @@ typedef struct _TCLIService_CloseOperation_presult__isset {
 class TCLIService_CloseOperation_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "BDB51A479DCD1EB1DB636FA1B4BE02A7";
-  static const uint8_t binary_fingerprint[16]; // = {0xBD,0xB5,0x1A,0x47,0x9D,0xCD,0x1E,0xB1,0xDB,0x63,0x6F,0xA1,0xB4,0xBE,0x02,0xA7};
-
 
   virtual ~TCLIService_CloseOperation_presult() throw();
   TCloseOperationResp* success;
@@ -1799,7 +1582,6 @@ class TCLIService_CloseOperation_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_CloseOperation_presult& obj);
 };
 
 typedef struct _TCLIService_GetResultSetMetadata_args__isset {
@@ -1810,9 +1592,6 @@ typedef struct _TCLIService_GetResultSetMetadata_args__isset {
 class TCLIService_GetResultSetMetadata_args {
  public:
 
-  static const char* ascii_fingerprint; // = "2A0009415DC2A8A9CDCF3A75C16ADBE7";
-  static const uint8_t binary_fingerprint[16]; // = {0x2A,0x00,0x09,0x41,0x5D,0xC2,0xA8,0xA9,0xCD,0xCF,0x3A,0x75,0xC1,0x6A,0xDB,0xE7};
-
   TCLIService_GetResultSetMetadata_args(const TCLIService_GetResultSetMetadata_args&);
   TCLIService_GetResultSetMetadata_args& operator=(const TCLIService_GetResultSetMetadata_args&);
   TCLIService_GetResultSetMetadata_args() {
@@ -1840,23 +1619,18 @@ class TCLIService_GetResultSetMetadata_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetResultSetMetadata_args& obj);
 };
 
 
 class TCLIService_GetResultSetMetadata_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "2A0009415DC2A8A9CDCF3A75C16ADBE7";
-  static const uint8_t binary_fingerprint[16]; // = {0x2A,0x00,0x09,0x41,0x5D,0xC2,0xA8,0xA9,0xCD,0xCF,0x3A,0x75,0xC1,0x6A,0xDB,0xE7};
-
 
   virtual ~TCLIService_GetResultSetMetadata_pargs() throw();
   const TGetResultSetMetadataReq* req;
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetResultSetMetadata_pargs& obj);
 };
 
 typedef struct _TCLIService_GetResultSetMetadata_result__isset {
@@ -1867,9 +1641,6 @@ typedef struct _TCLIService_GetResultSetMetadata_result__isset {
 class TCLIService_GetResultSetMetadata_result {
  public:
 
-  static const char* ascii_fingerprint; // = "748CA3BE2055D5C1AD7EAAEF01F7C463";
-  static const uint8_t binary_fingerprint[16]; // = {0x74,0x8C,0xA3,0xBE,0x20,0x55,0xD5,0xC1,0xAD,0x7E,0xAA,0xEF,0x01,0xF7,0xC4,0x63};
-
   TCLIService_GetResultSetMetadata_result(const TCLIService_GetResultSetMetadata_result&);
   TCLIService_GetResultSetMetadata_result& operator=(const TCLIService_GetResultSetMetadata_result&);
   TCLIService_GetResultSetMetadata_result() {
@@ -1897,7 +1668,6 @@ class TCLIService_GetResultSetMetadata_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetResultSetMetadata_result& obj);
 };
 
 typedef struct _TCLIService_GetResultSetMetadata_presult__isset {
@@ -1908,9 +1678,6 @@ typedef struct _TCLIService_GetResultSetMetadata_presult__isset {
 class TCLIService_GetResultSetMetadata_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "748CA3BE2055D5C1AD7EAAEF01F7C463";
-  static const uint8_t binary_fingerprint[16]; // = {0x74,0x8C,0xA3,0xBE,0x20,0x55,0xD5,0xC1,0xAD,0x7E,0xAA,0xEF,0x01,0xF7,0xC4,0x63};
-
 
   virtual ~TCLIService_GetResultSetMetadata_presult() throw();
   TGetResultSetMetadataResp* success;
@@ -1919,7 +1686,6 @@ class TCLIService_GetResultSetMetadata_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetResultSetMetadata_presult& obj);
 };
 
 typedef struct _TCLIService_FetchResults_args__isset {
@@ -1930,9 +1696,6 @@ typedef struct _TCLIService_FetchResults_args__isset {
 class TCLIService_FetchResults_args {
  public:
 
-  static const char* ascii_fingerprint; // = "9861C5443566158A1DAEAC80886635C8";
-  static const uint8_t binary_fingerprint[16]; // = {0x98,0x61,0xC5,0x44,0x35,0x66,0x15,0x8A,0x1D,0xAE,0xAC,0x80,0x88,0x66,0x35,0xC8};
-
   TCLIService_FetchResults_args(const TCLIService_FetchResults_args&);
   TCLIService_FetchResults_args& operator=(const TCLIService_FetchResults_args&);
   TCLIService_FetchResults_args() {
@@ -1960,23 +1723,18 @@ class TCLIService_FetchResults_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_FetchResults_args& obj);
 };
 
 
 class TCLIService_FetchResults_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "9861C5443566158A1DAEAC80886635C8";
-  static const uint8_t binary_fingerprint[16]; // = {0x98,0x61,0xC5,0x44,0x35,0x66,0x15,0x8A,0x1D,0xAE,0xAC,0x80,0x88,0x66,0x35,0xC8};
-
 
   virtual ~TCLIService_FetchResults_pargs() throw();
   const TFetchResultsReq* req;
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_FetchResults_pargs& obj);
 };
 
 typedef struct _TCLIService_FetchResults_result__isset {
@@ -1987,9 +1745,6 @@ typedef struct _TCLIService_FetchResults_result__isset {
 class TCLIService_FetchResults_result {
  public:
 
-  static const char* ascii_fingerprint; // = "CDEF797B9FFCA6B2CD18163A3D78F196";
-  static const uint8_t binary_fingerprint[16]; // = {0xCD,0xEF,0x79,0x7B,0x9F,0xFC,0xA6,0xB2,0xCD,0x18,0x16,0x3A,0x3D,0x78,0xF1,0x96};
-
   TCLIService_FetchResults_result(const TCLIService_FetchResults_result&);
   TCLIService_FetchResults_result& operator=(const TCLIService_FetchResults_result&);
   TCLIService_FetchResults_result() {
@@ -2017,7 +1772,6 @@ class TCLIService_FetchResults_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_FetchResults_result& obj);
 };
 
 typedef struct _TCLIService_FetchResults_presult__isset {
@@ -2028,9 +1782,6 @@ typedef struct _TCLIService_FetchResults_presult__isset {
 class TCLIService_FetchResults_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "CDEF797B9FFCA6B2CD18163A3D78F196";
-  static const uint8_t binary_fingerprint[16]; // = {0xCD,0xEF,0x79,0x7B,0x9F,0xFC,0xA6,0xB2,0xCD,0x18,0x16,0x3A,0x3D,0x78,0xF1,0x96};
-
 
   virtual ~TCLIService_FetchResults_presult() throw();
   TFetchResultsResp* success;
@@ -2039,7 +1790,6 @@ class TCLIService_FetchResults_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_FetchResults_presult& obj);
 };
 
 typedef struct _TCLIService_GetDelegationToken_args__isset {
@@ -2050,9 +1800,6 @@ typedef struct _TCLIService_GetDelegationToken_args__isset {
 class TCLIService_GetDelegationToken_args {
  public:
 
-  static const char* ascii_fingerprint; // = "BF8EEEB8C67FB6195D3D9BA5BA2C58A4";
-  static const uint8_t binary_fingerprint[16]; // = {0xBF,0x8E,0xEE,0xB8,0xC6,0x7F,0xB6,0x19,0x5D,0x3D,0x9B,0xA5,0xBA,0x2C,0x58,0xA4};
-
   TCLIService_GetDelegationToken_args(const TCLIService_GetDelegationToken_args&);
   TCLIService_GetDelegationToken_args& operator=(const TCLIService_GetDelegationToken_args&);
   TCLIService_GetDelegationToken_args() {
@@ -2080,23 +1827,18 @@ class TCLIService_GetDelegationToken_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetDelegationToken_args& obj);
 };
 
 
 class TCLIService_GetDelegationToken_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "BF8EEEB8C67FB6195D3D9BA5BA2C58A4";
-  static const uint8_t binary_fingerprint[16]; // = {0xBF,0x8E,0xEE,0xB8,0xC6,0x7F,0xB6,0x19,0x5D,0x3D,0x9B,0xA5,0xBA,0x2C,0x58,0xA4};
-
 
   virtual ~TCLIService_GetDelegationToken_pargs() throw();
   const TGetDelegationTokenReq* req;
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetDelegationToken_pargs& obj);
 };
 
 typedef struct _TCLIService_GetDelegationToken_result__isset {
@@ -2107,9 +1849,6 @@ typedef struct _TCLIService_GetDelegationToken_result__isset {
 class TCLIService_GetDelegationToken_result {
  public:
 
-  static const char* ascii_fingerprint; // = "58C1B84BE91071117FFEBB0AFA1636F7";
-  static const uint8_t binary_fingerprint[16]; // = {0x58,0xC1,0xB8,0x4B,0xE9,0x10,0x71,0x11,0x7F,0xFE,0xBB,0x0A,0xFA,0x16,0x36,0xF7};
-
   TCLIService_GetDelegationToken_result(const TCLIService_GetDelegationToken_result&);
   TCLIService_GetDelegationToken_result& operator=(const TCLIService_GetDelegationToken_result&);
   TCLIService_GetDelegationToken_result() {
@@ -2137,7 +1876,6 @@ class TCLIService_GetDelegationToken_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetDelegationToken_result& obj);
 };
 
 typedef struct _TCLIService_GetDelegationToken_presult__isset {
@@ -2148,9 +1886,6 @@ typedef struct _TCLIService_GetDelegationToken_presult__isset {
 class TCLIService_GetDelegationToken_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "58C1B84BE91071117FFEBB0AFA1636F7";
-  static const uint8_t binary_fingerprint[16]; // = {0x58,0xC1,0xB8,0x4B,0xE9,0x10,0x71,0x11,0x7F,0xFE,0xBB,0x0A,0xFA,0x16,0x36,0xF7};
-
 
   virtual ~TCLIService_GetDelegationToken_presult() throw();
   TGetDelegationTokenResp* success;
@@ -2159,7 +1894,6 @@ class TCLIService_GetDelegationToken_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_GetDelegationToken_presult& obj);
 };
 
 typedef struct _TCLIService_CancelDelegationToken_args__isset {
@@ -2170,9 +1904,6 @@ typedef struct _TCLIService_CancelDelegationToken_args__isset {
 class TCLIService_CancelDelegationToken_args {
  public:
 
-  static const char* ascii_fingerprint; // = "9C6A93D9444D84C6B888C2DA30E4CB54";
-  static const uint8_t binary_fingerprint[16]; // = {0x9C,0x6A,0x93,0xD9,0x44,0x4D,0x84,0xC6,0xB8,0x88,0xC2,0xDA,0x30,0xE4,0xCB,0x54};
-
   TCLIService_CancelDelegationToken_args(const TCLIService_CancelDelegationToken_args&);
   TCLIService_CancelDelegationToken_args& operator=(const TCLIService_CancelDelegationToken_args&);
   TCLIService_CancelDelegationToken_args() {
@@ -2200,23 +1931,18 @@ class TCLIService_CancelDelegationToken_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_CancelDelegationToken_args& obj);
 };
 
 
 class TCLIService_CancelDelegationToken_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "9C6A93D9444D84C6B888C2DA30E4CB54";
-  static const uint8_t binary_fingerprint[16]; // = {0x9C,0x6A,0x93,0xD9,0x44,0x4D,0x84,0xC6,0xB8,0x88,0xC2,0xDA,0x30,0xE4,0xCB,0x54};
-
 
   virtual ~TCLIService_CancelDelegationToken_pargs() throw();
   const TCancelDelegationTokenReq* req;
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_CancelDelegationToken_pargs& obj);
 };
 
 typedef struct _TCLIService_CancelDelegationToken_result__isset {
@@ -2227,9 +1953,6 @@ typedef struct _TCLIService_CancelDelegationToken_result__isset {
 class TCLIService_CancelDelegationToken_result {
  public:
 
-  static const char* ascii_fingerprint; // = "BDB51A479DCD1EB1DB636FA1B4BE02A7";
-  static const uint8_t binary_fingerprint[16]; // = {0xBD,0xB5,0x1A,0x47,0x9D,0xCD,0x1E,0xB1,0xDB,0x63,0x6F,0xA1,0xB4,0xBE,0x02,0xA7};
-
   TCLIService_CancelDelegationToken_result(const TCLIService_CancelDelegationToken_result&);
   TCLIService_CancelDelegationToken_result& operator=(const TCLIService_CancelDelegationToken_result&);
   TCLIService_CancelDelegationToken_result() {
@@ -2257,7 +1980,6 @@ class TCLIService_CancelDelegationToken_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_CancelDelegationToken_result& obj);
 };
 
 typedef struct _TCLIService_CancelDelegationToken_presult__isset {
@@ -2268,9 +1990,6 @@ typedef struct _TCLIService_CancelDelegationToken_presult__isset {
 class TCLIService_CancelDelegationToken_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "BDB51A479DCD1EB1DB636FA1B4BE02A7";
-  static const uint8_t binary_fingerprint[16]; // = {0xBD,0xB5,0x1A,0x47,0x9D,0xCD,0x1E,0xB1,0xDB,0x63,0x6F,0xA1,0xB4,0xBE,0x02,0xA7};
-
 
   virtual ~TCLIService_CancelDelegationToken_presult() throw();
   TCancelDelegationTokenResp* success;
@@ -2279,7 +1998,6 @@ class TCLIService_CancelDelegationToken_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_CancelDelegationToken_presult& obj);
 };
 
 typedef struct _TCLIService_RenewDelegationToken_args__isset {
@@ -2290,9 +2008,6 @@ typedef struct _TCLIService_RenewDelegationToken_args__isset {
 class TCLIService_RenewDelegationToken_args {
  public:
 
-  static const char* ascii_fingerprint; // = "9C6A93D9444D84C6B888C2DA30E4CB54";
-  static const uint8_t binary_fingerprint[16]; // = {0x9C,0x6A,0x93,0xD9,0x44,0x4D,0x84,0xC6,0xB8,0x88,0xC2,0xDA,0x30,0xE4,0xCB,0x54};
-
   TCLIService_RenewDelegationToken_args(const TCLIService_RenewDelegationToken_args&);
   TCLIService_RenewDelegationToken_args& operator=(const TCLIService_RenewDelegationToken_args&);
   TCLIService_RenewDelegationToken_args() {
@@ -2320,23 +2035,18 @@ class TCLIService_RenewDelegationToken_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_RenewDelegationToken_args& obj);
 };
 
 
 class TCLIService_RenewDelegationToken_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "9C6A93D9444D84C6B888C2DA30E4CB54";
-  static const uint8_t binary_fingerprint[16]; // = {0x9C,0x6A,0x93,0xD9,0x44,0x4D,0x84,0xC6,0xB8,0x88,0xC2,0xDA,0x30,0xE4,0xCB,0x54};
-
 
   virtual ~TCLIService_RenewDelegationToken_pargs() throw();
   const TRenewDelegationTokenReq* req;
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_RenewDelegationToken_pargs& obj);
 };
 
 typedef struct _TCLIService_RenewDelegationToken_result__isset {
@@ -2347,9 +2057,6 @@ typedef struct _TCLIService_RenewDelegationToken_result__isset {
 class TCLIService_RenewDelegationToken_result {
  public:
 
-  static const char* ascii_fingerprint; // = "BDB51A479DCD1EB1DB636FA1B4BE02A7";
-  static const uint8_t binary_fingerprint[16]; // = {0xBD,0xB5,0x1A,0x47,0x9D,0xCD,0x1E,0xB1,0xDB,0x63,0x6F,0xA1,0xB4,0xBE,0x02,0xA7};
-
   TCLIService_RenewDelegationToken_result(const TCLIService_RenewDelegationToken_result&);
   TCLIService_RenewDelegationToken_result& operator=(const TCLIService_RenewDelegationToken_result&);
   TCLIService_RenewDelegationToken_result() {
@@ -2377,7 +2084,6 @@ class TCLIService_RenewDelegationToken_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_RenewDelegationToken_result& obj);
 };
 
 typedef struct _TCLIService_RenewDelegationToken_presult__isset {
@@ -2388,9 +2094,6 @@ typedef struct _TCLIService_RenewDelegationToken_presult__isset {
 class TCLIService_RenewDelegationToken_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "BDB51A479DCD1EB1DB636FA1B4BE02A7";
-  static const uint8_t binary_fingerprint[16]; // = {0xBD,0xB5,0x1A,0x47,0x9D,0xCD,0x1E,0xB1,0xDB,0x63,0x6F,0xA1,0xB4,0xBE,0x02,0xA7};
-
 
   virtual ~TCLIService_RenewDelegationToken_presult() throw();
   TRenewDelegationTokenResp* success;
@@ -2399,7 +2102,6 @@ class TCLIService_RenewDelegationToken_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const TCLIService_RenewDelegationToken_presult& obj);
 };
 
 class TCLIServiceClient : virtual public TCLIServiceIf {
@@ -2760,6 +2462,103 @@ class TCLIServiceMultiface : virtual public TCLIServiceIf {
 
 };
 
+// The 'concurrent' client is a thread safe client that correctly handles
+// out of order responses.  It is slower than the regular client, so should
+// only be used when you need to share a connection among multiple threads
+class TCLIServiceConcurrentClient : virtual public TCLIServiceIf {
+ public:
+  TCLIServiceConcurrentClient(boost::shared_ptr< ::apache::thrift::protocol::TProtocol> prot) {
+    setProtocol(prot);
+  }
+  TCLIServiceConcurrentClient(boost::shared_ptr< ::apache::thrift::protocol::TProtocol> iprot, boost::shared_ptr< ::apache::thrift::protocol::TProtocol> oprot) {
+    setProtocol(iprot,oprot);
+  }
+ private:
+  void setProtocol(boost::shared_ptr< ::apache::thrift::protocol::TProtocol> prot) {
+  setProtocol(prot,prot);
+  }
+  void setProtocol(boost::shared_ptr< ::apache::thrift::protocol::TProtocol> iprot, boost::shared_ptr< ::apache::thrift::protocol::TProtocol> oprot) {
+    piprot_=iprot;
+    poprot_=oprot;
+    iprot_ = iprot.get();
+    oprot_ = oprot.get();
+  }
+ public:
+  boost::shared_ptr< ::apache::thrift::protocol::TProtocol> getInputProtocol() {
+    return piprot_;
+  }
+  boost::shared_ptr< ::apache::thrift::protocol::TProtocol> getOutputProtocol() {
+    return poprot_;
+  }
+  void OpenSession(TOpenSessionResp& _return, const TOpenSessionReq& req);
+  int32_t send_OpenSession(const TOpenSessionReq& req);
+  void recv_OpenSession(TOpenSessionResp& _return, const int32_t seqid);
+  void CloseSession(TCloseSessionResp& _return, const TCloseSessionReq& req);
+  int32_t send_CloseSession(const TCloseSessionReq& req);
+  void recv_CloseSession(TCloseSessionResp& _return, const int32_t seqid);
+  void GetInfo(TGetInfoResp& _return, const TGetInfoReq& req);
+  int32_t send_GetInfo(const TGetInfoReq& req);
+  void recv_GetInfo(TGetInfoResp& _return, const int32_t seqid);
+  void ExecuteStatement(TExecuteStatementResp& _return, const TExecuteStatementReq& req);
+  int32_t send_ExecuteStatement(const TExecuteStatementReq& req);
+  void recv_ExecuteStatement(TExecuteStatementResp& _return, const int32_t seqid);
+  void GetTypeInfo(TGetTypeInfoResp& _return, const TGetTypeInfoReq& req);
+  int32_t send_GetTypeInfo(const TGetTypeInfoReq& req);
+  void recv_GetTypeInfo(TGetTypeInfoResp& _return, const int32_t seqid);
+  void GetCatalogs(TGetCatalogsResp& _return, const TGetCatalogsReq& req);
+  int32_t send_GetCatalogs(const TGetCatalogsReq& req);
+  void recv_GetCatalogs(TGetCatalogsResp& _return, const int32_t seqid);
+  void GetSchemas(TGetSchemasResp& _return, const TGetSchemasReq& req);
+  int32_t send_GetSchemas(const TGetSchemasReq& req);
+  void recv_GetSchemas(TGetSchemasResp& _return, const int32_t seqid);
+  void GetTables(TGetTablesResp& _return, const TGetTablesReq& req);
+  int32_t send_GetTables(const TGetTablesReq& req);
+  void recv_GetTables(TGetTablesResp& _return, const int32_t seqid);
+  void GetTableTypes(TGetTableTypesResp& _return, const TGetTableTypesReq& req);
+  int32_t send_GetTableTypes(const TGetTableTypesReq& req);
+  void recv_GetTableTypes(TGetTableTypesResp& _return, const int32_t seqid);
+  void GetColumns(TGetColumnsResp& _return, const TGetColumnsReq& req);
+  int32_t send_GetColumns(const TGetColumnsReq& req);
+  void recv_GetColumns(TGetColumnsResp& _return, const int32_t seqid);
+  void GetFunctions(TGetFunctionsResp& _return, const TGetFunctionsReq& req);
+  int32_t send_GetFunctions(const TGetFunctionsReq& req);
+  void recv_GetFunctions(TGetFunctionsResp& _return, const int32_t seqid);
+  void GetOperationStatus(TGetOperationStatusResp& _return, const TGetOperationStatusReq& req);
+  int32_t send_GetOperationStatus(const TGetOperationStatusReq& req);
+  void recv_GetOperationStatus(TGetOperationStatusResp& _return, const int32_t seqid);
+  void CancelOperation(TCancelOperationResp& _return, const TCancelOperationReq& req);
+  int32_t send_CancelOperation(const TCancelOperationReq& req);
+  void recv_CancelOperation(TCancelOperationResp& _return, const int32_t seqid);
+  void CloseOperation(TCloseOperationResp& _return, const TCloseOperationReq& req);
+  int32_t send_CloseOperation(const TCloseOperationReq& req);
+  void recv_CloseOperation(TCloseOperationResp& _return, const int32_t seqid);
+  void GetResultSetMetadata(TGetResultSetMetadataResp& _return, const TGetResultSetMetadataReq& req);
+  int32_t send_GetResultSetMetadata(const TGetResultSetMetadataReq& req);
+  void recv_GetResultSetMetadata(TGetResultSetMetadataResp& _return, const int32_t seqid);
+  void FetchResults(TFetchResultsResp& _return, const TFetchResultsReq& req);
+  int32_t send_FetchResults(const TFetchResultsReq& req);
+  void recv_FetchResults(TFetchResultsResp& _return, const int32_t seqid);
+  void GetDelegationToken(TGetDelegationTokenResp& _return, const TGetDelegationTokenReq& req);
+  int32_t send_GetDelegationToken(const TGetDelegationTokenReq& req);
+  void recv_GetDelegationToken(TGetDelegationTokenResp& _return, const int32_t seqid);
+  void CancelDelegationToken(TCancelDelegationTokenResp& _return, const TCancelDelegationTokenReq& req);
+  int32_t send_CancelDelegationToken(const TCancelDelegationTokenReq& req);
+  void recv_CancelDelegationToken(TCancelDelegationTokenResp& _return, const int32_t seqid);
+  void RenewDelegationToken(TRenewDelegationTokenResp& _return, const TRenewDelegationTokenReq& req);
+  int32_t send_RenewDelegationToken(const TRenewDelegationTokenReq& req);
+  void recv_RenewDelegationToken(TRenewDelegationTokenResp& _return, const int32_t seqid);
+ protected:
+  boost::shared_ptr< ::apache::thrift::protocol::TProtocol> piprot_;
+  boost::shared_ptr< ::apache::thrift::protocol::TProtocol> poprot_;
+  ::apache::thrift::protocol::TProtocol* iprot_;
+  ::apache::thrift::protocol::TProtocol* oprot_;
+  ::apache::thrift::async::TConcurrentClientSyncInfo sync_;
+};
+
+#ifdef _WIN32
+  #pragma warning( pop )
+#endif
+
 }}}}} // namespace
 
 #endif

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-cpp/TCLIService_constants.cpp
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-cpp/TCLIService_constants.cpp b/service/src/gen/thrift/gen-cpp/TCLIService_constants.cpp
index dcca5c8..60eeff7 100644
--- a/service/src/gen/thrift/gen-cpp/TCLIService_constants.cpp
+++ b/service/src/gen/thrift/gen-cpp/TCLIService_constants.cpp
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-cpp/TCLIService_constants.h
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-cpp/TCLIService_constants.h b/service/src/gen/thrift/gen-cpp/TCLIService_constants.h
index 8993394..9310717 100644
--- a/service/src/gen/thrift/gen-cpp/TCLIService_constants.h
+++ b/service/src/gen/thrift/gen-cpp/TCLIService_constants.h
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated


[34/55] [abbrv] hive git commit: HIVE-12261 : schematool version info exit status should depend on compatibility, not equality (Thejas Nair, reviewed by Sushanth Sowmyan)

Posted by xu...@apache.org.
HIVE-12261 : schematool version info exit status should depend on compatibility, not equality (Thejas Nair, reviewed by Sushanth Sowmyan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/1f9556d8
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/1f9556d8
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/1f9556d8

Branch: refs/heads/spark
Commit: 1f9556d843ae5bfaf0c701f4f7428dfabc127e7c
Parents: f415ce9
Author: Thejas Nair <th...@hortonworks.com>
Authored: Sun Oct 25 23:47:57 2015 -0700
Committer: Thejas Nair <th...@hortonworks.com>
Committed: Sun Oct 25 23:47:57 2015 -0700

----------------------------------------------------------------------
 .../org/apache/hive/beeline/HiveSchemaTool.java | 12 +++---
 .../hive/metastore/MetaStoreSchemaInfo.java     | 44 ++++++++++++++++++++
 2 files changed, 50 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/1f9556d8/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
----------------------------------------------------------------------
diff --git a/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java b/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
index 9e72a3a..6f0ba07 100644
--- a/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
+++ b/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
@@ -129,7 +129,7 @@ public class HiveSchemaTool {
     String dbVersion = getMetaStoreSchemaVersion(metastoreConn);
     System.out.println("Hive distribution version:\t " + hiveVersion);
     System.out.println("Metastore schema version:\t " + dbVersion);
-    assertSameVersion(hiveVersion, dbVersion);
+    assertCompatibleVersion(hiveVersion, dbVersion);
 
   }
 
@@ -185,15 +185,15 @@ public class HiveSchemaTool {
     String newSchemaVersion = getMetaStoreSchemaVersion(
         getConnectionToMetastore(false));
     // verify that the new version is added to schema
-    assertSameVersion(MetaStoreSchemaInfo.getHiveSchemaVersion(), newSchemaVersion);
+    assertCompatibleVersion(MetaStoreSchemaInfo.getHiveSchemaVersion(), newSchemaVersion);
 
   }
 
-  private void assertSameVersion(String hiveSchemaVersion, String dbSchemaVersion)
+  private void assertCompatibleVersion(String hiveSchemaVersion, String dbSchemaVersion)
       throws HiveMetaException {
-    if (!hiveSchemaVersion.equalsIgnoreCase(dbSchemaVersion)) {
-      throw new HiveMetaException("Expected schema version " + hiveSchemaVersion
-          + ", found version " + dbSchemaVersion);
+    if (!MetaStoreSchemaInfo.isVersionCompatible(hiveSchemaVersion, dbSchemaVersion)) {
+      throw new HiveMetaException("Metastore schema version is not compatible. Hive Version: "
+          + hiveSchemaVersion + ", Database Schema Version: " + dbSchemaVersion);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/1f9556d8/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java
index d72267d..98798e8 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java
@@ -151,6 +151,10 @@ public class MetaStoreSchemaInfo {
 
   public static String getHiveSchemaVersion() {
     String hiveVersion = HiveVersionInfo.getShortVersion();
+    return getEquivalentVersion(hiveVersion);
+  }
+
+  private static String getEquivalentVersion(String hiveVersion) {
     // if there is an equivalent version, return that, else return this version
     String equivalentVersion = EQUIVALENT_VERSIONS.get(hiveVersion);
     if (equivalentVersion != null) {
@@ -160,4 +164,44 @@ public class MetaStoreSchemaInfo {
     }
   }
 
+  /**
+   * A dbVersion is compatible with hive version if it is greater or equal to
+   * the hive version. This is result of the db schema upgrade design principles
+   * followed in hive project.
+   *
+   * @param hiveVersion
+   *          version of hive software
+   * @param dbVersion
+   *          version of metastore rdbms schema
+   * @return true if versions are compatible
+   */
+  public static boolean isVersionCompatible(String hiveVersion, String dbVersion) {
+    hiveVersion = getEquivalentVersion(hiveVersion);
+    dbVersion = getEquivalentVersion(dbVersion);
+    if (hiveVersion.equals(dbVersion)) {
+      return true;
+    }
+    String[] hiveVerParts = hiveVersion.split("\\.");
+    String[] dbVerParts = dbVersion.split("\\.");
+    if (hiveVerParts.length != 3 || dbVerParts.length != 3) {
+      // these are non standard version numbers. can't perform the
+      // comparison on these, so assume that they are incompatible
+      return false;
+    }
+
+    for (int i = 0; i < dbVerParts.length; i++) {
+      Integer dbVerPart = Integer.valueOf(dbVerParts[i]);
+      Integer hiveVerPart = Integer.valueOf(hiveVerParts[i]);
+      if (dbVerPart > hiveVerPart) {
+        return true;
+      } else if (dbVerPart < hiveVerPart) {
+        return false;
+      } else {
+        continue; // compare next part
+      }
+    }
+
+    return true;
+  }
+
 }


[18/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)

Posted by xu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-cpp/hive_metastore_constants.cpp
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-cpp/hive_metastore_constants.cpp b/metastore/src/gen/thrift/gen-cpp/hive_metastore_constants.cpp
index f61ac7d..f982bf2 100644
--- a/metastore/src/gen/thrift/gen-cpp/hive_metastore_constants.cpp
+++ b/metastore/src/gen/thrift/gen-cpp/hive_metastore_constants.cpp
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-cpp/hive_metastore_constants.h
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-cpp/hive_metastore_constants.h b/metastore/src/gen/thrift/gen-cpp/hive_metastore_constants.h
index c95fec1..ae14bd1 100644
--- a/metastore/src/gen/thrift/gen-cpp/hive_metastore_constants.h
+++ b/metastore/src/gen/thrift/gen-cpp/hive_metastore_constants.h
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated


[52/55] [abbrv] hive git commit: HIVE-12227 : LLAP: better column vector object pools (Sergey Shelukhin, reviewed by Gopal V)

Posted by xu...@apache.org.
HIVE-12227 : LLAP: better column vector object pools (Sergey Shelukhin, reviewed by Gopal V)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f2ede0e7
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f2ede0e7
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f2ede0e7

Branch: refs/heads/spark
Commit: f2ede0e728c9740d7f7c37bbc342ada7df11b1aa
Parents: e5b5303
Author: Sergey Shelukhin <se...@apache.org>
Authored: Tue Oct 27 18:26:03 2015 -0700
Committer: Sergey Shelukhin <se...@apache.org>
Committed: Tue Oct 27 18:26:03 2015 -0700

----------------------------------------------------------------------
 .../apache/hadoop/hive/llap/io/decode/EncodedDataConsumer.java   | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/f2ede0e7/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/EncodedDataConsumer.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/EncodedDataConsumer.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/EncodedDataConsumer.java
index 23c2c51..b81e97d 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/EncodedDataConsumer.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/EncodedDataConsumer.java
@@ -42,9 +42,8 @@ public abstract class EncodedDataConsumer<BatchKey, BatchType extends EncodedCol
   private final Consumer<ColumnVectorBatch> downstreamConsumer;
   private Callable<Void> readCallable;
   private final LlapDaemonQueueMetrics queueMetrics;
-  // TODO: if we were using Exchanger, pool would not be necessary here - it would be 1/N items
-  private final static int CVB_POOL_SIZE = 8;
   // Note that the pool is per EDC - within EDC, CVBs are expected to have the same schema.
+  private final static int CVB_POOL_SIZE = 128;
   protected final FixedSizedObjectPool<ColumnVectorBatch> cvbPool;
 
   public EncodedDataConsumer(Consumer<ColumnVectorBatch> consumer, final int colCount,
@@ -59,6 +58,7 @@ public abstract class EncodedDataConsumer<BatchKey, BatchType extends EncodedCol
           }
           @Override
           public void resetBeforeOffer(ColumnVectorBatch t) {
+            // Don't reset anything, we are reusing column vectors.
           }
         });
   }


[25/55] [abbrv] hive git commit: HIVE-11755 :Incorrect method called with Kerberos enabled in AccumuloStorageHandler (Josh Elser via Brock Noland)

Posted by xu...@apache.org.
HIVE-11755 :Incorrect method called with Kerberos enabled in AccumuloStorageHandler (Josh Elser via Brock Noland)

Signed-off-by: Ashutosh Chauhan <ha...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/ee2d3189
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/ee2d3189
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/ee2d3189

Branch: refs/heads/spark
Commit: ee2d3189ff9a7e8bd604b2e036d53632a9b4e616
Parents: 3e0d87f
Author: Josh Elser <jo...@gmail.com>
Authored: Tue Sep 8 14:46:00 2015 -0800
Committer: Ashutosh Chauhan <ha...@apache.org>
Committed: Sat Oct 24 14:30:14 2015 -0700

----------------------------------------------------------------------
 .../hive/accumulo/HiveAccumuloHelper.java       | 55 ++++++++++---
 .../mr/HiveAccumuloTableOutputFormat.java       | 50 ++++++++----
 .../hive/accumulo/TestHiveAccumuloHelper.java   | 69 +++++++++++++++-
 .../mr/TestHiveAccumuloTableOutputFormat.java   | 86 +++++++++++++++++++-
 4 files changed, 229 insertions(+), 31 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/ee2d3189/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/HiveAccumuloHelper.java
----------------------------------------------------------------------
diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/HiveAccumuloHelper.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/HiveAccumuloHelper.java
index dfc5d03..71b8b77 100644
--- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/HiveAccumuloHelper.java
+++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/HiveAccumuloHelper.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.hive.accumulo;
 import static com.google.common.base.Preconditions.checkNotNull;
 
 import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
 import java.util.Collection;
 
@@ -238,22 +239,56 @@ public class HiveAccumuloHelper {
   public void setZooKeeperInstance(JobConf jobConf, Class<?> inputOrOutputFormatClass, String
       zookeepers, String instanceName, boolean useSasl) throws IOException {
     try {
-      Class<?> clientConfigClass = JavaUtils.loadClass(CLIENT_CONFIGURATION_CLASS_NAME);
-
-      // get the ClientConfiguration
-      Object clientConfig = getClientConfiguration(zookeepers, instanceName, useSasl);
-
-      // AccumuloOutputFormat.setZooKeeperInstance(JobConf, ClientConfiguration) or
-      // AccumuloInputFormat.setZooKeeperInstance(JobConf, ClientConfiguration)
-      Method setZooKeeperMethod = inputOrOutputFormatClass.getMethod(
-          SET_ZOOKEEPER_INSTANCE_METHOD_NAME, JobConf.class, clientConfigClass);
-      setZooKeeperMethod.invoke(null, jobConf, clientConfig);
+      setZooKeeperInstanceWithReflection(jobConf, inputOrOutputFormatClass, zookeepers,
+          instanceName, useSasl);
+    } catch (InvocationTargetException e) {
+      Throwable cause = e.getCause();
+      if (null != cause && cause instanceof IllegalStateException) {
+        throw (IllegalStateException) cause;
+      }
+      throw new IOException("Failed to invoke setZooKeeperInstance method", e);
+    } catch (IllegalStateException e) {
+      // re-throw the ISE so the caller can work around the silly impl that throws this in the
+      // first place.
+      throw e;
     } catch (Exception e) {
       throw new IOException("Failed to invoke setZooKeeperInstance method", e);
     }
   }
 
   /**
+   * Wrap the setZooKeeperInstance reflected-call into its own method for testing
+   *
+   * @param jobConf
+   *          The JobConf
+   * @param inputOrOutputFormatClass
+   *          The InputFormat or OutputFormat class
+   * @param zookeepers
+   *          ZooKeeper hosts
+   * @param instanceName
+   *          Accumulo instance name
+   * @param useSasl
+   *          Is SASL enabled
+   * @throws IOException
+   *           When invocation of the method fails
+   */
+  void setZooKeeperInstanceWithReflection(JobConf jobConf, Class<?> inputOrOutputFormatClass, String
+      zookeepers, String instanceName, boolean useSasl) throws IOException, ClassNotFoundException,
+      NoSuchMethodException, SecurityException, IllegalAccessException, IllegalArgumentException,
+      InvocationTargetException {
+    Class<?> clientConfigClass = JavaUtils.loadClass(CLIENT_CONFIGURATION_CLASS_NAME);
+
+    // get the ClientConfiguration
+    Object clientConfig = getClientConfiguration(zookeepers, instanceName, useSasl);
+
+    // AccumuloOutputFormat.setZooKeeperInstance(JobConf, ClientConfiguration) or
+    // AccumuloInputFormat.setZooKeeperInstance(JobConf, ClientConfiguration)
+    Method setZooKeeperMethod = inputOrOutputFormatClass.getMethod(
+        SET_ZOOKEEPER_INSTANCE_METHOD_NAME, JobConf.class, clientConfigClass);
+    setZooKeeperMethod.invoke(null, jobConf, clientConfig);
+  }
+      
+  /**
    * Wrapper around <code>ConfiguratorBase.unwrapAuthenticationToken</code> which only exists in
    * 1.7.0 and new. Uses reflection to not break compat.
    *

http://git-wip-us.apache.org/repos/asf/hive/blob/ee2d3189/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/mr/HiveAccumuloTableOutputFormat.java
----------------------------------------------------------------------
diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/mr/HiveAccumuloTableOutputFormat.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/mr/HiveAccumuloTableOutputFormat.java
index 0189c07..3ae5431 100644
--- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/mr/HiveAccumuloTableOutputFormat.java
+++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/mr/HiveAccumuloTableOutputFormat.java
@@ -61,7 +61,7 @@ public class HiveAccumuloTableOutputFormat extends AccumuloOutputFormat {
   }
 
   protected void configureAccumuloOutputFormat(JobConf job) throws IOException {
-    AccumuloConnectionParameters cnxnParams = new AccumuloConnectionParameters(job);
+    AccumuloConnectionParameters cnxnParams = getConnectionParams(job);
 
     final String tableName = job.get(AccumuloSerDeParameters.TABLE_NAME);
 
@@ -72,35 +72,35 @@ public class HiveAccumuloTableOutputFormat extends AccumuloOutputFormat {
     // Set the necessary Accumulo information
     try {
       if (cnxnParams.useMockInstance()) {
-        setAccumuloMockInstance(job, cnxnParams.getAccumuloInstanceName());
+        setMockInstanceWithErrorChecking(job, cnxnParams.getAccumuloInstanceName());
       } else {
         // Accumulo instance name with ZK quorum
-        setAccumuloZooKeeperInstance(job, cnxnParams.getAccumuloInstanceName(),
+        setZooKeeperInstanceWithErrorChecking(job, cnxnParams.getAccumuloInstanceName(),
             cnxnParams.getZooKeepers(), cnxnParams.useSasl());
       }
 
       // Extract the delegation Token from the UGI and add it to the job
       // The AccumuloOutputFormat will look for it there.
       if (cnxnParams.useSasl()) {
-        UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
-        if (!ugi.hasKerberosCredentials()) {
-          helper.addTokenFromUserToJobConf(ugi, job);
+        UserGroupInformation ugi = getCurrentUser();
+        if (!hasKerberosCredentials(ugi)) {
+          getHelper().addTokenFromUserToJobConf(ugi, job);
         } else {
           // Still in the local JVM, can use Kerberos credentials
           try {
             Connector connector = cnxnParams.getConnector();
-            AuthenticationToken token = helper.getDelegationToken(connector);
+            AuthenticationToken token = getHelper().getDelegationToken(connector);
 
             // Send the DelegationToken down to the Configuration for Accumulo to use
-            setConnectorInfo(job, cnxnParams.getAccumuloUserName(), token);
+            setConnectorInfoWithErrorChecking(job, cnxnParams.getAccumuloUserName(), token);
 
             // Convert the Accumulo token in a Hadoop token
-            Token<? extends TokenIdentifier> accumuloToken = helper.getHadoopToken(token);
+            Token<? extends TokenIdentifier> accumuloToken = getHelper().getHadoopToken(token);
 
             log.info("Adding Hadoop Token for Accumulo to Job's Credentials");
 
             // Add the Hadoop token to the JobConf
-            helper.mergeTokenIntoJobConf(job, accumuloToken);
+            getHelper().mergeTokenIntoJobConf(job, accumuloToken);
 
             // Make sure the UGI contains the token too for good measure
             if (!ugi.addToken(accumuloToken)) {
@@ -111,7 +111,7 @@ public class HiveAccumuloTableOutputFormat extends AccumuloOutputFormat {
           }
         }
       } else {
-        setAccumuloConnectorInfo(job, cnxnParams.getAccumuloUserName(),
+        setConnectorInfoWithErrorChecking(job, cnxnParams.getAccumuloUserName(),
             new PasswordToken(cnxnParams.getAccumuloPassword()));
       }
 
@@ -125,7 +125,7 @@ public class HiveAccumuloTableOutputFormat extends AccumuloOutputFormat {
 
   // Non-static methods to wrap the static AccumuloOutputFormat methods to enable testing
 
-  protected void setAccumuloConnectorInfo(JobConf conf, String username, AuthenticationToken token)
+  protected void setConnectorInfoWithErrorChecking(JobConf conf, String username, AuthenticationToken token)
       throws AccumuloSecurityException {
     try {
       AccumuloOutputFormat.setConnectorInfo(conf, username, token);
@@ -136,14 +136,14 @@ public class HiveAccumuloTableOutputFormat extends AccumuloOutputFormat {
   }
 
   @SuppressWarnings("deprecation")
-  protected void setAccumuloZooKeeperInstance(JobConf conf, String instanceName, String zookeepers,
+  protected void setZooKeeperInstanceWithErrorChecking(JobConf conf, String instanceName, String zookeepers,
       boolean isSasl) throws IOException {
     try {
       if (isSasl) {
         // Reflection to support Accumulo 1.5. Remove when Accumulo 1.5 support is dropped
         // 1.6 works with the deprecated 1.5 method, but must use reflection for 1.7-only
         // SASL support
-        helper.setZooKeeperInstance(conf, AccumuloOutputFormat.class, zookeepers, instanceName,
+        getHelper().setZooKeeperInstance(conf, AccumuloOutputFormat.class, zookeepers, instanceName,
             isSasl);
       } else {
         AccumuloOutputFormat.setZooKeeperInstance(conf, instanceName, zookeepers);
@@ -155,7 +155,7 @@ public class HiveAccumuloTableOutputFormat extends AccumuloOutputFormat {
     }
   }
 
-  protected void setAccumuloMockInstance(JobConf conf, String instanceName) {
+  protected void setMockInstanceWithErrorChecking(JobConf conf, String instanceName) {
     try {
       AccumuloOutputFormat.setMockInstance(conf, instanceName);
     } catch (IllegalStateException e) {
@@ -167,4 +167,24 @@ public class HiveAccumuloTableOutputFormat extends AccumuloOutputFormat {
   protected void setDefaultAccumuloTableName(JobConf conf, String tableName) {
     AccumuloOutputFormat.setDefaultTableName(conf, tableName);
   }
+
+  HiveAccumuloHelper getHelper() {
+    // Allows mocking in testing.
+    return helper;
+  }
+
+  AccumuloConnectionParameters getConnectionParams(JobConf conf) {
+    // Allows mocking in testing.
+    return new AccumuloConnectionParameters(conf);
+  }
+
+  boolean hasKerberosCredentials(UserGroupInformation ugi) {
+    // Allows mocking in testing.
+    return ugi.hasKerberosCredentials();
+  }
+
+  UserGroupInformation getCurrentUser() throws IOException {
+    // Allows mocking in testing.
+    return UserGroupInformation.getCurrentUser();
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/ee2d3189/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/TestHiveAccumuloHelper.java
----------------------------------------------------------------------
diff --git a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/TestHiveAccumuloHelper.java b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/TestHiveAccumuloHelper.java
index 88544f0..406768a 100644
--- a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/TestHiveAccumuloHelper.java
+++ b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/TestHiveAccumuloHelper.java
@@ -18,18 +18,23 @@ package org.apache.hadoop.hive.accumulo;
 
 import static org.junit.Assert.assertEquals;
 
+import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
 import java.util.ArrayList;
 import java.util.Collection;
 
+import org.apache.accumulo.core.client.mapred.AccumuloInputFormat;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
+import org.apache.log4j.Logger;
 import org.junit.Before;
 import org.junit.Test;
 import org.mockito.Mockito;
 
 public class TestHiveAccumuloHelper {
+  private static final Logger log = Logger.getLogger(TestHiveAccumuloHelper.class);
 
   private HiveAccumuloHelper helper;
 
@@ -46,7 +51,13 @@ public class TestHiveAccumuloHelper {
 
     Mockito.when(token.getService()).thenReturn(service);
 
-    helper.mergeTokenIntoJobConf(jobConf, token);
+    try {
+      helper.mergeTokenIntoJobConf(jobConf, token);
+    } catch (IOException e) {
+      // Hadoop 1 doesn't support credential merging, so this will fail.
+      log.info("Ignoring exception, likely coming from Hadoop 1", e);
+      return;
+    }
 
     Collection<Token<?>> tokens = jobConf.getCredentials().getAllTokens();
     assertEquals(1, tokens.size());
@@ -66,10 +77,64 @@ public class TestHiveAccumuloHelper {
     Mockito.when(token.getKind()).thenReturn(HiveAccumuloHelper.ACCUMULO_SERVICE);
     Mockito.when(token.getService()).thenReturn(service);
 
-    helper.addTokenFromUserToJobConf(ugi, jobConf);
+    try {
+      helper.addTokenFromUserToJobConf(ugi, jobConf);
+    } catch (IOException e) {
+      // Hadoop 1 doesn't support credential merging, so this will fail.
+      log.info("Ignoring exception, likely coming from Hadoop 1", e);
+      return;
+    }
 
     Collection<Token<?>> credTokens = jobConf.getCredentials().getAllTokens();
     assertEquals(1, credTokens.size());
     assertEquals(service, credTokens.iterator().next().getService());
   }
+
+  @Test(expected = IllegalStateException.class)
+  public void testISEIsPropagated() throws Exception {
+    final HiveAccumuloHelper helper = Mockito.mock(HiveAccumuloHelper.class);
+
+    final JobConf jobConf = Mockito.mock(JobConf.class);
+    final Class<?> inputOrOutputFormatClass = AccumuloInputFormat.class;
+    final String zookeepers = "localhost:2181";
+    final String instanceName = "accumulo_instance";
+    final boolean useSasl = false;
+
+    // Call the real "public" method
+    Mockito.doCallRealMethod().when(helper).setZooKeeperInstance(jobConf, inputOrOutputFormatClass,
+        zookeepers, instanceName, useSasl);
+
+    // Mock the private one to throw the ISE
+    Mockito.doThrow(new IllegalStateException()).when(helper).
+        setZooKeeperInstanceWithReflection(jobConf, inputOrOutputFormatClass, zookeepers,
+            instanceName, useSasl);
+
+    // Should throw an IllegalStateException
+    helper.setZooKeeperInstance(jobConf, inputOrOutputFormatClass, zookeepers, instanceName,
+        useSasl);
+  }
+
+  @Test(expected = IllegalStateException.class)
+  public void testISEIsPropagatedWithReflection() throws Exception {
+    final HiveAccumuloHelper helper = Mockito.mock(HiveAccumuloHelper.class);
+
+    final JobConf jobConf = Mockito.mock(JobConf.class);
+    final Class<?> inputOrOutputFormatClass = AccumuloInputFormat.class;
+    final String zookeepers = "localhost:2181";
+    final String instanceName = "accumulo_instance";
+    final boolean useSasl = false;
+
+    // Call the real "public" method
+    Mockito.doCallRealMethod().when(helper).setZooKeeperInstance(jobConf, inputOrOutputFormatClass,
+        zookeepers, instanceName, useSasl);
+
+    // Mock the private one to throw the IAE
+    Mockito.doThrow(new InvocationTargetException(new IllegalStateException())).when(helper).
+        setZooKeeperInstanceWithReflection(jobConf, inputOrOutputFormatClass, zookeepers,
+            instanceName, useSasl);
+
+    // Should throw an IllegalStateException
+    helper.setZooKeeperInstance(jobConf, inputOrOutputFormatClass, zookeepers, instanceName,
+        useSasl);
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/ee2d3189/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTableOutputFormat.java
----------------------------------------------------------------------
diff --git a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTableOutputFormat.java b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTableOutputFormat.java
index 5d3f15b..5fdab28 100644
--- a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTableOutputFormat.java
+++ b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTableOutputFormat.java
@@ -18,15 +18,18 @@ package org.apache.hadoop.hive.accumulo.mr;
 
 import java.io.IOException;
 import java.util.Arrays;
+import java.util.Collection;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map.Entry;
 import java.util.Properties;
 
+import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.Connector;
 import org.apache.accumulo.core.client.Instance;
 import org.apache.accumulo.core.client.mock.MockInstance;
+import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
 import org.apache.accumulo.core.client.security.tokens.PasswordToken;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Mutation;
@@ -36,6 +39,7 @@ import org.apache.accumulo.core.security.ColumnVisibility;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hive.accumulo.AccumuloConnectionParameters;
+import org.apache.hadoop.hive.accumulo.HiveAccumuloHelper;
 import org.apache.hadoop.hive.accumulo.columns.ColumnEncoding;
 import org.apache.hadoop.hive.accumulo.serde.AccumuloRowSerializer;
 import org.apache.hadoop.hive.accumulo.serde.AccumuloSerDe;
@@ -54,6 +58,10 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.RecordWriter;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.TokenIdentifier;
+import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Rule;
@@ -87,6 +95,15 @@ public class TestHiveAccumuloTableOutputFormat {
     conf.set(AccumuloConnectionParameters.INSTANCE_NAME, instanceName);
     conf.set(AccumuloConnectionParameters.ZOOKEEPERS, zookeepers);
     conf.set(AccumuloConnectionParameters.TABLE_NAME, outputTable);
+
+    System.setProperty("java.security.krb5.realm", "accumulo");
+    System.setProperty("java.security.krb5.kdc", "fake");
+  }
+
+  @After
+  public void cleanup() {
+    System.setProperty("java.security.krb5.realm", "");
+    System.setProperty("java.security.krb5.kdc", "");
   }
 
   @Test
@@ -94,12 +111,72 @@ public class TestHiveAccumuloTableOutputFormat {
     HiveAccumuloTableOutputFormat outputFormat = Mockito.mock(HiveAccumuloTableOutputFormat.class);
 
     Mockito.doCallRealMethod().when(outputFormat).configureAccumuloOutputFormat(conf);
+    Mockito.doCallRealMethod().when(outputFormat).getConnectionParams(conf);
+
+    outputFormat.configureAccumuloOutputFormat(conf);
+
+    Mockito.verify(outputFormat).setConnectorInfoWithErrorChecking(conf, user, new PasswordToken(password));
+    Mockito.verify(outputFormat).setZooKeeperInstanceWithErrorChecking(conf, instanceName, zookeepers, false);
+    Mockito.verify(outputFormat).setDefaultAccumuloTableName(conf, outputTable);
+  }
+
+  @SuppressWarnings({"rawtypes", "unchecked"})
+  @Test
+  public void testSaslConfiguration() throws IOException, AccumuloException, AccumuloSecurityException {
+    final HiveAccumuloTableOutputFormat outputFormat = Mockito.mock(HiveAccumuloTableOutputFormat.class);
+    final AuthenticationToken authToken = Mockito.mock(AuthenticationToken.class);
+    final Token hadoopToken = Mockito.mock(Token.class);
+    final HiveAccumuloHelper helper = Mockito.mock(HiveAccumuloHelper.class);
+    final AccumuloConnectionParameters cnxnParams = Mockito.mock(AccumuloConnectionParameters.class);
+    final Connector connector = Mockito.mock(Connector.class);
+
+    // Set UGI to use Kerberos
+    // Have to use the string constant to support hadoop 1
+    conf.set("hadoop.security.authentication", "kerberos");
+    UserGroupInformation.setConfiguration(conf);
+
+    // Set the current UGI to a fake user
+    UserGroupInformation user1 = UserGroupInformation.createUserForTesting(user, new String[0]);
+    // Use that as the "current user"
+    Mockito.when(outputFormat.getCurrentUser()).thenReturn(user1);
+
+    // Turn off passwords, enable sasl and set a keytab
+    conf.unset(AccumuloConnectionParameters.USER_PASS);
+
+    // Call the real method instead of the mock
+    Mockito.doCallRealMethod().when(outputFormat).configureAccumuloOutputFormat(conf);
+
+    // Return our mocked objects
+    Mockito.when(outputFormat.getHelper()).thenReturn(helper);
+    Mockito.when(outputFormat.getConnectionParams(conf)).thenReturn(cnxnParams);
+    Mockito.when(cnxnParams.getConnector()).thenReturn(connector);
+    Mockito.when(helper.getDelegationToken(connector)).thenReturn(authToken);
+    Mockito.when(helper.getHadoopToken(authToken)).thenReturn(hadoopToken);
 
+    // Stub AccumuloConnectionParameters actions
+    Mockito.when(cnxnParams.useSasl()).thenReturn(true);
+    Mockito.when(cnxnParams.getAccumuloUserName()).thenReturn(user);
+    Mockito.when(cnxnParams.getAccumuloInstanceName()).thenReturn(instanceName);
+    Mockito.when(cnxnParams.getZooKeepers()).thenReturn(zookeepers);
+
+    // Stub OutputFormat actions
+    Mockito.when(outputFormat.hasKerberosCredentials(user1)).thenReturn(true);
+
+    // Invoke the method
     outputFormat.configureAccumuloOutputFormat(conf);
 
-    Mockito.verify(outputFormat).setAccumuloConnectorInfo(conf, user, new PasswordToken(password));
-    Mockito.verify(outputFormat).setAccumuloZooKeeperInstance(conf, instanceName, zookeepers, false);
+    // The AccumuloInputFormat methods
+    Mockito.verify(outputFormat).setZooKeeperInstanceWithErrorChecking(conf, instanceName, zookeepers, true);
+    Mockito.verify(outputFormat).setConnectorInfoWithErrorChecking(conf, user, authToken);
     Mockito.verify(outputFormat).setDefaultAccumuloTableName(conf, outputTable);
+
+    // Other methods we expect
+    Mockito.verify(helper).mergeTokenIntoJobConf(conf, hadoopToken);
+
+    // Make sure the token made it into the UGI
+    Collection<Token<? extends TokenIdentifier>> tokens = user1.getTokens();
+    Assert.assertEquals(1, tokens.size());
+    Assert.assertEquals(hadoopToken, tokens.iterator().next());
   }
 
   @Test
@@ -109,11 +186,12 @@ public class TestHiveAccumuloTableOutputFormat {
     conf.unset(AccumuloConnectionParameters.ZOOKEEPERS);
 
     Mockito.doCallRealMethod().when(outputFormat).configureAccumuloOutputFormat(conf);
+    Mockito.doCallRealMethod().when(outputFormat).getConnectionParams(conf);
 
     outputFormat.configureAccumuloOutputFormat(conf);
 
-    Mockito.verify(outputFormat).setAccumuloConnectorInfo(conf, user, new PasswordToken(password));
-    Mockito.verify(outputFormat).setAccumuloMockInstance(conf, instanceName);
+    Mockito.verify(outputFormat).setConnectorInfoWithErrorChecking(conf, user, new PasswordToken(password));
+    Mockito.verify(outputFormat).setMockInstanceWithErrorChecking(conf, instanceName);
     Mockito.verify(outputFormat).setDefaultAccumuloTableName(conf, outputTable);
   }
 


[50/55] [abbrv] hive git commit: HIVE-12061 : add file type support to file metadata by expr call (Sergey Shelukhin, reviewed by Alan Gates)

Posted by xu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/e5b53032/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp b/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
index 44aa22e..a82c363 100644
--- a/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
+++ b/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
@@ -1240,14 +1240,14 @@ uint32_t ThriftHiveMetastore_get_databases_result::read(::apache::thrift::protoc
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->success.clear();
-            uint32_t _size713;
-            ::apache::thrift::protocol::TType _etype716;
-            xfer += iprot->readListBegin(_etype716, _size713);
-            this->success.resize(_size713);
-            uint32_t _i717;
-            for (_i717 = 0; _i717 < _size713; ++_i717)
+            uint32_t _size714;
+            ::apache::thrift::protocol::TType _etype717;
+            xfer += iprot->readListBegin(_etype717, _size714);
+            this->success.resize(_size714);
+            uint32_t _i718;
+            for (_i718 = 0; _i718 < _size714; ++_i718)
             {
-              xfer += iprot->readString(this->success[_i717]);
+              xfer += iprot->readString(this->success[_i718]);
             }
             xfer += iprot->readListEnd();
           }
@@ -1286,10 +1286,10 @@ uint32_t ThriftHiveMetastore_get_databases_result::write(::apache::thrift::proto
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->success.size()));
-      std::vector<std::string> ::const_iterator _iter718;
-      for (_iter718 = this->success.begin(); _iter718 != this->success.end(); ++_iter718)
+      std::vector<std::string> ::const_iterator _iter719;
+      for (_iter719 = this->success.begin(); _iter719 != this->success.end(); ++_iter719)
       {
-        xfer += oprot->writeString((*_iter718));
+        xfer += oprot->writeString((*_iter719));
       }
       xfer += oprot->writeListEnd();
     }
@@ -1334,14 +1334,14 @@ uint32_t ThriftHiveMetastore_get_databases_presult::read(::apache::thrift::proto
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             (*(this->success)).clear();
-            uint32_t _size719;
-            ::apache::thrift::protocol::TType _etype722;
-            xfer += iprot->readListBegin(_etype722, _size719);
-            (*(this->success)).resize(_size719);
-            uint32_t _i723;
-            for (_i723 = 0; _i723 < _size719; ++_i723)
+            uint32_t _size720;
+            ::apache::thrift::protocol::TType _etype723;
+            xfer += iprot->readListBegin(_etype723, _size720);
+            (*(this->success)).resize(_size720);
+            uint32_t _i724;
+            for (_i724 = 0; _i724 < _size720; ++_i724)
             {
-              xfer += iprot->readString((*(this->success))[_i723]);
+              xfer += iprot->readString((*(this->success))[_i724]);
             }
             xfer += iprot->readListEnd();
           }
@@ -1458,14 +1458,14 @@ uint32_t ThriftHiveMetastore_get_all_databases_result::read(::apache::thrift::pr
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->success.clear();
-            uint32_t _size724;
-            ::apache::thrift::protocol::TType _etype727;
-            xfer += iprot->readListBegin(_etype727, _size724);
-            this->success.resize(_size724);
-            uint32_t _i728;
-            for (_i728 = 0; _i728 < _size724; ++_i728)
+            uint32_t _size725;
+            ::apache::thrift::protocol::TType _etype728;
+            xfer += iprot->readListBegin(_etype728, _size725);
+            this->success.resize(_size725);
+            uint32_t _i729;
+            for (_i729 = 0; _i729 < _size725; ++_i729)
             {
-              xfer += iprot->readString(this->success[_i728]);
+              xfer += iprot->readString(this->success[_i729]);
             }
             xfer += iprot->readListEnd();
           }
@@ -1504,10 +1504,10 @@ uint32_t ThriftHiveMetastore_get_all_databases_result::write(::apache::thrift::p
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->success.size()));
-      std::vector<std::string> ::const_iterator _iter729;
-      for (_iter729 = this->success.begin(); _iter729 != this->success.end(); ++_iter729)
+      std::vector<std::string> ::const_iterator _iter730;
+      for (_iter730 = this->success.begin(); _iter730 != this->success.end(); ++_iter730)
       {
-        xfer += oprot->writeString((*_iter729));
+        xfer += oprot->writeString((*_iter730));
       }
       xfer += oprot->writeListEnd();
     }
@@ -1552,14 +1552,14 @@ uint32_t ThriftHiveMetastore_get_all_databases_presult::read(::apache::thrift::p
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             (*(this->success)).clear();
-            uint32_t _size730;
-            ::apache::thrift::protocol::TType _etype733;
-            xfer += iprot->readListBegin(_etype733, _size730);
-            (*(this->success)).resize(_size730);
-            uint32_t _i734;
-            for (_i734 = 0; _i734 < _size730; ++_i734)
+            uint32_t _size731;
+            ::apache::thrift::protocol::TType _etype734;
+            xfer += iprot->readListBegin(_etype734, _size731);
+            (*(this->success)).resize(_size731);
+            uint32_t _i735;
+            for (_i735 = 0; _i735 < _size731; ++_i735)
             {
-              xfer += iprot->readString((*(this->success))[_i734]);
+              xfer += iprot->readString((*(this->success))[_i735]);
             }
             xfer += iprot->readListEnd();
           }
@@ -2621,17 +2621,17 @@ uint32_t ThriftHiveMetastore_get_type_all_result::read(::apache::thrift::protoco
         if (ftype == ::apache::thrift::protocol::T_MAP) {
           {
             this->success.clear();
-            uint32_t _size735;
-            ::apache::thrift::protocol::TType _ktype736;
-            ::apache::thrift::protocol::TType _vtype737;
-            xfer += iprot->readMapBegin(_ktype736, _vtype737, _size735);
-            uint32_t _i739;
-            for (_i739 = 0; _i739 < _size735; ++_i739)
+            uint32_t _size736;
+            ::apache::thrift::protocol::TType _ktype737;
+            ::apache::thrift::protocol::TType _vtype738;
+            xfer += iprot->readMapBegin(_ktype737, _vtype738, _size736);
+            uint32_t _i740;
+            for (_i740 = 0; _i740 < _size736; ++_i740)
             {
-              std::string _key740;
-              xfer += iprot->readString(_key740);
-              Type& _val741 = this->success[_key740];
-              xfer += _val741.read(iprot);
+              std::string _key741;
+              xfer += iprot->readString(_key741);
+              Type& _val742 = this->success[_key741];
+              xfer += _val742.read(iprot);
             }
             xfer += iprot->readMapEnd();
           }
@@ -2670,11 +2670,11 @@ uint32_t ThriftHiveMetastore_get_type_all_result::write(::apache::thrift::protoc
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_MAP, 0);
     {
       xfer += oprot->writeMapBegin(::apache::thrift::protocol::T_STRING, ::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->success.size()));
-      std::map<std::string, Type> ::const_iterator _iter742;
-      for (_iter742 = this->success.begin(); _iter742 != this->success.end(); ++_iter742)
+      std::map<std::string, Type> ::const_iterator _iter743;
+      for (_iter743 = this->success.begin(); _iter743 != this->success.end(); ++_iter743)
       {
-        xfer += oprot->writeString(_iter742->first);
-        xfer += _iter742->second.write(oprot);
+        xfer += oprot->writeString(_iter743->first);
+        xfer += _iter743->second.write(oprot);
       }
       xfer += oprot->writeMapEnd();
     }
@@ -2719,17 +2719,17 @@ uint32_t ThriftHiveMetastore_get_type_all_presult::read(::apache::thrift::protoc
         if (ftype == ::apache::thrift::protocol::T_MAP) {
           {
             (*(this->success)).clear();
-            uint32_t _size743;
-            ::apache::thrift::protocol::TType _ktype744;
-            ::apache::thrift::protocol::TType _vtype745;
-            xfer += iprot->readMapBegin(_ktype744, _vtype745, _size743);
-            uint32_t _i747;
-            for (_i747 = 0; _i747 < _size743; ++_i747)
+            uint32_t _size744;
+            ::apache::thrift::protocol::TType _ktype745;
+            ::apache::thrift::protocol::TType _vtype746;
+            xfer += iprot->readMapBegin(_ktype745, _vtype746, _size744);
+            uint32_t _i748;
+            for (_i748 = 0; _i748 < _size744; ++_i748)
             {
-              std::string _key748;
-              xfer += iprot->readString(_key748);
-              Type& _val749 = (*(this->success))[_key748];
-              xfer += _val749.read(iprot);
+              std::string _key749;
+              xfer += iprot->readString(_key749);
+              Type& _val750 = (*(this->success))[_key749];
+              xfer += _val750.read(iprot);
             }
             xfer += iprot->readMapEnd();
           }
@@ -2883,14 +2883,14 @@ uint32_t ThriftHiveMetastore_get_fields_result::read(::apache::thrift::protocol:
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->success.clear();
-            uint32_t _size750;
-            ::apache::thrift::protocol::TType _etype753;
-            xfer += iprot->readListBegin(_etype753, _size750);
-            this->success.resize(_size750);
-            uint32_t _i754;
-            for (_i754 = 0; _i754 < _size750; ++_i754)
+            uint32_t _size751;
+            ::apache::thrift::protocol::TType _etype754;
+            xfer += iprot->readListBegin(_etype754, _size751);
+            this->success.resize(_size751);
+            uint32_t _i755;
+            for (_i755 = 0; _i755 < _size751; ++_i755)
             {
-              xfer += this->success[_i754].read(iprot);
+              xfer += this->success[_i755].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -2945,10 +2945,10 @@ uint32_t ThriftHiveMetastore_get_fields_result::write(::apache::thrift::protocol
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->success.size()));
-      std::vector<FieldSchema> ::const_iterator _iter755;
-      for (_iter755 = this->success.begin(); _iter755 != this->success.end(); ++_iter755)
+      std::vector<FieldSchema> ::const_iterator _iter756;
+      for (_iter756 = this->success.begin(); _iter756 != this->success.end(); ++_iter756)
       {
-        xfer += (*_iter755).write(oprot);
+        xfer += (*_iter756).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -3001,14 +3001,14 @@ uint32_t ThriftHiveMetastore_get_fields_presult::read(::apache::thrift::protocol
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             (*(this->success)).clear();
-            uint32_t _size756;
-            ::apache::thrift::protocol::TType _etype759;
-            xfer += iprot->readListBegin(_etype759, _size756);
-            (*(this->success)).resize(_size756);
-            uint32_t _i760;
-            for (_i760 = 0; _i760 < _size756; ++_i760)
+            uint32_t _size757;
+            ::apache::thrift::protocol::TType _etype760;
+            xfer += iprot->readListBegin(_etype760, _size757);
+            (*(this->success)).resize(_size757);
+            uint32_t _i761;
+            for (_i761 = 0; _i761 < _size757; ++_i761)
             {
-              xfer += (*(this->success))[_i760].read(iprot);
+              xfer += (*(this->success))[_i761].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -3194,14 +3194,14 @@ uint32_t ThriftHiveMetastore_get_fields_with_environment_context_result::read(::
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->success.clear();
-            uint32_t _size761;
-            ::apache::thrift::protocol::TType _etype764;
-            xfer += iprot->readListBegin(_etype764, _size761);
-            this->success.resize(_size761);
-            uint32_t _i765;
-            for (_i765 = 0; _i765 < _size761; ++_i765)
+            uint32_t _size762;
+            ::apache::thrift::protocol::TType _etype765;
+            xfer += iprot->readListBegin(_etype765, _size762);
+            this->success.resize(_size762);
+            uint32_t _i766;
+            for (_i766 = 0; _i766 < _size762; ++_i766)
             {
-              xfer += this->success[_i765].read(iprot);
+              xfer += this->success[_i766].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -3256,10 +3256,10 @@ uint32_t ThriftHiveMetastore_get_fields_with_environment_context_result::write(:
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->success.size()));
-      std::vector<FieldSchema> ::const_iterator _iter766;
-      for (_iter766 = this->success.begin(); _iter766 != this->success.end(); ++_iter766)
+      std::vector<FieldSchema> ::const_iterator _iter767;
+      for (_iter767 = this->success.begin(); _iter767 != this->success.end(); ++_iter767)
       {
-        xfer += (*_iter766).write(oprot);
+        xfer += (*_iter767).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -3312,14 +3312,14 @@ uint32_t ThriftHiveMetastore_get_fields_with_environment_context_presult::read(:
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             (*(this->success)).clear();
-            uint32_t _size767;
-            ::apache::thrift::protocol::TType _etype770;
-            xfer += iprot->readListBegin(_etype770, _size767);
-            (*(this->success)).resize(_size767);
-            uint32_t _i771;
-            for (_i771 = 0; _i771 < _size767; ++_i771)
+            uint32_t _size768;
+            ::apache::thrift::protocol::TType _etype771;
+            xfer += iprot->readListBegin(_etype771, _size768);
+            (*(this->success)).resize(_size768);
+            uint32_t _i772;
+            for (_i772 = 0; _i772 < _size768; ++_i772)
             {
-              xfer += (*(this->success))[_i771].read(iprot);
+              xfer += (*(this->success))[_i772].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -3489,14 +3489,14 @@ uint32_t ThriftHiveMetastore_get_schema_result::read(::apache::thrift::protocol:
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->success.clear();
-            uint32_t _size772;
-            ::apache::thrift::protocol::TType _etype775;
-            xfer += iprot->readListBegin(_etype775, _size772);
-            this->success.resize(_size772);
-            uint32_t _i776;
-            for (_i776 = 0; _i776 < _size772; ++_i776)
+            uint32_t _size773;
+            ::apache::thrift::protocol::TType _etype776;
+            xfer += iprot->readListBegin(_etype776, _size773);
+            this->success.resize(_size773);
+            uint32_t _i777;
+            for (_i777 = 0; _i777 < _size773; ++_i777)
             {
-              xfer += this->success[_i776].read(iprot);
+              xfer += this->success[_i777].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -3551,10 +3551,10 @@ uint32_t ThriftHiveMetastore_get_schema_result::write(::apache::thrift::protocol
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->success.size()));
-      std::vector<FieldSchema> ::const_iterator _iter777;
-      for (_iter777 = this->success.begin(); _iter777 != this->success.end(); ++_iter777)
+      std::vector<FieldSchema> ::const_iterator _iter778;
+      for (_iter778 = this->success.begin(); _iter778 != this->success.end(); ++_iter778)
       {
-        xfer += (*_iter777).write(oprot);
+        xfer += (*_iter778).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -3607,14 +3607,14 @@ uint32_t ThriftHiveMetastore_get_schema_presult::read(::apache::thrift::protocol
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             (*(this->success)).clear();
-            uint32_t _size778;
-            ::apache::thrift::protocol::TType _etype781;
-            xfer += iprot->readListBegin(_etype781, _size778);
-            (*(this->success)).resize(_size778);
-            uint32_t _i782;
-            for (_i782 = 0; _i782 < _size778; ++_i782)
+            uint32_t _size779;
+            ::apache::thrift::protocol::TType _etype782;
+            xfer += iprot->readListBegin(_etype782, _size779);
+            (*(this->success)).resize(_size779);
+            uint32_t _i783;
+            for (_i783 = 0; _i783 < _size779; ++_i783)
             {
-              xfer += (*(this->success))[_i782].read(iprot);
+              xfer += (*(this->success))[_i783].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -3800,14 +3800,14 @@ uint32_t ThriftHiveMetastore_get_schema_with_environment_context_result::read(::
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->success.clear();
-            uint32_t _size783;
-            ::apache::thrift::protocol::TType _etype786;
-            xfer += iprot->readListBegin(_etype786, _size783);
-            this->success.resize(_size783);
-            uint32_t _i787;
-            for (_i787 = 0; _i787 < _size783; ++_i787)
+            uint32_t _size784;
+            ::apache::thrift::protocol::TType _etype787;
+            xfer += iprot->readListBegin(_etype787, _size784);
+            this->success.resize(_size784);
+            uint32_t _i788;
+            for (_i788 = 0; _i788 < _size784; ++_i788)
             {
-              xfer += this->success[_i787].read(iprot);
+              xfer += this->success[_i788].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -3862,10 +3862,10 @@ uint32_t ThriftHiveMetastore_get_schema_with_environment_context_result::write(:
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->success.size()));
-      std::vector<FieldSchema> ::const_iterator _iter788;
-      for (_iter788 = this->success.begin(); _iter788 != this->success.end(); ++_iter788)
+      std::vector<FieldSchema> ::const_iterator _iter789;
+      for (_iter789 = this->success.begin(); _iter789 != this->success.end(); ++_iter789)
       {
-        xfer += (*_iter788).write(oprot);
+        xfer += (*_iter789).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -3918,14 +3918,14 @@ uint32_t ThriftHiveMetastore_get_schema_with_environment_context_presult::read(:
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             (*(this->success)).clear();
-            uint32_t _size789;
-            ::apache::thrift::protocol::TType _etype792;
-            xfer += iprot->readListBegin(_etype792, _size789);
-            (*(this->success)).resize(_size789);
-            uint32_t _i793;
-            for (_i793 = 0; _i793 < _size789; ++_i793)
+            uint32_t _size790;
+            ::apache::thrift::protocol::TType _etype793;
+            xfer += iprot->readListBegin(_etype793, _size790);
+            (*(this->success)).resize(_size790);
+            uint32_t _i794;
+            for (_i794 = 0; _i794 < _size790; ++_i794)
             {
-              xfer += (*(this->success))[_i793].read(iprot);
+              xfer += (*(this->success))[_i794].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -5099,14 +5099,14 @@ uint32_t ThriftHiveMetastore_get_tables_result::read(::apache::thrift::protocol:
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->success.clear();
-            uint32_t _size794;
-            ::apache::thrift::protocol::TType _etype797;
-            xfer += iprot->readListBegin(_etype797, _size794);
-            this->success.resize(_size794);
-            uint32_t _i798;
-            for (_i798 = 0; _i798 < _size794; ++_i798)
+            uint32_t _size795;
+            ::apache::thrift::protocol::TType _etype798;
+            xfer += iprot->readListBegin(_etype798, _size795);
+            this->success.resize(_size795);
+            uint32_t _i799;
+            for (_i799 = 0; _i799 < _size795; ++_i799)
             {
-              xfer += iprot->readString(this->success[_i798]);
+              xfer += iprot->readString(this->success[_i799]);
             }
             xfer += iprot->readListEnd();
           }
@@ -5145,10 +5145,10 @@ uint32_t ThriftHiveMetastore_get_tables_result::write(::apache::thrift::protocol
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->success.size()));
-      std::vector<std::string> ::const_iterator _iter799;
-      for (_iter799 = this->success.begin(); _iter799 != this->success.end(); ++_iter799)
+      std::vector<std::string> ::const_iterator _iter800;
+      for (_iter800 = this->success.begin(); _iter800 != this->success.end(); ++_iter800)
       {
-        xfer += oprot->writeString((*_iter799));
+        xfer += oprot->writeString((*_iter800));
       }
       xfer += oprot->writeListEnd();
     }
@@ -5193,14 +5193,14 @@ uint32_t ThriftHiveMetastore_get_tables_presult::read(::apache::thrift::protocol
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             (*(this->success)).clear();
-            uint32_t _size800;
-            ::apache::thrift::protocol::TType _etype803;
-            xfer += iprot->readListBegin(_etype803, _size800);
-            (*(this->success)).resize(_size800);
-            uint32_t _i804;
-            for (_i804 = 0; _i804 < _size800; ++_i804)
+            uint32_t _size801;
+            ::apache::thrift::protocol::TType _etype804;
+            xfer += iprot->readListBegin(_etype804, _size801);
+            (*(this->success)).resize(_size801);
+            uint32_t _i805;
+            for (_i805 = 0; _i805 < _size801; ++_i805)
             {
-              xfer += iprot->readString((*(this->success))[_i804]);
+              xfer += iprot->readString((*(this->success))[_i805]);
             }
             xfer += iprot->readListEnd();
           }
@@ -5338,14 +5338,14 @@ uint32_t ThriftHiveMetastore_get_all_tables_result::read(::apache::thrift::proto
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->success.clear();
-            uint32_t _size805;
-            ::apache::thrift::protocol::TType _etype808;
-            xfer += iprot->readListBegin(_etype808, _size805);
-            this->success.resize(_size805);
-            uint32_t _i809;
-            for (_i809 = 0; _i809 < _size805; ++_i809)
+            uint32_t _size806;
+            ::apache::thrift::protocol::TType _etype809;
+            xfer += iprot->readListBegin(_etype809, _size806);
+            this->success.resize(_size806);
+            uint32_t _i810;
+            for (_i810 = 0; _i810 < _size806; ++_i810)
             {
-              xfer += iprot->readString(this->success[_i809]);
+              xfer += iprot->readString(this->success[_i810]);
             }
             xfer += iprot->readListEnd();
           }
@@ -5384,10 +5384,10 @@ uint32_t ThriftHiveMetastore_get_all_tables_result::write(::apache::thrift::prot
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->success.size()));
-      std::vector<std::string> ::const_iterator _iter810;
-      for (_iter810 = this->success.begin(); _iter810 != this->success.end(); ++_iter810)
+      std::vector<std::string> ::const_iterator _iter811;
+      for (_iter811 = this->success.begin(); _iter811 != this->success.end(); ++_iter811)
       {
-        xfer += oprot->writeString((*_iter810));
+        xfer += oprot->writeString((*_iter811));
       }
       xfer += oprot->writeListEnd();
     }
@@ -5432,14 +5432,14 @@ uint32_t ThriftHiveMetastore_get_all_tables_presult::read(::apache::thrift::prot
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             (*(this->success)).clear();
-            uint32_t _size811;
-            ::apache::thrift::protocol::TType _etype814;
-            xfer += iprot->readListBegin(_etype814, _size811);
-            (*(this->success)).resize(_size811);
-            uint32_t _i815;
-            for (_i815 = 0; _i815 < _size811; ++_i815)
+            uint32_t _size812;
+            ::apache::thrift::protocol::TType _etype815;
+            xfer += iprot->readListBegin(_etype815, _size812);
+            (*(this->success)).resize(_size812);
+            uint32_t _i816;
+            for (_i816 = 0; _i816 < _size812; ++_i816)
             {
-              xfer += iprot->readString((*(this->success))[_i815]);
+              xfer += iprot->readString((*(this->success))[_i816]);
             }
             xfer += iprot->readListEnd();
           }
@@ -5749,14 +5749,14 @@ uint32_t ThriftHiveMetastore_get_table_objects_by_name_args::read(::apache::thri
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->tbl_names.clear();
-            uint32_t _size816;
-            ::apache::thrift::protocol::TType _etype819;
-            xfer += iprot->readListBegin(_etype819, _size816);
-            this->tbl_names.resize(_size816);
-            uint32_t _i820;
-            for (_i820 = 0; _i820 < _size816; ++_i820)
+            uint32_t _size817;
+            ::apache::thrift::protocol::TType _etype820;
+            xfer += iprot->readListBegin(_etype820, _size817);
+            this->tbl_names.resize(_size817);
+            uint32_t _i821;
+            for (_i821 = 0; _i821 < _size817; ++_i821)
             {
-              xfer += iprot->readString(this->tbl_names[_i820]);
+              xfer += iprot->readString(this->tbl_names[_i821]);
             }
             xfer += iprot->readListEnd();
           }
@@ -5789,10 +5789,10 @@ uint32_t ThriftHiveMetastore_get_table_objects_by_name_args::write(::apache::thr
   xfer += oprot->writeFieldBegin("tbl_names", ::apache::thrift::protocol::T_LIST, 2);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->tbl_names.size()));
-    std::vector<std::string> ::const_iterator _iter821;
-    for (_iter821 = this->tbl_names.begin(); _iter821 != this->tbl_names.end(); ++_iter821)
+    std::vector<std::string> ::const_iterator _iter822;
+    for (_iter822 = this->tbl_names.begin(); _iter822 != this->tbl_names.end(); ++_iter822)
     {
-      xfer += oprot->writeString((*_iter821));
+      xfer += oprot->writeString((*_iter822));
     }
     xfer += oprot->writeListEnd();
   }
@@ -5820,10 +5820,10 @@ uint32_t ThriftHiveMetastore_get_table_objects_by_name_pargs::write(::apache::th
   xfer += oprot->writeFieldBegin("tbl_names", ::apache::thrift::protocol::T_LIST, 2);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->tbl_names)).size()));
-    std::vector<std::string> ::const_iterator _iter822;
-    for (_iter822 = (*(this->tbl_names)).begin(); _iter822 != (*(this->tbl_names)).end(); ++_iter822)
+    std::vector<std::string> ::const_iterator _iter823;
+    for (_iter823 = (*(this->tbl_names)).begin(); _iter823 != (*(this->tbl_names)).end(); ++_iter823)
     {
-      xfer += oprot->writeString((*_iter822));
+      xfer += oprot->writeString((*_iter823));
     }
     xfer += oprot->writeListEnd();
   }
@@ -5864,14 +5864,14 @@ uint32_t ThriftHiveMetastore_get_table_objects_by_name_result::read(::apache::th
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->success.clear();
-            uint32_t _size823;
-            ::apache::thrift::protocol::TType _etype826;
-            xfer += iprot->readListBegin(_etype826, _size823);
-            this->success.resize(_size823);
-            uint32_t _i827;
-            for (_i827 = 0; _i827 < _size823; ++_i827)
+            uint32_t _size824;
+            ::apache::thrift::protocol::TType _etype827;
+            xfer += iprot->readListBegin(_etype827, _size824);
+            this->success.resize(_size824);
+            uint32_t _i828;
+            for (_i828 = 0; _i828 < _size824; ++_i828)
             {
-              xfer += this->success[_i827].read(iprot);
+              xfer += this->success[_i828].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -5926,10 +5926,10 @@ uint32_t ThriftHiveMetastore_get_table_objects_by_name_result::write(::apache::t
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->success.size()));
-      std::vector<Table> ::const_iterator _iter828;
-      for (_iter828 = this->success.begin(); _iter828 != this->success.end(); ++_iter828)
+      std::vector<Table> ::const_iterator _iter829;
+      for (_iter829 = this->success.begin(); _iter829 != this->success.end(); ++_iter829)
       {
-        xfer += (*_iter828).write(oprot);
+        xfer += (*_iter829).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -5982,14 +5982,14 @@ uint32_t ThriftHiveMetastore_get_table_objects_by_name_presult::read(::apache::t
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             (*(this->success)).clear();
-            uint32_t _size829;
-            ::apache::thrift::protocol::TType _etype832;
-            xfer += iprot->readListBegin(_etype832, _size829);
-            (*(this->success)).resize(_size829);
-            uint32_t _i833;
-            for (_i833 = 0; _i833 < _size829; ++_i833)
+            uint32_t _size830;
+            ::apache::thrift::protocol::TType _etype833;
+            xfer += iprot->readListBegin(_etype833, _size830);
+            (*(this->success)).resize(_size830);
+            uint32_t _i834;
+            for (_i834 = 0; _i834 < _size830; ++_i834)
             {
-              xfer += (*(this->success))[_i833].read(iprot);
+              xfer += (*(this->success))[_i834].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -6175,14 +6175,14 @@ uint32_t ThriftHiveMetastore_get_table_names_by_filter_result::read(::apache::th
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->success.clear();
-            uint32_t _size834;
-            ::apache::thrift::protocol::TType _etype837;
-            xfer += iprot->readListBegin(_etype837, _size834);
-            this->success.resize(_size834);
-            uint32_t _i838;
-            for (_i838 = 0; _i838 < _size834; ++_i838)
+            uint32_t _size835;
+            ::apache::thrift::protocol::TType _etype838;
+            xfer += iprot->readListBegin(_etype838, _size835);
+            this->success.resize(_size835);
+            uint32_t _i839;
+            for (_i839 = 0; _i839 < _size835; ++_i839)
             {
-              xfer += iprot->readString(this->success[_i838]);
+              xfer += iprot->readString(this->success[_i839]);
             }
             xfer += iprot->readListEnd();
           }
@@ -6237,10 +6237,10 @@ uint32_t ThriftHiveMetastore_get_table_names_by_filter_result::write(::apache::t
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->success.size()));
-      std::vector<std::string> ::const_iterator _iter839;
-      for (_iter839 = this->success.begin(); _iter839 != this->success.end(); ++_iter839)
+      std::vector<std::string> ::const_iterator _iter840;
+      for (_iter840 = this->success.begin(); _iter840 != this->success.end(); ++_iter840)
       {
-        xfer += oprot->writeString((*_iter839));
+        xfer += oprot->writeString((*_iter840));
       }
       xfer += oprot->writeListEnd();
     }
@@ -6293,14 +6293,14 @@ uint32_t ThriftHiveMetastore_get_table_names_by_filter_presult::read(::apache::t
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             (*(this->success)).clear();
-            uint32_t _size840;
-            ::apache::thrift::protocol::TType _etype843;
-            xfer += iprot->readListBegin(_etype843, _size840);
-            (*(this->success)).resize(_size840);
-            uint32_t _i844;
-            for (_i844 = 0; _i844 < _size840; ++_i844)
+            uint32_t _size841;
+            ::apache::thrift::protocol::TType _etype844;
+            xfer += iprot->readListBegin(_etype844, _size841);
+            (*(this->success)).resize(_size841);
+            uint32_t _i845;
+            for (_i845 = 0; _i845 < _size841; ++_i845)
             {
-              xfer += iprot->readString((*(this->success))[_i844]);
+              xfer += iprot->readString((*(this->success))[_i845]);
             }
             xfer += iprot->readListEnd();
           }
@@ -7634,14 +7634,14 @@ uint32_t ThriftHiveMetastore_add_partitions_args::read(::apache::thrift::protoco
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->new_parts.clear();
-            uint32_t _size845;
-            ::apache::thrift::protocol::TType _etype848;
-            xfer += iprot->readListBegin(_etype848, _size845);
-            this->new_parts.resize(_size845);
-            uint32_t _i849;
-            for (_i849 = 0; _i849 < _size845; ++_i849)
+            uint32_t _size846;
+            ::apache::thrift::protocol::TType _etype849;
+            xfer += iprot->readListBegin(_etype849, _size846);
+            this->new_parts.resize(_size846);
+            uint32_t _i850;
+            for (_i850 = 0; _i850 < _size846; ++_i850)
             {
-              xfer += this->new_parts[_i849].read(iprot);
+              xfer += this->new_parts[_i850].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -7670,10 +7670,10 @@ uint32_t ThriftHiveMetastore_add_partitions_args::write(::apache::thrift::protoc
   xfer += oprot->writeFieldBegin("new_parts", ::apache::thrift::protocol::T_LIST, 1);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->new_parts.size()));
-    std::vector<Partition> ::const_iterator _iter850;
-    for (_iter850 = this->new_parts.begin(); _iter850 != this->new_parts.end(); ++_iter850)
+    std::vector<Partition> ::const_iterator _iter851;
+    for (_iter851 = this->new_parts.begin(); _iter851 != this->new_parts.end(); ++_iter851)
     {
-      xfer += (*_iter850).write(oprot);
+      xfer += (*_iter851).write(oprot);
     }
     xfer += oprot->writeListEnd();
   }
@@ -7697,10 +7697,10 @@ uint32_t ThriftHiveMetastore_add_partitions_pargs::write(::apache::thrift::proto
   xfer += oprot->writeFieldBegin("new_parts", ::apache::thrift::protocol::T_LIST, 1);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>((*(this->new_parts)).size()));
-    std::vector<Partition> ::const_iterator _iter851;
-    for (_iter851 = (*(this->new_parts)).begin(); _iter851 != (*(this->new_parts)).end(); ++_iter851)
+    std::vector<Partition> ::const_iterator _iter852;
+    for (_iter852 = (*(this->new_parts)).begin(); _iter852 != (*(this->new_parts)).end(); ++_iter852)
     {
-      xfer += (*_iter851).write(oprot);
+      xfer += (*_iter852).write(oprot);
     }
     xfer += oprot->writeListEnd();
   }
@@ -7909,14 +7909,14 @@ uint32_t ThriftHiveMetastore_add_partitions_pspec_args::read(::apache::thrift::p
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->new_parts.clear();
-            uint32_t _size852;
-            ::apache::thrift::protocol::TType _etype855;
-            xfer += iprot->readListBegin(_etype855, _size852);
-            this->new_parts.resize(_size852);
-            uint32_t _i856;
-            for (_i856 = 0; _i856 < _size852; ++_i856)
+            uint32_t _size853;
+            ::apache::thrift::protocol::TType _etype856;
+            xfer += iprot->readListBegin(_etype856, _size853);
+            this->new_parts.resize(_size853);
+            uint32_t _i857;
+            for (_i857 = 0; _i857 < _size853; ++_i857)
             {
-              xfer += this->new_parts[_i856].read(iprot);
+              xfer += this->new_parts[_i857].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -7945,10 +7945,10 @@ uint32_t ThriftHiveMetastore_add_partitions_pspec_args::write(::apache::thrift::
   xfer += oprot->writeFieldBegin("new_parts", ::apache::thrift::protocol::T_LIST, 1);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->new_parts.size()));
-    std::vector<PartitionSpec> ::const_iterator _iter857;
-    for (_iter857 = this->new_parts.begin(); _iter857 != this->new_parts.end(); ++_iter857)
+    std::vector<PartitionSpec> ::const_iterator _iter858;
+    for (_iter858 = this->new_parts.begin(); _iter858 != this->new_parts.end(); ++_iter858)
     {
-      xfer += (*_iter857).write(oprot);
+      xfer += (*_iter858).write(oprot);
     }
     xfer += oprot->writeListEnd();
   }
@@ -7972,10 +7972,10 @@ uint32_t ThriftHiveMetastore_add_partitions_pspec_pargs::write(::apache::thrift:
   xfer += oprot->writeFieldBegin("new_parts", ::apache::thrift::protocol::T_LIST, 1);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>((*(this->new_parts)).size()));
-    std::vector<PartitionSpec> ::const_iterator _iter858;
-    for (_iter858 = (*(this->new_parts)).begin(); _iter858 != (*(this->new_parts)).end(); ++_iter858)
+    std::vector<PartitionSpec> ::const_iterator _iter859;
+    for (_iter859 = (*(this->new_parts)).begin(); _iter859 != (*(this->new_parts)).end(); ++_iter859)
     {
-      xfer += (*_iter858).write(oprot);
+      xfer += (*_iter859).write(oprot);
     }
     xfer += oprot->writeListEnd();
   }
@@ -8200,14 +8200,14 @@ uint32_t ThriftHiveMetastore_append_partition_args::read(::apache::thrift::proto
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->part_vals.clear();
-            uint32_t _size859;
-            ::apache::thrift::protocol::TType _etype862;
-            xfer += iprot->readListBegin(_etype862, _size859);
-            this->part_vals.resize(_size859);
-            uint32_t _i863;
-            for (_i863 = 0; _i863 < _size859; ++_i863)
+            uint32_t _size860;
+            ::apache::thrift::protocol::TType _etype863;
+            xfer += iprot->readListBegin(_etype863, _size860);
+            this->part_vals.resize(_size860);
+            uint32_t _i864;
+            for (_i864 = 0; _i864 < _size860; ++_i864)
             {
-              xfer += iprot->readString(this->part_vals[_i863]);
+              xfer += iprot->readString(this->part_vals[_i864]);
             }
             xfer += iprot->readListEnd();
           }
@@ -8244,10 +8244,10 @@ uint32_t ThriftHiveMetastore_append_partition_args::write(::apache::thrift::prot
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->part_vals.size()));
-    std::vector<std::string> ::const_iterator _iter864;
-    for (_iter864 = this->part_vals.begin(); _iter864 != this->part_vals.end(); ++_iter864)
+    std::vector<std::string> ::const_iterator _iter865;
+    for (_iter865 = this->part_vals.begin(); _iter865 != this->part_vals.end(); ++_iter865)
     {
-      xfer += oprot->writeString((*_iter864));
+      xfer += oprot->writeString((*_iter865));
     }
     xfer += oprot->writeListEnd();
   }
@@ -8279,10 +8279,10 @@ uint32_t ThriftHiveMetastore_append_partition_pargs::write(::apache::thrift::pro
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->part_vals)).size()));
-    std::vector<std::string> ::const_iterator _iter865;
-    for (_iter865 = (*(this->part_vals)).begin(); _iter865 != (*(this->part_vals)).end(); ++_iter865)
+    std::vector<std::string> ::const_iterator _iter866;
+    for (_iter866 = (*(this->part_vals)).begin(); _iter866 != (*(this->part_vals)).end(); ++_iter866)
     {
-      xfer += oprot->writeString((*_iter865));
+      xfer += oprot->writeString((*_iter866));
     }
     xfer += oprot->writeListEnd();
   }
@@ -8754,14 +8754,14 @@ uint32_t ThriftHiveMetastore_append_partition_with_environment_context_args::rea
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->part_vals.clear();
-            uint32_t _size866;
-            ::apache::thrift::protocol::TType _etype869;
-            xfer += iprot->readListBegin(_etype869, _size866);
-            this->part_vals.resize(_size866);
-            uint32_t _i870;
-            for (_i870 = 0; _i870 < _size866; ++_i870)
+            uint32_t _size867;
+            ::apache::thrift::protocol::TType _etype870;
+            xfer += iprot->readListBegin(_etype870, _size867);
+            this->part_vals.resize(_size867);
+            uint32_t _i871;
+            for (_i871 = 0; _i871 < _size867; ++_i871)
             {
-              xfer += iprot->readString(this->part_vals[_i870]);
+              xfer += iprot->readString(this->part_vals[_i871]);
             }
             xfer += iprot->readListEnd();
           }
@@ -8806,10 +8806,10 @@ uint32_t ThriftHiveMetastore_append_partition_with_environment_context_args::wri
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->part_vals.size()));
-    std::vector<std::string> ::const_iterator _iter871;
-    for (_iter871 = this->part_vals.begin(); _iter871 != this->part_vals.end(); ++_iter871)
+    std::vector<std::string> ::const_iterator _iter872;
+    for (_iter872 = this->part_vals.begin(); _iter872 != this->part_vals.end(); ++_iter872)
     {
-      xfer += oprot->writeString((*_iter871));
+      xfer += oprot->writeString((*_iter872));
     }
     xfer += oprot->writeListEnd();
   }
@@ -8845,10 +8845,10 @@ uint32_t ThriftHiveMetastore_append_partition_with_environment_context_pargs::wr
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->part_vals)).size()));
-    std::vector<std::string> ::const_iterator _iter872;
-    for (_iter872 = (*(this->part_vals)).begin(); _iter872 != (*(this->part_vals)).end(); ++_iter872)
+    std::vector<std::string> ::const_iterator _iter873;
+    for (_iter873 = (*(this->part_vals)).begin(); _iter873 != (*(this->part_vals)).end(); ++_iter873)
     {
-      xfer += oprot->writeString((*_iter872));
+      xfer += oprot->writeString((*_iter873));
     }
     xfer += oprot->writeListEnd();
   }
@@ -9651,14 +9651,14 @@ uint32_t ThriftHiveMetastore_drop_partition_args::read(::apache::thrift::protoco
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->part_vals.clear();
-            uint32_t _size873;
-            ::apache::thrift::protocol::TType _etype876;
-            xfer += iprot->readListBegin(_etype876, _size873);
-            this->part_vals.resize(_size873);
-            uint32_t _i877;
-            for (_i877 = 0; _i877 < _size873; ++_i877)
+            uint32_t _size874;
+            ::apache::thrift::protocol::TType _etype877;
+            xfer += iprot->readListBegin(_etype877, _size874);
+            this->part_vals.resize(_size874);
+            uint32_t _i878;
+            for (_i878 = 0; _i878 < _size874; ++_i878)
             {
-              xfer += iprot->readString(this->part_vals[_i877]);
+              xfer += iprot->readString(this->part_vals[_i878]);
             }
             xfer += iprot->readListEnd();
           }
@@ -9703,10 +9703,10 @@ uint32_t ThriftHiveMetastore_drop_partition_args::write(::apache::thrift::protoc
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->part_vals.size()));
-    std::vector<std::string> ::const_iterator _iter878;
-    for (_iter878 = this->part_vals.begin(); _iter878 != this->part_vals.end(); ++_iter878)
+    std::vector<std::string> ::const_iterator _iter879;
+    for (_iter879 = this->part_vals.begin(); _iter879 != this->part_vals.end(); ++_iter879)
     {
-      xfer += oprot->writeString((*_iter878));
+      xfer += oprot->writeString((*_iter879));
     }
     xfer += oprot->writeListEnd();
   }
@@ -9742,10 +9742,10 @@ uint32_t ThriftHiveMetastore_drop_partition_pargs::write(::apache::thrift::proto
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->part_vals)).size()));
-    std::vector<std::string> ::const_iterator _iter879;
-    for (_iter879 = (*(this->part_vals)).begin(); _iter879 != (*(this->part_vals)).end(); ++_iter879)
+    std::vector<std::string> ::const_iterator _iter880;
+    for (_iter880 = (*(this->part_vals)).begin(); _iter880 != (*(this->part_vals)).end(); ++_iter880)
     {
-      xfer += oprot->writeString((*_iter879));
+      xfer += oprot->writeString((*_iter880));
     }
     xfer += oprot->writeListEnd();
   }
@@ -9954,14 +9954,14 @@ uint32_t ThriftHiveMetastore_drop_partition_with_environment_context_args::read(
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->part_vals.clear();
-            uint32_t _size880;
-            ::apache::thrift::protocol::TType _etype883;
-            xfer += iprot->readListBegin(_etype883, _size880);
-            this->part_vals.resize(_size880);
-            uint32_t _i884;
-            for (_i884 = 0; _i884 < _size880; ++_i884)
+            uint32_t _size881;
+            ::apache::thrift::protocol::TType _etype884;
+            xfer += iprot->readListBegin(_etype884, _size881);
+            this->part_vals.resize(_size881);
+            uint32_t _i885;
+            for (_i885 = 0; _i885 < _size881; ++_i885)
             {
-              xfer += iprot->readString(this->part_vals[_i884]);
+              xfer += iprot->readString(this->part_vals[_i885]);
             }
             xfer += iprot->readListEnd();
           }
@@ -10014,10 +10014,10 @@ uint32_t ThriftHiveMetastore_drop_partition_with_environment_context_args::write
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->part_vals.size()));
-    std::vector<std::string> ::const_iterator _iter885;
-    for (_iter885 = this->part_vals.begin(); _iter885 != this->part_vals.end(); ++_iter885)
+    std::vector<std::string> ::const_iterator _iter886;
+    for (_iter886 = this->part_vals.begin(); _iter886 != this->part_vals.end(); ++_iter886)
     {
-      xfer += oprot->writeString((*_iter885));
+      xfer += oprot->writeString((*_iter886));
     }
     xfer += oprot->writeListEnd();
   }
@@ -10057,10 +10057,10 @@ uint32_t ThriftHiveMetastore_drop_partition_with_environment_context_pargs::writ
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->part_vals)).size()));
-    std::vector<std::string> ::const_iterator _iter886;
-    for (_iter886 = (*(this->part_vals)).begin(); _iter886 != (*(this->part_vals)).end(); ++_iter886)
+    std::vector<std::string> ::const_iterator _iter887;
+    for (_iter887 = (*(this->part_vals)).begin(); _iter887 != (*(this->part_vals)).end(); ++_iter887)
     {
-      xfer += oprot->writeString((*_iter886));
+      xfer += oprot->writeString((*_iter887));
     }
     xfer += oprot->writeListEnd();
   }
@@ -11066,14 +11066,14 @@ uint32_t ThriftHiveMetastore_get_partition_args::read(::apache::thrift::protocol
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->part_vals.clear();
-            uint32_t _size887;
-            ::apache::thrift::protocol::TType _etype890;
-            xfer += iprot->readListBegin(_etype890, _size887);
-            this->part_vals.resize(_size887);
-            uint32_t _i891;
-            for (_i891 = 0; _i891 < _size887; ++_i891)
+            uint32_t _size888;
+            ::apache::thrift::protocol::TType _etype891;
+            xfer += iprot->readListBegin(_etype891, _size888);
+            this->part_vals.resize(_size888);
+            uint32_t _i892;
+            for (_i892 = 0; _i892 < _size888; ++_i892)
             {
-              xfer += iprot->readString(this->part_vals[_i891]);
+              xfer += iprot->readString(this->part_vals[_i892]);
             }
             xfer += iprot->readListEnd();
           }
@@ -11110,10 +11110,10 @@ uint32_t ThriftHiveMetastore_get_partition_args::write(::apache::thrift::protoco
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->part_vals.size()));
-    std::vector<std::string> ::const_iterator _iter892;
-    for (_iter892 = this->part_vals.begin(); _iter892 != this->part_vals.end(); ++_iter892)
+    std::vector<std::string> ::const_iterator _iter893;
+    for (_iter893 = this->part_vals.begin(); _iter893 != this->part_vals.end(); ++_iter893)
     {
-      xfer += oprot->writeString((*_iter892));
+      xfer += oprot->writeString((*_iter893));
     }
     xfer += oprot->writeListEnd();
   }
@@ -11145,10 +11145,10 @@ uint32_t ThriftHiveMetastore_get_partition_pargs::write(::apache::thrift::protoc
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->part_vals)).size()));
-    std::vector<std::string> ::const_iterator _iter893;
-    for (_iter893 = (*(this->part_vals)).begin(); _iter893 != (*(this->part_vals)).end(); ++_iter893)
+    std::vector<std::string> ::const_iterator _iter894;
+    for (_iter894 = (*(this->part_vals)).begin(); _iter894 != (*(this->part_vals)).end(); ++_iter894)
     {
-      xfer += oprot->writeString((*_iter893));
+      xfer += oprot->writeString((*_iter894));
     }
     xfer += oprot->writeListEnd();
   }
@@ -11337,17 +11337,17 @@ uint32_t ThriftHiveMetastore_exchange_partition_args::read(::apache::thrift::pro
         if (ftype == ::apache::thrift::protocol::T_MAP) {
           {
             this->partitionSpecs.clear();
-            uint32_t _size894;
-            ::apache::thrift::protocol::TType _ktype895;
-            ::apache::thrift::protocol::TType _vtype896;
-            xfer += iprot->readMapBegin(_ktype895, _vtype896, _size894);
-            uint32_t _i898;
-            for (_i898 = 0; _i898 < _size894; ++_i898)
+            uint32_t _size895;
+            ::apache::thrift::protocol::TType _ktype896;
+            ::apache::thrift::protocol::TType _vtype897;
+            xfer += iprot->readMapBegin(_ktype896, _vtype897, _size895);
+            uint32_t _i899;
+            for (_i899 = 0; _i899 < _size895; ++_i899)
             {
-              std::string _key899;
-              xfer += iprot->readString(_key899);
-              std::string& _val900 = this->partitionSpecs[_key899];
-              xfer += iprot->readString(_val900);
+              std::string _key900;
+              xfer += iprot->readString(_key900);
+              std::string& _val901 = this->partitionSpecs[_key900];
+              xfer += iprot->readString(_val901);
             }
             xfer += iprot->readMapEnd();
           }
@@ -11408,11 +11408,11 @@ uint32_t ThriftHiveMetastore_exchange_partition_args::write(::apache::thrift::pr
   xfer += oprot->writeFieldBegin("partitionSpecs", ::apache::thrift::protocol::T_MAP, 1);
   {
     xfer += oprot->writeMapBegin(::apache::thrift::protocol::T_STRING, ::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->partitionSpecs.size()));
-    std::map<std::string, std::string> ::const_iterator _iter901;
-    for (_iter901 = this->partitionSpecs.begin(); _iter901 != this->partitionSpecs.end(); ++_iter901)
+    std::map<std::string, std::string> ::const_iterator _iter902;
+    for (_iter902 = this->partitionSpecs.begin(); _iter902 != this->partitionSpecs.end(); ++_iter902)
     {
-      xfer += oprot->writeString(_iter901->first);
-      xfer += oprot->writeString(_iter901->second);
+      xfer += oprot->writeString(_iter902->first);
+      xfer += oprot->writeString(_iter902->second);
     }
     xfer += oprot->writeMapEnd();
   }
@@ -11452,11 +11452,11 @@ uint32_t ThriftHiveMetastore_exchange_partition_pargs::write(::apache::thrift::p
   xfer += oprot->writeFieldBegin("partitionSpecs", ::apache::thrift::protocol::T_MAP, 1);
   {
     xfer += oprot->writeMapBegin(::apache::thrift::protocol::T_STRING, ::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->partitionSpecs)).size()));
-    std::map<std::string, std::string> ::const_iterator _iter902;
-    for (_iter902 = (*(this->partitionSpecs)).begin(); _iter902 != (*(this->partitionSpecs)).end(); ++_iter902)
+    std::map<std::string, std::string> ::const_iterator _iter903;
+    for (_iter903 = (*(this->partitionSpecs)).begin(); _iter903 != (*(this->partitionSpecs)).end(); ++_iter903)
     {
-      xfer += oprot->writeString(_iter902->first);
-      xfer += oprot->writeString(_iter902->second);
+      xfer += oprot->writeString(_iter903->first);
+      xfer += oprot->writeString(_iter903->second);
     }
     xfer += oprot->writeMapEnd();
   }
@@ -11717,14 +11717,14 @@ uint32_t ThriftHiveMetastore_get_partition_with_auth_args::read(::apache::thrift
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->part_vals.clear();
-            uint32_t _size903;
-            ::apache::thrift::protocol::TType _etype906;
-            xfer += iprot->readListBegin(_etype906, _size903);
-            this->part_vals.resize(_size903);
-            uint32_t _i907;
-            for (_i907 = 0; _i907 < _size903; ++_i907)
+            uint32_t _size904;
+            ::apache::thrift::protocol::TType _etype907;
+            xfer += iprot->readListBegin(_etype907, _size904);
+            this->part_vals.resize(_size904);
+            uint32_t _i908;
+            for (_i908 = 0; _i908 < _size904; ++_i908)
             {
-              xfer += iprot->readString(this->part_vals[_i907]);
+              xfer += iprot->readString(this->part_vals[_i908]);
             }
             xfer += iprot->readListEnd();
           }
@@ -11745,14 +11745,14 @@ uint32_t ThriftHiveMetastore_get_partition_with_auth_args::read(::apache::thrift
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->group_names.clear();
-            uint32_t _size908;
-            ::apache::thrift::protocol::TType _etype911;
-            xfer += iprot->readListBegin(_etype911, _size908);
-            this->group_names.resize(_size908);
-            uint32_t _i912;
-            for (_i912 = 0; _i912 < _size908; ++_i912)
+            uint32_t _size909;
+            ::apache::thrift::protocol::TType _etype912;
+            xfer += iprot->readListBegin(_etype912, _size909);
+            this->group_names.resize(_size909);
+            uint32_t _i913;
+            for (_i913 = 0; _i913 < _size909; ++_i913)
             {
-              xfer += iprot->readString(this->group_names[_i912]);
+              xfer += iprot->readString(this->group_names[_i913]);
             }
             xfer += iprot->readListEnd();
           }
@@ -11789,10 +11789,10 @@ uint32_t ThriftHiveMetastore_get_partition_with_auth_args::write(::apache::thrif
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->part_vals.size()));
-    std::vector<std::string> ::const_iterator _iter913;
-    for (_iter913 = this->part_vals.begin(); _iter913 != this->part_vals.end(); ++_iter913)
+    std::vector<std::string> ::const_iterator _iter914;
+    for (_iter914 = this->part_vals.begin(); _iter914 != this->part_vals.end(); ++_iter914)
     {
-      xfer += oprot->writeString((*_iter913));
+      xfer += oprot->writeString((*_iter914));
     }
     xfer += oprot->writeListEnd();
   }
@@ -11805,10 +11805,10 @@ uint32_t ThriftHiveMetastore_get_partition_with_auth_args::write(::apache::thrif
   xfer += oprot->writeFieldBegin("group_names", ::apache::thrift::protocol::T_LIST, 5);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->group_names.size()));
-    std::vector<std::string> ::const_iterator _iter914;
-    for (_iter914 = this->group_names.begin(); _iter914 != this->group_names.end(); ++_iter914)
+    std::vector<std::string> ::const_iterator _iter915;
+    for (_iter915 = this->group_names.begin(); _iter915 != this->group_names.end(); ++_iter915)
     {
-      xfer += oprot->writeString((*_iter914));
+      xfer += oprot->writeString((*_iter915));
     }
     xfer += oprot->writeListEnd();
   }
@@ -11840,10 +11840,10 @@ uint32_t ThriftHiveMetastore_get_partition_with_auth_pargs::write(::apache::thri
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->part_vals)).size()));
-    std::vector<std::string> ::const_iterator _iter915;
-    for (_iter915 = (*(this->part_vals)).begin(); _iter915 != (*(this->part_vals)).end(); ++_iter915)
+    std::vector<std::string> ::const_iterator _iter916;
+    for (_iter916 = (*(this->part_vals)).begin(); _iter916 != (*(this->part_vals)).end(); ++_iter916)
     {
-      xfer += oprot->writeString((*_iter915));
+      xfer += oprot->writeString((*_iter916));
     }
     xfer += oprot->writeListEnd();
   }
@@ -11856,10 +11856,10 @@ uint32_t ThriftHiveMetastore_get_partition_with_auth_pargs::write(::apache::thri
   xfer += oprot->writeFieldBegin("group_names", ::apache::thrift::protocol::T_LIST, 5);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->group_names)).size()));
-    std::vector<std::string> ::const_iterator _iter916;
-    for (_iter916 = (*(this->group_names)).begin(); _iter916 != (*(this->group_names)).end(); ++_iter916)
+    std::vector<std::string> ::const_iterator _iter917;
+    for (_iter917 = (*(this->group_names)).begin(); _iter917 != (*(this->group_names)).end(); ++_iter917)
     {
-      xfer += oprot->writeString((*_iter916));
+      xfer += oprot->writeString((*_iter917));
     }
     xfer += oprot->writeListEnd();
   }
@@ -12418,14 +12418,14 @@ uint32_t ThriftHiveMetastore_get_partitions_result::read(::apache::thrift::proto
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->success.clear();
-            uint32_t _size917;
-            ::apache::thrift::protocol::TType _etype920;
-            xfer += iprot->readListBegin(_etype920, _size917);
-            this->success.resize(_size917);
-            uint32_t _i921;
-            for (_i921 = 0; _i921 < _size917; ++_i921)
+            uint32_t _size918;
+            ::apache::thrift::protocol::TType _etype921;
+            xfer += iprot->readListBegin(_etype921, _size918);
+            this->success.resize(_size918);
+            uint32_t _i922;
+            for (_i922 = 0; _i922 < _size918; ++_i922)
             {
-              xfer += this->success[_i921].read(iprot);
+              xfer += this->success[_i922].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -12472,10 +12472,10 @@ uint32_t ThriftHiveMetastore_get_partitions_result::write(::apache::thrift::prot
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->success.size()));
-      std::vector<Partition> ::const_iterator _iter922;
-      for (_iter922 = this->success.begin(); _iter922 != this->success.end(); ++_iter922)
+      std::vector<Partition> ::const_iterator _iter923;
+      for (_iter923 = this->success.begin(); _iter923 != this->success.end(); ++_iter923)
       {
-        xfer += (*_iter922).write(oprot);
+        xfer += (*_iter923).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -12524,14 +12524,14 @@ uint32_t ThriftHiveMetastore_get_partitions_presult::read(::apache::thrift::prot
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             (*(this->success)).clear();
-            uint32_t _size923;
-            ::apache::thrift::protocol::TType _etype926;
-            xfer += iprot->readListBegin(_etype926, _size923);
-            (*(this->success)).resize(_size923);
-            uint32_t _i927;
-            for (_i927 = 0; _i927 < _size923; ++_i927)
+            uint32_t _size924;
+            ::apache::thrift::protocol::TType _etype927;
+            xfer += iprot->readListBegin(_etype927, _size924);
+            (*(this->success)).resize(_size924);
+            uint32_t _i928;
+            for (_i928 = 0; _i928 < _size924; ++_i928)
             {
-              xfer += (*(this->success))[_i927].read(iprot);
+              xfer += (*(this->success))[_i928].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -12630,14 +12630,14 @@ uint32_t ThriftHiveMetastore_get_partitions_with_auth_args::read(::apache::thrif
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->group_names.clear();
-            uint32_t _size928;
-            ::apache::thrift::protocol::TType _etype931;
-            xfer += iprot->readListBegin(_etype931, _size928);
-            this->group_names.resize(_size928);
-            uint32_t _i932;
-            for (_i932 = 0; _i932 < _size928; ++_i932)
+            uint32_t _size929;
+            ::apache::thrift::protocol::TType _etype932;
+            xfer += iprot->readListBegin(_etype932, _size929);
+            this->group_names.resize(_size929);
+            uint32_t _i933;
+            for (_i933 = 0; _i933 < _size929; ++_i933)
             {
-              xfer += iprot->readString(this->group_names[_i932]);
+              xfer += iprot->readString(this->group_names[_i933]);
             }
             xfer += iprot->readListEnd();
           }
@@ -12682,10 +12682,10 @@ uint32_t ThriftHiveMetastore_get_partitions_with_auth_args::write(::apache::thri
   xfer += oprot->writeFieldBegin("group_names", ::apache::thrift::protocol::T_LIST, 5);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->group_names.size()));
-    std::vector<std::string> ::const_iterator _iter933;
-    for (_iter933 = this->group_names.begin(); _iter933 != this->group_names.end(); ++_iter933)
+    std::vector<std::string> ::const_iterator _iter934;
+    for (_iter934 = this->group_names.begin(); _iter934 != this->group_names.end(); ++_iter934)
     {
-      xfer += oprot->writeString((*_iter933));
+      xfer += oprot->writeString((*_iter934));
     }
     xfer += oprot->writeListEnd();
   }
@@ -12725,10 +12725,10 @@ uint32_t ThriftHiveMetastore_get_partitions_with_auth_pargs::write(::apache::thr
   xfer += oprot->writeFieldBegin("group_names", ::apache::thrift::protocol::T_LIST, 5);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->group_names)).size()));
-    std::vector<std::string> ::const_iterator _iter934;
-    for (_iter934 = (*(this->group_names)).begin(); _iter934 != (*(this->group_names)).end(); ++_iter934)
+    std::vector<std::string> ::const_iterator _iter935;
+    for (_iter935 = (*(this->group_names)).begin(); _iter935 != (*(this->group_names)).end(); ++_iter935)
     {
-      xfer += oprot->writeString((*_iter934));
+      xfer += oprot->writeString((*_iter935));
     }
     xfer += oprot->writeListEnd();
   }
@@ -12769,14 +12769,14 @@ uint32_t ThriftHiveMetastore_get_partitions_with_auth_result::read(::apache::thr
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->success.clear();
-            uint32_t _size935;
-            ::apache::thrift::protocol::TType _etype938;
-            xfer += iprot->readListBegin(_etype938, _size935);
-            this->success.resize(_size935);
-            uint32_t _i939;
-            for (_i939 = 0; _i939 < _size935; ++_i939)
+            uint32_t _size936;
+            ::apache::thrift::protocol::TType _etype939;
+            xfer += iprot->readListBegin(_etype939, _size936);
+            this->success.resize(_size936);
+            uint32_t _i940;
+            for (_i940 = 0; _i940 < _size936; ++_i940)
             {
-              xfer += this->success[_i939].read(iprot);
+              xfer += this->success[_i940].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -12823,10 +12823,10 @@ uint32_t ThriftHiveMetastore_get_partitions_with_auth_result::write(::apache::th
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->success.size()));
-      std::vector<Partition> ::const_iterator _iter940;
-      for (_iter940 = this->success.begin(); _iter940 != this->success.end(); ++_iter940)
+      std::vector<Partition> ::const_iterator _iter941;
+      for (_iter941 = this->success.begin(); _iter941 != this->success.end(); ++_iter941)
       {
-        xfer += (*_iter940).write(oprot);
+        xfer += (*_iter941).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -12875,14 +12875,14 @@ uint32_t ThriftHiveMetastore_get_partitions_with_auth_presult::read(::apache::th
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             (*(this->success)).clear();
-            uint32_t _size941;
-            ::apache::thrift::protocol::TType _etype944;
-            xfer += iprot->readListBegin(_etype944, _size941);
-            (*(this->success)).resize(_size941);
-            uint32_t _i945;
-            for (_i945 = 0; _i945 < _size941; ++_i945)
+            uint32_t _size942;
+            ::apache::thrift::protocol::TType _etype945;
+            xfer += iprot->readListBegin(_etype945, _size942);
+            (*(this->success)).resize(_size942);
+            uint32_t _i946;
+            for (_i946 = 0; _i946 < _size942; ++_i946)
             {
-              xfer += (*(this->success))[_i945].read(iprot);
+              xfer += (*(this->success))[_i946].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -13060,14 +13060,14 @@ uint32_t ThriftHiveMetastore_get_partitions_pspec_result::read(::apache::thrift:
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->success.clear();
-            uint32_t _size946;
-            ::apache::thrift::protocol::TType _etype949;
-            xfer += iprot->readListBegin(_etype949, _size946);
-            this->success.resize(_size946);
-            uint32_t _i950;
-            for (_i950 = 0; _i950 < _size946; ++_i950)
+            uint32_t _size947;
+            ::apache::thrift::protocol::TType _etype950;
+            xfer += iprot->readListBegin(_etype950, _size947);
+            this->success.resize(_size947);
+            uint32_t _i951;
+            for (_i951 = 0; _i951 < _size947; ++_i951)
             {
-              xfer += this->success[_i950].read(iprot);
+              xfer += this->success[_i951].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -13114,10 +13114,10 @@ uint32_t ThriftHiveMetastore_get_partitions_pspec_result::write(::apache::thrift
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->success.size()));
-      std::vector<PartitionSpec> ::const_iterator _iter951;
-      for (_iter951 = this->success.begin(); _iter951 != this->success.end(); ++_iter951)
+      std::vector<PartitionSpec> ::const_iterator _iter952;
+      for (_iter952 = this->success.begin(); _iter952 != this->success.end(); ++_iter952)
       {
-        xfer += (*_iter951).write(oprot);
+        xfer += (*_iter952).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -13166,14 +13166,14 @@ uint32_t ThriftHiveMetastore_get_partitions_pspec_presult::read(::apache::thrift
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             (*(this->success)).clear();
-            uint32_t _size952;
-            ::apache::thrift::protocol::TType _etype955;
-            xfer += iprot->readListBegin(_etype955, _size952);
-            (*(this->success)).resize(_size952);
-            uint32_t _i956;
-            for (_i956 = 0; _i956 < _size952; ++_i956)
+            uint32_t _size953;
+            ::apache::thrift::protocol::TType _etype956;
+            xfer += iprot->readListBegin(_etype956, _size953);
+            (*(this->success)).resize(_size953);
+            uint32_t _i957;
+            for (_i957 = 0; _i957 < _size953; ++_i957)
             {
-              xfer += (*(this->success))[_i956].read(iprot);
+              xfer += (*(this->success))[_i957].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -13351,14 +13351,14 @@ uint32_t ThriftHiveMetastore_get_partition_names_result::read(::apache::thrift::
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->success.clear();
-            uint32_t _size957;
-            ::apache::thrift::protocol::TType _etype960;
-            xfer += iprot->readListBegin(_etype960, _size957);
-            this->success.resize(_size957);
-            uint32_t _i961;
-            for (_i961 = 0; _i961 < _size957; ++_i961)
+            uint32_t _size958;
+            ::apache::thrift::protocol::TType _etype961;
+            xfer += iprot->readListBegin(_etype961, _size958);
+            this->success.resize(_size958);
+            uint32_t _i962;
+            for (_i962 = 0; _i962 < _size958; ++_i962)
             {
-              xfer += iprot->readString(this->success[_i961]);
+              xfer += iprot->readString(this->success[_i962]);
             }
             xfer += iprot->readListEnd();
           }
@@ -13397,10 +13397,10 @@ uint32_t ThriftHiveMetastore_get_partition_names_result::write(::apache::thrift:
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->success.size()));
-      std::vector<std::string> ::const_iterator _iter962;
-      for (_iter962 = this->success.begin(); _iter962 != this->success.end(); ++_iter962)
+      std::vector<std::string> ::const_iterator _iter963;
+      for (_iter963 = this->success.begin(); _iter963 != this->success.end(); ++_iter963)
       {
-        xfer += oprot->writeString((*_iter962));
+        xfer += oprot->writeString((*_iter963));
       }
       xfer += oprot->writeListEnd();
     }
@@ -13445,14 +13445,14 @@ uint32_t ThriftHiveMetastore_get_partition_names_presult::read(::apache::thrift:
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             (*(this->success)).clear();
-            uint32_t _size963;
-            ::apache::thrift::protocol::TType _etype966;
-            xfer += iprot->readListBegin(_etype966, _size963);
-            (*(this->success)).resize(_size963);
-            uint32_t _i967;
-            for (_i967 = 0; _i967 < _size963; ++_i967)
+            uint32_t _size964;
+            ::apache::thrift::protocol::TType _etype967;
+            xfer += iprot->readListBegin(_etype967, _size964);
+            (*(this->success)).resize(_size964);
+            uint32_t _i968;
+            for (_i968 = 0; _i968 < _size964; ++_i968)
             {
-              xfer += iprot->readString((*(this->success))[_i967]);
+              xfer += iprot->readString((*(this->success))[_i968]);
             }
             xfer += iprot->readListEnd();
           }
@@ -13527,14 +13527,14 @@ uint32_t ThriftHiveMetastore_get_partitions_ps_args::read(::apache::thrift::prot
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->part_vals.clear();
-            uint32_t _size968;
-            ::apache::thrift::protocol::TType _etype971;
-            xfer += iprot->readListBegin(_etype971, _size968);
-            this->part_vals.resize(_size968);
-            uint32_t _i972;
-            for (_i972 = 0; _i972 < _size968; ++_i972)
+            uint32_t _size969;
+            ::apache::thrift::protocol::TType _etype972;
+            xfer += iprot->readListBegin(_etype972, _size969);
+            this->part_vals.resize(_size969);
+            uint32_t _i973;
+            for (_i973 = 0; _i973 < _size969; ++_i973)
             {
-              xfer += iprot->readString(this->part_vals[_i972]);
+              xfer += iprot->readString(this->part_vals[_i973]);
             }
             xfer += iprot->readListEnd();
           }
@@ -13579,10 +13579,10 @@ uint32_t ThriftHiveMetastore_get_partitions_ps_args::write(::apache::thrift::pro
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->part_vals.size()));
-    std::vector<std::string> ::const_iterator _iter973;
-    for (_iter973 = this->part_vals.begin(); _iter973 != this->part_vals.end(); ++_iter973)
+    std::vector<std::string> ::const_iterator _iter974;
+    for (_iter974 = this->part_vals.begin(); _iter974 != this->part_vals.end(); ++_iter974)
     {
-      xfer += oprot->writeString((*_iter973));
+      xfer += oprot->writeString((*_iter974));
     }
     xfer += oprot->writeListEnd();
   }
@@ -13618,10 +13618,10 @@ uint32_t ThriftHiveMetastore_get_partitions_ps_pargs::write(::apache::thrift::pr
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->part_vals)).size()));
-    std::vector<std::string> ::const_iterator _iter974;
-    for (_iter974 = (*(this->part_vals)).begin(); _iter974 != (*(this->part_vals)).end(); ++_iter974)
+    std::vector<std::string> ::const_iterator _iter975;
+    for (_iter975 = (*(this->part_vals)).begin(); _iter975 != (*(this->part_vals)).end(); ++_iter975)
     {
-      xfer += oprot->writeString((*_iter974));
+      xfer += oprot->writeString((*_iter975));
     }
     xfer += oprot->writeListEnd();
   }
@@ -13666,14 +13666,14 @@ uint32_t ThriftHiveMetastore_get_partitions_ps_result::read(::apache::thrift::pr
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->success.clear();
-            uint32_t _size975;
-            ::apache::thrift::protocol::TType _etype978;
-            xfer += iprot->readListBegin(_etype978, _size975);
-            this->success.resize(_size975);
-            uint32_t _i979;
-            for (_i979 = 0; _i979 < _size975; ++_i979)
+            uint32_t _size976;
+            ::apache::thrift::protocol::TType _etype979;
+            xfer += iprot->readListBegin(_etype979, _size976);
+            this->success.resize(_size976);
+            uint32_t _i980;
+            for (_i980 = 0; _i980 < _size976; ++_i980)
             {
-              xfer += this->success[_i979].read(iprot);
+              xfer += this->success[_i980].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -13720,10 +13720,10 @@ uint32_t ThriftHiveMetastore_get_partitions_ps_result::write(::apache::thrift::p
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->success.size()));
-      std::vector<Partition> ::const_iterator _iter980;
-      for (_iter980 = this->success.begin(); _iter980 != this->success.end(); ++_iter980)
+      std::vector<Partition> ::const_iterator _iter981;
+      for (_iter981 = this->success.begin(); _iter981 != this->success.end(); ++_iter981)
       {
-        xfer += (*_iter980).write(oprot);
+        xfer += (*_iter981).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -13772,14 +13772,14 @@ uint32_t ThriftHiveMetastore_get_partitions_ps_presult::read(::apache::thrift::p
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             (*(this->success)).clear();
-            uint32_t _size981;
-            ::apache::thrift::protocol::TType _etype984;
-            xfer += iprot->readListBegin(_etype984, _size981);
-            (*(this->success)).resize(_size981);
-            uint32_t _i985;
-            for (_i985 = 0; _i985 < _size981; ++_i985)
+            uint32_t _size982;
+            ::apache::thrift::protocol::TType _etype985;
+            xfer += iprot->readListBegin(_etype985, _size982);
+            (*(this->success)).resize(_size982);
+            uint32_t _i986;
+            for (_i986 = 0; _i986 < _size982; ++_i986)
             {
-              xfer += (*(this->success))[_i985].read(iprot);
+              xfer += (*(this->success))[_i986].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -13862,14 +13862,14 @@ uint32_t ThriftHiveMetastore_get_partitions_ps_with_auth_args::read(::apache::th
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->part_vals.clear();
-            uint32_t _size986;
-            ::apache::thrift::protocol::TType _etype989;
-            xfer += iprot->readListBegin(_etype989, _size986);
-            this->part_vals.resize(_size986);
-            uint32_t _i990;
-            for (_i990 = 0; _i990 < _size986; ++_i990)
+            uint32_t _size987;
+            ::apache::thrift::protocol::TType _etype990;
+            xfer += iprot->readListBegin(_etype990, _size987);
+            this->part_vals.resize(_size987);
+            uint32_t _i991;
+            for (_i991 = 0; _i991 < _size987; ++_i991)
             {
-              xfer += iprot->readString(this->part_vals[_i990]);
+              xfer += iprot->readString(this->part_vals[_i991]);
             }
             xfer += iprot->readListEnd();
           }
@@ -13898,14 +13898,14 @@ uint32_t ThriftHiveMetastore_get_partitions_ps_with_auth_args::read(::apache::th
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->group_names.clear();
-            uint32_t _size991;
-            ::apache::thrift::protocol::TType _etype994;
-            xfer += iprot->readListBegin(_etype994, _size991);
-            this->group_names.resize(_size991);
-            uint32_t _i995;
-            for (_i995 = 0; _i995 < _size991; ++_i995)
+            uint32_t _size992;
+            ::apache::thrift::protocol::TType _etype995;
+            xfer += iprot->readListBegin(_etype995, _size992);
+            this->group_names.resize(_size992);
+            uint32_t _i996;
+            for (_i996 = 0; _i996 < _size992; ++_i996)
             {
-              xfer += iprot->readString(this->group_names[_i995]);
+              xfer += iprot->readString(this->group_names[_i996]);
             }
             xfer += iprot->readListEnd();
           }
@@ -13942,10 +13942,10 @@ uint32_t ThriftHiveMetastore_get_partitions_ps_with_auth_args::write(::apache::t
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->part_vals.size()));
-    std::vector<std::string> ::const_iterator _iter996;
-    for (_iter996 = this->part_vals.begin(); _iter996 != this->part_vals.end(); ++_iter996)
+    std::vector<std::string> ::const_iterator _iter997;
+    for (_iter997 = this->part_vals.begin(); _iter997 != this->part_vals.end(); ++_iter997)
     {
-      xfer += oprot->writeString((*_iter996));
+      xfer += oprot->writeString((*_iter997));
     }
     xfer += oprot->writeListEnd();
   }
@@ -13962,10 +13962,10 @@ uint32_t ThriftHiveMetastore_get_partitions_ps_with_auth_args::write(::apache::t
   xfer += oprot->writeFieldBegin("group_names", ::apache::thrift::protocol::T_LIST, 6);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->group_names.size()));
-    std::vector<std::string> ::const_iterator _iter997;
-    for (_iter997 = this->group_names.begin(); _iter997 != this->group_names.end(); ++_iter997)
+    std::vector<std::string> ::const_iterator _iter998;
+    for (_iter998 = this->group_names.begin(); _iter998 != this->group_names.end(); ++_iter998)
     {
-      xfer += oprot->writeString((*_iter997));
+      xfer += oprot->writeString((*_iter998));
     }
     xfer += oprot->writeListEnd();
   }
@@ -13997,10 +13997,10 @@ uint32_t ThriftHiveMetastore_get_partitions_ps_with_auth_pargs::write(::apache::
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->part_vals)).size()));
-    std::vector<std::string> ::const_iterator _iter998;
-    for (_iter998 = (*(this->part_vals)).begin(); _iter998 != (*(this->part_vals)).end(); ++_iter998)
+    std::vector<std::string> ::const_iterator _iter999;
+    for (_iter999 = (*(this->part_vals)).begin(); _iter999 != (*(this->part_vals)).end(); ++_iter999)
     {
-      xfer += oprot->writeString((*_iter998));
+      xfer += oprot->writeString((*_iter999));
     }
     xfer += oprot->writeListEnd();
   }
@@ -14017,10 +14017,10 @@ uint32_t ThriftHiveMetastore_get_partitions_ps_with_auth_pargs::write(::apache::
   xfer += oprot->writeFieldBegin("group_names", ::apache::thrift::protocol::T_LIST, 6);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->group_names)).size()));
-    std::vector<std::string> ::const_iterator _iter999;
-    for (_iter999 = (*(this->group_names)).begin(); _iter999 != (*(this->group_names)).end(); ++_iter999)
+    std::vector<std::string> ::const_iterator _iter1000;
+    for (_iter1000 = (*(this->group_names)).begin(); _iter1000 != (*(this->group_names)).end(); ++_iter1000)
     {
-      xfer += oprot->writeString((*_iter999));
+      xfer += oprot->writeString((*_iter1000));
     }
     xfer += oprot->writeListEnd();
   }
@@ -14061,14 +14061,14 @@ uint32_t ThriftHiveMetastore_get_partitions_ps_with_auth_result::read(::apache::
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->success.clear();
-            uint32_t _size1000;
-            ::apache::thrift::protocol::TType _etype1003;
-            xfer += iprot->readListBegin(_etype1003, _size1000);
-            this->success.resize(_size1000);
-            uint32_t _i1004;
-            for (_i1004 = 0; _i1004 < _size1000; ++_i1004)
+            uint32_t _size1001;
+            ::apache::thrift::protocol::TType _etype1004;
+            xfer += iprot->readListBegin(_etype1004, _size1001);
+            this->success.resize(_size1001);
+            uint32_t _i1005;
+            for (_i1005 = 0; _i1005 < _size1001; ++_i1005)
             {
-              xfer += this->success[_i1004].read(iprot);
+              xfer += this->success[_i1005].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -14115,10 +14115,10 @@ uint32_t ThriftHiveMetastore_get_partitions_ps_with_auth_result::write(::apache:
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->success.size()));
-      std::vector<Partition> ::const_iterator _iter1005;
-      for (_iter1005 = this->success.begin(); _iter1005 != this->success.end(); ++_iter1005)
+      std::vector<Partition> ::const_iterator _iter1006;
+      for (_iter1006 = this->success.begin(); _iter1006 != this->success.end(); ++_iter1006)
       {
-        xfer += (*_iter1005).write(oprot);
+        xfer += (*_iter1006).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -14167,14 +14167,14 @@ uint32_t ThriftHiveMetastore_get_partitions_ps_with_auth_presult::read(::apache:
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             (*(this->success)).clear();
-            uint32_t _size1006;
-            ::apache::thrift::protocol::TType _etype1009;
-            xfer += iprot->readListBegin(_etype1009, _size1006);
-            (*(this->success)).resize(_size1006);
-            uint32_t _i1010;
-            for (_i1010 = 0; _i1010 < _size1006; ++_i1010)
+            uint32_t _size1007;
+            ::apache::thrift::protocol::TType _etype1010;
+            xfer += iprot->readListBegin(_etype1010, _size1007);
+            (*(this->success)).resize(_size1007);
+            uint32_t _i1011;
+            for (_i1011 = 0; _i1011 < _size1007; ++_i1011)
             {
-              xfer += (*(this->success))[_i1010].read(iprot);
+              xfer += (*(this->success))[_i1011].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -14257,14 +14257,14 @@ uint32_t ThriftHiveMetastore_get_partition_names_ps_args::read(::apache::thrift:
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->part_vals.clear();
-            uint32_t _size1011;
-            ::apache::thrift::protocol::TType _etype1014;
-            xfer += iprot->readListBegin(_etype1014, _size1011);
-            this->part_vals.resize(_size1011);
-            uint32_t _i1015;
-            for (_i1015 = 0; _i1015 < _size1011; ++_i1015)
+            uint32_t _size1012;
+            ::apache::thrift::protocol::TType _etype1015;
+            xfer += iprot->readListBegin(_etype1015, _size1012);
+            this->part_vals.resize(_size1012);
+            uint32_t _i1016;
+            for (_i1016 = 0; _i1016 < _size1012; ++_i1016)
             {
-              xfer += iprot->readString(this->part_vals[_i1015]);
+              xfer += iprot->readString(this->part_vals[_i1016]);
             }
             xfer += iprot->readListEnd();
           }
@@ -14309,10 +14309,10 @@ uint32_t ThriftHiveMetastore_get_partition_names_ps_args::write(::apache::thrift
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->part_vals.size()));
-    std::vector<std::string> ::const_iterator _iter1016;
-    for (_iter1016 = this->part_vals.begin(); _iter1016 != this->part_vals.end(); ++_iter1016)
+    std::vector<std::string> ::const_iterator _iter1017;
+    for (_iter1017 = this->part_vals.begin(); _iter1017 != this->part_vals.end(); ++_iter1017)
     {
-      xfer += oprot->writeString((*_iter1016));
+      xfer

<TRUNCATED>

[37/55] [abbrv] hive git commit: HIVE-12260: Fix TestColumnStatistics and TestJsonFileDump test failures in master (Prasanth Jayachandran reviewed by Thejas Nair)

Posted by xu...@apache.org.
HIVE-12260: Fix TestColumnStatistics and TestJsonFileDump test failures in master (Prasanth Jayachandran reviewed by Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/0808741c
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/0808741c
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/0808741c

Branch: refs/heads/spark
Commit: 0808741c69d3ec6739e47bd10f89695c28d460c0
Parents: 383d1cc
Author: Prasanth Jayachandran <j....@gmail.com>
Authored: Mon Oct 26 11:45:10 2015 -0500
Committer: Prasanth Jayachandran <j....@gmail.com>
Committed: Mon Oct 26 11:45:10 2015 -0500

----------------------------------------------------------------------
 ql/src/test/resources/orc-file-dump.json    | 184 +++++++++++------------
 ql/src/test/resources/orc-file-has-null.out |  80 +++++-----
 2 files changed, 132 insertions(+), 132 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/0808741c/ql/src/test/resources/orc-file-dump.json
----------------------------------------------------------------------
diff --git a/ql/src/test/resources/orc-file-dump.json b/ql/src/test/resources/orc-file-dump.json
index 14cf962..646dfe5 100644
--- a/ql/src/test/resources/orc-file-dump.json
+++ b/ql/src/test/resources/orc-file-dump.json
@@ -4,7 +4,7 @@
   "writerVersion": "HIVE_4243",
   "numberOfRows": 21000,
   "compression": "ZLIB",
-  "compressionBufferSize": 10000,
+  "compressionBufferSize": 4096,
   "schemaString": "struct<i:int,l:bigint,s:string>",
   "schema": [
     {
@@ -254,8 +254,8 @@
       "stripeNumber": 1,
       "stripeInformation": {
         "offset": 3,
-        "indexLength": 863,
-        "dataLength": 63749,
+        "indexLength": 970,
+        "dataLength": 63770,
         "footerLength": 90,
         "rowCount": 5000
       },
@@ -270,60 +270,60 @@
           "columnId": 1,
           "section": "ROW_INDEX",
           "startOffset": 20,
-          "length": 165
+          "length": 167
         },
         {
           "columnId": 2,
           "section": "ROW_INDEX",
-          "startOffset": 185,
-          "length": 174
+          "startOffset": 187,
+          "length": 171
         },
         {
           "columnId": 3,
           "section": "ROW_INDEX",
-          "startOffset": 359,
+          "startOffset": 358,
           "length": 103
         },
         {
           "columnId": 3,
           "section": "BLOOM_FILTER",
-          "startOffset": 462,
-          "length": 404
+          "startOffset": 461,
+          "length": 512
         },
         {
           "columnId": 1,
           "section": "DATA",
-          "startOffset": 866,
-          "length": 20029
+          "startOffset": 973,
+          "length": 20035
         },
         {
           "columnId": 2,
           "section": "DATA",
-          "startOffset": 20895,
-          "length": 40035
+          "startOffset": 21008,
+          "length": 40050
         },
         {
           "columnId": 3,
           "section": "PRESENT",
-          "startOffset": 60930,
+          "startOffset": 61058,
           "length": 17
         },
         {
           "columnId": 3,
           "section": "DATA",
-          "startOffset": 60947,
+          "startOffset": 61075,
           "length": 3510
         },
         {
           "columnId": 3,
           "section": "LENGTH",
-          "startOffset": 64457,
+          "startOffset": 64585,
           "length": 25
         },
         {
           "columnId": 3,
           "section": "DICTIONARY_DATA",
-          "startOffset": 64482,
+          "startOffset": 64610,
           "length": 133
         }
       ],
@@ -494,77 +494,77 @@
     {
       "stripeNumber": 2,
       "stripeInformation": {
-        "offset": 64705,
-        "indexLength": 854,
-        "dataLength": 63742,
-        "footerLength": 90,
+        "offset": 64833,
+        "indexLength": 961,
+        "dataLength": 63763,
+        "footerLength": 88,
         "rowCount": 5000
       },
       "streams": [
         {
           "columnId": 0,
           "section": "ROW_INDEX",
-          "startOffset": 64705,
+          "startOffset": 64833,
           "length": 17
         },
         {
           "columnId": 1,
           "section": "ROW_INDEX",
-          "startOffset": 64722,
-          "length": 164
+          "startOffset": 64850,
+          "length": 166
         },
         {
           "columnId": 2,
           "section": "ROW_INDEX",
-          "startOffset": 64886,
-          "length": 169
+          "startOffset": 65016,
+          "length": 166
         },
         {
           "columnId": 3,
           "section": "ROW_INDEX",
-          "startOffset": 65055,
+          "startOffset": 65182,
           "length": 100
         },
         {
           "columnId": 3,
           "section": "BLOOM_FILTER",
-          "startOffset": 65155,
-          "length": 404
+          "startOffset": 65282,
+          "length": 512
         },
         {
           "columnId": 1,
           "section": "DATA",
-          "startOffset": 65559,
-          "length": 20029
+          "startOffset": 65794,
+          "length": 20035
         },
         {
           "columnId": 2,
           "section": "DATA",
-          "startOffset": 85588,
-          "length": 40035
+          "startOffset": 85829,
+          "length": 40050
         },
         {
           "columnId": 3,
           "section": "PRESENT",
-          "startOffset": 125623,
+          "startOffset": 125879,
           "length": 17
         },
         {
           "columnId": 3,
           "section": "DATA",
-          "startOffset": 125640,
+          "startOffset": 125896,
           "length": 3503
         },
         {
           "columnId": 3,
           "section": "LENGTH",
-          "startOffset": 129143,
+          "startOffset": 129399,
           "length": 25
         },
         {
           "columnId": 3,
           "section": "DICTIONARY_DATA",
-          "startOffset": 129168,
+          "startOffset": 129424,
           "length": 133
         }
       ],
@@ -735,77 +735,77 @@
     {
       "stripeNumber": 3,
       "stripeInformation": {
-        "offset": 129391,
-        "indexLength": 853,
-        "dataLength": 63749,
-        "footerLength": 90,
+        "offset": 129645,
+        "indexLength": 962,
+        "dataLength": 63770,
+        "footerLength": 91,
         "rowCount": 5000
       },
       "streams": [
         {
           "columnId": 0,
           "section": "ROW_INDEX",
-          "startOffset": 129391,
+          "startOffset": 129645,
           "length": 17
         },
         {
           "columnId": 1,
           "section": "ROW_INDEX",
-          "startOffset": 129408,
-          "length": 160
+          "startOffset": 129662,
+          "length": 164
         },
         {
           "columnId": 2,
           "section": "ROW_INDEX",
-          "startOffset": 129568,
-          "length": 170
+          "startOffset": 129826,
+          "length": 167
         },
         {
           "columnId": 3,
           "section": "ROW_INDEX",
-          "startOffset": 129738,
+          "startOffset": 129993,
           "length": 102
         },
         {
           "columnId": 3,
           "section": "BLOOM_FILTER",
-          "startOffset": 129840,
-          "length": 404
+          "startOffset": 130095,
+          "length": 512
         },
         {
           "columnId": 1,
           "section": "DATA",
-          "startOffset": 130244,
-          "length": 20029
+          "startOffset": 130607,
+          "length": 20035
         },
         {
           "columnId": 2,
           "section": "DATA",
-          "startOffset": 150273,
-          "length": 40035
+          "startOffset": 150642,
+          "length": 40050
         },
         {
           "columnId": 3,
           "section": "PRESENT",
-          "startOffset": 190308,
+          "startOffset": 190692,
           "length": 17
         },
         {
           "columnId": 3,
           "section": "DATA",
-          "startOffset": 190325,
+          "startOffset": 190709,
           "length": 3510
         },
         {
           "columnId": 3,
           "section": "LENGTH",
-          "startOffset": 193835,
+          "startOffset": 194219,
           "length": 25
         },
         {
           "columnId": 3,
           "section": "DICTIONARY_DATA",
-          "startOffset": 193860,
+          "startOffset": 194244,
           "length": 133
         }
       ],
@@ -976,77 +976,77 @@
     {
       "stripeNumber": 4,
       "stripeInformation": {
-        "offset": 194083,
-        "indexLength": 866,
-        "dataLength": 63735,
-        "footerLength": 90,
+        "offset": 194468,
+        "indexLength": 973,
+        "dataLength": 63756,
+        "footerLength": 91,
         "rowCount": 5000
       },
       "streams": [
         {
           "columnId": 0,
           "section": "ROW_INDEX",
-          "startOffset": 194083,
+          "startOffset": 194468,
           "length": 17
         },
         {
           "columnId": 1,
           "section": "ROW_INDEX",
-          "startOffset": 194100,
-          "length": 164
+          "startOffset": 194485,
+          "length": 166
         },
         {
           "columnId": 2,
           "section": "ROW_INDEX",
-          "startOffset": 194264,
-          "length": 174
+          "startOffset": 194651,
+          "length": 171
         },
         {
           "columnId": 3,
           "section": "ROW_INDEX",
-          "startOffset": 194438,
+          "startOffset": 194822,
           "length": 107
         },
         {
           "columnId": 3,
           "section": "BLOOM_FILTER",
-          "startOffset": 194545,
-          "length": 404
+          "startOffset": 194929,
+          "length": 512
         },
         {
           "columnId": 1,
           "section": "DATA",
-          "startOffset": 194949,
-          "length": 20029
+          "startOffset": 195441,
+          "length": 20035
         },
         {
           "columnId": 2,
           "section": "DATA",
-          "startOffset": 214978,
-          "length": 40035
+          "startOffset": 215476,
+          "length": 40050
         },
         {
           "columnId": 3,
           "section": "PRESENT",
-          "startOffset": 255013,
+          "startOffset": 255526,
           "length": 17
         },
         {
           "columnId": 3,
           "section": "DATA",
-          "startOffset": 255030,
+          "startOffset": 255543,
           "length": 3496
         },
         {
           "columnId": 3,
           "section": "LENGTH",
-          "startOffset": 258526,
+          "startOffset": 259039,
           "length": 25
         },
         {
           "columnId": 3,
           "section": "DICTIONARY_DATA",
-          "startOffset": 258551,
+          "startOffset": 259064,
           "length": 133
         }
       ],
@@ -1217,9 +1217,9 @@
     {
       "stripeNumber": 5,
       "stripeInformation": {
-        "offset": 258774,
+        "offset": 259288,
         "indexLength": 433,
-        "dataLength": 12940,
+        "dataLength": 12943,
         "footerLength": 83,
         "rowCount": 1000
       },
@@ -1227,67 +1227,67 @@
         {
           "columnId": 0,
           "section": "ROW_INDEX",
-          "startOffset": 258774,
+          "startOffset": 259288,
           "length": 12
         },
         {
           "columnId": 1,
           "section": "ROW_INDEX",
-          "startOffset": 258786,
+          "startOffset": 259300,
           "length": 38
         },
         {
           "columnId": 2,
           "section": "ROW_INDEX",
-          "startOffset": 258824,
+          "startOffset": 259338,
           "length": 41
         },
         {
           "columnId": 3,
           "section": "ROW_INDEX",
-          "startOffset": 258865,
+          "startOffset": 259379,
           "length": 41
         },
         {
           "columnId": 3,
           "section": "BLOOM_FILTER",
-          "startOffset": 258906,
+          "startOffset": 259420,
           "length": 301
         },
         {
           "columnId": 1,
           "section": "DATA",
-          "startOffset": 259207,
+          "startOffset": 259721,
           "length": 4007
         },
         {
           "columnId": 2,
           "section": "DATA",
-          "startOffset": 263214,
-          "length": 8007
+          "startOffset": 263728,
+          "length": 8010
         },
         {
           "columnId": 3,
           "section": "PRESENT",
-          "startOffset": 271221,
+          "startOffset": 271738,
           "length": 16
         },
         {
           "columnId": 3,
           "section": "DATA",
-          "startOffset": 271237,
+          "startOffset": 271754,
           "length": 752
         },
         {
           "columnId": 3,
           "section": "LENGTH",
-          "startOffset": 271989,
+          "startOffset": 272506,
           "length": 25
         },
         {
           "columnId": 3,
           "section": "DICTIONARY_DATA",
-          "startOffset": 272014,
+          "startOffset": 272531,
           "length": 133
         }
       ],
@@ -1348,7 +1348,7 @@
       }]
     }
   ],
-  "fileLength": 272779,
+  "fileLength": 273300,
   "paddingLength": 0,
   "paddingRatio": 0
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/0808741c/ql/src/test/resources/orc-file-has-null.out
----------------------------------------------------------------------
diff --git a/ql/src/test/resources/orc-file-has-null.out b/ql/src/test/resources/orc-file-has-null.out
index 9c4e83c..bef44a5 100644
--- a/ql/src/test/resources/orc-file-has-null.out
+++ b/ql/src/test/resources/orc-file-has-null.out
@@ -2,7 +2,7 @@ Structure for TestOrcFile.testHasNull.orc
 File Version: 0.12 with HIVE_4243
 Rows: 20000
 Compression: ZLIB
-Compression size: 10000
+Compression size: 4096
 Type: struct<bytes1:binary,string1:string>
 
 Stripe Statistics:
@@ -29,16 +29,16 @@ File Statistics:
   Column 2: count: 7000 hasNull: true min: RG1 max: STRIPE-3 sum: 46000
 
 Stripes:
-  Stripe: offset: 3 data: 195 rows: 5000 tail: 64 index: 162
+  Stripe: offset: 3 data: 241 rows: 5000 tail: 67 index: 163
     Stream: column 0 section ROW_INDEX start: 3 length 17
-    Stream: column 1 section ROW_INDEX start: 20 length 63
-    Stream: column 2 section ROW_INDEX start: 83 length 82
-    Stream: column 1 section DATA start: 165 length 113
-    Stream: column 1 section LENGTH start: 278 length 32
-    Stream: column 2 section PRESENT start: 310 length 13
-    Stream: column 2 section DATA start: 323 length 22
-    Stream: column 2 section LENGTH start: 345 length 6
-    Stream: column 2 section DICTIONARY_DATA start: 351 length 9
+    Stream: column 1 section ROW_INDEX start: 20 length 64
+    Stream: column 2 section ROW_INDEX start: 84 length 82
+    Stream: column 1 section DATA start: 166 length 159
+    Stream: column 1 section LENGTH start: 325 length 32
+    Stream: column 2 section PRESENT start: 357 length 13
+    Stream: column 2 section DATA start: 370 length 22
+    Stream: column 2 section LENGTH start: 392 length 6
+    Stream: column 2 section DICTIONARY_DATA start: 398 length 9
     Encoding column 0: DIRECT
     Encoding column 1: DIRECT_V2
     Encoding column 2: DICTIONARY_V2[2]
@@ -48,16 +48,16 @@ Stripes:
       Entry 2: count: 1000 hasNull: false min: RG3 max: RG3 sum: 3000 positions: 0,2,125,0,0,66,488
       Entry 3: count: 0 hasNull: true positions: 0,4,125,0,0,136,488
       Entry 4: count: 0 hasNull: true positions: 0,6,125,0,0,136,488
-  Stripe: offset: 424 data: 156 rows: 5000 tail: 60 index: 119
-    Stream: column 0 section ROW_INDEX start: 424 length 17
-    Stream: column 1 section ROW_INDEX start: 441 length 63
-    Stream: column 2 section ROW_INDEX start: 504 length 39
-    Stream: column 1 section DATA start: 543 length 113
-    Stream: column 1 section LENGTH start: 656 length 32
-    Stream: column 2 section PRESENT start: 688 length 11
-    Stream: column 2 section DATA start: 699 length 0
-    Stream: column 2 section LENGTH start: 699 length 0
-    Stream: column 2 section DICTIONARY_DATA start: 699 length 0
+  Stripe: offset: 474 data: 202 rows: 5000 tail: 64 index: 120
+    Stream: column 0 section ROW_INDEX start: 474 length 17
+    Stream: column 1 section ROW_INDEX start: 491 length 64
+    Stream: column 2 section ROW_INDEX start: 555 length 39
+    Stream: column 1 section DATA start: 594 length 159
+    Stream: column 1 section LENGTH start: 753 length 32
+    Stream: column 2 section PRESENT start: 785 length 11
+    Stream: column 2 section DATA start: 796 length 0
+    Stream: column 2 section LENGTH start: 796 length 0
+    Stream: column 2 section DICTIONARY_DATA start: 796 length 0
     Encoding column 0: DIRECT
     Encoding column 1: DIRECT_V2
     Encoding column 2: DICTIONARY_V2[0]
@@ -67,15 +67,15 @@ Stripes:
       Entry 2: count: 0 hasNull: true positions: 0,2,120,0,0,0,0
       Entry 3: count: 0 hasNull: true positions: 0,4,115,0,0,0,0
       Entry 4: count: 0 hasNull: true positions: 0,6,110,0,0,0,0
-  Stripe: offset: 759 data: 186 rows: 5000 tail: 60 index: 148
-    Stream: column 0 section ROW_INDEX start: 759 length 17
-    Stream: column 1 section ROW_INDEX start: 776 length 63
-    Stream: column 2 section ROW_INDEX start: 839 length 68
-    Stream: column 1 section DATA start: 907 length 113
-    Stream: column 1 section LENGTH start: 1020 length 32
-    Stream: column 2 section DATA start: 1052 length 24
-    Stream: column 2 section LENGTH start: 1076 length 6
-    Stream: column 2 section DICTIONARY_DATA start: 1082 length 11
+  Stripe: offset: 860 data: 232 rows: 5000 tail: 63 index: 149
+    Stream: column 0 section ROW_INDEX start: 860 length 17
+    Stream: column 1 section ROW_INDEX start: 877 length 64
+    Stream: column 2 section ROW_INDEX start: 941 length 68
+    Stream: column 1 section DATA start: 1009 length 159
+    Stream: column 1 section LENGTH start: 1168 length 32
+    Stream: column 2 section DATA start: 1200 length 24
+    Stream: column 2 section LENGTH start: 1224 length 6
+    Stream: column 2 section DICTIONARY_DATA start: 1230 length 11
     Encoding column 0: DIRECT
     Encoding column 1: DIRECT_V2
     Encoding column 2: DICTIONARY_V2[1]
@@ -85,16 +85,16 @@ Stripes:
       Entry 2: count: 1000 hasNull: false min: STRIPE-3 max: STRIPE-3 sum: 8000 positions: 0,198,464
       Entry 3: count: 1000 hasNull: false min: STRIPE-3 max: STRIPE-3 sum: 8000 positions: 0,330,440
       Entry 4: count: 1000 hasNull: false min: STRIPE-3 max: STRIPE-3 sum: 8000 positions: 0,462,416
-  Stripe: offset: 1153 data: 156 rows: 5000 tail: 60 index: 119
-    Stream: column 0 section ROW_INDEX start: 1153 length 17
-    Stream: column 1 section ROW_INDEX start: 1170 length 63
-    Stream: column 2 section ROW_INDEX start: 1233 length 39
-    Stream: column 1 section DATA start: 1272 length 113
-    Stream: column 1 section LENGTH start: 1385 length 32
-    Stream: column 2 section PRESENT start: 1417 length 11
-    Stream: column 2 section DATA start: 1428 length 0
-    Stream: column 2 section LENGTH start: 1428 length 0
-    Stream: column 2 section DICTIONARY_DATA start: 1428 length 0
+  Stripe: offset: 1304 data: 202 rows: 5000 tail: 64 index: 120
+    Stream: column 0 section ROW_INDEX start: 1304 length 17
+    Stream: column 1 section ROW_INDEX start: 1321 length 64
+    Stream: column 2 section ROW_INDEX start: 1385 length 39
+    Stream: column 1 section DATA start: 1424 length 159
+    Stream: column 1 section LENGTH start: 1583 length 32
+    Stream: column 2 section PRESENT start: 1615 length 11
+    Stream: column 2 section DATA start: 1626 length 0
+    Stream: column 2 section LENGTH start: 1626 length 0
+    Stream: column 2 section DICTIONARY_DATA start: 1626 length 0
     Encoding column 0: DIRECT
     Encoding column 1: DIRECT_V2
     Encoding column 2: DICTIONARY_V2[0]
@@ -105,6 +105,6 @@ Stripes:
       Entry 3: count: 0 hasNull: true positions: 0,4,115,0,0,0,0
       Entry 4: count: 0 hasNull: true positions: 0,6,110,0,0,0,0
 
-File length: 1736 bytes
+File length: 1940 bytes
 Padding length: 0 bytes
 Padding ratio: 0%


[10/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)

Posted by xu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/ql/src/gen/thrift/gen-cpp/queryplan_types.cpp
----------------------------------------------------------------------
diff --git a/ql/src/gen/thrift/gen-cpp/queryplan_types.cpp b/ql/src/gen/thrift/gen-cpp/queryplan_types.cpp
index a1fb60b..e92d776 100644
--- a/ql/src/gen/thrift/gen-cpp/queryplan_types.cpp
+++ b/ql/src/gen/thrift/gen-cpp/queryplan_types.cpp
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -150,11 +150,9 @@ void Adjacency::__set_adjacencyType(const AdjacencyType::type val) {
   this->adjacencyType = val;
 }
 
-const char* Adjacency::ascii_fingerprint = "BC4F8C394677A1003AA9F56ED26D8204";
-const uint8_t Adjacency::binary_fingerprint[16] = {0xBC,0x4F,0x8C,0x39,0x46,0x77,0xA1,0x00,0x3A,0xA9,0xF5,0x6E,0xD2,0x6D,0x82,0x04};
-
 uint32_t Adjacency::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -225,7 +223,7 @@ uint32_t Adjacency::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t Adjacency::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("Adjacency");
 
   xfer += oprot->writeFieldBegin("node", ::apache::thrift::protocol::T_STRING, 1);
@@ -250,7 +248,6 @@ uint32_t Adjacency::write(::apache::thrift::protocol::TProtocol* oprot) const {
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -275,14 +272,13 @@ Adjacency& Adjacency::operator=(const Adjacency& other8) {
   __isset = other8.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const Adjacency& obj) {
-  using apache::thrift::to_string;
+void Adjacency::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "Adjacency(";
-  out << "node=" << to_string(obj.node);
-  out << ", " << "children=" << to_string(obj.children);
-  out << ", " << "adjacencyType=" << to_string(obj.adjacencyType);
+  out << "node=" << to_string(node);
+  out << ", " << "children=" << to_string(children);
+  out << ", " << "adjacencyType=" << to_string(adjacencyType);
   out << ")";
-  return out;
 }
 
 
@@ -302,11 +298,9 @@ void Graph::__set_adjacencyList(const std::vector<Adjacency> & val) {
   this->adjacencyList = val;
 }
 
-const char* Graph::ascii_fingerprint = "1F7FB604B3EF8F7AFB5DEAD15F2FC0B5";
-const uint8_t Graph::binary_fingerprint[16] = {0x1F,0x7F,0xB6,0x04,0xB3,0xEF,0x8F,0x7A,0xFB,0x5D,0xEA,0xD1,0x5F,0x2F,0xC0,0xB5};
-
 uint32_t Graph::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -389,7 +383,7 @@ uint32_t Graph::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t Graph::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("Graph");
 
   xfer += oprot->writeFieldBegin("nodeType", ::apache::thrift::protocol::T_I32, 1);
@@ -422,7 +416,6 @@ uint32_t Graph::write(::apache::thrift::protocol::TProtocol* oprot) const {
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -447,14 +440,13 @@ Graph& Graph::operator=(const Graph& other23) {
   __isset = other23.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const Graph& obj) {
-  using apache::thrift::to_string;
+void Graph::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "Graph(";
-  out << "nodeType=" << to_string(obj.nodeType);
-  out << ", " << "roots=" << to_string(obj.roots);
-  out << ", " << "adjacencyList=" << to_string(obj.adjacencyList);
+  out << "nodeType=" << to_string(nodeType);
+  out << ", " << "roots=" << to_string(roots);
+  out << ", " << "adjacencyList=" << to_string(adjacencyList);
   out << ")";
-  return out;
 }
 
 
@@ -486,11 +478,9 @@ void Operator::__set_started(const bool val) {
   this->started = val;
 }
 
-const char* Operator::ascii_fingerprint = "30917C758A752485AF223B697479DE6C";
-const uint8_t Operator::binary_fingerprint[16] = {0x30,0x91,0x7C,0x75,0x8A,0x75,0x24,0x85,0xAF,0x22,0x3B,0x69,0x74,0x79,0xDE,0x6C};
-
 uint32_t Operator::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -603,7 +593,7 @@ uint32_t Operator::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t Operator::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("Operator");
 
   xfer += oprot->writeFieldBegin("operatorId", ::apache::thrift::protocol::T_STRING, 1);
@@ -650,7 +640,6 @@ uint32_t Operator::write(::apache::thrift::protocol::TProtocol* oprot) const {
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -684,17 +673,16 @@ Operator& Operator::operator=(const Operator& other42) {
   __isset = other42.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const Operator& obj) {
-  using apache::thrift::to_string;
+void Operator::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "Operator(";
-  out << "operatorId=" << to_string(obj.operatorId);
-  out << ", " << "operatorType=" << to_string(obj.operatorType);
-  out << ", " << "operatorAttributes=" << to_string(obj.operatorAttributes);
-  out << ", " << "operatorCounters=" << to_string(obj.operatorCounters);
-  out << ", " << "done=" << to_string(obj.done);
-  out << ", " << "started=" << to_string(obj.started);
+  out << "operatorId=" << to_string(operatorId);
+  out << ", " << "operatorType=" << to_string(operatorType);
+  out << ", " << "operatorAttributes=" << to_string(operatorAttributes);
+  out << ", " << "operatorCounters=" << to_string(operatorCounters);
+  out << ", " << "done=" << to_string(done);
+  out << ", " << "started=" << to_string(started);
   out << ")";
-  return out;
 }
 
 
@@ -736,11 +724,9 @@ void Task::__set_started(const bool val) {
   this->started = val;
 }
 
-const char* Task::ascii_fingerprint = "AC741A136EFA51843AFC3A12F6A793D1";
-const uint8_t Task::binary_fingerprint[16] = {0xAC,0x74,0x1A,0x13,0x6E,0xFA,0x51,0x84,0x3A,0xFC,0x3A,0x12,0xF6,0xA7,0x93,0xD1};
-
 uint32_t Task::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -881,7 +867,7 @@ uint32_t Task::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t Task::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("Task");
 
   xfer += oprot->writeFieldBegin("taskId", ::apache::thrift::protocol::T_STRING, 1);
@@ -946,7 +932,6 @@ uint32_t Task::write(::apache::thrift::protocol::TProtocol* oprot) const {
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -986,19 +971,18 @@ Task& Task::operator=(const Task& other67) {
   __isset = other67.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const Task& obj) {
-  using apache::thrift::to_string;
+void Task::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "Task(";
-  out << "taskId=" << to_string(obj.taskId);
-  out << ", " << "taskType=" << to_string(obj.taskType);
-  out << ", " << "taskAttributes=" << to_string(obj.taskAttributes);
-  out << ", " << "taskCounters=" << to_string(obj.taskCounters);
-  out << ", " << "operatorGraph="; (obj.__isset.operatorGraph ? (out << to_string(obj.operatorGraph)) : (out << "<null>"));
-  out << ", " << "operatorList="; (obj.__isset.operatorList ? (out << to_string(obj.operatorList)) : (out << "<null>"));
-  out << ", " << "done=" << to_string(obj.done);
-  out << ", " << "started=" << to_string(obj.started);
+  out << "taskId=" << to_string(taskId);
+  out << ", " << "taskType=" << to_string(taskType);
+  out << ", " << "taskAttributes=" << to_string(taskAttributes);
+  out << ", " << "taskCounters=" << to_string(taskCounters);
+  out << ", " << "operatorGraph="; (__isset.operatorGraph ? (out << to_string(operatorGraph)) : (out << "<null>"));
+  out << ", " << "operatorList="; (__isset.operatorList ? (out << to_string(operatorList)) : (out << "<null>"));
+  out << ", " << "done=" << to_string(done);
+  out << ", " << "started=" << to_string(started);
   out << ")";
-  return out;
 }
 
 
@@ -1034,11 +1018,9 @@ void Stage::__set_started(const bool val) {
   this->started = val;
 }
 
-const char* Stage::ascii_fingerprint = "86EA3C7B0690AFED21A3D479E2B32378";
-const uint8_t Stage::binary_fingerprint[16] = {0x86,0xEA,0x3C,0x7B,0x06,0x90,0xAF,0xED,0x21,0xA3,0xD4,0x79,0xE2,0xB3,0x23,0x78};
-
 uint32_t Stage::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1171,7 +1153,7 @@ uint32_t Stage::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t Stage::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("Stage");
 
   xfer += oprot->writeFieldBegin("stageId", ::apache::thrift::protocol::T_STRING, 1);
@@ -1230,7 +1212,6 @@ uint32_t Stage::write(::apache::thrift::protocol::TProtocol* oprot) const {
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1267,18 +1248,17 @@ Stage& Stage::operator=(const Stage& other92) {
   __isset = other92.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const Stage& obj) {
-  using apache::thrift::to_string;
+void Stage::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "Stage(";
-  out << "stageId=" << to_string(obj.stageId);
-  out << ", " << "stageType=" << to_string(obj.stageType);
-  out << ", " << "stageAttributes=" << to_string(obj.stageAttributes);
-  out << ", " << "stageCounters=" << to_string(obj.stageCounters);
-  out << ", " << "taskList=" << to_string(obj.taskList);
-  out << ", " << "done=" << to_string(obj.done);
-  out << ", " << "started=" << to_string(obj.started);
+  out << "stageId=" << to_string(stageId);
+  out << ", " << "stageType=" << to_string(stageType);
+  out << ", " << "stageAttributes=" << to_string(stageAttributes);
+  out << ", " << "stageCounters=" << to_string(stageCounters);
+  out << ", " << "taskList=" << to_string(taskList);
+  out << ", " << "done=" << to_string(done);
+  out << ", " << "started=" << to_string(started);
   out << ")";
-  return out;
 }
 
 
@@ -1318,11 +1298,9 @@ void Query::__set_started(const bool val) {
   this->started = val;
 }
 
-const char* Query::ascii_fingerprint = "68300D63A5D40F2D17B9A9440FF626C1";
-const uint8_t Query::binary_fingerprint[16] = {0x68,0x30,0x0D,0x63,0xA5,0xD4,0x0F,0x2D,0x17,0xB9,0xA9,0x44,0x0F,0xF6,0x26,0xC1};
-
 uint32_t Query::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1461,7 +1439,7 @@ uint32_t Query::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t Query::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("Query");
 
   xfer += oprot->writeFieldBegin("queryId", ::apache::thrift::protocol::T_STRING, 1);
@@ -1524,7 +1502,6 @@ uint32_t Query::write(::apache::thrift::protocol::TProtocol* oprot) const {
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1564,19 +1541,18 @@ Query& Query::operator=(const Query& other116) {
   __isset = other116.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const Query& obj) {
-  using apache::thrift::to_string;
+void Query::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "Query(";
-  out << "queryId=" << to_string(obj.queryId);
-  out << ", " << "queryType=" << to_string(obj.queryType);
-  out << ", " << "queryAttributes=" << to_string(obj.queryAttributes);
-  out << ", " << "queryCounters=" << to_string(obj.queryCounters);
-  out << ", " << "stageGraph=" << to_string(obj.stageGraph);
-  out << ", " << "stageList=" << to_string(obj.stageList);
-  out << ", " << "done=" << to_string(obj.done);
-  out << ", " << "started=" << to_string(obj.started);
+  out << "queryId=" << to_string(queryId);
+  out << ", " << "queryType=" << to_string(queryType);
+  out << ", " << "queryAttributes=" << to_string(queryAttributes);
+  out << ", " << "queryCounters=" << to_string(queryCounters);
+  out << ", " << "stageGraph=" << to_string(stageGraph);
+  out << ", " << "stageList=" << to_string(stageList);
+  out << ", " << "done=" << to_string(done);
+  out << ", " << "started=" << to_string(started);
   out << ")";
-  return out;
 }
 
 
@@ -1596,11 +1572,9 @@ void QueryPlan::__set_started(const bool val) {
   this->started = val;
 }
 
-const char* QueryPlan::ascii_fingerprint = "3418D1B0C20C288C8406186700B772E3";
-const uint8_t QueryPlan::binary_fingerprint[16] = {0x34,0x18,0xD1,0xB0,0xC2,0x0C,0x28,0x8C,0x84,0x06,0x18,0x67,0x00,0xB7,0x72,0xE3};
-
 uint32_t QueryPlan::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1669,7 +1643,7 @@ uint32_t QueryPlan::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t QueryPlan::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("QueryPlan");
 
   xfer += oprot->writeFieldBegin("queries", ::apache::thrift::protocol::T_LIST, 1);
@@ -1694,7 +1668,6 @@ uint32_t QueryPlan::write(::apache::thrift::protocol::TProtocol* oprot) const {
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1719,14 +1692,13 @@ QueryPlan& QueryPlan::operator=(const QueryPlan& other124) {
   __isset = other124.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const QueryPlan& obj) {
-  using apache::thrift::to_string;
+void QueryPlan::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "QueryPlan(";
-  out << "queries=" << to_string(obj.queries);
-  out << ", " << "done=" << to_string(obj.done);
-  out << ", " << "started=" << to_string(obj.started);
+  out << "queries=" << to_string(queries);
+  out << ", " << "done=" << to_string(done);
+  out << ", " << "started=" << to_string(started);
   out << ")";
-  return out;
 }
 
 }}} // namespace

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/ql/src/gen/thrift/gen-cpp/queryplan_types.h
----------------------------------------------------------------------
diff --git a/ql/src/gen/thrift/gen-cpp/queryplan_types.h b/ql/src/gen/thrift/gen-cpp/queryplan_types.h
index 932804d..ce37b2e 100644
--- a/ql/src/gen/thrift/gen-cpp/queryplan_types.h
+++ b/ql/src/gen/thrift/gen-cpp/queryplan_types.h
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -123,9 +123,6 @@ typedef struct _Adjacency__isset {
 class Adjacency {
  public:
 
-  static const char* ascii_fingerprint; // = "BC4F8C394677A1003AA9F56ED26D8204";
-  static const uint8_t binary_fingerprint[16]; // = {0xBC,0x4F,0x8C,0x39,0x46,0x77,0xA1,0x00,0x3A,0xA9,0xF5,0x6E,0xD2,0x6D,0x82,0x04};
-
   Adjacency(const Adjacency&);
   Adjacency& operator=(const Adjacency&);
   Adjacency() : node(), adjacencyType((AdjacencyType::type)0) {
@@ -163,11 +160,17 @@ class Adjacency {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const Adjacency& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(Adjacency &a, Adjacency &b);
 
+inline std::ostream& operator<<(std::ostream& out, const Adjacency& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _Graph__isset {
   _Graph__isset() : nodeType(false), roots(false), adjacencyList(false) {}
   bool nodeType :1;
@@ -178,9 +181,6 @@ typedef struct _Graph__isset {
 class Graph {
  public:
 
-  static const char* ascii_fingerprint; // = "1F7FB604B3EF8F7AFB5DEAD15F2FC0B5";
-  static const uint8_t binary_fingerprint[16]; // = {0x1F,0x7F,0xB6,0x04,0xB3,0xEF,0x8F,0x7A,0xFB,0x5D,0xEA,0xD1,0x5F,0x2F,0xC0,0xB5};
-
   Graph(const Graph&);
   Graph& operator=(const Graph&);
   Graph() : nodeType((NodeType::type)0) {
@@ -218,11 +218,17 @@ class Graph {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const Graph& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(Graph &a, Graph &b);
 
+inline std::ostream& operator<<(std::ostream& out, const Graph& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _Operator__isset {
   _Operator__isset() : operatorId(false), operatorType(false), operatorAttributes(false), operatorCounters(false), done(false), started(false) {}
   bool operatorId :1;
@@ -236,9 +242,6 @@ typedef struct _Operator__isset {
 class Operator {
  public:
 
-  static const char* ascii_fingerprint; // = "30917C758A752485AF223B697479DE6C";
-  static const uint8_t binary_fingerprint[16]; // = {0x30,0x91,0x7C,0x75,0x8A,0x75,0x24,0x85,0xAF,0x22,0x3B,0x69,0x74,0x79,0xDE,0x6C};
-
   Operator(const Operator&);
   Operator& operator=(const Operator&);
   Operator() : operatorId(), operatorType((OperatorType::type)0), done(0), started(0) {
@@ -291,11 +294,17 @@ class Operator {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const Operator& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(Operator &a, Operator &b);
 
+inline std::ostream& operator<<(std::ostream& out, const Operator& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _Task__isset {
   _Task__isset() : taskId(false), taskType(false), taskAttributes(false), taskCounters(false), operatorGraph(false), operatorList(false), done(false), started(false) {}
   bool taskId :1;
@@ -311,9 +320,6 @@ typedef struct _Task__isset {
 class Task {
  public:
 
-  static const char* ascii_fingerprint; // = "AC741A136EFA51843AFC3A12F6A793D1";
-  static const uint8_t binary_fingerprint[16]; // = {0xAC,0x74,0x1A,0x13,0x6E,0xFA,0x51,0x84,0x3A,0xFC,0x3A,0x12,0xF6,0xA7,0x93,0xD1};
-
   Task(const Task&);
   Task& operator=(const Task&);
   Task() : taskId(), taskType((TaskType::type)0), done(0), started(0) {
@@ -380,11 +386,17 @@ class Task {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const Task& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(Task &a, Task &b);
 
+inline std::ostream& operator<<(std::ostream& out, const Task& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _Stage__isset {
   _Stage__isset() : stageId(false), stageType(false), stageAttributes(false), stageCounters(false), taskList(false), done(false), started(false) {}
   bool stageId :1;
@@ -399,9 +411,6 @@ typedef struct _Stage__isset {
 class Stage {
  public:
 
-  static const char* ascii_fingerprint; // = "86EA3C7B0690AFED21A3D479E2B32378";
-  static const uint8_t binary_fingerprint[16]; // = {0x86,0xEA,0x3C,0x7B,0x06,0x90,0xAF,0xED,0x21,0xA3,0xD4,0x79,0xE2,0xB3,0x23,0x78};
-
   Stage(const Stage&);
   Stage& operator=(const Stage&);
   Stage() : stageId(), stageType((StageType::type)0), done(0), started(0) {
@@ -459,11 +468,17 @@ class Stage {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const Stage& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(Stage &a, Stage &b);
 
+inline std::ostream& operator<<(std::ostream& out, const Stage& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _Query__isset {
   _Query__isset() : queryId(false), queryType(false), queryAttributes(false), queryCounters(false), stageGraph(false), stageList(false), done(false), started(false) {}
   bool queryId :1;
@@ -479,9 +494,6 @@ typedef struct _Query__isset {
 class Query {
  public:
 
-  static const char* ascii_fingerprint; // = "68300D63A5D40F2D17B9A9440FF626C1";
-  static const uint8_t binary_fingerprint[16]; // = {0x68,0x30,0x0D,0x63,0xA5,0xD4,0x0F,0x2D,0x17,0xB9,0xA9,0x44,0x0F,0xF6,0x26,0xC1};
-
   Query(const Query&);
   Query& operator=(const Query&);
   Query() : queryId(), queryType(), done(0), started(0) {
@@ -544,11 +556,17 @@ class Query {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const Query& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(Query &a, Query &b);
 
+inline std::ostream& operator<<(std::ostream& out, const Query& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _QueryPlan__isset {
   _QueryPlan__isset() : queries(false), done(false), started(false) {}
   bool queries :1;
@@ -559,9 +577,6 @@ typedef struct _QueryPlan__isset {
 class QueryPlan {
  public:
 
-  static const char* ascii_fingerprint; // = "3418D1B0C20C288C8406186700B772E3";
-  static const uint8_t binary_fingerprint[16]; // = {0x34,0x18,0xD1,0xB0,0xC2,0x0C,0x28,0x8C,0x84,0x06,0x18,0x67,0x00,0xB7,0x72,0xE3};
-
   QueryPlan(const QueryPlan&);
   QueryPlan& operator=(const QueryPlan&);
   QueryPlan() : done(0), started(0) {
@@ -599,11 +614,17 @@ class QueryPlan {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const QueryPlan& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(QueryPlan &a, QueryPlan &b);
 
+inline std::ostream& operator<<(std::ostream& out, const QueryPlan& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 }}} // namespace
 
 #endif

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Adjacency.java
----------------------------------------------------------------------
diff --git a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Adjacency.java b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Adjacency.java
index 53b70d5..37edf2a 100644
--- a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Adjacency.java
+++ b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Adjacency.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class Adjacency implements org.apache.thrift.TBase<Adjacency, Adjacency._Fields>, java.io.Serializable, Cloneable, Comparable<Adjacency> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Adjacency");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/AdjacencyType.java
----------------------------------------------------------------------
diff --git a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/AdjacencyType.java b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/AdjacencyType.java
index 470cad2..7a716f8 100644
--- a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/AdjacencyType.java
+++ b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/AdjacencyType.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Graph.java
----------------------------------------------------------------------
diff --git a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Graph.java b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Graph.java
index aeadd19..0b454df 100644
--- a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Graph.java
+++ b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Graph.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class Graph implements org.apache.thrift.TBase<Graph, Graph._Fields>, java.io.Serializable, Cloneable, Comparable<Graph> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Graph");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/NodeType.java
----------------------------------------------------------------------
diff --git a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/NodeType.java b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/NodeType.java
index 5fdbfa3..c3ce662 100644
--- a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/NodeType.java
+++ b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/NodeType.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Operator.java
----------------------------------------------------------------------
diff --git a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Operator.java b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Operator.java
index 407bdbe..05dbb2d 100644
--- a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Operator.java
+++ b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Operator.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class Operator implements org.apache.thrift.TBase<Operator, Operator._Fields>, java.io.Serializable, Cloneable, Comparable<Operator> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Operator");
 
@@ -457,10 +457,10 @@ public class Operator implements org.apache.thrift.TBase<Operator, Operator._Fie
       return getOperatorCounters();
 
     case DONE:
-      return Boolean.valueOf(isDone());
+      return isDone();
 
     case STARTED:
-      return Boolean.valueOf(isStarted());
+      return isStarted();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/OperatorType.java
----------------------------------------------------------------------
diff --git a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/OperatorType.java b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/OperatorType.java
index 1d17dcb..a002348 100644
--- a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/OperatorType.java
+++ b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/OperatorType.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Query.java
----------------------------------------------------------------------
diff --git a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Query.java b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Query.java
index e111cda..de553e1 100644
--- a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Query.java
+++ b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Query.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class Query implements org.apache.thrift.TBase<Query, Query._Fields>, java.io.Serializable, Cloneable, Comparable<Query> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Query");
 
@@ -559,10 +559,10 @@ public class Query implements org.apache.thrift.TBase<Query, Query._Fields>, jav
       return getStageList();
 
     case DONE:
-      return Boolean.valueOf(isDone());
+      return isDone();
 
     case STARTED:
-      return Boolean.valueOf(isStarted());
+      return isStarted();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/QueryPlan.java
----------------------------------------------------------------------
diff --git a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/QueryPlan.java b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/QueryPlan.java
index 920aac3..0e12279 100644
--- a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/QueryPlan.java
+++ b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/QueryPlan.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class QueryPlan implements org.apache.thrift.TBase<QueryPlan, QueryPlan._Fields>, java.io.Serializable, Cloneable, Comparable<QueryPlan> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("QueryPlan");
 
@@ -296,10 +296,10 @@ public class QueryPlan implements org.apache.thrift.TBase<QueryPlan, QueryPlan._
       return getQueries();
 
     case DONE:
-      return Boolean.valueOf(isDone());
+      return isDone();
 
     case STARTED:
-      return Boolean.valueOf(isStarted());
+      return isStarted();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Stage.java
----------------------------------------------------------------------
diff --git a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Stage.java b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Stage.java
index 8a0d537..7f86eeb 100644
--- a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Stage.java
+++ b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Stage.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class Stage implements org.apache.thrift.TBase<Stage, Stage._Fields>, java.io.Serializable, Cloneable, Comparable<Stage> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Stage");
 
@@ -524,10 +524,10 @@ public class Stage implements org.apache.thrift.TBase<Stage, Stage._Fields>, jav
       return getTaskList();
 
     case DONE:
-      return Boolean.valueOf(isDone());
+      return isDone();
 
     case STARTED:
-      return Boolean.valueOf(isStarted());
+      return isStarted();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/StageType.java
----------------------------------------------------------------------
diff --git a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/StageType.java b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/StageType.java
index c7b0ff7..f20174c 100644
--- a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/StageType.java
+++ b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/StageType.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Task.java
----------------------------------------------------------------------
diff --git a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Task.java b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Task.java
index 720599b..f978e42 100644
--- a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Task.java
+++ b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Task.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class Task implements org.apache.thrift.TBase<Task, Task._Fields>, java.io.Serializable, Cloneable, Comparable<Task> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Task");
 
@@ -568,10 +568,10 @@ public class Task implements org.apache.thrift.TBase<Task, Task._Fields>, java.i
       return getOperatorList();
 
     case DONE:
-      return Boolean.valueOf(isDone());
+      return isDone();
 
     case STARTED:
-      return Boolean.valueOf(isStarted());
+      return isStarted();
 
     }
     throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/TaskType.java
----------------------------------------------------------------------
diff --git a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/TaskType.java b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/TaskType.java
index 8aad073..d77f008 100644
--- a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/TaskType.java
+++ b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/TaskType.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/ql/src/gen/thrift/gen-php/Types.php
----------------------------------------------------------------------
diff --git a/ql/src/gen/thrift/gen-php/Types.php b/ql/src/gen/thrift/gen-php/Types.php
index 3dfff1d..e1693f3 100644
--- a/ql/src/gen/thrift/gen-php/Types.php
+++ b/ql/src/gen/thrift/gen-php/Types.php
@@ -1,8 +1,6 @@
 <?php
-namespace ;
-
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/ql/src/gen/thrift/gen-py/queryplan/constants.py
----------------------------------------------------------------------
diff --git a/ql/src/gen/thrift/gen-py/queryplan/constants.py b/ql/src/gen/thrift/gen-py/queryplan/constants.py
index 99717a9..4a6492b 100644
--- a/ql/src/gen/thrift/gen-py/queryplan/constants.py
+++ b/ql/src/gen/thrift/gen-py/queryplan/constants.py
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/ql/src/gen/thrift/gen-py/queryplan/ttypes.py
----------------------------------------------------------------------
diff --git a/ql/src/gen/thrift/gen-py/queryplan/ttypes.py b/ql/src/gen/thrift/gen-py/queryplan/ttypes.py
index 6deb850..2073959 100644
--- a/ql/src/gen/thrift/gen-py/queryplan/ttypes.py
+++ b/ql/src/gen/thrift/gen-py/queryplan/ttypes.py
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #
@@ -223,7 +223,7 @@ class Adjacency:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.node = iprot.readString();
+          self.node = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
@@ -231,14 +231,14 @@ class Adjacency:
           self.children = []
           (_etype3, _size0) = iprot.readListBegin()
           for _i4 in xrange(_size0):
-            _elem5 = iprot.readString();
+            _elem5 = iprot.readString()
             self.children.append(_elem5)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.I32:
-          self.adjacencyType = iprot.readI32();
+          self.adjacencyType = iprot.readI32()
         else:
           iprot.skip(ftype)
       else:
@@ -322,7 +322,7 @@ class Graph:
         break
       if fid == 1:
         if ftype == TType.I32:
-          self.nodeType = iprot.readI32();
+          self.nodeType = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 2:
@@ -330,7 +330,7 @@ class Graph:
           self.roots = []
           (_etype10, _size7) = iprot.readListBegin()
           for _i11 in xrange(_size7):
-            _elem12 = iprot.readString();
+            _elem12 = iprot.readString()
             self.roots.append(_elem12)
           iprot.readListEnd()
         else:
@@ -439,12 +439,12 @@ class Operator:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.operatorId = iprot.readString();
+          self.operatorId = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.I32:
-          self.operatorType = iprot.readI32();
+          self.operatorType = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 3:
@@ -452,8 +452,8 @@ class Operator:
           self.operatorAttributes = {}
           (_ktype22, _vtype23, _size21 ) = iprot.readMapBegin()
           for _i25 in xrange(_size21):
-            _key26 = iprot.readString();
-            _val27 = iprot.readString();
+            _key26 = iprot.readString()
+            _val27 = iprot.readString()
             self.operatorAttributes[_key26] = _val27
           iprot.readMapEnd()
         else:
@@ -463,20 +463,20 @@ class Operator:
           self.operatorCounters = {}
           (_ktype29, _vtype30, _size28 ) = iprot.readMapBegin()
           for _i32 in xrange(_size28):
-            _key33 = iprot.readString();
-            _val34 = iprot.readI64();
+            _key33 = iprot.readString()
+            _val34 = iprot.readI64()
             self.operatorCounters[_key33] = _val34
           iprot.readMapEnd()
         else:
           iprot.skip(ftype)
       elif fid == 5:
         if ftype == TType.BOOL:
-          self.done = iprot.readBool();
+          self.done = iprot.readBool()
         else:
           iprot.skip(ftype)
       elif fid == 6:
         if ftype == TType.BOOL:
-          self.started = iprot.readBool();
+          self.started = iprot.readBool()
         else:
           iprot.skip(ftype)
       else:
@@ -595,12 +595,12 @@ class Task:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.taskId = iprot.readString();
+          self.taskId = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.I32:
-          self.taskType = iprot.readI32();
+          self.taskType = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 3:
@@ -608,8 +608,8 @@ class Task:
           self.taskAttributes = {}
           (_ktype40, _vtype41, _size39 ) = iprot.readMapBegin()
           for _i43 in xrange(_size39):
-            _key44 = iprot.readString();
-            _val45 = iprot.readString();
+            _key44 = iprot.readString()
+            _val45 = iprot.readString()
             self.taskAttributes[_key44] = _val45
           iprot.readMapEnd()
         else:
@@ -619,8 +619,8 @@ class Task:
           self.taskCounters = {}
           (_ktype47, _vtype48, _size46 ) = iprot.readMapBegin()
           for _i50 in xrange(_size46):
-            _key51 = iprot.readString();
-            _val52 = iprot.readI64();
+            _key51 = iprot.readString()
+            _val52 = iprot.readI64()
             self.taskCounters[_key51] = _val52
           iprot.readMapEnd()
         else:
@@ -644,12 +644,12 @@ class Task:
           iprot.skip(ftype)
       elif fid == 7:
         if ftype == TType.BOOL:
-          self.done = iprot.readBool();
+          self.done = iprot.readBool()
         else:
           iprot.skip(ftype)
       elif fid == 8:
         if ftype == TType.BOOL:
-          self.started = iprot.readBool();
+          self.started = iprot.readBool()
         else:
           iprot.skip(ftype)
       else:
@@ -778,12 +778,12 @@ class Stage:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.stageId = iprot.readString();
+          self.stageId = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.I32:
-          self.stageType = iprot.readI32();
+          self.stageType = iprot.readI32()
         else:
           iprot.skip(ftype)
       elif fid == 3:
@@ -791,8 +791,8 @@ class Stage:
           self.stageAttributes = {}
           (_ktype65, _vtype66, _size64 ) = iprot.readMapBegin()
           for _i68 in xrange(_size64):
-            _key69 = iprot.readString();
-            _val70 = iprot.readString();
+            _key69 = iprot.readString()
+            _val70 = iprot.readString()
             self.stageAttributes[_key69] = _val70
           iprot.readMapEnd()
         else:
@@ -802,8 +802,8 @@ class Stage:
           self.stageCounters = {}
           (_ktype72, _vtype73, _size71 ) = iprot.readMapBegin()
           for _i75 in xrange(_size71):
-            _key76 = iprot.readString();
-            _val77 = iprot.readI64();
+            _key76 = iprot.readString()
+            _val77 = iprot.readI64()
             self.stageCounters[_key76] = _val77
           iprot.readMapEnd()
         else:
@@ -821,12 +821,12 @@ class Stage:
           iprot.skip(ftype)
       elif fid == 6:
         if ftype == TType.BOOL:
-          self.done = iprot.readBool();
+          self.done = iprot.readBool()
         else:
           iprot.skip(ftype)
       elif fid == 7:
         if ftype == TType.BOOL:
-          self.started = iprot.readBool();
+          self.started = iprot.readBool()
         else:
           iprot.skip(ftype)
       else:
@@ -953,12 +953,12 @@ class Query:
         break
       if fid == 1:
         if ftype == TType.STRING:
-          self.queryId = iprot.readString();
+          self.queryId = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.STRING:
-          self.queryType = iprot.readString();
+          self.queryType = iprot.readString()
         else:
           iprot.skip(ftype)
       elif fid == 3:
@@ -966,8 +966,8 @@ class Query:
           self.queryAttributes = {}
           (_ktype90, _vtype91, _size89 ) = iprot.readMapBegin()
           for _i93 in xrange(_size89):
-            _key94 = iprot.readString();
-            _val95 = iprot.readString();
+            _key94 = iprot.readString()
+            _val95 = iprot.readString()
             self.queryAttributes[_key94] = _val95
           iprot.readMapEnd()
         else:
@@ -977,8 +977,8 @@ class Query:
           self.queryCounters = {}
           (_ktype97, _vtype98, _size96 ) = iprot.readMapBegin()
           for _i100 in xrange(_size96):
-            _key101 = iprot.readString();
-            _val102 = iprot.readI64();
+            _key101 = iprot.readString()
+            _val102 = iprot.readI64()
             self.queryCounters[_key101] = _val102
           iprot.readMapEnd()
         else:
@@ -1002,12 +1002,12 @@ class Query:
           iprot.skip(ftype)
       elif fid == 7:
         if ftype == TType.BOOL:
-          self.done = iprot.readBool();
+          self.done = iprot.readBool()
         else:
           iprot.skip(ftype)
       elif fid == 8:
         if ftype == TType.BOOL:
-          self.started = iprot.readBool();
+          self.started = iprot.readBool()
         else:
           iprot.skip(ftype)
       else:
@@ -1135,12 +1135,12 @@ class QueryPlan:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.BOOL:
-          self.done = iprot.readBool();
+          self.done = iprot.readBool()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.BOOL:
-          self.started = iprot.readBool();
+          self.started = iprot.readBool()
         else:
           iprot.skip(ftype)
       else:

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/ql/src/gen/thrift/gen-rb/queryplan_constants.rb
----------------------------------------------------------------------
diff --git a/ql/src/gen/thrift/gen-rb/queryplan_constants.rb b/ql/src/gen/thrift/gen-rb/queryplan_constants.rb
index 428185e..e61608b 100644
--- a/ql/src/gen/thrift/gen-rb/queryplan_constants.rb
+++ b/ql/src/gen/thrift/gen-rb/queryplan_constants.rb
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/ql/src/gen/thrift/gen-rb/queryplan_types.rb
----------------------------------------------------------------------
diff --git a/ql/src/gen/thrift/gen-rb/queryplan_types.rb b/ql/src/gen/thrift/gen-rb/queryplan_types.rb
index 1a22f07..f8b4034 100644
--- a/ql/src/gen/thrift/gen-rb/queryplan_types.rb
+++ b/ql/src/gen/thrift/gen-rb/queryplan_types.rb
@@ -1,5 +1,5 @@
 #
-# Autogenerated by Thrift Compiler (0.9.2)
+# Autogenerated by Thrift Compiler (0.9.3)
 #
 # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 #

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-cpp/complex_constants.cpp
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-cpp/complex_constants.cpp b/serde/src/gen/thrift/gen-cpp/complex_constants.cpp
index aa146c6..a6df39c 100644
--- a/serde/src/gen/thrift/gen-cpp/complex_constants.cpp
+++ b/serde/src/gen/thrift/gen-cpp/complex_constants.cpp
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-cpp/complex_constants.h
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-cpp/complex_constants.h b/serde/src/gen/thrift/gen-cpp/complex_constants.h
index 971cd2c..ee0bdca 100644
--- a/serde/src/gen/thrift/gen-cpp/complex_constants.h
+++ b/serde/src/gen/thrift/gen-cpp/complex_constants.h
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-cpp/complex_types.cpp
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-cpp/complex_types.cpp b/serde/src/gen/thrift/gen-cpp/complex_types.cpp
index fb6fe25..36c1514 100644
--- a/serde/src/gen/thrift/gen-cpp/complex_types.cpp
+++ b/serde/src/gen/thrift/gen-cpp/complex_types.cpp
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -51,11 +51,9 @@ void PropValueUnion::__set_unionMStringString(const std::map<std::string, std::s
   this->unionMStringString = val;
 }
 
-const char* PropValueUnion::ascii_fingerprint = "123CD9D82D5B5054B5054EFD63FC8590";
-const uint8_t PropValueUnion::binary_fingerprint[16] = {0x12,0x3C,0xD9,0xD8,0x2D,0x5B,0x50,0x54,0xB5,0x05,0x4E,0xFD,0x63,0xFC,0x85,0x90};
-
 uint32_t PropValueUnion::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -171,7 +169,7 @@ uint32_t PropValueUnion::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t PropValueUnion::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("PropValueUnion");
 
   if (this->__isset.intValue) {
@@ -226,7 +224,6 @@ uint32_t PropValueUnion::write(::apache::thrift::protocol::TProtocol* oprot) con
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -263,18 +260,17 @@ PropValueUnion& PropValueUnion::operator=(const PropValueUnion& other15) {
   __isset = other15.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const PropValueUnion& obj) {
-  using apache::thrift::to_string;
+void PropValueUnion::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "PropValueUnion(";
-  out << "intValue="; (obj.__isset.intValue ? (out << to_string(obj.intValue)) : (out << "<null>"));
-  out << ", " << "longValue="; (obj.__isset.longValue ? (out << to_string(obj.longValue)) : (out << "<null>"));
-  out << ", " << "stringValue="; (obj.__isset.stringValue ? (out << to_string(obj.stringValue)) : (out << "<null>"));
-  out << ", " << "doubleValue="; (obj.__isset.doubleValue ? (out << to_string(obj.doubleValue)) : (out << "<null>"));
-  out << ", " << "flag="; (obj.__isset.flag ? (out << to_string(obj.flag)) : (out << "<null>"));
-  out << ", " << "lString=" << to_string(obj.lString);
-  out << ", " << "unionMStringString=" << to_string(obj.unionMStringString);
+  out << "intValue="; (__isset.intValue ? (out << to_string(intValue)) : (out << "<null>"));
+  out << ", " << "longValue="; (__isset.longValue ? (out << to_string(longValue)) : (out << "<null>"));
+  out << ", " << "stringValue="; (__isset.stringValue ? (out << to_string(stringValue)) : (out << "<null>"));
+  out << ", " << "doubleValue="; (__isset.doubleValue ? (out << to_string(doubleValue)) : (out << "<null>"));
+  out << ", " << "flag="; (__isset.flag ? (out << to_string(flag)) : (out << "<null>"));
+  out << ", " << "lString=" << to_string(lString);
+  out << ", " << "unionMStringString=" << to_string(unionMStringString);
   out << ")";
-  return out;
 }
 
 
@@ -294,11 +290,9 @@ void IntString::__set_underscore_int(const int32_t val) {
   this->underscore_int = val;
 }
 
-const char* IntString::ascii_fingerprint = "52C6DAB6CF51AF617111F6D3964C6503";
-const uint8_t IntString::binary_fingerprint[16] = {0x52,0xC6,0xDA,0xB6,0xCF,0x51,0xAF,0x61,0x71,0x11,0xF6,0xD3,0x96,0x4C,0x65,0x03};
-
 uint32_t IntString::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -355,7 +349,7 @@ uint32_t IntString::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t IntString::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("IntString");
 
   xfer += oprot->writeFieldBegin("myint", ::apache::thrift::protocol::T_I32, 1);
@@ -372,7 +366,6 @@ uint32_t IntString::write(::apache::thrift::protocol::TProtocol* oprot) const {
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -397,14 +390,13 @@ IntString& IntString::operator=(const IntString& other17) {
   __isset = other17.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const IntString& obj) {
-  using apache::thrift::to_string;
+void IntString::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "IntString(";
-  out << "myint=" << to_string(obj.myint);
-  out << ", " << "myString=" << to_string(obj.myString);
-  out << ", " << "underscore_int=" << to_string(obj.underscore_int);
+  out << "myint=" << to_string(myint);
+  out << ", " << "myString=" << to_string(myString);
+  out << ", " << "underscore_int=" << to_string(underscore_int);
   out << ")";
-  return out;
 }
 
 
@@ -452,11 +444,9 @@ void Complex::__set_unionField3(const PropValueUnion& val) {
   this->unionField3 = val;
 }
 
-const char* Complex::ascii_fingerprint = "FFA84FEA7037F5858F2BFEDA73AD679A";
-const uint8_t Complex::binary_fingerprint[16] = {0xFF,0xA8,0x4F,0xEA,0x70,0x37,0xF5,0x85,0x8F,0x2B,0xFE,0xDA,0x73,0xAD,0x67,0x9A};
-
 uint32_t Complex::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -665,7 +655,7 @@ uint32_t Complex::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t Complex::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("Complex");
 
   xfer += oprot->writeFieldBegin("aint", ::apache::thrift::protocol::T_I32, 1);
@@ -770,7 +760,6 @@ uint32_t Complex::write(::apache::thrift::protocol::TProtocol* oprot) const {
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -816,21 +805,20 @@ Complex& Complex::operator=(const Complex& other69) {
   __isset = other69.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const Complex& obj) {
-  using apache::thrift::to_string;
+void Complex::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "Complex(";
-  out << "aint=" << to_string(obj.aint);
-  out << ", " << "aString=" << to_string(obj.aString);
-  out << ", " << "lint=" << to_string(obj.lint);
-  out << ", " << "lString=" << to_string(obj.lString);
-  out << ", " << "lintString=" << to_string(obj.lintString);
-  out << ", " << "mStringString=" << to_string(obj.mStringString);
-  out << ", " << "attributes=" << to_string(obj.attributes);
-  out << ", " << "unionField1=" << to_string(obj.unionField1);
-  out << ", " << "unionField2=" << to_string(obj.unionField2);
-  out << ", " << "unionField3=" << to_string(obj.unionField3);
+  out << "aint=" << to_string(aint);
+  out << ", " << "aString=" << to_string(aString);
+  out << ", " << "lint=" << to_string(lint);
+  out << ", " << "lString=" << to_string(lString);
+  out << ", " << "lintString=" << to_string(lintString);
+  out << ", " << "mStringString=" << to_string(mStringString);
+  out << ", " << "attributes=" << to_string(attributes);
+  out << ", " << "unionField1=" << to_string(unionField1);
+  out << ", " << "unionField2=" << to_string(unionField2);
+  out << ", " << "unionField3=" << to_string(unionField3);
   out << ")";
-  return out;
 }
 
 
@@ -846,11 +834,9 @@ void SetIntString::__set_aString(const std::string& val) {
   this->aString = val;
 }
 
-const char* SetIntString::ascii_fingerprint = "842B41C940D05DFB16183142A90DFC54";
-const uint8_t SetIntString::binary_fingerprint[16] = {0x84,0x2B,0x41,0xC9,0x40,0xD0,0x5D,0xFB,0x16,0x18,0x31,0x42,0xA9,0x0D,0xFC,0x54};
-
 uint32_t SetIntString::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -912,7 +898,7 @@ uint32_t SetIntString::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t SetIntString::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("SetIntString");
 
   xfer += oprot->writeFieldBegin("sIntString", ::apache::thrift::protocol::T_SET, 1);
@@ -933,7 +919,6 @@ uint32_t SetIntString::write(::apache::thrift::protocol::TProtocol* oprot) const
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -955,13 +940,12 @@ SetIntString& SetIntString::operator=(const SetIntString& other78) {
   __isset = other78.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const SetIntString& obj) {
-  using apache::thrift::to_string;
+void SetIntString::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "SetIntString(";
-  out << "sIntString=" << to_string(obj.sIntString);
-  out << ", " << "aString=" << to_string(obj.aString);
+  out << "sIntString=" << to_string(sIntString);
+  out << ", " << "aString=" << to_string(aString);
   out << ")";
-  return out;
 }
 
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-cpp/complex_types.h
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-cpp/complex_types.h b/serde/src/gen/thrift/gen-cpp/complex_types.h
index 2637720..38fa559 100644
--- a/serde/src/gen/thrift/gen-cpp/complex_types.h
+++ b/serde/src/gen/thrift/gen-cpp/complex_types.h
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -41,9 +41,6 @@ typedef struct _PropValueUnion__isset {
 class PropValueUnion {
  public:
 
-  static const char* ascii_fingerprint; // = "123CD9D82D5B5054B5054EFD63FC8590";
-  static const uint8_t binary_fingerprint[16]; // = {0x12,0x3C,0xD9,0xD8,0x2D,0x5B,0x50,0x54,0xB5,0x05,0x4E,0xFD,0x63,0xFC,0x85,0x90};
-
   PropValueUnion(const PropValueUnion&);
   PropValueUnion& operator=(const PropValueUnion&);
   PropValueUnion() : intValue(0), longValue(0), stringValue(), doubleValue(0), flag(0) {
@@ -111,11 +108,17 @@ class PropValueUnion {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const PropValueUnion& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(PropValueUnion &a, PropValueUnion &b);
 
+inline std::ostream& operator<<(std::ostream& out, const PropValueUnion& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _IntString__isset {
   _IntString__isset() : myint(false), myString(false), underscore_int(false) {}
   bool myint :1;
@@ -126,9 +129,6 @@ typedef struct _IntString__isset {
 class IntString {
  public:
 
-  static const char* ascii_fingerprint; // = "52C6DAB6CF51AF617111F6D3964C6503";
-  static const uint8_t binary_fingerprint[16]; // = {0x52,0xC6,0xDA,0xB6,0xCF,0x51,0xAF,0x61,0x71,0x11,0xF6,0xD3,0x96,0x4C,0x65,0x03};
-
   IntString(const IntString&);
   IntString& operator=(const IntString&);
   IntString() : myint(0), myString(), underscore_int(0) {
@@ -166,11 +166,17 @@ class IntString {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const IntString& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(IntString &a, IntString &b);
 
+inline std::ostream& operator<<(std::ostream& out, const IntString& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _Complex__isset {
   _Complex__isset() : aint(false), aString(false), lint(false), lString(false), lintString(false), mStringString(false), attributes(false), unionField1(false), unionField2(false), unionField3(false) {}
   bool aint :1;
@@ -188,9 +194,6 @@ typedef struct _Complex__isset {
 class Complex {
  public:
 
-  static const char* ascii_fingerprint; // = "FFA84FEA7037F5858F2BFEDA73AD679A";
-  static const uint8_t binary_fingerprint[16]; // = {0xFF,0xA8,0x4F,0xEA,0x70,0x37,0xF5,0x85,0x8F,0x2B,0xFE,0xDA,0x73,0xAD,0x67,0x9A};
-
   Complex(const Complex&);
   Complex& operator=(const Complex&);
   Complex() : aint(0), aString() {
@@ -263,11 +266,17 @@ class Complex {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const Complex& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(Complex &a, Complex &b);
 
+inline std::ostream& operator<<(std::ostream& out, const Complex& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _SetIntString__isset {
   _SetIntString__isset() : sIntString(false), aString(false) {}
   bool sIntString :1;
@@ -277,9 +286,6 @@ typedef struct _SetIntString__isset {
 class SetIntString {
  public:
 
-  static const char* ascii_fingerprint; // = "842B41C940D05DFB16183142A90DFC54";
-  static const uint8_t binary_fingerprint[16]; // = {0x84,0x2B,0x41,0xC9,0x40,0xD0,0x5D,0xFB,0x16,0x18,0x31,0x42,0xA9,0x0D,0xFC,0x54};
-
   SetIntString(const SetIntString&);
   SetIntString& operator=(const SetIntString&);
   SetIntString() : aString() {
@@ -312,11 +318,17 @@ class SetIntString {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const SetIntString& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(SetIntString &a, SetIntString &b);
 
+inline std::ostream& operator<<(std::ostream& out, const SetIntString& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 
 #endif

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-cpp/megastruct_constants.cpp
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-cpp/megastruct_constants.cpp b/serde/src/gen/thrift/gen-cpp/megastruct_constants.cpp
index 9a18a48..c3e3794 100644
--- a/serde/src/gen/thrift/gen-cpp/megastruct_constants.cpp
+++ b/serde/src/gen/thrift/gen-cpp/megastruct_constants.cpp
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-cpp/megastruct_constants.h
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-cpp/megastruct_constants.h b/serde/src/gen/thrift/gen-cpp/megastruct_constants.h
index 1b0558f..e08f8a3 100644
--- a/serde/src/gen/thrift/gen-cpp/megastruct_constants.h
+++ b/serde/src/gen/thrift/gen-cpp/megastruct_constants.h
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-cpp/megastruct_types.cpp
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-cpp/megastruct_types.cpp b/serde/src/gen/thrift/gen-cpp/megastruct_types.cpp
index 6123f7d..42f7e31 100644
--- a/serde/src/gen/thrift/gen-cpp/megastruct_types.cpp
+++ b/serde/src/gen/thrift/gen-cpp/megastruct_types.cpp
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -38,11 +38,9 @@ void MiniStruct::__set_my_enum(const MyEnum::type val) {
 __isset.my_enum = true;
 }
 
-const char* MiniStruct::ascii_fingerprint = "4ED2B10931906B61ED0B1592EE860A37";
-const uint8_t MiniStruct::binary_fingerprint[16] = {0x4E,0xD2,0xB1,0x09,0x31,0x90,0x6B,0x61,0xED,0x0B,0x15,0x92,0xEE,0x86,0x0A,0x37};
-
 uint32_t MiniStruct::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -93,7 +91,7 @@ uint32_t MiniStruct::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t MiniStruct::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("MiniStruct");
 
   if (this->__isset.my_string) {
@@ -108,7 +106,6 @@ uint32_t MiniStruct::write(::apache::thrift::protocol::TProtocol* oprot) const {
   }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -130,13 +127,12 @@ MiniStruct& MiniStruct::operator=(const MiniStruct& other2) {
   __isset = other2.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const MiniStruct& obj) {
-  using apache::thrift::to_string;
+void MiniStruct::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "MiniStruct(";
-  out << "my_string="; (obj.__isset.my_string ? (out << to_string(obj.my_string)) : (out << "<null>"));
-  out << ", " << "my_enum="; (obj.__isset.my_enum ? (out << to_string(obj.my_enum)) : (out << "<null>"));
+  out << "my_string="; (__isset.my_string ? (out << to_string(my_string)) : (out << "<null>"));
+  out << ", " << "my_enum="; (__isset.my_enum ? (out << to_string(my_enum)) : (out << "<null>"));
   out << ")";
-  return out;
 }
 
 
@@ -244,11 +240,9 @@ void MegaStruct::__set_my_structset(const std::set<MiniStruct> & val) {
 __isset.my_structset = true;
 }
 
-const char* MegaStruct::ascii_fingerprint = "9979EEF0CA19988228E64220A3AA9120";
-const uint8_t MegaStruct::binary_fingerprint[16] = {0x99,0x79,0xEE,0xF0,0xCA,0x19,0x98,0x82,0x28,0xE6,0x42,0x20,0xA3,0xAA,0x91,0x20};
-
 uint32_t MegaStruct::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -644,7 +638,7 @@ uint32_t MegaStruct::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t MegaStruct::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("MegaStruct");
 
   if (this->__isset.my_bool) {
@@ -867,7 +861,6 @@ uint32_t MegaStruct::write(::apache::thrift::protocol::TProtocol* oprot) const {
   }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -943,31 +936,30 @@ MegaStruct& MegaStruct::operator=(const MegaStruct& other110) {
   __isset = other110.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const MegaStruct& obj) {
-  using apache::thrift::to_string;
+void MegaStruct::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "MegaStruct(";
-  out << "my_bool="; (obj.__isset.my_bool ? (out << to_string(obj.my_bool)) : (out << "<null>"));
-  out << ", " << "my_byte="; (obj.__isset.my_byte ? (out << to_string(obj.my_byte)) : (out << "<null>"));
-  out << ", " << "my_16bit_int="; (obj.__isset.my_16bit_int ? (out << to_string(obj.my_16bit_int)) : (out << "<null>"));
-  out << ", " << "my_32bit_int="; (obj.__isset.my_32bit_int ? (out << to_string(obj.my_32bit_int)) : (out << "<null>"));
-  out << ", " << "my_64bit_int="; (obj.__isset.my_64bit_int ? (out << to_string(obj.my_64bit_int)) : (out << "<null>"));
-  out << ", " << "my_double="; (obj.__isset.my_double ? (out << to_string(obj.my_double)) : (out << "<null>"));
-  out << ", " << "my_string="; (obj.__isset.my_string ? (out << to_string(obj.my_string)) : (out << "<null>"));
-  out << ", " << "my_binary="; (obj.__isset.my_binary ? (out << to_string(obj.my_binary)) : (out << "<null>"));
-  out << ", " << "my_string_string_map="; (obj.__isset.my_string_string_map ? (out << to_string(obj.my_string_string_map)) : (out << "<null>"));
-  out << ", " << "my_string_enum_map="; (obj.__isset.my_string_enum_map ? (out << to_string(obj.my_string_enum_map)) : (out << "<null>"));
-  out << ", " << "my_enum_string_map="; (obj.__isset.my_enum_string_map ? (out << to_string(obj.my_enum_string_map)) : (out << "<null>"));
-  out << ", " << "my_enum_struct_map="; (obj.__isset.my_enum_struct_map ? (out << to_string(obj.my_enum_struct_map)) : (out << "<null>"));
-  out << ", " << "my_enum_stringlist_map="; (obj.__isset.my_enum_stringlist_map ? (out << to_string(obj.my_enum_stringlist_map)) : (out << "<null>"));
-  out << ", " << "my_enum_structlist_map="; (obj.__isset.my_enum_structlist_map ? (out << to_string(obj.my_enum_structlist_map)) : (out << "<null>"));
-  out << ", " << "my_stringlist="; (obj.__isset.my_stringlist ? (out << to_string(obj.my_stringlist)) : (out << "<null>"));
-  out << ", " << "my_structlist="; (obj.__isset.my_structlist ? (out << to_string(obj.my_structlist)) : (out << "<null>"));
-  out << ", " << "my_enumlist="; (obj.__isset.my_enumlist ? (out << to_string(obj.my_enumlist)) : (out << "<null>"));
-  out << ", " << "my_stringset="; (obj.__isset.my_stringset ? (out << to_string(obj.my_stringset)) : (out << "<null>"));
-  out << ", " << "my_enumset="; (obj.__isset.my_enumset ? (out << to_string(obj.my_enumset)) : (out << "<null>"));
-  out << ", " << "my_structset="; (obj.__isset.my_structset ? (out << to_string(obj.my_structset)) : (out << "<null>"));
+  out << "my_bool="; (__isset.my_bool ? (out << to_string(my_bool)) : (out << "<null>"));
+  out << ", " << "my_byte="; (__isset.my_byte ? (out << to_string(my_byte)) : (out << "<null>"));
+  out << ", " << "my_16bit_int="; (__isset.my_16bit_int ? (out << to_string(my_16bit_int)) : (out << "<null>"));
+  out << ", " << "my_32bit_int="; (__isset.my_32bit_int ? (out << to_string(my_32bit_int)) : (out << "<null>"));
+  out << ", " << "my_64bit_int="; (__isset.my_64bit_int ? (out << to_string(my_64bit_int)) : (out << "<null>"));
+  out << ", " << "my_double="; (__isset.my_double ? (out << to_string(my_double)) : (out << "<null>"));
+  out << ", " << "my_string="; (__isset.my_string ? (out << to_string(my_string)) : (out << "<null>"));
+  out << ", " << "my_binary="; (__isset.my_binary ? (out << to_string(my_binary)) : (out << "<null>"));
+  out << ", " << "my_string_string_map="; (__isset.my_string_string_map ? (out << to_string(my_string_string_map)) : (out << "<null>"));
+  out << ", " << "my_string_enum_map="; (__isset.my_string_enum_map ? (out << to_string(my_string_enum_map)) : (out << "<null>"));
+  out << ", " << "my_enum_string_map="; (__isset.my_enum_string_map ? (out << to_string(my_enum_string_map)) : (out << "<null>"));
+  out << ", " << "my_enum_struct_map="; (__isset.my_enum_struct_map ? (out << to_string(my_enum_struct_map)) : (out << "<null>"));
+  out << ", " << "my_enum_stringlist_map="; (__isset.my_enum_stringlist_map ? (out << to_string(my_enum_stringlist_map)) : (out << "<null>"));
+  out << ", " << "my_enum_structlist_map="; (__isset.my_enum_structlist_map ? (out << to_string(my_enum_structlist_map)) : (out << "<null>"));
+  out << ", " << "my_stringlist="; (__isset.my_stringlist ? (out << to_string(my_stringlist)) : (out << "<null>"));
+  out << ", " << "my_structlist="; (__isset.my_structlist ? (out << to_string(my_structlist)) : (out << "<null>"));
+  out << ", " << "my_enumlist="; (__isset.my_enumlist ? (out << to_string(my_enumlist)) : (out << "<null>"));
+  out << ", " << "my_stringset="; (__isset.my_stringset ? (out << to_string(my_stringset)) : (out << "<null>"));
+  out << ", " << "my_enumset="; (__isset.my_enumset ? (out << to_string(my_enumset)) : (out << "<null>"));
+  out << ", " << "my_structset="; (__isset.my_structset ? (out << to_string(my_structset)) : (out << "<null>"));
   out << ")";
-  return out;
 }
 
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-cpp/megastruct_types.h
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-cpp/megastruct_types.h b/serde/src/gen/thrift/gen-cpp/megastruct_types.h
index e4985dc..d04a814 100644
--- a/serde/src/gen/thrift/gen-cpp/megastruct_types.h
+++ b/serde/src/gen/thrift/gen-cpp/megastruct_types.h
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -41,9 +41,6 @@ typedef struct _MiniStruct__isset {
 class MiniStruct {
  public:
 
-  static const char* ascii_fingerprint; // = "4ED2B10931906B61ED0B1592EE860A37";
-  static const uint8_t binary_fingerprint[16]; // = {0x4E,0xD2,0xB1,0x09,0x31,0x90,0x6B,0x61,0xED,0x0B,0x15,0x92,0xEE,0x86,0x0A,0x37};
-
   MiniStruct(const MiniStruct&);
   MiniStruct& operator=(const MiniStruct&);
   MiniStruct() : my_string(), my_enum((MyEnum::type)0) {
@@ -80,11 +77,17 @@ class MiniStruct {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const MiniStruct& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(MiniStruct &a, MiniStruct &b);
 
+inline std::ostream& operator<<(std::ostream& out, const MiniStruct& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _MegaStruct__isset {
   _MegaStruct__isset() : my_bool(false), my_byte(false), my_16bit_int(false), my_32bit_int(false), my_64bit_int(false), my_double(false), my_string(false), my_binary(false), my_string_string_map(false), my_string_enum_map(false), my_enum_string_map(false), my_enum_struct_map(false), my_enum_stringlist_map(false), my_enum_structlist_map(false), my_stringlist(false), my_structlist(false), my_enumlist(false), my_stringset(false), my_enumset(false), my_structset(false) {}
   bool my_bool :1;
@@ -112,9 +115,6 @@ typedef struct _MegaStruct__isset {
 class MegaStruct {
  public:
 
-  static const char* ascii_fingerprint; // = "9979EEF0CA19988228E64220A3AA9120";
-  static const uint8_t binary_fingerprint[16]; // = {0x99,0x79,0xEE,0xF0,0xCA,0x19,0x98,0x82,0x28,0xE6,0x42,0x20,0xA3,0xAA,0x91,0x20};
-
   MegaStruct(const MegaStruct&);
   MegaStruct& operator=(const MegaStruct&);
   MegaStruct() : my_bool(0), my_byte(0), my_16bit_int(0), my_32bit_int(0), my_64bit_int(0), my_double(0), my_string(), my_binary() {
@@ -277,11 +277,17 @@ class MegaStruct {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const MegaStruct& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(MegaStruct &a, MegaStruct &b);
 
+inline std::ostream& operator<<(std::ostream& out, const MegaStruct& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 
 #endif

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-cpp/serde_constants.cpp
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-cpp/serde_constants.cpp b/serde/src/gen/thrift/gen-cpp/serde_constants.cpp
index d5d6999..243d3b8 100644
--- a/serde/src/gen/thrift/gen-cpp/serde_constants.cpp
+++ b/serde/src/gen/thrift/gen-cpp/serde_constants.cpp
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-cpp/serde_constants.h
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-cpp/serde_constants.h b/serde/src/gen/thrift/gen-cpp/serde_constants.h
index cd48926..3566ead 100644
--- a/serde/src/gen/thrift/gen-cpp/serde_constants.h
+++ b/serde/src/gen/thrift/gen-cpp/serde_constants.h
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-cpp/serde_types.cpp
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-cpp/serde_types.cpp b/serde/src/gen/thrift/gen-cpp/serde_types.cpp
index a37ff7c..c05f86d 100644
--- a/serde/src/gen/thrift/gen-cpp/serde_types.cpp
+++ b/serde/src/gen/thrift/gen-cpp/serde_types.cpp
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-cpp/serde_types.h
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-cpp/serde_types.h b/serde/src/gen/thrift/gen-cpp/serde_types.h
index eb7659a..bdaab33 100644
--- a/serde/src/gen/thrift/gen-cpp/serde_types.h
+++ b/serde/src/gen/thrift/gen-cpp/serde_types.h
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-cpp/testthrift_constants.cpp
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-cpp/testthrift_constants.cpp b/serde/src/gen/thrift/gen-cpp/testthrift_constants.cpp
index 0fa57e7..9538dae 100644
--- a/serde/src/gen/thrift/gen-cpp/testthrift_constants.cpp
+++ b/serde/src/gen/thrift/gen-cpp/testthrift_constants.cpp
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-cpp/testthrift_constants.h
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-cpp/testthrift_constants.h b/serde/src/gen/thrift/gen-cpp/testthrift_constants.h
index cba3296..45fdc28 100644
--- a/serde/src/gen/thrift/gen-cpp/testthrift_constants.h
+++ b/serde/src/gen/thrift/gen-cpp/testthrift_constants.h
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/serde/src/gen/thrift/gen-cpp/testthrift_types.cpp
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-cpp/testthrift_types.cpp b/serde/src/gen/thrift/gen-cpp/testthrift_types.cpp
index 0cd19b9..c76e2fd 100644
--- a/serde/src/gen/thrift/gen-cpp/testthrift_types.cpp
+++ b/serde/src/gen/thrift/gen-cpp/testthrift_types.cpp
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -22,11 +22,9 @@ void InnerStruct::__set_field0(const int32_t val) {
   this->field0 = val;
 }
 
-const char* InnerStruct::ascii_fingerprint = "E86CACEB22240450EDCBEFC3A83970E4";
-const uint8_t InnerStruct::binary_fingerprint[16] = {0xE8,0x6C,0xAC,0xEB,0x22,0x24,0x04,0x50,0xED,0xCB,0xEF,0xC3,0xA8,0x39,0x70,0xE4};
-
 uint32_t InnerStruct::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -67,7 +65,7 @@ uint32_t InnerStruct::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t InnerStruct::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("InnerStruct");
 
   xfer += oprot->writeFieldBegin("field0", ::apache::thrift::protocol::T_I32, 1);
@@ -76,7 +74,6 @@ uint32_t InnerStruct::write(::apache::thrift::protocol::TProtocol* oprot) const
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -95,12 +92,11 @@ InnerStruct& InnerStruct::operator=(const InnerStruct& other1) {
   __isset = other1.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const InnerStruct& obj) {
-  using apache::thrift::to_string;
+void InnerStruct::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "InnerStruct(";
-  out << "field0=" << to_string(obj.field0);
+  out << "field0=" << to_string(field0);
   out << ")";
-  return out;
 }
 
 
@@ -120,11 +116,9 @@ void ThriftTestObj::__set_field3(const std::vector<InnerStruct> & val) {
   this->field3 = val;
 }
 
-const char* ThriftTestObj::ascii_fingerprint = "2BA5D8DAACFBBE6599779830A6185706";
-const uint8_t ThriftTestObj::binary_fingerprint[16] = {0x2B,0xA5,0xD8,0xDA,0xAC,0xFB,0xBE,0x65,0x99,0x77,0x98,0x30,0xA6,0x18,0x57,0x06};
-
 uint32_t ThriftTestObj::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -193,7 +187,7 @@ uint32_t ThriftTestObj::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t ThriftTestObj::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftTestObj");
 
   xfer += oprot->writeFieldBegin("field1", ::apache::thrift::protocol::T_I32, 1);
@@ -218,7 +212,6 @@ uint32_t ThriftTestObj::write(::apache::thrift::protocol::TProtocol* oprot) cons
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -243,14 +236,13 @@ ThriftTestObj& ThriftTestObj::operator=(const ThriftTestObj& other9) {
   __isset = other9.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const ThriftTestObj& obj) {
-  using apache::thrift::to_string;
+void ThriftTestObj::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "ThriftTestObj(";
-  out << "field1=" << to_string(obj.field1);
-  out << ", " << "field2=" << to_string(obj.field2);
-  out << ", " << "field3=" << to_string(obj.field3);
+  out << "field1=" << to_string(field1);
+  out << ", " << "field2=" << to_string(field2);
+  out << ", " << "field3=" << to_string(field3);
   out << ")";
-  return out;
 }
 
 


[19/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)

Posted by xu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h b/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h
index 49e6143..c8f16a7 100644
--- a/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h
+++ b/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -8,11 +8,17 @@
 #define ThriftHiveMetastore_H
 
 #include <thrift/TDispatchProcessor.h>
+#include <thrift/async/TConcurrentClientSyncInfo.h>
 #include "hive_metastore_types.h"
 #include "FacebookService.h"
 
 namespace Apache { namespace Hadoop { namespace Hive {
 
+#ifdef _WIN32
+  #pragma warning( push )
+  #pragma warning (disable : 4250 ) //inheriting methods via dominance 
+#endif
+
 class ThriftHiveMetastoreIf : virtual public  ::facebook::fb303::FacebookServiceIf {
  public:
   virtual ~ThriftHiveMetastoreIf() {}
@@ -594,9 +600,6 @@ typedef struct _ThriftHiveMetastore_getMetaConf_args__isset {
 class ThriftHiveMetastore_getMetaConf_args {
  public:
 
-  static const char* ascii_fingerprint; // = "EFB929595D312AC8F305D5A794CFEDA1";
-  static const uint8_t binary_fingerprint[16]; // = {0xEF,0xB9,0x29,0x59,0x5D,0x31,0x2A,0xC8,0xF3,0x05,0xD5,0xA7,0x94,0xCF,0xED,0xA1};
-
   ThriftHiveMetastore_getMetaConf_args(const ThriftHiveMetastore_getMetaConf_args&);
   ThriftHiveMetastore_getMetaConf_args& operator=(const ThriftHiveMetastore_getMetaConf_args&);
   ThriftHiveMetastore_getMetaConf_args() : key() {
@@ -624,23 +627,18 @@ class ThriftHiveMetastore_getMetaConf_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_getMetaConf_args& obj);
 };
 
 
 class ThriftHiveMetastore_getMetaConf_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "EFB929595D312AC8F305D5A794CFEDA1";
-  static const uint8_t binary_fingerprint[16]; // = {0xEF,0xB9,0x29,0x59,0x5D,0x31,0x2A,0xC8,0xF3,0x05,0xD5,0xA7,0x94,0xCF,0xED,0xA1};
-
 
   virtual ~ThriftHiveMetastore_getMetaConf_pargs() throw();
   const std::string* key;
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_getMetaConf_pargs& obj);
 };
 
 typedef struct _ThriftHiveMetastore_getMetaConf_result__isset {
@@ -652,9 +650,6 @@ typedef struct _ThriftHiveMetastore_getMetaConf_result__isset {
 class ThriftHiveMetastore_getMetaConf_result {
  public:
 
-  static const char* ascii_fingerprint; // = "FB3D66E547AE5D7F8EB046D752ECF5C1";
-  static const uint8_t binary_fingerprint[16]; // = {0xFB,0x3D,0x66,0xE5,0x47,0xAE,0x5D,0x7F,0x8E,0xB0,0x46,0xD7,0x52,0xEC,0xF5,0xC1};
-
   ThriftHiveMetastore_getMetaConf_result(const ThriftHiveMetastore_getMetaConf_result&);
   ThriftHiveMetastore_getMetaConf_result& operator=(const ThriftHiveMetastore_getMetaConf_result&);
   ThriftHiveMetastore_getMetaConf_result() : success() {
@@ -687,7 +682,6 @@ class ThriftHiveMetastore_getMetaConf_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_getMetaConf_result& obj);
 };
 
 typedef struct _ThriftHiveMetastore_getMetaConf_presult__isset {
@@ -699,9 +693,6 @@ typedef struct _ThriftHiveMetastore_getMetaConf_presult__isset {
 class ThriftHiveMetastore_getMetaConf_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "FB3D66E547AE5D7F8EB046D752ECF5C1";
-  static const uint8_t binary_fingerprint[16]; // = {0xFB,0x3D,0x66,0xE5,0x47,0xAE,0x5D,0x7F,0x8E,0xB0,0x46,0xD7,0x52,0xEC,0xF5,0xC1};
-
 
   virtual ~ThriftHiveMetastore_getMetaConf_presult() throw();
   std::string* success;
@@ -711,7 +702,6 @@ class ThriftHiveMetastore_getMetaConf_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_getMetaConf_presult& obj);
 };
 
 typedef struct _ThriftHiveMetastore_setMetaConf_args__isset {
@@ -723,9 +713,6 @@ typedef struct _ThriftHiveMetastore_setMetaConf_args__isset {
 class ThriftHiveMetastore_setMetaConf_args {
  public:
 
-  static const char* ascii_fingerprint; // = "07A9615F837F7D0A952B595DD3020972";
-  static const uint8_t binary_fingerprint[16]; // = {0x07,0xA9,0x61,0x5F,0x83,0x7F,0x7D,0x0A,0x95,0x2B,0x59,0x5D,0xD3,0x02,0x09,0x72};
-
   ThriftHiveMetastore_setMetaConf_args(const ThriftHiveMetastore_setMetaConf_args&);
   ThriftHiveMetastore_setMetaConf_args& operator=(const ThriftHiveMetastore_setMetaConf_args&);
   ThriftHiveMetastore_setMetaConf_args() : key(), value() {
@@ -758,16 +745,12 @@ class ThriftHiveMetastore_setMetaConf_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_setMetaConf_args& obj);
 };
 
 
 class ThriftHiveMetastore_setMetaConf_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "07A9615F837F7D0A952B595DD3020972";
-  static const uint8_t binary_fingerprint[16]; // = {0x07,0xA9,0x61,0x5F,0x83,0x7F,0x7D,0x0A,0x95,0x2B,0x59,0x5D,0xD3,0x02,0x09,0x72};
-
 
   virtual ~ThriftHiveMetastore_setMetaConf_pargs() throw();
   const std::string* key;
@@ -775,7 +758,6 @@ class ThriftHiveMetastore_setMetaConf_pargs {
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_setMetaConf_pargs& obj);
 };
 
 typedef struct _ThriftHiveMetastore_setMetaConf_result__isset {
@@ -786,9 +768,6 @@ typedef struct _ThriftHiveMetastore_setMetaConf_result__isset {
 class ThriftHiveMetastore_setMetaConf_result {
  public:
 
-  static const char* ascii_fingerprint; // = "771E7EF40B572D2BFAB12C49547ADCBF";
-  static const uint8_t binary_fingerprint[16]; // = {0x77,0x1E,0x7E,0xF4,0x0B,0x57,0x2D,0x2B,0xFA,0xB1,0x2C,0x49,0x54,0x7A,0xDC,0xBF};
-
   ThriftHiveMetastore_setMetaConf_result(const ThriftHiveMetastore_setMetaConf_result&);
   ThriftHiveMetastore_setMetaConf_result& operator=(const ThriftHiveMetastore_setMetaConf_result&);
   ThriftHiveMetastore_setMetaConf_result() {
@@ -816,7 +795,6 @@ class ThriftHiveMetastore_setMetaConf_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_setMetaConf_result& obj);
 };
 
 typedef struct _ThriftHiveMetastore_setMetaConf_presult__isset {
@@ -827,9 +805,6 @@ typedef struct _ThriftHiveMetastore_setMetaConf_presult__isset {
 class ThriftHiveMetastore_setMetaConf_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "771E7EF40B572D2BFAB12C49547ADCBF";
-  static const uint8_t binary_fingerprint[16]; // = {0x77,0x1E,0x7E,0xF4,0x0B,0x57,0x2D,0x2B,0xFA,0xB1,0x2C,0x49,0x54,0x7A,0xDC,0xBF};
-
 
   virtual ~ThriftHiveMetastore_setMetaConf_presult() throw();
   MetaException o1;
@@ -838,7 +813,6 @@ class ThriftHiveMetastore_setMetaConf_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_setMetaConf_presult& obj);
 };
 
 typedef struct _ThriftHiveMetastore_create_database_args__isset {
@@ -849,9 +823,6 @@ typedef struct _ThriftHiveMetastore_create_database_args__isset {
 class ThriftHiveMetastore_create_database_args {
  public:
 
-  static const char* ascii_fingerprint; // = "BAA173319AAB1B660ECE7D895D8F1A31";
-  static const uint8_t binary_fingerprint[16]; // = {0xBA,0xA1,0x73,0x31,0x9A,0xAB,0x1B,0x66,0x0E,0xCE,0x7D,0x89,0x5D,0x8F,0x1A,0x31};
-
   ThriftHiveMetastore_create_database_args(const ThriftHiveMetastore_create_database_args&);
   ThriftHiveMetastore_create_database_args& operator=(const ThriftHiveMetastore_create_database_args&);
   ThriftHiveMetastore_create_database_args() {
@@ -879,23 +850,18 @@ class ThriftHiveMetastore_create_database_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_create_database_args& obj);
 };
 
 
 class ThriftHiveMetastore_create_database_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "BAA173319AAB1B660ECE7D895D8F1A31";
-  static const uint8_t binary_fingerprint[16]; // = {0xBA,0xA1,0x73,0x31,0x9A,0xAB,0x1B,0x66,0x0E,0xCE,0x7D,0x89,0x5D,0x8F,0x1A,0x31};
-
 
   virtual ~ThriftHiveMetastore_create_database_pargs() throw();
   const Database* database;
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_create_database_pargs& obj);
 };
 
 typedef struct _ThriftHiveMetastore_create_database_result__isset {
@@ -908,9 +874,6 @@ typedef struct _ThriftHiveMetastore_create_database_result__isset {
 class ThriftHiveMetastore_create_database_result {
  public:
 
-  static const char* ascii_fingerprint; // = "2E72786EE0F17DA00FC5BDED362FE255";
-  static const uint8_t binary_fingerprint[16]; // = {0x2E,0x72,0x78,0x6E,0xE0,0xF1,0x7D,0xA0,0x0F,0xC5,0xBD,0xED,0x36,0x2F,0xE2,0x55};
-
   ThriftHiveMetastore_create_database_result(const ThriftHiveMetastore_create_database_result&);
   ThriftHiveMetastore_create_database_result& operator=(const ThriftHiveMetastore_create_database_result&);
   ThriftHiveMetastore_create_database_result() {
@@ -948,7 +911,6 @@ class ThriftHiveMetastore_create_database_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_create_database_result& obj);
 };
 
 typedef struct _ThriftHiveMetastore_create_database_presult__isset {
@@ -961,9 +923,6 @@ typedef struct _ThriftHiveMetastore_create_database_presult__isset {
 class ThriftHiveMetastore_create_database_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "2E72786EE0F17DA00FC5BDED362FE255";
-  static const uint8_t binary_fingerprint[16]; // = {0x2E,0x72,0x78,0x6E,0xE0,0xF1,0x7D,0xA0,0x0F,0xC5,0xBD,0xED,0x36,0x2F,0xE2,0x55};
-
 
   virtual ~ThriftHiveMetastore_create_database_presult() throw();
   AlreadyExistsException o1;
@@ -974,7 +933,6 @@ class ThriftHiveMetastore_create_database_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_create_database_presult& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_database_args__isset {
@@ -985,9 +943,6 @@ typedef struct _ThriftHiveMetastore_get_database_args__isset {
 class ThriftHiveMetastore_get_database_args {
  public:
 
-  static const char* ascii_fingerprint; // = "EFB929595D312AC8F305D5A794CFEDA1";
-  static const uint8_t binary_fingerprint[16]; // = {0xEF,0xB9,0x29,0x59,0x5D,0x31,0x2A,0xC8,0xF3,0x05,0xD5,0xA7,0x94,0xCF,0xED,0xA1};
-
   ThriftHiveMetastore_get_database_args(const ThriftHiveMetastore_get_database_args&);
   ThriftHiveMetastore_get_database_args& operator=(const ThriftHiveMetastore_get_database_args&);
   ThriftHiveMetastore_get_database_args() : name() {
@@ -1015,23 +970,18 @@ class ThriftHiveMetastore_get_database_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_database_args& obj);
 };
 
 
 class ThriftHiveMetastore_get_database_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "EFB929595D312AC8F305D5A794CFEDA1";
-  static const uint8_t binary_fingerprint[16]; // = {0xEF,0xB9,0x29,0x59,0x5D,0x31,0x2A,0xC8,0xF3,0x05,0xD5,0xA7,0x94,0xCF,0xED,0xA1};
-
 
   virtual ~ThriftHiveMetastore_get_database_pargs() throw();
   const std::string* name;
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_database_pargs& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_database_result__isset {
@@ -1044,9 +994,6 @@ typedef struct _ThriftHiveMetastore_get_database_result__isset {
 class ThriftHiveMetastore_get_database_result {
  public:
 
-  static const char* ascii_fingerprint; // = "C73ECEC4971924DCA9053333CB4A89A0";
-  static const uint8_t binary_fingerprint[16]; // = {0xC7,0x3E,0xCE,0xC4,0x97,0x19,0x24,0xDC,0xA9,0x05,0x33,0x33,0xCB,0x4A,0x89,0xA0};
-
   ThriftHiveMetastore_get_database_result(const ThriftHiveMetastore_get_database_result&);
   ThriftHiveMetastore_get_database_result& operator=(const ThriftHiveMetastore_get_database_result&);
   ThriftHiveMetastore_get_database_result() {
@@ -1084,7 +1031,6 @@ class ThriftHiveMetastore_get_database_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_database_result& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_database_presult__isset {
@@ -1097,9 +1043,6 @@ typedef struct _ThriftHiveMetastore_get_database_presult__isset {
 class ThriftHiveMetastore_get_database_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "C73ECEC4971924DCA9053333CB4A89A0";
-  static const uint8_t binary_fingerprint[16]; // = {0xC7,0x3E,0xCE,0xC4,0x97,0x19,0x24,0xDC,0xA9,0x05,0x33,0x33,0xCB,0x4A,0x89,0xA0};
-
 
   virtual ~ThriftHiveMetastore_get_database_presult() throw();
   Database* success;
@@ -1110,7 +1053,6 @@ class ThriftHiveMetastore_get_database_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_database_presult& obj);
 };
 
 typedef struct _ThriftHiveMetastore_drop_database_args__isset {
@@ -1123,9 +1065,6 @@ typedef struct _ThriftHiveMetastore_drop_database_args__isset {
 class ThriftHiveMetastore_drop_database_args {
  public:
 
-  static const char* ascii_fingerprint; // = "B8E1E91B0E4D1772237E387EEB23B1B0";
-  static const uint8_t binary_fingerprint[16]; // = {0xB8,0xE1,0xE9,0x1B,0x0E,0x4D,0x17,0x72,0x23,0x7E,0x38,0x7E,0xEB,0x23,0xB1,0xB0};
-
   ThriftHiveMetastore_drop_database_args(const ThriftHiveMetastore_drop_database_args&);
   ThriftHiveMetastore_drop_database_args& operator=(const ThriftHiveMetastore_drop_database_args&);
   ThriftHiveMetastore_drop_database_args() : name(), deleteData(0), cascade(0) {
@@ -1163,16 +1102,12 @@ class ThriftHiveMetastore_drop_database_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_drop_database_args& obj);
 };
 
 
 class ThriftHiveMetastore_drop_database_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "B8E1E91B0E4D1772237E387EEB23B1B0";
-  static const uint8_t binary_fingerprint[16]; // = {0xB8,0xE1,0xE9,0x1B,0x0E,0x4D,0x17,0x72,0x23,0x7E,0x38,0x7E,0xEB,0x23,0xB1,0xB0};
-
 
   virtual ~ThriftHiveMetastore_drop_database_pargs() throw();
   const std::string* name;
@@ -1181,7 +1116,6 @@ class ThriftHiveMetastore_drop_database_pargs {
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_drop_database_pargs& obj);
 };
 
 typedef struct _ThriftHiveMetastore_drop_database_result__isset {
@@ -1194,9 +1128,6 @@ typedef struct _ThriftHiveMetastore_drop_database_result__isset {
 class ThriftHiveMetastore_drop_database_result {
  public:
 
-  static const char* ascii_fingerprint; // = "2E72786EE0F17DA00FC5BDED362FE255";
-  static const uint8_t binary_fingerprint[16]; // = {0x2E,0x72,0x78,0x6E,0xE0,0xF1,0x7D,0xA0,0x0F,0xC5,0xBD,0xED,0x36,0x2F,0xE2,0x55};
-
   ThriftHiveMetastore_drop_database_result(const ThriftHiveMetastore_drop_database_result&);
   ThriftHiveMetastore_drop_database_result& operator=(const ThriftHiveMetastore_drop_database_result&);
   ThriftHiveMetastore_drop_database_result() {
@@ -1234,7 +1165,6 @@ class ThriftHiveMetastore_drop_database_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_drop_database_result& obj);
 };
 
 typedef struct _ThriftHiveMetastore_drop_database_presult__isset {
@@ -1247,9 +1177,6 @@ typedef struct _ThriftHiveMetastore_drop_database_presult__isset {
 class ThriftHiveMetastore_drop_database_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "2E72786EE0F17DA00FC5BDED362FE255";
-  static const uint8_t binary_fingerprint[16]; // = {0x2E,0x72,0x78,0x6E,0xE0,0xF1,0x7D,0xA0,0x0F,0xC5,0xBD,0xED,0x36,0x2F,0xE2,0x55};
-
 
   virtual ~ThriftHiveMetastore_drop_database_presult() throw();
   NoSuchObjectException o1;
@@ -1260,7 +1187,6 @@ class ThriftHiveMetastore_drop_database_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_drop_database_presult& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_databases_args__isset {
@@ -1271,9 +1197,6 @@ typedef struct _ThriftHiveMetastore_get_databases_args__isset {
 class ThriftHiveMetastore_get_databases_args {
  public:
 
-  static const char* ascii_fingerprint; // = "EFB929595D312AC8F305D5A794CFEDA1";
-  static const uint8_t binary_fingerprint[16]; // = {0xEF,0xB9,0x29,0x59,0x5D,0x31,0x2A,0xC8,0xF3,0x05,0xD5,0xA7,0x94,0xCF,0xED,0xA1};
-
   ThriftHiveMetastore_get_databases_args(const ThriftHiveMetastore_get_databases_args&);
   ThriftHiveMetastore_get_databases_args& operator=(const ThriftHiveMetastore_get_databases_args&);
   ThriftHiveMetastore_get_databases_args() : pattern() {
@@ -1301,23 +1224,18 @@ class ThriftHiveMetastore_get_databases_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_databases_args& obj);
 };
 
 
 class ThriftHiveMetastore_get_databases_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "EFB929595D312AC8F305D5A794CFEDA1";
-  static const uint8_t binary_fingerprint[16]; // = {0xEF,0xB9,0x29,0x59,0x5D,0x31,0x2A,0xC8,0xF3,0x05,0xD5,0xA7,0x94,0xCF,0xED,0xA1};
-
 
   virtual ~ThriftHiveMetastore_get_databases_pargs() throw();
   const std::string* pattern;
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_databases_pargs& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_databases_result__isset {
@@ -1329,9 +1247,6 @@ typedef struct _ThriftHiveMetastore_get_databases_result__isset {
 class ThriftHiveMetastore_get_databases_result {
  public:
 
-  static const char* ascii_fingerprint; // = "96F383CF9CB8BE09061ECB825FE717B6";
-  static const uint8_t binary_fingerprint[16]; // = {0x96,0xF3,0x83,0xCF,0x9C,0xB8,0xBE,0x09,0x06,0x1E,0xCB,0x82,0x5F,0xE7,0x17,0xB6};
-
   ThriftHiveMetastore_get_databases_result(const ThriftHiveMetastore_get_databases_result&);
   ThriftHiveMetastore_get_databases_result& operator=(const ThriftHiveMetastore_get_databases_result&);
   ThriftHiveMetastore_get_databases_result() {
@@ -1364,7 +1279,6 @@ class ThriftHiveMetastore_get_databases_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_databases_result& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_databases_presult__isset {
@@ -1376,9 +1290,6 @@ typedef struct _ThriftHiveMetastore_get_databases_presult__isset {
 class ThriftHiveMetastore_get_databases_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "96F383CF9CB8BE09061ECB825FE717B6";
-  static const uint8_t binary_fingerprint[16]; // = {0x96,0xF3,0x83,0xCF,0x9C,0xB8,0xBE,0x09,0x06,0x1E,0xCB,0x82,0x5F,0xE7,0x17,0xB6};
-
 
   virtual ~ThriftHiveMetastore_get_databases_presult() throw();
   std::vector<std::string> * success;
@@ -1388,16 +1299,12 @@ class ThriftHiveMetastore_get_databases_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_databases_presult& obj);
 };
 
 
 class ThriftHiveMetastore_get_all_databases_args {
  public:
 
-  static const char* ascii_fingerprint; // = "99914B932BD37A50B983C5E7C90AE93B";
-  static const uint8_t binary_fingerprint[16]; // = {0x99,0x91,0x4B,0x93,0x2B,0xD3,0x7A,0x50,0xB9,0x83,0xC5,0xE7,0xC9,0x0A,0xE9,0x3B};
-
   ThriftHiveMetastore_get_all_databases_args(const ThriftHiveMetastore_get_all_databases_args&);
   ThriftHiveMetastore_get_all_databases_args& operator=(const ThriftHiveMetastore_get_all_databases_args&);
   ThriftHiveMetastore_get_all_databases_args() {
@@ -1418,22 +1325,17 @@ class ThriftHiveMetastore_get_all_databases_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_all_databases_args& obj);
 };
 
 
 class ThriftHiveMetastore_get_all_databases_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "99914B932BD37A50B983C5E7C90AE93B";
-  static const uint8_t binary_fingerprint[16]; // = {0x99,0x91,0x4B,0x93,0x2B,0xD3,0x7A,0x50,0xB9,0x83,0xC5,0xE7,0xC9,0x0A,0xE9,0x3B};
-
 
   virtual ~ThriftHiveMetastore_get_all_databases_pargs() throw();
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_all_databases_pargs& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_all_databases_result__isset {
@@ -1445,9 +1347,6 @@ typedef struct _ThriftHiveMetastore_get_all_databases_result__isset {
 class ThriftHiveMetastore_get_all_databases_result {
  public:
 
-  static const char* ascii_fingerprint; // = "96F383CF9CB8BE09061ECB825FE717B6";
-  static const uint8_t binary_fingerprint[16]; // = {0x96,0xF3,0x83,0xCF,0x9C,0xB8,0xBE,0x09,0x06,0x1E,0xCB,0x82,0x5F,0xE7,0x17,0xB6};
-
   ThriftHiveMetastore_get_all_databases_result(const ThriftHiveMetastore_get_all_databases_result&);
   ThriftHiveMetastore_get_all_databases_result& operator=(const ThriftHiveMetastore_get_all_databases_result&);
   ThriftHiveMetastore_get_all_databases_result() {
@@ -1480,7 +1379,6 @@ class ThriftHiveMetastore_get_all_databases_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_all_databases_result& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_all_databases_presult__isset {
@@ -1492,9 +1390,6 @@ typedef struct _ThriftHiveMetastore_get_all_databases_presult__isset {
 class ThriftHiveMetastore_get_all_databases_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "96F383CF9CB8BE09061ECB825FE717B6";
-  static const uint8_t binary_fingerprint[16]; // = {0x96,0xF3,0x83,0xCF,0x9C,0xB8,0xBE,0x09,0x06,0x1E,0xCB,0x82,0x5F,0xE7,0x17,0xB6};
-
 
   virtual ~ThriftHiveMetastore_get_all_databases_presult() throw();
   std::vector<std::string> * success;
@@ -1504,7 +1399,6 @@ class ThriftHiveMetastore_get_all_databases_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_all_databases_presult& obj);
 };
 
 typedef struct _ThriftHiveMetastore_alter_database_args__isset {
@@ -1516,9 +1410,6 @@ typedef struct _ThriftHiveMetastore_alter_database_args__isset {
 class ThriftHiveMetastore_alter_database_args {
  public:
 
-  static const char* ascii_fingerprint; // = "B314C1FCA7295CFE5872A037139333A5";
-  static const uint8_t binary_fingerprint[16]; // = {0xB3,0x14,0xC1,0xFC,0xA7,0x29,0x5C,0xFE,0x58,0x72,0xA0,0x37,0x13,0x93,0x33,0xA5};
-
   ThriftHiveMetastore_alter_database_args(const ThriftHiveMetastore_alter_database_args&);
   ThriftHiveMetastore_alter_database_args& operator=(const ThriftHiveMetastore_alter_database_args&);
   ThriftHiveMetastore_alter_database_args() : dbname() {
@@ -1551,16 +1442,12 @@ class ThriftHiveMetastore_alter_database_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_alter_database_args& obj);
 };
 
 
 class ThriftHiveMetastore_alter_database_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "B314C1FCA7295CFE5872A037139333A5";
-  static const uint8_t binary_fingerprint[16]; // = {0xB3,0x14,0xC1,0xFC,0xA7,0x29,0x5C,0xFE,0x58,0x72,0xA0,0x37,0x13,0x93,0x33,0xA5};
-
 
   virtual ~ThriftHiveMetastore_alter_database_pargs() throw();
   const std::string* dbname;
@@ -1568,7 +1455,6 @@ class ThriftHiveMetastore_alter_database_pargs {
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_alter_database_pargs& obj);
 };
 
 typedef struct _ThriftHiveMetastore_alter_database_result__isset {
@@ -1580,9 +1466,6 @@ typedef struct _ThriftHiveMetastore_alter_database_result__isset {
 class ThriftHiveMetastore_alter_database_result {
  public:
 
-  static const char* ascii_fingerprint; // = "A963880F55F6F22FCC67AD7BA0E1894D";
-  static const uint8_t binary_fingerprint[16]; // = {0xA9,0x63,0x88,0x0F,0x55,0xF6,0xF2,0x2F,0xCC,0x67,0xAD,0x7B,0xA0,0xE1,0x89,0x4D};
-
   ThriftHiveMetastore_alter_database_result(const ThriftHiveMetastore_alter_database_result&);
   ThriftHiveMetastore_alter_database_result& operator=(const ThriftHiveMetastore_alter_database_result&);
   ThriftHiveMetastore_alter_database_result() {
@@ -1615,7 +1498,6 @@ class ThriftHiveMetastore_alter_database_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_alter_database_result& obj);
 };
 
 typedef struct _ThriftHiveMetastore_alter_database_presult__isset {
@@ -1627,9 +1509,6 @@ typedef struct _ThriftHiveMetastore_alter_database_presult__isset {
 class ThriftHiveMetastore_alter_database_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "A963880F55F6F22FCC67AD7BA0E1894D";
-  static const uint8_t binary_fingerprint[16]; // = {0xA9,0x63,0x88,0x0F,0x55,0xF6,0xF2,0x2F,0xCC,0x67,0xAD,0x7B,0xA0,0xE1,0x89,0x4D};
-
 
   virtual ~ThriftHiveMetastore_alter_database_presult() throw();
   MetaException o1;
@@ -1639,7 +1518,6 @@ class ThriftHiveMetastore_alter_database_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_alter_database_presult& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_type_args__isset {
@@ -1650,9 +1528,6 @@ typedef struct _ThriftHiveMetastore_get_type_args__isset {
 class ThriftHiveMetastore_get_type_args {
  public:
 
-  static const char* ascii_fingerprint; // = "EFB929595D312AC8F305D5A794CFEDA1";
-  static const uint8_t binary_fingerprint[16]; // = {0xEF,0xB9,0x29,0x59,0x5D,0x31,0x2A,0xC8,0xF3,0x05,0xD5,0xA7,0x94,0xCF,0xED,0xA1};
-
   ThriftHiveMetastore_get_type_args(const ThriftHiveMetastore_get_type_args&);
   ThriftHiveMetastore_get_type_args& operator=(const ThriftHiveMetastore_get_type_args&);
   ThriftHiveMetastore_get_type_args() : name() {
@@ -1680,23 +1555,18 @@ class ThriftHiveMetastore_get_type_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_type_args& obj);
 };
 
 
 class ThriftHiveMetastore_get_type_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "EFB929595D312AC8F305D5A794CFEDA1";
-  static const uint8_t binary_fingerprint[16]; // = {0xEF,0xB9,0x29,0x59,0x5D,0x31,0x2A,0xC8,0xF3,0x05,0xD5,0xA7,0x94,0xCF,0xED,0xA1};
-
 
   virtual ~ThriftHiveMetastore_get_type_pargs() throw();
   const std::string* name;
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_type_pargs& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_type_result__isset {
@@ -1709,9 +1579,6 @@ typedef struct _ThriftHiveMetastore_get_type_result__isset {
 class ThriftHiveMetastore_get_type_result {
  public:
 
-  static const char* ascii_fingerprint; // = "FB3C6A03581A125ACD392DBF486E3393";
-  static const uint8_t binary_fingerprint[16]; // = {0xFB,0x3C,0x6A,0x03,0x58,0x1A,0x12,0x5A,0xCD,0x39,0x2D,0xBF,0x48,0x6E,0x33,0x93};
-
   ThriftHiveMetastore_get_type_result(const ThriftHiveMetastore_get_type_result&);
   ThriftHiveMetastore_get_type_result& operator=(const ThriftHiveMetastore_get_type_result&);
   ThriftHiveMetastore_get_type_result() {
@@ -1749,7 +1616,6 @@ class ThriftHiveMetastore_get_type_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_type_result& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_type_presult__isset {
@@ -1762,9 +1628,6 @@ typedef struct _ThriftHiveMetastore_get_type_presult__isset {
 class ThriftHiveMetastore_get_type_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "FB3C6A03581A125ACD392DBF486E3393";
-  static const uint8_t binary_fingerprint[16]; // = {0xFB,0x3C,0x6A,0x03,0x58,0x1A,0x12,0x5A,0xCD,0x39,0x2D,0xBF,0x48,0x6E,0x33,0x93};
-
 
   virtual ~ThriftHiveMetastore_get_type_presult() throw();
   Type* success;
@@ -1775,7 +1638,6 @@ class ThriftHiveMetastore_get_type_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_type_presult& obj);
 };
 
 typedef struct _ThriftHiveMetastore_create_type_args__isset {
@@ -1786,9 +1648,6 @@ typedef struct _ThriftHiveMetastore_create_type_args__isset {
 class ThriftHiveMetastore_create_type_args {
  public:
 
-  static const char* ascii_fingerprint; // = "9A5D4BC8553C4C253DA189914EB026F0";
-  static const uint8_t binary_fingerprint[16]; // = {0x9A,0x5D,0x4B,0xC8,0x55,0x3C,0x4C,0x25,0x3D,0xA1,0x89,0x91,0x4E,0xB0,0x26,0xF0};
-
   ThriftHiveMetastore_create_type_args(const ThriftHiveMetastore_create_type_args&);
   ThriftHiveMetastore_create_type_args& operator=(const ThriftHiveMetastore_create_type_args&);
   ThriftHiveMetastore_create_type_args() {
@@ -1816,23 +1675,18 @@ class ThriftHiveMetastore_create_type_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_create_type_args& obj);
 };
 
 
 class ThriftHiveMetastore_create_type_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "9A5D4BC8553C4C253DA189914EB026F0";
-  static const uint8_t binary_fingerprint[16]; // = {0x9A,0x5D,0x4B,0xC8,0x55,0x3C,0x4C,0x25,0x3D,0xA1,0x89,0x91,0x4E,0xB0,0x26,0xF0};
-
 
   virtual ~ThriftHiveMetastore_create_type_pargs() throw();
   const Type* type;
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_create_type_pargs& obj);
 };
 
 typedef struct _ThriftHiveMetastore_create_type_result__isset {
@@ -1846,9 +1700,6 @@ typedef struct _ThriftHiveMetastore_create_type_result__isset {
 class ThriftHiveMetastore_create_type_result {
  public:
 
-  static const char* ascii_fingerprint; // = "BEFA80ACD0D12B069F160582E1F44215";
-  static const uint8_t binary_fingerprint[16]; // = {0xBE,0xFA,0x80,0xAC,0xD0,0xD1,0x2B,0x06,0x9F,0x16,0x05,0x82,0xE1,0xF4,0x42,0x15};
-
   ThriftHiveMetastore_create_type_result(const ThriftHiveMetastore_create_type_result&);
   ThriftHiveMetastore_create_type_result& operator=(const ThriftHiveMetastore_create_type_result&);
   ThriftHiveMetastore_create_type_result() : success(0) {
@@ -1891,7 +1742,6 @@ class ThriftHiveMetastore_create_type_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_create_type_result& obj);
 };
 
 typedef struct _ThriftHiveMetastore_create_type_presult__isset {
@@ -1905,9 +1755,6 @@ typedef struct _ThriftHiveMetastore_create_type_presult__isset {
 class ThriftHiveMetastore_create_type_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "BEFA80ACD0D12B069F160582E1F44215";
-  static const uint8_t binary_fingerprint[16]; // = {0xBE,0xFA,0x80,0xAC,0xD0,0xD1,0x2B,0x06,0x9F,0x16,0x05,0x82,0xE1,0xF4,0x42,0x15};
-
 
   virtual ~ThriftHiveMetastore_create_type_presult() throw();
   bool* success;
@@ -1919,7 +1766,6 @@ class ThriftHiveMetastore_create_type_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_create_type_presult& obj);
 };
 
 typedef struct _ThriftHiveMetastore_drop_type_args__isset {
@@ -1930,9 +1776,6 @@ typedef struct _ThriftHiveMetastore_drop_type_args__isset {
 class ThriftHiveMetastore_drop_type_args {
  public:
 
-  static const char* ascii_fingerprint; // = "EFB929595D312AC8F305D5A794CFEDA1";
-  static const uint8_t binary_fingerprint[16]; // = {0xEF,0xB9,0x29,0x59,0x5D,0x31,0x2A,0xC8,0xF3,0x05,0xD5,0xA7,0x94,0xCF,0xED,0xA1};
-
   ThriftHiveMetastore_drop_type_args(const ThriftHiveMetastore_drop_type_args&);
   ThriftHiveMetastore_drop_type_args& operator=(const ThriftHiveMetastore_drop_type_args&);
   ThriftHiveMetastore_drop_type_args() : type() {
@@ -1960,23 +1803,18 @@ class ThriftHiveMetastore_drop_type_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_drop_type_args& obj);
 };
 
 
 class ThriftHiveMetastore_drop_type_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "EFB929595D312AC8F305D5A794CFEDA1";
-  static const uint8_t binary_fingerprint[16]; // = {0xEF,0xB9,0x29,0x59,0x5D,0x31,0x2A,0xC8,0xF3,0x05,0xD5,0xA7,0x94,0xCF,0xED,0xA1};
-
 
   virtual ~ThriftHiveMetastore_drop_type_pargs() throw();
   const std::string* type;
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_drop_type_pargs& obj);
 };
 
 typedef struct _ThriftHiveMetastore_drop_type_result__isset {
@@ -1989,9 +1827,6 @@ typedef struct _ThriftHiveMetastore_drop_type_result__isset {
 class ThriftHiveMetastore_drop_type_result {
  public:
 
-  static const char* ascii_fingerprint; // = "3499095F5992356B85332339F1B55814";
-  static const uint8_t binary_fingerprint[16]; // = {0x34,0x99,0x09,0x5F,0x59,0x92,0x35,0x6B,0x85,0x33,0x23,0x39,0xF1,0xB5,0x58,0x14};
-
   ThriftHiveMetastore_drop_type_result(const ThriftHiveMetastore_drop_type_result&);
   ThriftHiveMetastore_drop_type_result& operator=(const ThriftHiveMetastore_drop_type_result&);
   ThriftHiveMetastore_drop_type_result() : success(0) {
@@ -2029,7 +1864,6 @@ class ThriftHiveMetastore_drop_type_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_drop_type_result& obj);
 };
 
 typedef struct _ThriftHiveMetastore_drop_type_presult__isset {
@@ -2042,9 +1876,6 @@ typedef struct _ThriftHiveMetastore_drop_type_presult__isset {
 class ThriftHiveMetastore_drop_type_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "3499095F5992356B85332339F1B55814";
-  static const uint8_t binary_fingerprint[16]; // = {0x34,0x99,0x09,0x5F,0x59,0x92,0x35,0x6B,0x85,0x33,0x23,0x39,0xF1,0xB5,0x58,0x14};
-
 
   virtual ~ThriftHiveMetastore_drop_type_presult() throw();
   bool* success;
@@ -2055,7 +1886,6 @@ class ThriftHiveMetastore_drop_type_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_drop_type_presult& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_type_all_args__isset {
@@ -2066,9 +1896,6 @@ typedef struct _ThriftHiveMetastore_get_type_all_args__isset {
 class ThriftHiveMetastore_get_type_all_args {
  public:
 
-  static const char* ascii_fingerprint; // = "EFB929595D312AC8F305D5A794CFEDA1";
-  static const uint8_t binary_fingerprint[16]; // = {0xEF,0xB9,0x29,0x59,0x5D,0x31,0x2A,0xC8,0xF3,0x05,0xD5,0xA7,0x94,0xCF,0xED,0xA1};
-
   ThriftHiveMetastore_get_type_all_args(const ThriftHiveMetastore_get_type_all_args&);
   ThriftHiveMetastore_get_type_all_args& operator=(const ThriftHiveMetastore_get_type_all_args&);
   ThriftHiveMetastore_get_type_all_args() : name() {
@@ -2096,23 +1923,18 @@ class ThriftHiveMetastore_get_type_all_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_type_all_args& obj);
 };
 
 
 class ThriftHiveMetastore_get_type_all_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "EFB929595D312AC8F305D5A794CFEDA1";
-  static const uint8_t binary_fingerprint[16]; // = {0xEF,0xB9,0x29,0x59,0x5D,0x31,0x2A,0xC8,0xF3,0x05,0xD5,0xA7,0x94,0xCF,0xED,0xA1};
-
 
   virtual ~ThriftHiveMetastore_get_type_all_pargs() throw();
   const std::string* name;
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_type_all_pargs& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_type_all_result__isset {
@@ -2124,9 +1946,6 @@ typedef struct _ThriftHiveMetastore_get_type_all_result__isset {
 class ThriftHiveMetastore_get_type_all_result {
  public:
 
-  static const char* ascii_fingerprint; // = "4AD9FFB783428C996DEB713D38DA38C4";
-  static const uint8_t binary_fingerprint[16]; // = {0x4A,0xD9,0xFF,0xB7,0x83,0x42,0x8C,0x99,0x6D,0xEB,0x71,0x3D,0x38,0xDA,0x38,0xC4};
-
   ThriftHiveMetastore_get_type_all_result(const ThriftHiveMetastore_get_type_all_result&);
   ThriftHiveMetastore_get_type_all_result& operator=(const ThriftHiveMetastore_get_type_all_result&);
   ThriftHiveMetastore_get_type_all_result() {
@@ -2159,7 +1978,6 @@ class ThriftHiveMetastore_get_type_all_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_type_all_result& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_type_all_presult__isset {
@@ -2171,9 +1989,6 @@ typedef struct _ThriftHiveMetastore_get_type_all_presult__isset {
 class ThriftHiveMetastore_get_type_all_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "4AD9FFB783428C996DEB713D38DA38C4";
-  static const uint8_t binary_fingerprint[16]; // = {0x4A,0xD9,0xFF,0xB7,0x83,0x42,0x8C,0x99,0x6D,0xEB,0x71,0x3D,0x38,0xDA,0x38,0xC4};
-
 
   virtual ~ThriftHiveMetastore_get_type_all_presult() throw();
   std::map<std::string, Type> * success;
@@ -2183,7 +1998,6 @@ class ThriftHiveMetastore_get_type_all_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_type_all_presult& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_fields_args__isset {
@@ -2195,9 +2009,6 @@ typedef struct _ThriftHiveMetastore_get_fields_args__isset {
 class ThriftHiveMetastore_get_fields_args {
  public:
 
-  static const char* ascii_fingerprint; // = "07A9615F837F7D0A952B595DD3020972";
-  static const uint8_t binary_fingerprint[16]; // = {0x07,0xA9,0x61,0x5F,0x83,0x7F,0x7D,0x0A,0x95,0x2B,0x59,0x5D,0xD3,0x02,0x09,0x72};
-
   ThriftHiveMetastore_get_fields_args(const ThriftHiveMetastore_get_fields_args&);
   ThriftHiveMetastore_get_fields_args& operator=(const ThriftHiveMetastore_get_fields_args&);
   ThriftHiveMetastore_get_fields_args() : db_name(), table_name() {
@@ -2230,16 +2041,12 @@ class ThriftHiveMetastore_get_fields_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_fields_args& obj);
 };
 
 
 class ThriftHiveMetastore_get_fields_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "07A9615F837F7D0A952B595DD3020972";
-  static const uint8_t binary_fingerprint[16]; // = {0x07,0xA9,0x61,0x5F,0x83,0x7F,0x7D,0x0A,0x95,0x2B,0x59,0x5D,0xD3,0x02,0x09,0x72};
-
 
   virtual ~ThriftHiveMetastore_get_fields_pargs() throw();
   const std::string* db_name;
@@ -2247,7 +2054,6 @@ class ThriftHiveMetastore_get_fields_pargs {
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_fields_pargs& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_fields_result__isset {
@@ -2261,9 +2067,6 @@ typedef struct _ThriftHiveMetastore_get_fields_result__isset {
 class ThriftHiveMetastore_get_fields_result {
  public:
 
-  static const char* ascii_fingerprint; // = "B736CA8B9EACE5EE093DF8D460EE0F4E";
-  static const uint8_t binary_fingerprint[16]; // = {0xB7,0x36,0xCA,0x8B,0x9E,0xAC,0xE5,0xEE,0x09,0x3D,0xF8,0xD4,0x60,0xEE,0x0F,0x4E};
-
   ThriftHiveMetastore_get_fields_result(const ThriftHiveMetastore_get_fields_result&);
   ThriftHiveMetastore_get_fields_result& operator=(const ThriftHiveMetastore_get_fields_result&);
   ThriftHiveMetastore_get_fields_result() {
@@ -2306,7 +2109,6 @@ class ThriftHiveMetastore_get_fields_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_fields_result& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_fields_presult__isset {
@@ -2320,9 +2122,6 @@ typedef struct _ThriftHiveMetastore_get_fields_presult__isset {
 class ThriftHiveMetastore_get_fields_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "B736CA8B9EACE5EE093DF8D460EE0F4E";
-  static const uint8_t binary_fingerprint[16]; // = {0xB7,0x36,0xCA,0x8B,0x9E,0xAC,0xE5,0xEE,0x09,0x3D,0xF8,0xD4,0x60,0xEE,0x0F,0x4E};
-
 
   virtual ~ThriftHiveMetastore_get_fields_presult() throw();
   std::vector<FieldSchema> * success;
@@ -2334,7 +2133,6 @@ class ThriftHiveMetastore_get_fields_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_fields_presult& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_fields_with_environment_context_args__isset {
@@ -2347,9 +2145,6 @@ typedef struct _ThriftHiveMetastore_get_fields_with_environment_context_args__is
 class ThriftHiveMetastore_get_fields_with_environment_context_args {
  public:
 
-  static const char* ascii_fingerprint; // = "AE0D6195F9A2D9B00B409A0B58C4F0AE";
-  static const uint8_t binary_fingerprint[16]; // = {0xAE,0x0D,0x61,0x95,0xF9,0xA2,0xD9,0xB0,0x0B,0x40,0x9A,0x0B,0x58,0xC4,0xF0,0xAE};
-
   ThriftHiveMetastore_get_fields_with_environment_context_args(const ThriftHiveMetastore_get_fields_with_environment_context_args&);
   ThriftHiveMetastore_get_fields_with_environment_context_args& operator=(const ThriftHiveMetastore_get_fields_with_environment_context_args&);
   ThriftHiveMetastore_get_fields_with_environment_context_args() : db_name(), table_name() {
@@ -2387,16 +2182,12 @@ class ThriftHiveMetastore_get_fields_with_environment_context_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_fields_with_environment_context_args& obj);
 };
 
 
 class ThriftHiveMetastore_get_fields_with_environment_context_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "AE0D6195F9A2D9B00B409A0B58C4F0AE";
-  static const uint8_t binary_fingerprint[16]; // = {0xAE,0x0D,0x61,0x95,0xF9,0xA2,0xD9,0xB0,0x0B,0x40,0x9A,0x0B,0x58,0xC4,0xF0,0xAE};
-
 
   virtual ~ThriftHiveMetastore_get_fields_with_environment_context_pargs() throw();
   const std::string* db_name;
@@ -2405,7 +2196,6 @@ class ThriftHiveMetastore_get_fields_with_environment_context_pargs {
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_fields_with_environment_context_pargs& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_fields_with_environment_context_result__isset {
@@ -2419,9 +2209,6 @@ typedef struct _ThriftHiveMetastore_get_fields_with_environment_context_result__
 class ThriftHiveMetastore_get_fields_with_environment_context_result {
  public:
 
-  static const char* ascii_fingerprint; // = "B736CA8B9EACE5EE093DF8D460EE0F4E";
-  static const uint8_t binary_fingerprint[16]; // = {0xB7,0x36,0xCA,0x8B,0x9E,0xAC,0xE5,0xEE,0x09,0x3D,0xF8,0xD4,0x60,0xEE,0x0F,0x4E};
-
   ThriftHiveMetastore_get_fields_with_environment_context_result(const ThriftHiveMetastore_get_fields_with_environment_context_result&);
   ThriftHiveMetastore_get_fields_with_environment_context_result& operator=(const ThriftHiveMetastore_get_fields_with_environment_context_result&);
   ThriftHiveMetastore_get_fields_with_environment_context_result() {
@@ -2464,7 +2251,6 @@ class ThriftHiveMetastore_get_fields_with_environment_context_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_fields_with_environment_context_result& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_fields_with_environment_context_presult__isset {
@@ -2478,9 +2264,6 @@ typedef struct _ThriftHiveMetastore_get_fields_with_environment_context_presult_
 class ThriftHiveMetastore_get_fields_with_environment_context_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "B736CA8B9EACE5EE093DF8D460EE0F4E";
-  static const uint8_t binary_fingerprint[16]; // = {0xB7,0x36,0xCA,0x8B,0x9E,0xAC,0xE5,0xEE,0x09,0x3D,0xF8,0xD4,0x60,0xEE,0x0F,0x4E};
-
 
   virtual ~ThriftHiveMetastore_get_fields_with_environment_context_presult() throw();
   std::vector<FieldSchema> * success;
@@ -2492,7 +2275,6 @@ class ThriftHiveMetastore_get_fields_with_environment_context_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_fields_with_environment_context_presult& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_schema_args__isset {
@@ -2504,9 +2286,6 @@ typedef struct _ThriftHiveMetastore_get_schema_args__isset {
 class ThriftHiveMetastore_get_schema_args {
  public:
 
-  static const char* ascii_fingerprint; // = "07A9615F837F7D0A952B595DD3020972";
-  static const uint8_t binary_fingerprint[16]; // = {0x07,0xA9,0x61,0x5F,0x83,0x7F,0x7D,0x0A,0x95,0x2B,0x59,0x5D,0xD3,0x02,0x09,0x72};
-
   ThriftHiveMetastore_get_schema_args(const ThriftHiveMetastore_get_schema_args&);
   ThriftHiveMetastore_get_schema_args& operator=(const ThriftHiveMetastore_get_schema_args&);
   ThriftHiveMetastore_get_schema_args() : db_name(), table_name() {
@@ -2539,16 +2318,12 @@ class ThriftHiveMetastore_get_schema_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_schema_args& obj);
 };
 
 
 class ThriftHiveMetastore_get_schema_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "07A9615F837F7D0A952B595DD3020972";
-  static const uint8_t binary_fingerprint[16]; // = {0x07,0xA9,0x61,0x5F,0x83,0x7F,0x7D,0x0A,0x95,0x2B,0x59,0x5D,0xD3,0x02,0x09,0x72};
-
 
   virtual ~ThriftHiveMetastore_get_schema_pargs() throw();
   const std::string* db_name;
@@ -2556,7 +2331,6 @@ class ThriftHiveMetastore_get_schema_pargs {
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_schema_pargs& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_schema_result__isset {
@@ -2570,9 +2344,6 @@ typedef struct _ThriftHiveMetastore_get_schema_result__isset {
 class ThriftHiveMetastore_get_schema_result {
  public:
 
-  static const char* ascii_fingerprint; // = "B736CA8B9EACE5EE093DF8D460EE0F4E";
-  static const uint8_t binary_fingerprint[16]; // = {0xB7,0x36,0xCA,0x8B,0x9E,0xAC,0xE5,0xEE,0x09,0x3D,0xF8,0xD4,0x60,0xEE,0x0F,0x4E};
-
   ThriftHiveMetastore_get_schema_result(const ThriftHiveMetastore_get_schema_result&);
   ThriftHiveMetastore_get_schema_result& operator=(const ThriftHiveMetastore_get_schema_result&);
   ThriftHiveMetastore_get_schema_result() {
@@ -2615,7 +2386,6 @@ class ThriftHiveMetastore_get_schema_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_schema_result& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_schema_presult__isset {
@@ -2629,9 +2399,6 @@ typedef struct _ThriftHiveMetastore_get_schema_presult__isset {
 class ThriftHiveMetastore_get_schema_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "B736CA8B9EACE5EE093DF8D460EE0F4E";
-  static const uint8_t binary_fingerprint[16]; // = {0xB7,0x36,0xCA,0x8B,0x9E,0xAC,0xE5,0xEE,0x09,0x3D,0xF8,0xD4,0x60,0xEE,0x0F,0x4E};
-
 
   virtual ~ThriftHiveMetastore_get_schema_presult() throw();
   std::vector<FieldSchema> * success;
@@ -2643,7 +2410,6 @@ class ThriftHiveMetastore_get_schema_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_schema_presult& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_schema_with_environment_context_args__isset {
@@ -2656,9 +2422,6 @@ typedef struct _ThriftHiveMetastore_get_schema_with_environment_context_args__is
 class ThriftHiveMetastore_get_schema_with_environment_context_args {
  public:
 
-  static const char* ascii_fingerprint; // = "AE0D6195F9A2D9B00B409A0B58C4F0AE";
-  static const uint8_t binary_fingerprint[16]; // = {0xAE,0x0D,0x61,0x95,0xF9,0xA2,0xD9,0xB0,0x0B,0x40,0x9A,0x0B,0x58,0xC4,0xF0,0xAE};
-
   ThriftHiveMetastore_get_schema_with_environment_context_args(const ThriftHiveMetastore_get_schema_with_environment_context_args&);
   ThriftHiveMetastore_get_schema_with_environment_context_args& operator=(const ThriftHiveMetastore_get_schema_with_environment_context_args&);
   ThriftHiveMetastore_get_schema_with_environment_context_args() : db_name(), table_name() {
@@ -2696,16 +2459,12 @@ class ThriftHiveMetastore_get_schema_with_environment_context_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_schema_with_environment_context_args& obj);
 };
 
 
 class ThriftHiveMetastore_get_schema_with_environment_context_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "AE0D6195F9A2D9B00B409A0B58C4F0AE";
-  static const uint8_t binary_fingerprint[16]; // = {0xAE,0x0D,0x61,0x95,0xF9,0xA2,0xD9,0xB0,0x0B,0x40,0x9A,0x0B,0x58,0xC4,0xF0,0xAE};
-
 
   virtual ~ThriftHiveMetastore_get_schema_with_environment_context_pargs() throw();
   const std::string* db_name;
@@ -2714,7 +2473,6 @@ class ThriftHiveMetastore_get_schema_with_environment_context_pargs {
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_schema_with_environment_context_pargs& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_schema_with_environment_context_result__isset {
@@ -2728,9 +2486,6 @@ typedef struct _ThriftHiveMetastore_get_schema_with_environment_context_result__
 class ThriftHiveMetastore_get_schema_with_environment_context_result {
  public:
 
-  static const char* ascii_fingerprint; // = "B736CA8B9EACE5EE093DF8D460EE0F4E";
-  static const uint8_t binary_fingerprint[16]; // = {0xB7,0x36,0xCA,0x8B,0x9E,0xAC,0xE5,0xEE,0x09,0x3D,0xF8,0xD4,0x60,0xEE,0x0F,0x4E};
-
   ThriftHiveMetastore_get_schema_with_environment_context_result(const ThriftHiveMetastore_get_schema_with_environment_context_result&);
   ThriftHiveMetastore_get_schema_with_environment_context_result& operator=(const ThriftHiveMetastore_get_schema_with_environment_context_result&);
   ThriftHiveMetastore_get_schema_with_environment_context_result() {
@@ -2773,7 +2528,6 @@ class ThriftHiveMetastore_get_schema_with_environment_context_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_schema_with_environment_context_result& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_schema_with_environment_context_presult__isset {
@@ -2787,9 +2541,6 @@ typedef struct _ThriftHiveMetastore_get_schema_with_environment_context_presult_
 class ThriftHiveMetastore_get_schema_with_environment_context_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "B736CA8B9EACE5EE093DF8D460EE0F4E";
-  static const uint8_t binary_fingerprint[16]; // = {0xB7,0x36,0xCA,0x8B,0x9E,0xAC,0xE5,0xEE,0x09,0x3D,0xF8,0xD4,0x60,0xEE,0x0F,0x4E};
-
 
   virtual ~ThriftHiveMetastore_get_schema_with_environment_context_presult() throw();
   std::vector<FieldSchema> * success;
@@ -2801,7 +2552,6 @@ class ThriftHiveMetastore_get_schema_with_environment_context_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_schema_with_environment_context_presult& obj);
 };
 
 typedef struct _ThriftHiveMetastore_create_table_args__isset {
@@ -2812,9 +2562,6 @@ typedef struct _ThriftHiveMetastore_create_table_args__isset {
 class ThriftHiveMetastore_create_table_args {
  public:
 
-  static const char* ascii_fingerprint; // = "BD1951DCA22A23C9A7B1CD46ACAE54DB";
-  static const uint8_t binary_fingerprint[16]; // = {0xBD,0x19,0x51,0xDC,0xA2,0x2A,0x23,0xC9,0xA7,0xB1,0xCD,0x46,0xAC,0xAE,0x54,0xDB};
-
   ThriftHiveMetastore_create_table_args(const ThriftHiveMetastore_create_table_args&);
   ThriftHiveMetastore_create_table_args& operator=(const ThriftHiveMetastore_create_table_args&);
   ThriftHiveMetastore_create_table_args() {
@@ -2842,23 +2589,18 @@ class ThriftHiveMetastore_create_table_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_create_table_args& obj);
 };
 
 
 class ThriftHiveMetastore_create_table_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "BD1951DCA22A23C9A7B1CD46ACAE54DB";
-  static const uint8_t binary_fingerprint[16]; // = {0xBD,0x19,0x51,0xDC,0xA2,0x2A,0x23,0xC9,0xA7,0xB1,0xCD,0x46,0xAC,0xAE,0x54,0xDB};
-
 
   virtual ~ThriftHiveMetastore_create_table_pargs() throw();
   const Table* tbl;
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_create_table_pargs& obj);
 };
 
 typedef struct _ThriftHiveMetastore_create_table_result__isset {
@@ -2872,9 +2614,6 @@ typedef struct _ThriftHiveMetastore_create_table_result__isset {
 class ThriftHiveMetastore_create_table_result {
  public:
 
-  static const char* ascii_fingerprint; // = "F9EF8E19A6AC2C4E08D282524B9828EB";
-  static const uint8_t binary_fingerprint[16]; // = {0xF9,0xEF,0x8E,0x19,0xA6,0xAC,0x2C,0x4E,0x08,0xD2,0x82,0x52,0x4B,0x98,0x28,0xEB};
-
   ThriftHiveMetastore_create_table_result(const ThriftHiveMetastore_create_table_result&);
   ThriftHiveMetastore_create_table_result& operator=(const ThriftHiveMetastore_create_table_result&);
   ThriftHiveMetastore_create_table_result() {
@@ -2917,7 +2656,6 @@ class ThriftHiveMetastore_create_table_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_create_table_result& obj);
 };
 
 typedef struct _ThriftHiveMetastore_create_table_presult__isset {
@@ -2931,9 +2669,6 @@ typedef struct _ThriftHiveMetastore_create_table_presult__isset {
 class ThriftHiveMetastore_create_table_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "F9EF8E19A6AC2C4E08D282524B9828EB";
-  static const uint8_t binary_fingerprint[16]; // = {0xF9,0xEF,0x8E,0x19,0xA6,0xAC,0x2C,0x4E,0x08,0xD2,0x82,0x52,0x4B,0x98,0x28,0xEB};
-
 
   virtual ~ThriftHiveMetastore_create_table_presult() throw();
   AlreadyExistsException o1;
@@ -2945,7 +2680,6 @@ class ThriftHiveMetastore_create_table_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_create_table_presult& obj);
 };
 
 typedef struct _ThriftHiveMetastore_create_table_with_environment_context_args__isset {
@@ -2957,9 +2691,6 @@ typedef struct _ThriftHiveMetastore_create_table_with_environment_context_args__
 class ThriftHiveMetastore_create_table_with_environment_context_args {
  public:
 
-  static const char* ascii_fingerprint; // = "095040B2E2E4564E171FB653FDCE0B95";
-  static const uint8_t binary_fingerprint[16]; // = {0x09,0x50,0x40,0xB2,0xE2,0xE4,0x56,0x4E,0x17,0x1F,0xB6,0x53,0xFD,0xCE,0x0B,0x95};
-
   ThriftHiveMetastore_create_table_with_environment_context_args(const ThriftHiveMetastore_create_table_with_environment_context_args&);
   ThriftHiveMetastore_create_table_with_environment_context_args& operator=(const ThriftHiveMetastore_create_table_with_environment_context_args&);
   ThriftHiveMetastore_create_table_with_environment_context_args() {
@@ -2992,16 +2723,12 @@ class ThriftHiveMetastore_create_table_with_environment_context_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_create_table_with_environment_context_args& obj);
 };
 
 
 class ThriftHiveMetastore_create_table_with_environment_context_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "095040B2E2E4564E171FB653FDCE0B95";
-  static const uint8_t binary_fingerprint[16]; // = {0x09,0x50,0x40,0xB2,0xE2,0xE4,0x56,0x4E,0x17,0x1F,0xB6,0x53,0xFD,0xCE,0x0B,0x95};
-
 
   virtual ~ThriftHiveMetastore_create_table_with_environment_context_pargs() throw();
   const Table* tbl;
@@ -3009,7 +2736,6 @@ class ThriftHiveMetastore_create_table_with_environment_context_pargs {
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_create_table_with_environment_context_pargs& obj);
 };
 
 typedef struct _ThriftHiveMetastore_create_table_with_environment_context_result__isset {
@@ -3023,9 +2749,6 @@ typedef struct _ThriftHiveMetastore_create_table_with_environment_context_result
 class ThriftHiveMetastore_create_table_with_environment_context_result {
  public:
 
-  static const char* ascii_fingerprint; // = "F9EF8E19A6AC2C4E08D282524B9828EB";
-  static const uint8_t binary_fingerprint[16]; // = {0xF9,0xEF,0x8E,0x19,0xA6,0xAC,0x2C,0x4E,0x08,0xD2,0x82,0x52,0x4B,0x98,0x28,0xEB};
-
   ThriftHiveMetastore_create_table_with_environment_context_result(const ThriftHiveMetastore_create_table_with_environment_context_result&);
   ThriftHiveMetastore_create_table_with_environment_context_result& operator=(const ThriftHiveMetastore_create_table_with_environment_context_result&);
   ThriftHiveMetastore_create_table_with_environment_context_result() {
@@ -3068,7 +2791,6 @@ class ThriftHiveMetastore_create_table_with_environment_context_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_create_table_with_environment_context_result& obj);
 };
 
 typedef struct _ThriftHiveMetastore_create_table_with_environment_context_presult__isset {
@@ -3082,9 +2804,6 @@ typedef struct _ThriftHiveMetastore_create_table_with_environment_context_presul
 class ThriftHiveMetastore_create_table_with_environment_context_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "F9EF8E19A6AC2C4E08D282524B9828EB";
-  static const uint8_t binary_fingerprint[16]; // = {0xF9,0xEF,0x8E,0x19,0xA6,0xAC,0x2C,0x4E,0x08,0xD2,0x82,0x52,0x4B,0x98,0x28,0xEB};
-
 
   virtual ~ThriftHiveMetastore_create_table_with_environment_context_presult() throw();
   AlreadyExistsException o1;
@@ -3096,7 +2815,6 @@ class ThriftHiveMetastore_create_table_with_environment_context_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_create_table_with_environment_context_presult& obj);
 };
 
 typedef struct _ThriftHiveMetastore_drop_table_args__isset {
@@ -3109,9 +2827,6 @@ typedef struct _ThriftHiveMetastore_drop_table_args__isset {
 class ThriftHiveMetastore_drop_table_args {
  public:
 
-  static const char* ascii_fingerprint; // = "7F21FB535884165D6350077C7B970E93";
-  static const uint8_t binary_fingerprint[16]; // = {0x7F,0x21,0xFB,0x53,0x58,0x84,0x16,0x5D,0x63,0x50,0x07,0x7C,0x7B,0x97,0x0E,0x93};
-
   ThriftHiveMetastore_drop_table_args(const ThriftHiveMetastore_drop_table_args&);
   ThriftHiveMetastore_drop_table_args& operator=(const ThriftHiveMetastore_drop_table_args&);
   ThriftHiveMetastore_drop_table_args() : dbname(), name(), deleteData(0) {
@@ -3149,16 +2864,12 @@ class ThriftHiveMetastore_drop_table_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_drop_table_args& obj);
 };
 
 
 class ThriftHiveMetastore_drop_table_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "7F21FB535884165D6350077C7B970E93";
-  static const uint8_t binary_fingerprint[16]; // = {0x7F,0x21,0xFB,0x53,0x58,0x84,0x16,0x5D,0x63,0x50,0x07,0x7C,0x7B,0x97,0x0E,0x93};
-
 
   virtual ~ThriftHiveMetastore_drop_table_pargs() throw();
   const std::string* dbname;
@@ -3167,7 +2878,6 @@ class ThriftHiveMetastore_drop_table_pargs {
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_drop_table_pargs& obj);
 };
 
 typedef struct _ThriftHiveMetastore_drop_table_result__isset {
@@ -3179,9 +2889,6 @@ typedef struct _ThriftHiveMetastore_drop_table_result__isset {
 class ThriftHiveMetastore_drop_table_result {
  public:
 
-  static const char* ascii_fingerprint; // = "A963880F55F6F22FCC67AD7BA0E1894D";
-  static const uint8_t binary_fingerprint[16]; // = {0xA9,0x63,0x88,0x0F,0x55,0xF6,0xF2,0x2F,0xCC,0x67,0xAD,0x7B,0xA0,0xE1,0x89,0x4D};
-
   ThriftHiveMetastore_drop_table_result(const ThriftHiveMetastore_drop_table_result&);
   ThriftHiveMetastore_drop_table_result& operator=(const ThriftHiveMetastore_drop_table_result&);
   ThriftHiveMetastore_drop_table_result() {
@@ -3214,7 +2921,6 @@ class ThriftHiveMetastore_drop_table_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_drop_table_result& obj);
 };
 
 typedef struct _ThriftHiveMetastore_drop_table_presult__isset {
@@ -3226,9 +2932,6 @@ typedef struct _ThriftHiveMetastore_drop_table_presult__isset {
 class ThriftHiveMetastore_drop_table_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "A963880F55F6F22FCC67AD7BA0E1894D";
-  static const uint8_t binary_fingerprint[16]; // = {0xA9,0x63,0x88,0x0F,0x55,0xF6,0xF2,0x2F,0xCC,0x67,0xAD,0x7B,0xA0,0xE1,0x89,0x4D};
-
 
   virtual ~ThriftHiveMetastore_drop_table_presult() throw();
   NoSuchObjectException o1;
@@ -3238,7 +2941,6 @@ class ThriftHiveMetastore_drop_table_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_drop_table_presult& obj);
 };
 
 typedef struct _ThriftHiveMetastore_drop_table_with_environment_context_args__isset {
@@ -3252,9 +2954,6 @@ typedef struct _ThriftHiveMetastore_drop_table_with_environment_context_args__is
 class ThriftHiveMetastore_drop_table_with_environment_context_args {
  public:
 
-  static const char* ascii_fingerprint; // = "0ED75AC6CEC5A207447A701578264DA4";
-  static const uint8_t binary_fingerprint[16]; // = {0x0E,0xD7,0x5A,0xC6,0xCE,0xC5,0xA2,0x07,0x44,0x7A,0x70,0x15,0x78,0x26,0x4D,0xA4};
-
   ThriftHiveMetastore_drop_table_with_environment_context_args(const ThriftHiveMetastore_drop_table_with_environment_context_args&);
   ThriftHiveMetastore_drop_table_with_environment_context_args& operator=(const ThriftHiveMetastore_drop_table_with_environment_context_args&);
   ThriftHiveMetastore_drop_table_with_environment_context_args() : dbname(), name(), deleteData(0) {
@@ -3297,16 +2996,12 @@ class ThriftHiveMetastore_drop_table_with_environment_context_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_drop_table_with_environment_context_args& obj);
 };
 
 
 class ThriftHiveMetastore_drop_table_with_environment_context_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "0ED75AC6CEC5A207447A701578264DA4";
-  static const uint8_t binary_fingerprint[16]; // = {0x0E,0xD7,0x5A,0xC6,0xCE,0xC5,0xA2,0x07,0x44,0x7A,0x70,0x15,0x78,0x26,0x4D,0xA4};
-
 
   virtual ~ThriftHiveMetastore_drop_table_with_environment_context_pargs() throw();
   const std::string* dbname;
@@ -3316,7 +3011,6 @@ class ThriftHiveMetastore_drop_table_with_environment_context_pargs {
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_drop_table_with_environment_context_pargs& obj);
 };
 
 typedef struct _ThriftHiveMetastore_drop_table_with_environment_context_result__isset {
@@ -3328,9 +3022,6 @@ typedef struct _ThriftHiveMetastore_drop_table_with_environment_context_result__
 class ThriftHiveMetastore_drop_table_with_environment_context_result {
  public:
 
-  static const char* ascii_fingerprint; // = "A963880F55F6F22FCC67AD7BA0E1894D";
-  static const uint8_t binary_fingerprint[16]; // = {0xA9,0x63,0x88,0x0F,0x55,0xF6,0xF2,0x2F,0xCC,0x67,0xAD,0x7B,0xA0,0xE1,0x89,0x4D};
-
   ThriftHiveMetastore_drop_table_with_environment_context_result(const ThriftHiveMetastore_drop_table_with_environment_context_result&);
   ThriftHiveMetastore_drop_table_with_environment_context_result& operator=(const ThriftHiveMetastore_drop_table_with_environment_context_result&);
   ThriftHiveMetastore_drop_table_with_environment_context_result() {
@@ -3363,7 +3054,6 @@ class ThriftHiveMetastore_drop_table_with_environment_context_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_drop_table_with_environment_context_result& obj);
 };
 
 typedef struct _ThriftHiveMetastore_drop_table_with_environment_context_presult__isset {
@@ -3375,9 +3065,6 @@ typedef struct _ThriftHiveMetastore_drop_table_with_environment_context_presult_
 class ThriftHiveMetastore_drop_table_with_environment_context_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "A963880F55F6F22FCC67AD7BA0E1894D";
-  static const uint8_t binary_fingerprint[16]; // = {0xA9,0x63,0x88,0x0F,0x55,0xF6,0xF2,0x2F,0xCC,0x67,0xAD,0x7B,0xA0,0xE1,0x89,0x4D};
-
 
   virtual ~ThriftHiveMetastore_drop_table_with_environment_context_presult() throw();
   NoSuchObjectException o1;
@@ -3387,7 +3074,6 @@ class ThriftHiveMetastore_drop_table_with_environment_context_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_drop_table_with_environment_context_presult& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_tables_args__isset {
@@ -3399,9 +3085,6 @@ typedef struct _ThriftHiveMetastore_get_tables_args__isset {
 class ThriftHiveMetastore_get_tables_args {
  public:
 
-  static const char* ascii_fingerprint; // = "07A9615F837F7D0A952B595DD3020972";
-  static const uint8_t binary_fingerprint[16]; // = {0x07,0xA9,0x61,0x5F,0x83,0x7F,0x7D,0x0A,0x95,0x2B,0x59,0x5D,0xD3,0x02,0x09,0x72};
-
   ThriftHiveMetastore_get_tables_args(const ThriftHiveMetastore_get_tables_args&);
   ThriftHiveMetastore_get_tables_args& operator=(const ThriftHiveMetastore_get_tables_args&);
   ThriftHiveMetastore_get_tables_args() : db_name(), pattern() {
@@ -3434,16 +3117,12 @@ class ThriftHiveMetastore_get_tables_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_tables_args& obj);
 };
 
 
 class ThriftHiveMetastore_get_tables_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "07A9615F837F7D0A952B595DD3020972";
-  static const uint8_t binary_fingerprint[16]; // = {0x07,0xA9,0x61,0x5F,0x83,0x7F,0x7D,0x0A,0x95,0x2B,0x59,0x5D,0xD3,0x02,0x09,0x72};
-
 
   virtual ~ThriftHiveMetastore_get_tables_pargs() throw();
   const std::string* db_name;
@@ -3451,7 +3130,6 @@ class ThriftHiveMetastore_get_tables_pargs {
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_tables_pargs& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_tables_result__isset {
@@ -3463,9 +3141,6 @@ typedef struct _ThriftHiveMetastore_get_tables_result__isset {
 class ThriftHiveMetastore_get_tables_result {
  public:
 
-  static const char* ascii_fingerprint; // = "96F383CF9CB8BE09061ECB825FE717B6";
-  static const uint8_t binary_fingerprint[16]; // = {0x96,0xF3,0x83,0xCF,0x9C,0xB8,0xBE,0x09,0x06,0x1E,0xCB,0x82,0x5F,0xE7,0x17,0xB6};
-
   ThriftHiveMetastore_get_tables_result(const ThriftHiveMetastore_get_tables_result&);
   ThriftHiveMetastore_get_tables_result& operator=(const ThriftHiveMetastore_get_tables_result&);
   ThriftHiveMetastore_get_tables_result() {
@@ -3498,7 +3173,6 @@ class ThriftHiveMetastore_get_tables_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_tables_result& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_tables_presult__isset {
@@ -3510,9 +3184,6 @@ typedef struct _ThriftHiveMetastore_get_tables_presult__isset {
 class ThriftHiveMetastore_get_tables_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "96F383CF9CB8BE09061ECB825FE717B6";
-  static const uint8_t binary_fingerprint[16]; // = {0x96,0xF3,0x83,0xCF,0x9C,0xB8,0xBE,0x09,0x06,0x1E,0xCB,0x82,0x5F,0xE7,0x17,0xB6};
-
 
   virtual ~ThriftHiveMetastore_get_tables_presult() throw();
   std::vector<std::string> * success;
@@ -3522,7 +3193,6 @@ class ThriftHiveMetastore_get_tables_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_tables_presult& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_all_tables_args__isset {
@@ -3533,9 +3203,6 @@ typedef struct _ThriftHiveMetastore_get_all_tables_args__isset {
 class ThriftHiveMetastore_get_all_tables_args {
  public:
 
-  static const char* ascii_fingerprint; // = "EFB929595D312AC8F305D5A794CFEDA1";
-  static const uint8_t binary_fingerprint[16]; // = {0xEF,0xB9,0x29,0x59,0x5D,0x31,0x2A,0xC8,0xF3,0x05,0xD5,0xA7,0x94,0xCF,0xED,0xA1};
-
   ThriftHiveMetastore_get_all_tables_args(const ThriftHiveMetastore_get_all_tables_args&);
   ThriftHiveMetastore_get_all_tables_args& operator=(const ThriftHiveMetastore_get_all_tables_args&);
   ThriftHiveMetastore_get_all_tables_args() : db_name() {
@@ -3563,23 +3230,18 @@ class ThriftHiveMetastore_get_all_tables_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_all_tables_args& obj);
 };
 
 
 class ThriftHiveMetastore_get_all_tables_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "EFB929595D312AC8F305D5A794CFEDA1";
-  static const uint8_t binary_fingerprint[16]; // = {0xEF,0xB9,0x29,0x59,0x5D,0x31,0x2A,0xC8,0xF3,0x05,0xD5,0xA7,0x94,0xCF,0xED,0xA1};
-
 
   virtual ~ThriftHiveMetastore_get_all_tables_pargs() throw();
   const std::string* db_name;
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_all_tables_pargs& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_all_tables_result__isset {
@@ -3591,9 +3253,6 @@ typedef struct _ThriftHiveMetastore_get_all_tables_result__isset {
 class ThriftHiveMetastore_get_all_tables_result {
  public:
 
-  static const char* ascii_fingerprint; // = "96F383CF9CB8BE09061ECB825FE717B6";
-  static const uint8_t binary_fingerprint[16]; // = {0x96,0xF3,0x83,0xCF,0x9C,0xB8,0xBE,0x09,0x06,0x1E,0xCB,0x82,0x5F,0xE7,0x17,0xB6};
-
   ThriftHiveMetastore_get_all_tables_result(const ThriftHiveMetastore_get_all_tables_result&);
   ThriftHiveMetastore_get_all_tables_result& operator=(const ThriftHiveMetastore_get_all_tables_result&);
   ThriftHiveMetastore_get_all_tables_result() {
@@ -3626,7 +3285,6 @@ class ThriftHiveMetastore_get_all_tables_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_all_tables_result& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_all_tables_presult__isset {
@@ -3638,9 +3296,6 @@ typedef struct _ThriftHiveMetastore_get_all_tables_presult__isset {
 class ThriftHiveMetastore_get_all_tables_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "96F383CF9CB8BE09061ECB825FE717B6";
-  static const uint8_t binary_fingerprint[16]; // = {0x96,0xF3,0x83,0xCF,0x9C,0xB8,0xBE,0x09,0x06,0x1E,0xCB,0x82,0x5F,0xE7,0x17,0xB6};
-
 
   virtual ~ThriftHiveMetastore_get_all_tables_presult() throw();
   std::vector<std::string> * success;
@@ -3650,7 +3305,6 @@ class ThriftHiveMetastore_get_all_tables_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_all_tables_presult& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_table_args__isset {
@@ -3662,9 +3316,6 @@ typedef struct _ThriftHiveMetastore_get_table_args__isset {
 class ThriftHiveMetastore_get_table_args {
  public:
 
-  static const char* ascii_fingerprint; // = "07A9615F837F7D0A952B595DD3020972";
-  static const uint8_t binary_fingerprint[16]; // = {0x07,0xA9,0x61,0x5F,0x83,0x7F,0x7D,0x0A,0x95,0x2B,0x59,0x5D,0xD3,0x02,0x09,0x72};
-
   ThriftHiveMetastore_get_table_args(const ThriftHiveMetastore_get_table_args&);
   ThriftHiveMetastore_get_table_args& operator=(const ThriftHiveMetastore_get_table_args&);
   ThriftHiveMetastore_get_table_args() : dbname(), tbl_name() {
@@ -3697,16 +3348,12 @@ class ThriftHiveMetastore_get_table_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_table_args& obj);
 };
 
 
 class ThriftHiveMetastore_get_table_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "07A9615F837F7D0A952B595DD3020972";
-  static const uint8_t binary_fingerprint[16]; // = {0x07,0xA9,0x61,0x5F,0x83,0x7F,0x7D,0x0A,0x95,0x2B,0x59,0x5D,0xD3,0x02,0x09,0x72};
-
 
   virtual ~ThriftHiveMetastore_get_table_pargs() throw();
   const std::string* dbname;
@@ -3714,7 +3361,6 @@ class ThriftHiveMetastore_get_table_pargs {
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_table_pargs& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_table_result__isset {
@@ -3727,9 +3373,6 @@ typedef struct _ThriftHiveMetastore_get_table_result__isset {
 class ThriftHiveMetastore_get_table_result {
  public:
 
-  static const char* ascii_fingerprint; // = "5A4F58BE0D70BCECE999677432BFE5A4";
-  static const uint8_t binary_fingerprint[16]; // = {0x5A,0x4F,0x58,0xBE,0x0D,0x70,0xBC,0xEC,0xE9,0x99,0x67,0x74,0x32,0xBF,0xE5,0xA4};
-
   ThriftHiveMetastore_get_table_result(const ThriftHiveMetastore_get_table_result&);
   ThriftHiveMetastore_get_table_result& operator=(const ThriftHiveMetastore_get_table_result&);
   ThriftHiveMetastore_get_table_result() {
@@ -3767,7 +3410,6 @@ class ThriftHiveMetastore_get_table_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_table_result& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_table_presult__isset {
@@ -3780,9 +3422,6 @@ typedef struct _ThriftHiveMetastore_get_table_presult__isset {
 class ThriftHiveMetastore_get_table_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "5A4F58BE0D70BCECE999677432BFE5A4";
-  static const uint8_t binary_fingerprint[16]; // = {0x5A,0x4F,0x58,0xBE,0x0D,0x70,0xBC,0xEC,0xE9,0x99,0x67,0x74,0x32,0xBF,0xE5,0xA4};
-
 
   virtual ~ThriftHiveMetastore_get_table_presult() throw();
   Table* success;
@@ -3793,7 +3432,6 @@ class ThriftHiveMetastore_get_table_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_table_presult& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_table_objects_by_name_args__isset {
@@ -3805,9 +3443,6 @@ typedef struct _ThriftHiveMetastore_get_table_objects_by_name_args__isset {
 class ThriftHiveMetastore_get_table_objects_by_name_args {
  public:
 
-  static const char* ascii_fingerprint; // = "25702B8D5E28AA39160F267DABBC8446";
-  static const uint8_t binary_fingerprint[16]; // = {0x25,0x70,0x2B,0x8D,0x5E,0x28,0xAA,0x39,0x16,0x0F,0x26,0x7D,0xAB,0xBC,0x84,0x46};
-
   ThriftHiveMetastore_get_table_objects_by_name_args(const ThriftHiveMetastore_get_table_objects_by_name_args&);
   ThriftHiveMetastore_get_table_objects_by_name_args& operator=(const ThriftHiveMetastore_get_table_objects_by_name_args&);
   ThriftHiveMetastore_get_table_objects_by_name_args() : dbname() {
@@ -3840,16 +3475,12 @@ class ThriftHiveMetastore_get_table_objects_by_name_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_table_objects_by_name_args& obj);
 };
 
 
 class ThriftHiveMetastore_get_table_objects_by_name_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "25702B8D5E28AA39160F267DABBC8446";
-  static const uint8_t binary_fingerprint[16]; // = {0x25,0x70,0x2B,0x8D,0x5E,0x28,0xAA,0x39,0x16,0x0F,0x26,0x7D,0xAB,0xBC,0x84,0x46};
-
 
   virtual ~ThriftHiveMetastore_get_table_objects_by_name_pargs() throw();
   const std::string* dbname;
@@ -3857,7 +3488,6 @@ class ThriftHiveMetastore_get_table_objects_by_name_pargs {
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_table_objects_by_name_pargs& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_table_objects_by_name_result__isset {
@@ -3871,9 +3501,6 @@ typedef struct _ThriftHiveMetastore_get_table_objects_by_name_result__isset {
 class ThriftHiveMetastore_get_table_objects_by_name_result {
  public:
 
-  static const char* ascii_fingerprint; // = "CE94435B55EBFD9CDCC30B61FBCA4E92";
-  static const uint8_t binary_fingerprint[16]; // = {0xCE,0x94,0x43,0x5B,0x55,0xEB,0xFD,0x9C,0xDC,0xC3,0x0B,0x61,0xFB,0xCA,0x4E,0x92};
-
   ThriftHiveMetastore_get_table_objects_by_name_result(const ThriftHiveMetastore_get_table_objects_by_name_result&);
   ThriftHiveMetastore_get_table_objects_by_name_result& operator=(const ThriftHiveMetastore_get_table_objects_by_name_result&);
   ThriftHiveMetastore_get_table_objects_by_name_result() {
@@ -3916,7 +3543,6 @@ class ThriftHiveMetastore_get_table_objects_by_name_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_table_objects_by_name_result& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_table_objects_by_name_presult__isset {
@@ -3930,9 +3556,6 @@ typedef struct _ThriftHiveMetastore_get_table_objects_by_name_presult__isset {
 class ThriftHiveMetastore_get_table_objects_by_name_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "CE94435B55EBFD9CDCC30B61FBCA4E92";
-  static const uint8_t binary_fingerprint[16]; // = {0xCE,0x94,0x43,0x5B,0x55,0xEB,0xFD,0x9C,0xDC,0xC3,0x0B,0x61,0xFB,0xCA,0x4E,0x92};
-
 
   virtual ~ThriftHiveMetastore_get_table_objects_by_name_presult() throw();
   std::vector<Table> * success;
@@ -3944,7 +3567,6 @@ class ThriftHiveMetastore_get_table_objects_by_name_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_table_objects_by_name_presult& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_table_names_by_filter_args__isset {
@@ -3957,9 +3579,6 @@ typedef struct _ThriftHiveMetastore_get_table_names_by_filter_args__isset {
 class ThriftHiveMetastore_get_table_names_by_filter_args {
  public:
 
-  static const char* ascii_fingerprint; // = "52F7D5E8217C4B8FC14F1F30BF2EB41C";
-  static const uint8_t binary_fingerprint[16]; // = {0x52,0xF7,0xD5,0xE8,0x21,0x7C,0x4B,0x8F,0xC1,0x4F,0x1F,0x30,0xBF,0x2E,0xB4,0x1C};
-
   ThriftHiveMetastore_get_table_names_by_filter_args(const ThriftHiveMetastore_get_table_names_by_filter_args&);
   ThriftHiveMetastore_get_table_names_by_filter_args& operator=(const ThriftHiveMetastore_get_table_names_by_filter_args&);
   ThriftHiveMetastore_get_table_names_by_filter_args() : dbname(), filter(), max_tables(-1) {
@@ -3997,16 +3616,12 @@ class ThriftHiveMetastore_get_table_names_by_filter_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_table_names_by_filter_args& obj);
 };
 
 
 class ThriftHiveMetastore_get_table_names_by_filter_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "52F7D5E8217C4B8FC14F1F30BF2EB41C";
-  static const uint8_t binary_fingerprint[16]; // = {0x52,0xF7,0xD5,0xE8,0x21,0x7C,0x4B,0x8F,0xC1,0x4F,0x1F,0x30,0xBF,0x2E,0xB4,0x1C};
-
 
   virtual ~ThriftHiveMetastore_get_table_names_by_filter_pargs() throw();
   const std::string* dbname;
@@ -4015,7 +3630,6 @@ class ThriftHiveMetastore_get_table_names_by_filter_pargs {
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_table_names_by_filter_pargs& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_table_names_by_filter_result__isset {
@@ -4029,9 +3643,6 @@ typedef struct _ThriftHiveMetastore_get_table_names_by_filter_result__isset {
 class ThriftHiveMetastore_get_table_names_by_filter_result {
  public:
 
-  static const char* ascii_fingerprint; // = "1A22B93AE2F8EC65600D067A19CEEE95";
-  static const uint8_t binary_fingerprint[16]; // = {0x1A,0x22,0xB9,0x3A,0xE2,0xF8,0xEC,0x65,0x60,0x0D,0x06,0x7A,0x19,0xCE,0xEE,0x95};
-
   ThriftHiveMetastore_get_table_names_by_filter_result(const ThriftHiveMetastore_get_table_names_by_filter_result&);
   ThriftHiveMetastore_get_table_names_by_filter_result& operator=(const ThriftHiveMetastore_get_table_names_by_filter_result&);
   ThriftHiveMetastore_get_table_names_by_filter_result() {
@@ -4074,7 +3685,6 @@ class ThriftHiveMetastore_get_table_names_by_filter_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_table_names_by_filter_result& obj);
 };
 
 typedef struct _ThriftHiveMetastore_get_table_names_by_filter_presult__isset {
@@ -4088,9 +3698,6 @@ typedef struct _ThriftHiveMetastore_get_table_names_by_filter_presult__isset {
 class ThriftHiveMetastore_get_table_names_by_filter_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "1A22B93AE2F8EC65600D067A19CEEE95";
-  static const uint8_t binary_fingerprint[16]; // = {0x1A,0x22,0xB9,0x3A,0xE2,0xF8,0xEC,0x65,0x60,0x0D,0x06,0x7A,0x19,0xCE,0xEE,0x95};
-
 
   virtual ~ThriftHiveMetastore_get_table_names_by_filter_presult() throw();
   std::vector<std::string> * success;
@@ -4102,7 +3709,6 @@ class ThriftHiveMetastore_get_table_names_by_filter_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHiveMetastore_get_table_names_by_filter_presult& obj);
 };
 
 typedef struct _ThriftHiveMetastore_alter_table_args__isset {
@@ -4115,9 +3721,6 @@ typedef struct _ThriftHiveMetastore_alter_table_args__isset {
 class ThriftHiveMetastore_alter_table_args {
  public:
 
-  static const char* ascii_fingerprint; // = "45DE8DCE89D06DFF84837CFEB78A1302";
-  static const uint8_t binary_fingerprint[16]; // = {0x45,0xDE,0x8D,0xCE,0x89,0xD0,0x6D,0xFF,0x84,0x83,0x7C,0xFE,0xB7,0x8A,0x13,0x02};
-
   ThriftHiveMetastore_alter_table_args(const ThriftHiveMetastore_alter_table_args&);
   ThriftHiveMetastore_alter_table_args& operator=(const ThriftHiveMetastore_alter_table_args&);
   ThriftHiveMetastore_alter_table_args() : dbname(), tbl_name() {
@@ -4155,16 +3758,12 @@ class ThriftHiveMetastore_alter_table_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apa

<TRUNCATED>

[05/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)

Posted by xu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-cpp/TCLIService_types.h
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-cpp/TCLIService_types.h b/service/src/gen/thrift/gen-cpp/TCLIService_types.h
index 4536b41..b078c99 100644
--- a/service/src/gen/thrift/gen-cpp/TCLIService_types.h
+++ b/service/src/gen/thrift/gen-cpp/TCLIService_types.h
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -338,9 +338,6 @@ typedef struct _TTypeQualifierValue__isset {
 class TTypeQualifierValue {
  public:
 
-  static const char* ascii_fingerprint; // = "A7801670116150C65ACA43E6F679BA79";
-  static const uint8_t binary_fingerprint[16]; // = {0xA7,0x80,0x16,0x70,0x11,0x61,0x50,0xC6,0x5A,0xCA,0x43,0xE6,0xF6,0x79,0xBA,0x79};
-
   TTypeQualifierValue(const TTypeQualifierValue&);
   TTypeQualifierValue& operator=(const TTypeQualifierValue&);
   TTypeQualifierValue() : i32Value(0), stringValue() {
@@ -377,18 +374,21 @@ class TTypeQualifierValue {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TTypeQualifierValue& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TTypeQualifierValue &a, TTypeQualifierValue &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TTypeQualifierValue& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TTypeQualifiers {
  public:
 
-  static const char* ascii_fingerprint; // = "6C72981CFA989214285648FA8C196C47";
-  static const uint8_t binary_fingerprint[16]; // = {0x6C,0x72,0x98,0x1C,0xFA,0x98,0x92,0x14,0x28,0x56,0x48,0xFA,0x8C,0x19,0x6C,0x47};
-
   TTypeQualifiers(const TTypeQualifiers&);
   TTypeQualifiers& operator=(const TTypeQualifiers&);
   TTypeQualifiers() {
@@ -414,11 +414,17 @@ class TTypeQualifiers {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TTypeQualifiers& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TTypeQualifiers &a, TTypeQualifiers &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TTypeQualifiers& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TPrimitiveTypeEntry__isset {
   _TPrimitiveTypeEntry__isset() : typeQualifiers(false) {}
   bool typeQualifiers :1;
@@ -427,9 +433,6 @@ typedef struct _TPrimitiveTypeEntry__isset {
 class TPrimitiveTypeEntry {
  public:
 
-  static const char* ascii_fingerprint; // = "755674F6A5C8EB47868686AE386FBC1C";
-  static const uint8_t binary_fingerprint[16]; // = {0x75,0x56,0x74,0xF6,0xA5,0xC8,0xEB,0x47,0x86,0x86,0x86,0xAE,0x38,0x6F,0xBC,0x1C};
-
   TPrimitiveTypeEntry(const TPrimitiveTypeEntry&);
   TPrimitiveTypeEntry& operator=(const TPrimitiveTypeEntry&);
   TPrimitiveTypeEntry() : type((TTypeId::type)0) {
@@ -464,18 +467,21 @@ class TPrimitiveTypeEntry {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TPrimitiveTypeEntry& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TPrimitiveTypeEntry &a, TPrimitiveTypeEntry &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TPrimitiveTypeEntry& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TArrayTypeEntry {
  public:
 
-  static const char* ascii_fingerprint; // = "E86CACEB22240450EDCBEFC3A83970E4";
-  static const uint8_t binary_fingerprint[16]; // = {0xE8,0x6C,0xAC,0xEB,0x22,0x24,0x04,0x50,0xED,0xCB,0xEF,0xC3,0xA8,0x39,0x70,0xE4};
-
   TArrayTypeEntry(const TArrayTypeEntry&);
   TArrayTypeEntry& operator=(const TArrayTypeEntry&);
   TArrayTypeEntry() : objectTypePtr(0) {
@@ -501,18 +507,21 @@ class TArrayTypeEntry {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TArrayTypeEntry& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TArrayTypeEntry &a, TArrayTypeEntry &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TArrayTypeEntry& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TMapTypeEntry {
  public:
 
-  static const char* ascii_fingerprint; // = "989D1F1AE8D148D5E2119FFEC4BBBEE3";
-  static const uint8_t binary_fingerprint[16]; // = {0x98,0x9D,0x1F,0x1A,0xE8,0xD1,0x48,0xD5,0xE2,0x11,0x9F,0xFE,0xC4,0xBB,0xBE,0xE3};
-
   TMapTypeEntry(const TMapTypeEntry&);
   TMapTypeEntry& operator=(const TMapTypeEntry&);
   TMapTypeEntry() : keyTypePtr(0), valueTypePtr(0) {
@@ -543,18 +552,21 @@ class TMapTypeEntry {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TMapTypeEntry& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TMapTypeEntry &a, TMapTypeEntry &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TMapTypeEntry& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TStructTypeEntry {
  public:
 
-  static const char* ascii_fingerprint; // = "91F548CA159B4AB4291F5741AC161402";
-  static const uint8_t binary_fingerprint[16]; // = {0x91,0xF5,0x48,0xCA,0x15,0x9B,0x4A,0xB4,0x29,0x1F,0x57,0x41,0xAC,0x16,0x14,0x02};
-
   TStructTypeEntry(const TStructTypeEntry&);
   TStructTypeEntry& operator=(const TStructTypeEntry&);
   TStructTypeEntry() {
@@ -580,18 +592,21 @@ class TStructTypeEntry {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TStructTypeEntry& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TStructTypeEntry &a, TStructTypeEntry &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TStructTypeEntry& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TUnionTypeEntry {
  public:
 
-  static const char* ascii_fingerprint; // = "91F548CA159B4AB4291F5741AC161402";
-  static const uint8_t binary_fingerprint[16]; // = {0x91,0xF5,0x48,0xCA,0x15,0x9B,0x4A,0xB4,0x29,0x1F,0x57,0x41,0xAC,0x16,0x14,0x02};
-
   TUnionTypeEntry(const TUnionTypeEntry&);
   TUnionTypeEntry& operator=(const TUnionTypeEntry&);
   TUnionTypeEntry() {
@@ -617,18 +632,21 @@ class TUnionTypeEntry {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TUnionTypeEntry& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TUnionTypeEntry &a, TUnionTypeEntry &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TUnionTypeEntry& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TUserDefinedTypeEntry {
  public:
 
-  static const char* ascii_fingerprint; // = "EFB929595D312AC8F305D5A794CFEDA1";
-  static const uint8_t binary_fingerprint[16]; // = {0xEF,0xB9,0x29,0x59,0x5D,0x31,0x2A,0xC8,0xF3,0x05,0xD5,0xA7,0x94,0xCF,0xED,0xA1};
-
   TUserDefinedTypeEntry(const TUserDefinedTypeEntry&);
   TUserDefinedTypeEntry& operator=(const TUserDefinedTypeEntry&);
   TUserDefinedTypeEntry() : typeClassName() {
@@ -654,11 +672,17 @@ class TUserDefinedTypeEntry {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TUserDefinedTypeEntry& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TUserDefinedTypeEntry &a, TUserDefinedTypeEntry &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TUserDefinedTypeEntry& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TTypeEntry__isset {
   _TTypeEntry__isset() : primitiveEntry(false), arrayEntry(false), mapEntry(false), structEntry(false), unionEntry(false), userDefinedTypeEntry(false) {}
   bool primitiveEntry :1;
@@ -672,9 +696,6 @@ typedef struct _TTypeEntry__isset {
 class TTypeEntry {
  public:
 
-  static const char* ascii_fingerprint; // = "2FE56D9097E325DAA7E933738C6D325F";
-  static const uint8_t binary_fingerprint[16]; // = {0x2F,0xE5,0x6D,0x90,0x97,0xE3,0x25,0xDA,0xA7,0xE9,0x33,0x73,0x8C,0x6D,0x32,0x5F};
-
   TTypeEntry(const TTypeEntry&);
   TTypeEntry& operator=(const TTypeEntry&);
   TTypeEntry() {
@@ -727,18 +748,21 @@ class TTypeEntry {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TTypeEntry& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TTypeEntry &a, TTypeEntry &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TTypeEntry& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TTypeDesc {
  public:
 
-  static const char* ascii_fingerprint; // = "90B3C5A0B73419A84E85E0E48C452AA5";
-  static const uint8_t binary_fingerprint[16]; // = {0x90,0xB3,0xC5,0xA0,0xB7,0x34,0x19,0xA8,0x4E,0x85,0xE0,0xE4,0x8C,0x45,0x2A,0xA5};
-
   TTypeDesc(const TTypeDesc&);
   TTypeDesc& operator=(const TTypeDesc&);
   TTypeDesc() {
@@ -764,11 +788,17 @@ class TTypeDesc {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TTypeDesc& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TTypeDesc &a, TTypeDesc &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TTypeDesc& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TColumnDesc__isset {
   _TColumnDesc__isset() : comment(false) {}
   bool comment :1;
@@ -777,9 +807,6 @@ typedef struct _TColumnDesc__isset {
 class TColumnDesc {
  public:
 
-  static const char* ascii_fingerprint; // = "EABED9009D5FCABFCA65612069F2A849";
-  static const uint8_t binary_fingerprint[16]; // = {0xEA,0xBE,0xD9,0x00,0x9D,0x5F,0xCA,0xBF,0xCA,0x65,0x61,0x20,0x69,0xF2,0xA8,0x49};
-
   TColumnDesc(const TColumnDesc&);
   TColumnDesc& operator=(const TColumnDesc&);
   TColumnDesc() : columnName(), position(0), comment() {
@@ -824,18 +851,21 @@ class TColumnDesc {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TColumnDesc& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TColumnDesc &a, TColumnDesc &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TColumnDesc& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TTableSchema {
  public:
 
-  static const char* ascii_fingerprint; // = "7A1811E49313E5977107FC667B20E39D";
-  static const uint8_t binary_fingerprint[16]; // = {0x7A,0x18,0x11,0xE4,0x93,0x13,0xE5,0x97,0x71,0x07,0xFC,0x66,0x7B,0x20,0xE3,0x9D};
-
   TTableSchema(const TTableSchema&);
   TTableSchema& operator=(const TTableSchema&);
   TTableSchema() {
@@ -861,11 +891,17 @@ class TTableSchema {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TTableSchema& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TTableSchema &a, TTableSchema &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TTableSchema& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TBoolValue__isset {
   _TBoolValue__isset() : value(false) {}
   bool value :1;
@@ -874,9 +910,6 @@ typedef struct _TBoolValue__isset {
 class TBoolValue {
  public:
 
-  static const char* ascii_fingerprint; // = "BF054652DEF86253C2BEE7D947F167DD";
-  static const uint8_t binary_fingerprint[16]; // = {0xBF,0x05,0x46,0x52,0xDE,0xF8,0x62,0x53,0xC2,0xBE,0xE7,0xD9,0x47,0xF1,0x67,0xDD};
-
   TBoolValue(const TBoolValue&);
   TBoolValue& operator=(const TBoolValue&);
   TBoolValue() : value(0) {
@@ -906,11 +939,17 @@ class TBoolValue {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TBoolValue& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TBoolValue &a, TBoolValue &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TBoolValue& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TByteValue__isset {
   _TByteValue__isset() : value(false) {}
   bool value :1;
@@ -919,9 +958,6 @@ typedef struct _TByteValue__isset {
 class TByteValue {
  public:
 
-  static const char* ascii_fingerprint; // = "9C15298ACB5D04AEA9B52D5DDE6F9208";
-  static const uint8_t binary_fingerprint[16]; // = {0x9C,0x15,0x29,0x8A,0xCB,0x5D,0x04,0xAE,0xA9,0xB5,0x2D,0x5D,0xDE,0x6F,0x92,0x08};
-
   TByteValue(const TByteValue&);
   TByteValue& operator=(const TByteValue&);
   TByteValue() : value(0) {
@@ -951,11 +987,17 @@ class TByteValue {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TByteValue& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TByteValue &a, TByteValue &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TByteValue& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TI16Value__isset {
   _TI16Value__isset() : value(false) {}
   bool value :1;
@@ -964,9 +1006,6 @@ typedef struct _TI16Value__isset {
 class TI16Value {
  public:
 
-  static const char* ascii_fingerprint; // = "5DAC9C51C7E1106BF936FC71860BE9D5";
-  static const uint8_t binary_fingerprint[16]; // = {0x5D,0xAC,0x9C,0x51,0xC7,0xE1,0x10,0x6B,0xF9,0x36,0xFC,0x71,0x86,0x0B,0xE9,0xD5};
-
   TI16Value(const TI16Value&);
   TI16Value& operator=(const TI16Value&);
   TI16Value() : value(0) {
@@ -996,11 +1035,17 @@ class TI16Value {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TI16Value& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TI16Value &a, TI16Value &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TI16Value& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TI32Value__isset {
   _TI32Value__isset() : value(false) {}
   bool value :1;
@@ -1009,9 +1054,6 @@ typedef struct _TI32Value__isset {
 class TI32Value {
  public:
 
-  static const char* ascii_fingerprint; // = "E7A96B151330359E84C0A3AC91BCBACD";
-  static const uint8_t binary_fingerprint[16]; // = {0xE7,0xA9,0x6B,0x15,0x13,0x30,0x35,0x9E,0x84,0xC0,0xA3,0xAC,0x91,0xBC,0xBA,0xCD};
-
   TI32Value(const TI32Value&);
   TI32Value& operator=(const TI32Value&);
   TI32Value() : value(0) {
@@ -1041,11 +1083,17 @@ class TI32Value {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TI32Value& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TI32Value &a, TI32Value &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TI32Value& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TI64Value__isset {
   _TI64Value__isset() : value(false) {}
   bool value :1;
@@ -1054,9 +1102,6 @@ typedef struct _TI64Value__isset {
 class TI64Value {
  public:
 
-  static const char* ascii_fingerprint; // = "148F3AAAC1D9859963D5E800D187BF26";
-  static const uint8_t binary_fingerprint[16]; // = {0x14,0x8F,0x3A,0xAA,0xC1,0xD9,0x85,0x99,0x63,0xD5,0xE8,0x00,0xD1,0x87,0xBF,0x26};
-
   TI64Value(const TI64Value&);
   TI64Value& operator=(const TI64Value&);
   TI64Value() : value(0) {
@@ -1086,11 +1131,17 @@ class TI64Value {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TI64Value& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TI64Value &a, TI64Value &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TI64Value& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TDoubleValue__isset {
   _TDoubleValue__isset() : value(false) {}
   bool value :1;
@@ -1099,9 +1150,6 @@ typedef struct _TDoubleValue__isset {
 class TDoubleValue {
  public:
 
-  static const char* ascii_fingerprint; // = "3586E570A474C4A8603B4FF74903B3A6";
-  static const uint8_t binary_fingerprint[16]; // = {0x35,0x86,0xE5,0x70,0xA4,0x74,0xC4,0xA8,0x60,0x3B,0x4F,0xF7,0x49,0x03,0xB3,0xA6};
-
   TDoubleValue(const TDoubleValue&);
   TDoubleValue& operator=(const TDoubleValue&);
   TDoubleValue() : value(0) {
@@ -1131,11 +1179,17 @@ class TDoubleValue {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TDoubleValue& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TDoubleValue &a, TDoubleValue &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TDoubleValue& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TStringValue__isset {
   _TStringValue__isset() : value(false) {}
   bool value :1;
@@ -1144,9 +1198,6 @@ typedef struct _TStringValue__isset {
 class TStringValue {
  public:
 
-  static const char* ascii_fingerprint; // = "66E694018C17E5B65A59AE8F55CCA3CD";
-  static const uint8_t binary_fingerprint[16]; // = {0x66,0xE6,0x94,0x01,0x8C,0x17,0xE5,0xB6,0x5A,0x59,0xAE,0x8F,0x55,0xCC,0xA3,0xCD};
-
   TStringValue(const TStringValue&);
   TStringValue& operator=(const TStringValue&);
   TStringValue() : value() {
@@ -1176,11 +1227,17 @@ class TStringValue {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TStringValue& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TStringValue &a, TStringValue &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TStringValue& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TColumnValue__isset {
   _TColumnValue__isset() : boolVal(false), byteVal(false), i16Val(false), i32Val(false), i64Val(false), doubleVal(false), stringVal(false) {}
   bool boolVal :1;
@@ -1195,9 +1252,6 @@ typedef struct _TColumnValue__isset {
 class TColumnValue {
  public:
 
-  static const char* ascii_fingerprint; // = "C2DDD988447EA7999A8285AA38AAE9AD";
-  static const uint8_t binary_fingerprint[16]; // = {0xC2,0xDD,0xD9,0x88,0x44,0x7E,0xA7,0x99,0x9A,0x82,0x85,0xAA,0x38,0xAA,0xE9,0xAD};
-
   TColumnValue(const TColumnValue&);
   TColumnValue& operator=(const TColumnValue&);
   TColumnValue() {
@@ -1255,18 +1309,21 @@ class TColumnValue {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TColumnValue& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TColumnValue &a, TColumnValue &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TColumnValue& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TRow {
  public:
 
-  static const char* ascii_fingerprint; // = "E73FD1FCA0CA58A669FC3E02FB68D534";
-  static const uint8_t binary_fingerprint[16]; // = {0xE7,0x3F,0xD1,0xFC,0xA0,0xCA,0x58,0xA6,0x69,0xFC,0x3E,0x02,0xFB,0x68,0xD5,0x34};
-
   TRow(const TRow&);
   TRow& operator=(const TRow&);
   TRow() {
@@ -1292,18 +1349,21 @@ class TRow {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TRow& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TRow &a, TRow &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TRow& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TBoolColumn {
  public:
 
-  static const char* ascii_fingerprint; // = "F9058324D96DB7F974D8ACDC01C54219";
-  static const uint8_t binary_fingerprint[16]; // = {0xF9,0x05,0x83,0x24,0xD9,0x6D,0xB7,0xF9,0x74,0xD8,0xAC,0xDC,0x01,0xC5,0x42,0x19};
-
   TBoolColumn(const TBoolColumn&);
   TBoolColumn& operator=(const TBoolColumn&);
   TBoolColumn() : nulls() {
@@ -1334,18 +1394,21 @@ class TBoolColumn {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TBoolColumn& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TBoolColumn &a, TBoolColumn &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TBoolColumn& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TByteColumn {
  public:
 
-  static const char* ascii_fingerprint; // = "1CB300106BAA463A70BB2A2395900F48";
-  static const uint8_t binary_fingerprint[16]; // = {0x1C,0xB3,0x00,0x10,0x6B,0xAA,0x46,0x3A,0x70,0xBB,0x2A,0x23,0x95,0x90,0x0F,0x48};
-
   TByteColumn(const TByteColumn&);
   TByteColumn& operator=(const TByteColumn&);
   TByteColumn() : nulls() {
@@ -1376,18 +1439,21 @@ class TByteColumn {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TByteColumn& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TByteColumn &a, TByteColumn &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TByteColumn& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TI16Column {
  public:
 
-  static const char* ascii_fingerprint; // = "6574CDB1F121C8DB47FB257A3F104BDB";
-  static const uint8_t binary_fingerprint[16]; // = {0x65,0x74,0xCD,0xB1,0xF1,0x21,0xC8,0xDB,0x47,0xFB,0x25,0x7A,0x3F,0x10,0x4B,0xDB};
-
   TI16Column(const TI16Column&);
   TI16Column& operator=(const TI16Column&);
   TI16Column() : nulls() {
@@ -1418,18 +1484,21 @@ class TI16Column {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TI16Column& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TI16Column &a, TI16Column &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TI16Column& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TI32Column {
  public:
 
-  static const char* ascii_fingerprint; // = "CCCCE89C7E9DA10280F5663700677313";
-  static const uint8_t binary_fingerprint[16]; // = {0xCC,0xCC,0xE8,0x9C,0x7E,0x9D,0xA1,0x02,0x80,0xF5,0x66,0x37,0x00,0x67,0x73,0x13};
-
   TI32Column(const TI32Column&);
   TI32Column& operator=(const TI32Column&);
   TI32Column() : nulls() {
@@ -1460,18 +1529,21 @@ class TI32Column {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TI32Column& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TI32Column &a, TI32Column &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TI32Column& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TI64Column {
  public:
 
-  static const char* ascii_fingerprint; // = "925353917FC0AF87976A2338011F5A31";
-  static const uint8_t binary_fingerprint[16]; // = {0x92,0x53,0x53,0x91,0x7F,0xC0,0xAF,0x87,0x97,0x6A,0x23,0x38,0x01,0x1F,0x5A,0x31};
-
   TI64Column(const TI64Column&);
   TI64Column& operator=(const TI64Column&);
   TI64Column() : nulls() {
@@ -1502,18 +1574,21 @@ class TI64Column {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TI64Column& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TI64Column &a, TI64Column &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TI64Column& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TDoubleColumn {
  public:
 
-  static const char* ascii_fingerprint; // = "8FF1C050A8D7FD247AEB23CD71539C09";
-  static const uint8_t binary_fingerprint[16]; // = {0x8F,0xF1,0xC0,0x50,0xA8,0xD7,0xFD,0x24,0x7A,0xEB,0x23,0xCD,0x71,0x53,0x9C,0x09};
-
   TDoubleColumn(const TDoubleColumn&);
   TDoubleColumn& operator=(const TDoubleColumn&);
   TDoubleColumn() : nulls() {
@@ -1544,18 +1619,21 @@ class TDoubleColumn {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TDoubleColumn& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TDoubleColumn &a, TDoubleColumn &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TDoubleColumn& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TStringColumn {
  public:
 
-  static const char* ascii_fingerprint; // = "BE556BF7091B2DABBA1863D5E458B15F";
-  static const uint8_t binary_fingerprint[16]; // = {0xBE,0x55,0x6B,0xF7,0x09,0x1B,0x2D,0xAB,0xBA,0x18,0x63,0xD5,0xE4,0x58,0xB1,0x5F};
-
   TStringColumn(const TStringColumn&);
   TStringColumn& operator=(const TStringColumn&);
   TStringColumn() : nulls() {
@@ -1586,18 +1664,21 @@ class TStringColumn {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TStringColumn& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TStringColumn &a, TStringColumn &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TStringColumn& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TBinaryColumn {
  public:
 
-  static const char* ascii_fingerprint; // = "BE556BF7091B2DABBA1863D5E458B15F";
-  static const uint8_t binary_fingerprint[16]; // = {0xBE,0x55,0x6B,0xF7,0x09,0x1B,0x2D,0xAB,0xBA,0x18,0x63,0xD5,0xE4,0x58,0xB1,0x5F};
-
   TBinaryColumn(const TBinaryColumn&);
   TBinaryColumn& operator=(const TBinaryColumn&);
   TBinaryColumn() : nulls() {
@@ -1628,11 +1709,17 @@ class TBinaryColumn {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TBinaryColumn& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TBinaryColumn &a, TBinaryColumn &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TBinaryColumn& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TColumn__isset {
   _TColumn__isset() : boolVal(false), byteVal(false), i16Val(false), i32Val(false), i64Val(false), doubleVal(false), stringVal(false), binaryVal(false) {}
   bool boolVal :1;
@@ -1648,9 +1735,6 @@ typedef struct _TColumn__isset {
 class TColumn {
  public:
 
-  static const char* ascii_fingerprint; // = "E6ADD10B4CDDE61A19E8878CC7039A17";
-  static const uint8_t binary_fingerprint[16]; // = {0xE6,0xAD,0xD1,0x0B,0x4C,0xDD,0xE6,0x1A,0x19,0xE8,0x87,0x8C,0xC7,0x03,0x9A,0x17};
-
   TColumn(const TColumn&);
   TColumn& operator=(const TColumn&);
   TColumn() {
@@ -1713,11 +1797,17 @@ class TColumn {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TColumn& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TColumn &a, TColumn &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TColumn& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TRowSet__isset {
   _TRowSet__isset() : columns(false) {}
   bool columns :1;
@@ -1726,9 +1816,6 @@ typedef struct _TRowSet__isset {
 class TRowSet {
  public:
 
-  static const char* ascii_fingerprint; // = "46DA30A870489C7A58105AE0080DAEBF";
-  static const uint8_t binary_fingerprint[16]; // = {0x46,0xDA,0x30,0xA8,0x70,0x48,0x9C,0x7A,0x58,0x10,0x5A,0xE0,0x08,0x0D,0xAE,0xBF};
-
   TRowSet(const TRowSet&);
   TRowSet& operator=(const TRowSet&);
   TRowSet() : startRowOffset(0) {
@@ -1768,11 +1855,17 @@ class TRowSet {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TRowSet& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TRowSet &a, TRowSet &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TRowSet& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TStatus__isset {
   _TStatus__isset() : infoMessages(false), sqlState(false), errorCode(false), errorMessage(false) {}
   bool infoMessages :1;
@@ -1784,9 +1877,6 @@ typedef struct _TStatus__isset {
 class TStatus {
  public:
 
-  static const char* ascii_fingerprint; // = "D5DEF49634A59C615C1B3A6F7D0DADB5";
-  static const uint8_t binary_fingerprint[16]; // = {0xD5,0xDE,0xF4,0x96,0x34,0xA5,0x9C,0x61,0x5C,0x1B,0x3A,0x6F,0x7D,0x0D,0xAD,0xB5};
-
   TStatus(const TStatus&);
   TStatus& operator=(const TStatus&);
   TStatus() : statusCode((TStatusCode::type)0), sqlState(), errorCode(0), errorMessage() {
@@ -1842,18 +1932,21 @@ class TStatus {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TStatus& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TStatus &a, TStatus &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TStatus& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class THandleIdentifier {
  public:
 
-  static const char* ascii_fingerprint; // = "07A9615F837F7D0A952B595DD3020972";
-  static const uint8_t binary_fingerprint[16]; // = {0x07,0xA9,0x61,0x5F,0x83,0x7F,0x7D,0x0A,0x95,0x2B,0x59,0x5D,0xD3,0x02,0x09,0x72};
-
   THandleIdentifier(const THandleIdentifier&);
   THandleIdentifier& operator=(const THandleIdentifier&);
   THandleIdentifier() : guid(), secret() {
@@ -1884,18 +1977,21 @@ class THandleIdentifier {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const THandleIdentifier& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(THandleIdentifier &a, THandleIdentifier &b);
 
+inline std::ostream& operator<<(std::ostream& out, const THandleIdentifier& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TSessionHandle {
  public:
 
-  static const char* ascii_fingerprint; // = "A756D3DBE614FB13F70BF7F7B6EB3D73";
-  static const uint8_t binary_fingerprint[16]; // = {0xA7,0x56,0xD3,0xDB,0xE6,0x14,0xFB,0x13,0xF7,0x0B,0xF7,0xF7,0xB6,0xEB,0x3D,0x73};
-
   TSessionHandle(const TSessionHandle&);
   TSessionHandle& operator=(const TSessionHandle&);
   TSessionHandle() {
@@ -1921,11 +2017,17 @@ class TSessionHandle {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TSessionHandle& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TSessionHandle &a, TSessionHandle &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TSessionHandle& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TOperationHandle__isset {
   _TOperationHandle__isset() : modifiedRowCount(false) {}
   bool modifiedRowCount :1;
@@ -1934,9 +2036,6 @@ typedef struct _TOperationHandle__isset {
 class TOperationHandle {
  public:
 
-  static const char* ascii_fingerprint; // = "29FD80F4F96804A30FCC59C23D2E5349";
-  static const uint8_t binary_fingerprint[16]; // = {0x29,0xFD,0x80,0xF4,0xF9,0x68,0x04,0xA3,0x0F,0xCC,0x59,0xC2,0x3D,0x2E,0x53,0x49};
-
   TOperationHandle(const TOperationHandle&);
   TOperationHandle& operator=(const TOperationHandle&);
   TOperationHandle() : operationType((TOperationType::type)0), hasResultSet(0), modifiedRowCount(0) {
@@ -1981,11 +2080,17 @@ class TOperationHandle {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TOperationHandle& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TOperationHandle &a, TOperationHandle &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TOperationHandle& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TOpenSessionReq__isset {
   _TOpenSessionReq__isset() : username(false), password(false), configuration(false) {}
   bool username :1;
@@ -1996,9 +2101,6 @@ typedef struct _TOpenSessionReq__isset {
 class TOpenSessionReq {
  public:
 
-  static const char* ascii_fingerprint; // = "C8FD0F306A16C16BDA7B57F58BFAE5B2";
-  static const uint8_t binary_fingerprint[16]; // = {0xC8,0xFD,0x0F,0x30,0x6A,0x16,0xC1,0x6B,0xDA,0x7B,0x57,0xF5,0x8B,0xFA,0xE5,0xB2};
-
   TOpenSessionReq(const TOpenSessionReq&);
   TOpenSessionReq& operator=(const TOpenSessionReq&);
   TOpenSessionReq() : client_protocol((TProtocolVersion::type)7), username(), password() {
@@ -2049,11 +2151,17 @@ class TOpenSessionReq {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TOpenSessionReq& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TOpenSessionReq &a, TOpenSessionReq &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TOpenSessionReq& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TOpenSessionResp__isset {
   _TOpenSessionResp__isset() : sessionHandle(false), configuration(false) {}
   bool sessionHandle :1;
@@ -2063,9 +2171,6 @@ typedef struct _TOpenSessionResp__isset {
 class TOpenSessionResp {
  public:
 
-  static const char* ascii_fingerprint; // = "CFE7D7F4E9EC671F2518ED74FEE9F163";
-  static const uint8_t binary_fingerprint[16]; // = {0xCF,0xE7,0xD7,0xF4,0xE9,0xEC,0x67,0x1F,0x25,0x18,0xED,0x74,0xFE,0xE9,0xF1,0x63};
-
   TOpenSessionResp(const TOpenSessionResp&);
   TOpenSessionResp& operator=(const TOpenSessionResp&);
   TOpenSessionResp() : serverProtocolVersion((TProtocolVersion::type)7) {
@@ -2114,18 +2219,21 @@ class TOpenSessionResp {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TOpenSessionResp& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TOpenSessionResp &a, TOpenSessionResp &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TOpenSessionResp& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TCloseSessionReq {
  public:
 
-  static const char* ascii_fingerprint; // = "82377107F8BD0526960537D5A112E6EF";
-  static const uint8_t binary_fingerprint[16]; // = {0x82,0x37,0x71,0x07,0xF8,0xBD,0x05,0x26,0x96,0x05,0x37,0xD5,0xA1,0x12,0xE6,0xEF};
-
   TCloseSessionReq(const TCloseSessionReq&);
   TCloseSessionReq& operator=(const TCloseSessionReq&);
   TCloseSessionReq() {
@@ -2151,18 +2259,21 @@ class TCloseSessionReq {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCloseSessionReq& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TCloseSessionReq &a, TCloseSessionReq &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TCloseSessionReq& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TCloseSessionResp {
  public:
 
-  static const char* ascii_fingerprint; // = "7142E89F09DC7C5F6FA916C7393F46C2";
-  static const uint8_t binary_fingerprint[16]; // = {0x71,0x42,0xE8,0x9F,0x09,0xDC,0x7C,0x5F,0x6F,0xA9,0x16,0xC7,0x39,0x3F,0x46,0xC2};
-
   TCloseSessionResp(const TCloseSessionResp&);
   TCloseSessionResp& operator=(const TCloseSessionResp&);
   TCloseSessionResp() {
@@ -2188,11 +2299,17 @@ class TCloseSessionResp {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCloseSessionResp& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TCloseSessionResp &a, TCloseSessionResp &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TCloseSessionResp& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TGetInfoValue__isset {
   _TGetInfoValue__isset() : stringValue(false), smallIntValue(false), integerBitmask(false), integerFlag(false), binaryValue(false), lenValue(false) {}
   bool stringValue :1;
@@ -2206,9 +2323,6 @@ typedef struct _TGetInfoValue__isset {
 class TGetInfoValue {
  public:
 
-  static const char* ascii_fingerprint; // = "057FED11279FD7248CFE73EE82ED579E";
-  static const uint8_t binary_fingerprint[16]; // = {0x05,0x7F,0xED,0x11,0x27,0x9F,0xD7,0x24,0x8C,0xFE,0x73,0xEE,0x82,0xED,0x57,0x9E};
-
   TGetInfoValue(const TGetInfoValue&);
   TGetInfoValue& operator=(const TGetInfoValue&);
   TGetInfoValue() : stringValue(), smallIntValue(0), integerBitmask(0), integerFlag(0), binaryValue(0), lenValue(0) {
@@ -2261,18 +2375,21 @@ class TGetInfoValue {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TGetInfoValue& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TGetInfoValue &a, TGetInfoValue &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TGetInfoValue& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TGetInfoReq {
  public:
 
-  static const char* ascii_fingerprint; // = "95675B1A0BADE5F7EDE323809DB679B2";
-  static const uint8_t binary_fingerprint[16]; // = {0x95,0x67,0x5B,0x1A,0x0B,0xAD,0xE5,0xF7,0xED,0xE3,0x23,0x80,0x9D,0xB6,0x79,0xB2};
-
   TGetInfoReq(const TGetInfoReq&);
   TGetInfoReq& operator=(const TGetInfoReq&);
   TGetInfoReq() : infoType((TGetInfoType::type)0) {
@@ -2303,18 +2420,21 @@ class TGetInfoReq {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TGetInfoReq& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TGetInfoReq &a, TGetInfoReq &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TGetInfoReq& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TGetInfoResp {
  public:
 
-  static const char* ascii_fingerprint; // = "72AFA10A82728B51FDE91092012868DE";
-  static const uint8_t binary_fingerprint[16]; // = {0x72,0xAF,0xA1,0x0A,0x82,0x72,0x8B,0x51,0xFD,0xE9,0x10,0x92,0x01,0x28,0x68,0xDE};
-
   TGetInfoResp(const TGetInfoResp&);
   TGetInfoResp& operator=(const TGetInfoResp&);
   TGetInfoResp() {
@@ -2345,11 +2465,17 @@ class TGetInfoResp {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TGetInfoResp& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TGetInfoResp &a, TGetInfoResp &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TGetInfoResp& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TExecuteStatementReq__isset {
   _TExecuteStatementReq__isset() : confOverlay(false), runAsync(true) {}
   bool confOverlay :1;
@@ -2359,9 +2485,6 @@ typedef struct _TExecuteStatementReq__isset {
 class TExecuteStatementReq {
  public:
 
-  static const char* ascii_fingerprint; // = "FED75DB77E66D76EC1939A51FB0D96FA";
-  static const uint8_t binary_fingerprint[16]; // = {0xFE,0xD7,0x5D,0xB7,0x7E,0x66,0xD7,0x6E,0xC1,0x93,0x9A,0x51,0xFB,0x0D,0x96,0xFA};
-
   TExecuteStatementReq(const TExecuteStatementReq&);
   TExecuteStatementReq& operator=(const TExecuteStatementReq&);
   TExecuteStatementReq() : statement(), runAsync(false) {
@@ -2408,11 +2531,17 @@ class TExecuteStatementReq {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TExecuteStatementReq& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TExecuteStatementReq &a, TExecuteStatementReq &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TExecuteStatementReq& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TExecuteStatementResp__isset {
   _TExecuteStatementResp__isset() : operationHandle(false) {}
   bool operationHandle :1;
@@ -2421,9 +2550,6 @@ typedef struct _TExecuteStatementResp__isset {
 class TExecuteStatementResp {
  public:
 
-  static const char* ascii_fingerprint; // = "02A075A0FF88D3A172916D8F23C7B286";
-  static const uint8_t binary_fingerprint[16]; // = {0x02,0xA0,0x75,0xA0,0xFF,0x88,0xD3,0xA1,0x72,0x91,0x6D,0x8F,0x23,0xC7,0xB2,0x86};
-
   TExecuteStatementResp(const TExecuteStatementResp&);
   TExecuteStatementResp& operator=(const TExecuteStatementResp&);
   TExecuteStatementResp() {
@@ -2458,18 +2584,21 @@ class TExecuteStatementResp {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TExecuteStatementResp& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TExecuteStatementResp &a, TExecuteStatementResp &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TExecuteStatementResp& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TGetTypeInfoReq {
  public:
 
-  static const char* ascii_fingerprint; // = "82377107F8BD0526960537D5A112E6EF";
-  static const uint8_t binary_fingerprint[16]; // = {0x82,0x37,0x71,0x07,0xF8,0xBD,0x05,0x26,0x96,0x05,0x37,0xD5,0xA1,0x12,0xE6,0xEF};
-
   TGetTypeInfoReq(const TGetTypeInfoReq&);
   TGetTypeInfoReq& operator=(const TGetTypeInfoReq&);
   TGetTypeInfoReq() {
@@ -2495,11 +2624,17 @@ class TGetTypeInfoReq {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TGetTypeInfoReq& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TGetTypeInfoReq &a, TGetTypeInfoReq &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TGetTypeInfoReq& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TGetTypeInfoResp__isset {
   _TGetTypeInfoResp__isset() : operationHandle(false) {}
   bool operationHandle :1;
@@ -2508,9 +2643,6 @@ typedef struct _TGetTypeInfoResp__isset {
 class TGetTypeInfoResp {
  public:
 
-  static const char* ascii_fingerprint; // = "02A075A0FF88D3A172916D8F23C7B286";
-  static const uint8_t binary_fingerprint[16]; // = {0x02,0xA0,0x75,0xA0,0xFF,0x88,0xD3,0xA1,0x72,0x91,0x6D,0x8F,0x23,0xC7,0xB2,0x86};
-
   TGetTypeInfoResp(const TGetTypeInfoResp&);
   TGetTypeInfoResp& operator=(const TGetTypeInfoResp&);
   TGetTypeInfoResp() {
@@ -2545,18 +2677,21 @@ class TGetTypeInfoResp {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TGetTypeInfoResp& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TGetTypeInfoResp &a, TGetTypeInfoResp &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TGetTypeInfoResp& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TGetCatalogsReq {
  public:
 
-  static const char* ascii_fingerprint; // = "82377107F8BD0526960537D5A112E6EF";
-  static const uint8_t binary_fingerprint[16]; // = {0x82,0x37,0x71,0x07,0xF8,0xBD,0x05,0x26,0x96,0x05,0x37,0xD5,0xA1,0x12,0xE6,0xEF};
-
   TGetCatalogsReq(const TGetCatalogsReq&);
   TGetCatalogsReq& operator=(const TGetCatalogsReq&);
   TGetCatalogsReq() {
@@ -2582,11 +2717,17 @@ class TGetCatalogsReq {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TGetCatalogsReq& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TGetCatalogsReq &a, TGetCatalogsReq &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TGetCatalogsReq& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TGetCatalogsResp__isset {
   _TGetCatalogsResp__isset() : operationHandle(false) {}
   bool operationHandle :1;
@@ -2595,9 +2736,6 @@ typedef struct _TGetCatalogsResp__isset {
 class TGetCatalogsResp {
  public:
 
-  static const char* ascii_fingerprint; // = "02A075A0FF88D3A172916D8F23C7B286";
-  static const uint8_t binary_fingerprint[16]; // = {0x02,0xA0,0x75,0xA0,0xFF,0x88,0xD3,0xA1,0x72,0x91,0x6D,0x8F,0x23,0xC7,0xB2,0x86};
-
   TGetCatalogsResp(const TGetCatalogsResp&);
   TGetCatalogsResp& operator=(const TGetCatalogsResp&);
   TGetCatalogsResp() {
@@ -2632,11 +2770,17 @@ class TGetCatalogsResp {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TGetCatalogsResp& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TGetCatalogsResp &a, TGetCatalogsResp &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TGetCatalogsResp& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TGetSchemasReq__isset {
   _TGetSchemasReq__isset() : catalogName(false), schemaName(false) {}
   bool catalogName :1;
@@ -2646,9 +2790,6 @@ typedef struct _TGetSchemasReq__isset {
 class TGetSchemasReq {
  public:
 
-  static const char* ascii_fingerprint; // = "28A9D12DE8393DD3E73FC1E5AE6E113B";
-  static const uint8_t binary_fingerprint[16]; // = {0x28,0xA9,0xD1,0x2D,0xE8,0x39,0x3D,0xD3,0xE7,0x3F,0xC1,0xE5,0xAE,0x6E,0x11,0x3B};
-
   TGetSchemasReq(const TGetSchemasReq&);
   TGetSchemasReq& operator=(const TGetSchemasReq&);
   TGetSchemasReq() : catalogName(), schemaName() {
@@ -2690,11 +2831,17 @@ class TGetSchemasReq {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TGetSchemasReq& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TGetSchemasReq &a, TGetSchemasReq &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TGetSchemasReq& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TGetSchemasResp__isset {
   _TGetSchemasResp__isset() : operationHandle(false) {}
   bool operationHandle :1;
@@ -2703,9 +2850,6 @@ typedef struct _TGetSchemasResp__isset {
 class TGetSchemasResp {
  public:
 
-  static const char* ascii_fingerprint; // = "02A075A0FF88D3A172916D8F23C7B286";
-  static const uint8_t binary_fingerprint[16]; // = {0x02,0xA0,0x75,0xA0,0xFF,0x88,0xD3,0xA1,0x72,0x91,0x6D,0x8F,0x23,0xC7,0xB2,0x86};
-
   TGetSchemasResp(const TGetSchemasResp&);
   TGetSchemasResp& operator=(const TGetSchemasResp&);
   TGetSchemasResp() {
@@ -2740,11 +2884,17 @@ class TGetSchemasResp {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TGetSchemasResp& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TGetSchemasResp &a, TGetSchemasResp &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TGetSchemasResp& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TGetTablesReq__isset {
   _TGetTablesReq__isset() : catalogName(false), schemaName(false), tableName(false), tableTypes(false) {}
   bool catalogName :1;
@@ -2756,9 +2906,6 @@ typedef struct _TGetTablesReq__isset {
 class TGetTablesReq {
  public:
 
-  static const char* ascii_fingerprint; // = "C80DFEE06850052F5A445BE81ED763DB";
-  static const uint8_t binary_fingerprint[16]; // = {0xC8,0x0D,0xFE,0xE0,0x68,0x50,0x05,0x2F,0x5A,0x44,0x5B,0xE8,0x1E,0xD7,0x63,0xDB};
-
   TGetTablesReq(const TGetTablesReq&);
   TGetTablesReq& operator=(const TGetTablesReq&);
   TGetTablesReq() : catalogName(), schemaName(), tableName() {
@@ -2814,11 +2961,17 @@ class TGetTablesReq {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TGetTablesReq& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TGetTablesReq &a, TGetTablesReq &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TGetTablesReq& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TGetTablesResp__isset {
   _TGetTablesResp__isset() : operationHandle(false) {}
   bool operationHandle :1;
@@ -2827,9 +2980,6 @@ typedef struct _TGetTablesResp__isset {
 class TGetTablesResp {
  public:
 
-  static const char* ascii_fingerprint; // = "02A075A0FF88D3A172916D8F23C7B286";
-  static const uint8_t binary_fingerprint[16]; // = {0x02,0xA0,0x75,0xA0,0xFF,0x88,0xD3,0xA1,0x72,0x91,0x6D,0x8F,0x23,0xC7,0xB2,0x86};
-
   TGetTablesResp(const TGetTablesResp&);
   TGetTablesResp& operator=(const TGetTablesResp&);
   TGetTablesResp() {
@@ -2864,18 +3014,21 @@ class TGetTablesResp {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TGetTablesResp& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TGetTablesResp &a, TGetTablesResp &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TGetTablesResp& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TGetTableTypesReq {
  public:
 
-  static const char* ascii_fingerprint; // = "82377107F8BD0526960537D5A112E6EF";
-  static const uint8_t binary_fingerprint[16]; // = {0x82,0x37,0x71,0x07,0xF8,0xBD,0x05,0x26,0x96,0x05,0x37,0xD5,0xA1,0x12,0xE6,0xEF};
-
   TGetTableTypesReq(const TGetTableTypesReq&);
   TGetTableTypesReq& operator=(const TGetTableTypesReq&);
   TGetTableTypesReq() {
@@ -2901,11 +3054,17 @@ class TGetTableTypesReq {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TGetTableTypesReq& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TGetTableTypesReq &a, TGetTableTypesReq &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TGetTableTypesReq& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TGetTableTypesResp__isset {
   _TGetTableTypesResp__isset() : operationHandle(false) {}
   bool operationHandle :1;
@@ -2914,9 +3073,6 @@ typedef struct _TGetTableTypesResp__isset {
 class TGetTableTypesResp {
  public:
 
-  static const char* ascii_fingerprint; // = "02A075A0FF88D3A172916D8F23C7B286";
-  static const uint8_t binary_fingerprint[16]; // = {0x02,0xA0,0x75,0xA0,0xFF,0x88,0xD3,0xA1,0x72,0x91,0x6D,0x8F,0x23,0xC7,0xB2,0x86};
-
   TGetTableTypesResp(const TGetTableTypesResp&);
   TGetTableTypesResp& operator=(const TGetTableTypesResp&);
   TGetTableTypesResp() {
@@ -2951,11 +3107,17 @@ class TGetTableTypesResp {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TGetTableTypesResp& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TGetTableTypesResp &a, TGetTableTypesResp &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TGetTableTypesResp& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TGetColumnsReq__isset {
   _TGetColumnsReq__isset() : catalogName(false), schemaName(false), tableName(false), columnName(false) {}
   bool catalogName :1;
@@ -2967,9 +3129,6 @@ typedef struct _TGetColumnsReq__isset {
 class TGetColumnsReq {
  public:
 
-  static const char* ascii_fingerprint; // = "72D0F28ED33A49B7306DF63EEE956C28";
-  static const uint8_t binary_fingerprint[16]; // = {0x72,0xD0,0xF2,0x8E,0xD3,0x3A,0x49,0xB7,0x30,0x6D,0xF6,0x3E,0xEE,0x95,0x6C,0x28};
-
   TGetColumnsReq(const TGetColumnsReq&);
   TGetColumnsReq& operator=(const TGetColumnsReq&);
   TGetColumnsReq() : catalogName(), schemaName(), tableName(), columnName() {
@@ -3025,11 +3184,17 @@ class TGetColumnsReq {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TGetColumnsReq& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TGetColumnsReq &a, TGetColumnsReq &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TGetColumnsReq& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TGetColumnsResp__isset {
   _TGetColumnsResp__isset() : operationHandle(false) {}
   bool operationHandle :1;
@@ -3038,9 +3203,6 @@ typedef struct _TGetColumnsResp__isset {
 class TGetColumnsResp {
  public:
 
-  static const char* ascii_fingerprint; // = "02A075A0FF88D3A172916D8F23C7B286";
-  static const uint8_t binary_fingerprint[16]; // = {0x02,0xA0,0x75,0xA0,0xFF,0x88,0xD3,0xA1,0x72,0x91,0x6D,0x8F,0x23,0xC7,0xB2,0x86};
-
   TGetColumnsResp(const TGetColumnsResp&);
   TGetColumnsResp& operator=(const TGetColumnsResp&);
   TGetColumnsResp() {
@@ -3075,11 +3237,17 @@ class TGetColumnsResp {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TGetColumnsResp& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TGetColumnsResp &a, TGetColumnsResp &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TGetColumnsResp& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TGetFunctionsReq__isset {
   _TGetFunctionsReq__isset() : catalogName(false), schemaName(false) {}
   bool catalogName :1;
@@ -3089,9 +3257,6 @@ typedef struct _TGetFunctionsReq__isset {
 class TGetFunctionsReq {
  public:
 
-  static const char* ascii_fingerprint; // = "0887E0916ADE4521BF6017B534493138";
-  static const uint8_t binary_fingerprint[16]; // = {0x08,0x87,0xE0,0x91,0x6A,0xDE,0x45,0x21,0xBF,0x60,0x17,0xB5,0x34,0x49,0x31,0x38};
-
   TGetFunctionsReq(const TGetFunctionsReq&);
   TGetFunctionsReq& operator=(const TGetFunctionsReq&);
   TGetFunctionsReq() : catalogName(), schemaName(), functionName() {
@@ -3138,11 +3303,17 @@ class TGetFunctionsReq {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TGetFunctionsReq& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TGetFunctionsReq &a, TGetFunctionsReq &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TGetFunctionsReq& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TGetFunctionsResp__isset {
   _TGetFunctionsResp__isset() : operationHandle(false) {}
   bool operationHandle :1;
@@ -3151,9 +3322,6 @@ typedef struct _TGetFunctionsResp__isset {
 class TGetFunctionsResp {
  public:
 
-  static const char* ascii_fingerprint; // = "02A075A0FF88D3A172916D8F23C7B286";
-  static const uint8_t binary_fingerprint[16]; // = {0x02,0xA0,0x75,0xA0,0xFF,0x88,0xD3,0xA1,0x72,0x91,0x6D,0x8F,0x23,0xC7,0xB2,0x86};
-
   TGetFunctionsResp(const TGetFunctionsResp&);
   TGetFunctionsResp& operator=(const TGetFunctionsResp&);
   TGetFunctionsResp() {
@@ -3188,18 +3356,21 @@ class TGetFunctionsResp {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TGetFunctionsResp& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TGetFunctionsResp &a, TGetFunctionsResp &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TGetFunctionsResp& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TGetOperationStatusReq {
  public:
 
-  static const char* ascii_fingerprint; // = "414FA38522AE6B9CEC1438B56CA1DE5A";
-  static const uint8_t binary_fingerprint[16]; // = {0x41,0x4F,0xA3,0x85,0x22,0xAE,0x6B,0x9C,0xEC,0x14,0x38,0xB5,0x6C,0xA1,0xDE,0x5A};
-
   TGetOperationStatusReq(const TGetOperationStatusReq&);
   TGetOperationStatusReq& operator=(const TGetOperationStatusReq&);
   TGetOperationStatusReq() {
@@ -3225,11 +3396,17 @@ class TGetOperationStatusReq {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TGetOperationStatusReq& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TGetOperationStatusReq &a, TGetOperationStatusReq &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TGetOperationStatusReq& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TGetOperationStatusResp__isset {
   _TGetOperationStatusResp__isset() : operationState(false), sqlState(false), errorCode(false), errorMessage(false) {}
   bool operationState :1;
@@ -3241,9 +3418,6 @@ typedef struct _TGetOperationStatusResp__isset {
 class TGetOperationStatusResp {
  public:
 
-  static const char* ascii_fingerprint; // = "BD124DB87A5A2E7D11945BD1B17F013D";
-  static const uint8_t binary_fingerprint[16]; // = {0xBD,0x12,0x4D,0xB8,0x7A,0x5A,0x2E,0x7D,0x11,0x94,0x5B,0xD1,0xB1,0x7F,0x01,0x3D};
-
   TGetOperationStatusResp(const TGetOperationStatusResp&);
   TGetOperationStatusResp& operator=(const TGetOperationStatusResp&);
   TGetOperationStatusResp() : operationState((TOperationState::type)0), sqlState(), errorCode(0), errorMessage() {
@@ -3299,18 +3473,21 @@ class TGetOperationStatusResp {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TGetOperationStatusResp& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TGetOperationStatusResp &a, TGetOperationStatusResp &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TGetOperationStatusResp& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TCancelOperationReq {
  public:
 
-  static const char* ascii_fingerprint; // = "414FA38522AE6B9CEC1438B56CA1DE5A";
-  static const uint8_t binary_fingerprint[16]; // = {0x41,0x4F,0xA3,0x85,0x22,0xAE,0x6B,0x9C,0xEC,0x14,0x38,0xB5,0x6C,0xA1,0xDE,0x5A};
-
   TCancelOperationReq(const TCancelOperationReq&);
   TCancelOperationReq& operator=(const TCancelOperationReq&);
   TCancelOperationReq() {
@@ -3336,18 +3513,21 @@ class TCancelOperationReq {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCancelOperationReq& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TCancelOperationReq &a, TCancelOperationReq &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TCancelOperationReq& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TCancelOperationResp {
  public:
 
-  static const char* ascii_fingerprint; // = "7142E89F09DC7C5F6FA916C7393F46C2";
-  static const uint8_t binary_fingerprint[16]; // = {0x71,0x42,0xE8,0x9F,0x09,0xDC,0x7C,0x5F,0x6F,0xA9,0x16,0xC7,0x39,0x3F,0x46,0xC2};
-
   TCancelOperationResp(const TCancelOperationResp&);
   TCancelOperationResp& operator=(const TCancelOperationResp&);
   TCancelOperationResp() {
@@ -3373,18 +3553,21 @@ class TCancelOperationResp {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCancelOperationResp& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TCancelOperationResp &a, TCancelOperationResp &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TCancelOperationResp& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TCloseOperationReq {
  public:
 
-  static const char* ascii_fingerprint; // = "414FA38522AE6B9CEC1438B56CA1DE5A";
-  static const uint8_t binary_fingerprint[16]; // = {0x41,0x4F,0xA3,0x85,0x22,0xAE,0x6B,0x9C,0xEC,0x14,0x38,0xB5,0x6C,0xA1,0xDE,0x5A};
-
   TCloseOperationReq(const TCloseOperationReq&);
   TCloseOperationReq& operator=(const TCloseOperationReq&);
   TCloseOperationReq() {
@@ -3410,18 +3593,21 @@ class TCloseOperationReq {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCloseOperationReq& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TCloseOperationReq &a, TCloseOperationReq &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TCloseOperationReq& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TCloseOperationResp {
  public:
 
-  static const char* ascii_fingerprint; // = "7142E89F09DC7C5F6FA916C7393F46C2";
-  static const uint8_t binary_fingerprint[16]; // = {0x71,0x42,0xE8,0x9F,0x09,0xDC,0x7C,0x5F,0x6F,0xA9,0x16,0xC7,0x39,0x3F,0x46,0xC2};
-
   TCloseOperationResp(const TCloseOperationResp&);
   TCloseOperationResp& operator=(const TCloseOperationResp&);
   TCloseOperationResp() {
@@ -3447,18 +3633,21 @@ class TCloseOperationResp {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCloseOperationResp& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TCloseOperationResp &a, TCloseOperationResp &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TCloseOperationResp& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TGetResultSetMetadataReq {
  public:
 
-  static const char* ascii_fingerprint; // = "414FA38522AE6B9CEC1438B56CA1DE5A";
-  static const uint8_t binary_fingerprint[16]; // = {0x41,0x4F,0xA3,0x85,0x22,0xAE,0x6B,0x9C,0xEC,0x14,0x38,0xB5,0x6C,0xA1,0xDE,0x5A};
-
   TGetResultSetMetadataReq(const TGetResultSetMetadataReq&);
   TGetResultSetMetadataReq& operator=(const TGetResultSetMetadataReq&);
   TGetResultSetMetadataReq() {
@@ -3484,11 +3673,17 @@ class TGetResultSetMetadataReq {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TGetResultSetMetadataReq& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TGetResultSetMetadataReq &a, TGetResultSetMetadataReq &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TGetResultSetMetadataReq& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TGetResultSetMetadataResp__isset {
   _TGetResultSetMetadataResp__isset() : schema(false) {}
   bool schema :1;
@@ -3497,9 +3692,6 @@ typedef struct _TGetResultSetMetadataResp__isset {
 class TGetResultSetMetadataResp {
  public:
 
-  static const char* ascii_fingerprint; // = "42CD49B7F49CC1B6D4E6F5FA2D7BE812";
-  static const uint8_t binary_fingerprint[16]; // = {0x42,0xCD,0x49,0xB7,0xF4,0x9C,0xC1,0xB6,0xD4,0xE6,0xF5,0xFA,0x2D,0x7B,0xE8,0x12};
-
   TGetResultSetMetadataResp(const TGetResultSetMetadataResp&);
   TGetResultSetMetadataResp& operator=(const TGetResultSetMetadataResp&);
   TGetResultSetMetadataResp() {
@@ -3534,11 +3726,17 @@ class TGetResultSetMetadataResp {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TGetResultSetMetadataResp& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TGetResultSetMetadataResp &a, TGetResultSetMetadataResp &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TGetResultSetMetadataResp& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TFetchResultsReq__isset {
   _TFetchResultsReq__isset() : fetchType(true) {}
   bool fetchType :1;
@@ -3547,9 +3745,6 @@ typedef struct _TFetchResultsReq__isset {
 class TFetchResultsReq {
  public:
 
-  static const char* ascii_fingerprint; // = "B4CB1E4F8F8F4D50183DD372AD11753A";
-  static const uint8_t binary_fingerprint[16]; // = {0xB4,0xCB,0x1E,0x4F,0x8F,0x8F,0x4D,0x50,0x18,0x3D,0xD3,0x72,0xAD,0x11,0x75,0x3A};
-
   TFetchResultsReq(const TFetchResultsReq&);
   TFetchResultsReq& operator=(const TFetchResultsReq&);
   TFetchResultsReq() : orientation((TFetchOrientation::type)0), maxRows(0), fetchType(0) {
@@ -3596,11 +3791,17 @@ class TFetchResultsReq {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TFetchResultsReq& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TFetchResultsReq &a, TFetchResultsReq &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TFetchResultsReq& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TFetchResultsResp__isset {
   _TFetchResultsResp__isset() : hasMoreRows(false), results(false) {}
   bool hasMoreRows :1;
@@ -3610,9 +3811,6 @@ typedef struct _TFetchResultsResp__isset {
 class TFetchResultsResp {
  public:
 
-  static const char* ascii_fingerprint; // = "FC43BC2D6F3B76D4DB0F34226A745C8E";
-  static const uint8_t binary_fingerprint[16]; // = {0xFC,0x43,0xBC,0x2D,0x6F,0x3B,0x76,0xD4,0xDB,0x0F,0x34,0x22,0x6A,0x74,0x5C,0x8E};
-
   TFetchResultsResp(const TFetchResultsResp&);
   TFetchResultsResp& operator=(const TFetchResultsResp&);
   TFetchResultsResp() : hasMoreRows(0) {
@@ -3654,18 +3852,21 @@ class TFetchResultsResp {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TFetchResultsResp& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TFetchResultsResp &a, TFetchResultsResp &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TFetchResultsResp& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TGetDelegationTokenReq {
  public:
 
-  static const char* ascii_fingerprint; // = "07EA0311716A27924914E4354ED22D6C";
-  static const uint8_t binary_fingerprint[16]; // = {0x07,0xEA,0x03,0x11,0x71,0x6A,0x27,0x92,0x49,0x14,0xE4,0x35,0x4E,0xD2,0x2D,0x6C};
-
   TGetDelegationTokenReq(const TGetDelegationTokenReq&);
   TGetDelegationTokenReq& operator=(const TGetDelegationTokenReq&);
   TGetDelegationTokenReq() : owner(), renewer() {
@@ -3701,11 +3902,17 @@ class TGetDelegationTokenReq {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TGetDelegationTokenReq& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TGetDelegationTokenReq &a, TGetDelegationTokenReq &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TGetDelegationTokenReq& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _TGetDelegationTokenResp__isset {
   _TGetDelegationTokenResp__isset() : delegationToken(false) {}
   bool delegationToken :1;
@@ -3714,9 +3921,6 @@ typedef struct _TGetDelegationTokenResp__isset {
 class TGetDelegationTokenResp {
  public:
 
-  static const char* ascii_fingerprint; // = "C0E132DC412CEA08D771EAC38CEA1DA6";
-  static const uint8_t binary_fingerprint[16]; // = {0xC0,0xE1,0x32,0xDC,0x41,0x2C,0xEA,0x08,0xD7,0x71,0xEA,0xC3,0x8C,0xEA,0x1D,0xA6};
-
   TGetDelegationTokenResp(const TGetDelegationTokenResp&);
   TGetDelegationTokenResp& operator=(const TGetDelegationTokenResp&);
   TGetDelegationTokenResp() : delegationToken() {
@@ -3751,18 +3955,21 @@ class TGetDelegationTokenResp {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TGetDelegationTokenResp& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TGetDelegationTokenResp &a, TGetDelegationTokenResp &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TGetDelegationTokenResp& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TCancelDelegationTokenReq {
  public:
 
-  static const char* ascii_fingerprint; // = "1A3D66269336B7EC66998BFE1BECDE75";
-  static const uint8_t binary_fingerprint[16]; // = {0x1A,0x3D,0x66,0x26,0x93,0x36,0xB7,0xEC,0x66,0x99,0x8B,0xFE,0x1B,0xEC,0xDE,0x75};
-
   TCancelDelegationTokenReq(const TCancelDelegationTokenReq&);
   TCancelDelegationTokenReq& operator=(const TCancelDelegationTokenReq&);
   TCancelDelegationTokenReq() : delegationToken() {
@@ -3793,18 +4000,21 @@ class TCancelDelegationTokenReq {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCancelDelegationTokenReq& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TCancelDelegationTokenReq &a, TCancelDelegationTokenReq &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TCancelDelegationTokenReq& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TCancelDelegationTokenResp {
  public:
 
-  static const char* ascii_fingerprint; // = "7142E89F09DC7C5F6FA916C7393F46C2";
-  static const uint8_t binary_fingerprint[16]; // = {0x71,0x42,0xE8,0x9F,0x09,0xDC,0x7C,0x5F,0x6F,0xA9,0x16,0xC7,0x39,0x3F,0x46,0xC2};
-
   TCancelDelegationTokenResp(const TCancelDelegationTokenResp&);
   TCancelDelegationTokenResp& operator=(const TCancelDelegationTokenResp&);
   TCancelDelegationTokenResp() {
@@ -3830,18 +4040,21 @@ class TCancelDelegationTokenResp {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TCancelDelegationTokenResp& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TCancelDelegationTokenResp &a, TCancelDelegationTokenResp &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TCancelDelegationTokenResp& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TRenewDelegationTokenReq {
  public:
 
-  static const char* ascii_fingerprint; // = "1A3D66269336B7EC66998BFE1BECDE75";
-  static const uint8_t binary_fingerprint[16]; // = {0x1A,0x3D,0x66,0x26,0x93,0x36,0xB7,0xEC,0x66,0x99,0x8B,0xFE,0x1B,0xEC,0xDE,0x75};
-
   TRenewDelegationTokenReq(const TRenewDelegationTokenReq&);
   TRenewDelegationTokenReq& operator=(const TRenewDelegationTokenReq&);
   TRenewDelegationTokenReq() : delegationToken() {
@@ -3872,18 +4085,21 @@ class TRenewDelegationTokenReq {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TRenewDelegationTokenReq& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TRenewDelegationTokenReq &a, TRenewDelegationTokenReq &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TRenewDelegationTokenReq& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 
 class TRenewDelegationTokenResp {
  public:
 
-  static const char* ascii_fingerprint; // = "7142E89F09DC7C5F6FA916C7393F46C2";
-  static const uint8_t binary_fingerprint[16]; // = {0x71,0x42,0xE8,0x9F,0x09,0xDC,0x7C,0x5F,0x6F,0xA9,0x16,0xC7,0x39,0x3F,0x46,0xC2};
-
   TRenewDelegationTokenResp(const TRenewDelegationTokenResp&);
   TRenewDelegationTokenResp& operator=(const TRenewDelegationTokenResp&);
   TRenewDelegationTokenResp() {
@@ -3909,11 +4125,17 @@ class TRenewDelegationTokenResp {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const TRenewDelegationTokenResp& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(TRenewDelegationTokenResp &a, TRenewDelegationTokenResp &b);
 
+inline std::ostream& operator<<(std::ostream& out, const TRenewDelegationTokenResp& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 }}}}} // namespace
 
 #endif


[30/55] [abbrv] hive git commit: HIVE-11540 - Too many delta files during Compaction - OOM (Eugene Koifman, reviewed by Alan Gates)

Posted by xu...@apache.org.
HIVE-11540 - Too many delta files during Compaction - OOM (Eugene Koifman, reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e3ef96f2
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e3ef96f2
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e3ef96f2

Branch: refs/heads/spark
Commit: e3ef96f2b83ffa932dd59fc3df79dff8747309ba
Parents: 24ec6be
Author: Eugene Koifman <ek...@hortonworks.com>
Authored: Sat Oct 24 18:44:05 2015 -0700
Committer: Eugene Koifman <ek...@hortonworks.com>
Committed: Sat Oct 24 18:44:05 2015 -0700

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/conf/HiveConf.java   |   2 +
 .../org/apache/hadoop/hive/ql/io/AcidUtils.java |  15 ++-
 .../hive/ql/txn/compactor/CompactorMR.java      |  96 ++++++++++-----
 .../hadoop/hive/ql/txn/compactor/Worker.java    |   6 +-
 .../hive/ql/txn/compactor/CompactorTest.java    |   4 +
 .../hive/ql/txn/compactor/TestWorker.java       | 120 +++++++++++++++++--
 6 files changed, 201 insertions(+), 42 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/e3ef96f2/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index f065048..dc79415 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -1551,6 +1551,8 @@ public class HiveConf extends Configuration {
     HIVE_COMPACTOR_DELTA_PCT_THRESHOLD("hive.compactor.delta.pct.threshold", 0.1f,
         "Percentage (fractional) size of the delta files relative to the base that will trigger\n" +
         "a major compaction. (1.0 = 100%, so the default 0.1 = 10%.)"),
+    COMPACTOR_MAX_NUM_DELTA("hive.compactor.max.num.delta", 500, "Maximum number of delta files that " +
+      "the compactor will attempt to handle in a single job."),
 
     HIVE_COMPACTOR_ABORTEDTXN_THRESHOLD("hive.compactor.abortedtxn.threshold", 1000,
         "Number of aborted transactions involving a given table or partition that will trigger\n" +

http://git-wip-us.apache.org/repos/asf/hive/blob/e3ef96f2/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
index 30db513..e8d070c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
@@ -132,6 +132,9 @@ public class AcidUtils {
     return deltaSubdir(min, max) + "_" + String.format(STATEMENT_DIGITS, statementId);
   }
 
+  public static String baseDir(long txnId) {
+    return BASE_PREFIX + String.format(DELTA_DIGITS, txnId);
+  }
   /**
    * Create a filename for a bucket file.
    * @param directory the partition directory
@@ -221,14 +224,16 @@ public class AcidUtils {
     Path getBaseDirectory();
 
     /**
-     * Get the list of original files.
+     * Get the list of original files.  Not {@code null}.
      * @return the list of original files (eg. 000000_0)
      */
     List<HdfsFileStatusWithId> getOriginalFiles();
 
     /**
      * Get the list of base and delta directories that are valid and not
-     * obsolete.
+     * obsolete.  Not {@code null}.  List must be sorted in a specific way.
+     * See {@link org.apache.hadoop.hive.ql.io.AcidUtils.ParsedDelta#compareTo(org.apache.hadoop.hive.ql.io.AcidUtils.ParsedDelta)}
+     * for details.
      * @return the minimal list of current directories
      */
     List<ParsedDelta> getCurrentDirectories();
@@ -237,7 +242,7 @@ public class AcidUtils {
      * Get the list of obsolete directories. After filtering out bases and
      * deltas that are not selected by the valid transaction list, return the
      * list of original files, bases, and deltas that have been replaced by
-     * more up to date ones.
+     * more up to date ones.  Not {@code null}.
      */
     List<FileStatus> getObsolete();
   }
@@ -284,6 +289,7 @@ public class AcidUtils {
      * happens in a different process; thus it's possible to have bases/deltas with
      * overlapping txnId boundaries.  The sort order helps figure out the "best" set of files
      * to use to get data.
+     * This sorts "wider" delta before "narrower" i.e. delta_5_20 sorts before delta_5_10 (and delta_11_20)
      */
     @Override
     public int compareTo(ParsedDelta parsedDelta) {
@@ -499,6 +505,9 @@ public class AcidUtils {
     }
 
     Collections.sort(working);
+    //so now, 'working' should be sorted like delta_5_20 delta_5_10 delta_11_20 delta_51_60 for example
+    //and we want to end up with the best set containing all relevant data: delta_5_20 delta_51_60,
+    //subject to list of 'exceptions' in 'txnList' (not show in above example).
     long current = bestBase.txn;
     int lastStmtId = -1;
     for(ParsedDelta next: working) {

http://git-wip-us.apache.org/repos/asf/hive/blob/e3ef96f2/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java
index 391f99a..bab01a9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java
@@ -32,6 +32,7 @@ import org.apache.hadoop.hive.metastore.api.CompactionType;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.txn.CompactionInfo;
 import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter;
 import org.apache.hadoop.hive.ql.io.AcidInputFormat;
 import org.apache.hadoop.hive.ql.io.AcidOutputFormat;
@@ -94,18 +95,8 @@ public class CompactorMR {
   public CompactorMR() {
   }
 
-  /**
-   * Run a compactor job.
-   * @param conf Hive configuration file
-   * @param jobName name to run this job with
-   * @param t metastore table
-   * @param sd metastore storage descriptor
-   * @param txns list of valid transactions
-   * @param isMajor is this a major compaction?
-   * @throws java.io.IOException if the job fails
-   */
-  void run(HiveConf conf, String jobName, Table t, StorageDescriptor sd,
-           ValidTxnList txns, boolean isMajor, Worker.StatsUpdater su) throws IOException {
+  private JobConf createBaseJobConf(HiveConf conf, String jobName, Table t, StorageDescriptor sd,
+                                    ValidTxnList txns) {
     JobConf job = new JobConf(conf);
     job.setJobName(jobName);
     job.setOutputKeyClass(NullWritable.class);
@@ -117,7 +108,7 @@ public class CompactorMR {
     job.setInputFormat(CompactorInputFormat.class);
     job.setOutputFormat(NullOutputFormat.class);
     job.setOutputCommitter(CompactorOutputCommitter.class);
-    
+
     String queueName = conf.getVar(HiveConf.ConfVars.COMPACTOR_JOB_QUEUE);
     if(queueName != null && queueName.length() > 0) {
       job.setQueueName(queueName);
@@ -127,23 +118,63 @@ public class CompactorMR {
     job.set(TMP_LOCATION, sd.getLocation() + "/" + TMPDIR + "_" + UUID.randomUUID().toString());
     job.set(INPUT_FORMAT_CLASS_NAME, sd.getInputFormat());
     job.set(OUTPUT_FORMAT_CLASS_NAME, sd.getOutputFormat());
-    job.setBoolean(IS_MAJOR, isMajor);
     job.setBoolean(IS_COMPRESSED, sd.isCompressed());
     job.set(TABLE_PROPS, new StringableMap(t.getParameters()).toString());
     job.setInt(NUM_BUCKETS, sd.getNumBuckets());
     job.set(ValidTxnList.VALID_TXNS_KEY, txns.toString());
     setColumnTypes(job, sd.getCols());
+    return job;
+  }
+  /**
+   * Run Compaction which may consist of several jobs on the cluster.
+   * @param conf Hive configuration file
+   * @param jobName name to run this job with
+   * @param t metastore table
+   * @param sd metastore storage descriptor
+   * @param txns list of valid transactions
+   * @param ci CompactionInfo
+   * @throws java.io.IOException if the job fails
+   */
+  void run(HiveConf conf, String jobName, Table t, StorageDescriptor sd,
+           ValidTxnList txns, CompactionInfo ci, Worker.StatsUpdater su) throws IOException {
+    JobConf job = createBaseJobConf(conf, jobName, t, sd, txns);
 
     // Figure out and encode what files we need to read.  We do this here (rather than in
     // getSplits below) because as part of this we discover our minimum and maximum transactions,
     // and discovering that in getSplits is too late as we then have no way to pass it to our
     // mapper.
 
-    AcidUtils.Directory dir = AcidUtils.getAcidState(
-        new Path(sd.getLocation()), conf, txns, false);
+    AcidUtils.Directory dir = AcidUtils.getAcidState(new Path(sd.getLocation()), conf, txns, false);
+    List<AcidUtils.ParsedDelta> parsedDeltas = dir.getCurrentDirectories();
+    int maxDeltastoHandle = conf.getIntVar(HiveConf.ConfVars.COMPACTOR_MAX_NUM_DELTA);
+    if(parsedDeltas.size() > maxDeltastoHandle) {
+      /**
+       * if here, that means we have very high number of delta files.  This may be sign of a temporary
+       * glitch or a real issue.  For example, if transaction batch size or transaction size is set too
+       * low for the event flow rate in Streaming API, it may generate lots of delta files very
+       * quickly.  Another possibility is that Compaction is repeatedly failing and not actually compacting.
+       * Thus, force N minor compactions first to reduce number of deltas and then follow up with
+       * the compaction actually requested in {@link ci} which now needs to compact a lot fewer deltas
+       */
+      LOG.warn(parsedDeltas.size() + " delta files found for " + ci.getFullPartitionName()
+        + " located at " + sd.getLocation() + "! This is likely a sign of misconfiguration, " +
+        "especially if this message repeats.  Check that compaction is running properly.  Check for any " +
+        "runaway/mis-configured process writing to ACID tables, especially using Streaming Ingest API.");
+      int numMinorCompactions = parsedDeltas.size() / maxDeltastoHandle;
+      for(int jobSubId = 0; jobSubId < numMinorCompactions; jobSubId++) {
+        JobConf jobMinorCompact = createBaseJobConf(conf, jobName + "_" + jobSubId, t, sd, txns);
+        launchCompactionJob(jobMinorCompact,
+          null, CompactionType.MINOR, null,
+          parsedDeltas.subList(jobSubId * maxDeltastoHandle, (jobSubId + 1) * maxDeltastoHandle),
+          maxDeltastoHandle, -1);
+      }
+      //now recompute state since we've done minor compactions and have different 'best' set of deltas
+      dir = AcidUtils.getAcidState(new Path(sd.getLocation()), conf, txns);
+    }
+
     StringableList dirsToSearch = new StringableList();
     Path baseDir = null;
-    if (isMajor) {
+    if (ci.isMajorCompaction()) {
       // There may not be a base dir if the partition was empty before inserts or if this
       // partition is just now being converted to ACID.
       baseDir = dir.getBaseDirectory();
@@ -166,14 +197,26 @@ public class CompactorMR {
       }
     }
 
-    List<AcidUtils.ParsedDelta> parsedDeltas = dir.getCurrentDirectories();
-
-    if (parsedDeltas == null || parsedDeltas.size() == 0) {
+    if (parsedDeltas.size() == 0) {
       // Seriously, no deltas?  Can't compact that.
       LOG.error(  "No delta files found to compact in " + sd.getLocation());
+      //couldn't someone want to run a Major compaction to convert old table to ACID?
       return;
     }
 
+    launchCompactionJob(job, baseDir, ci.type, dirsToSearch, dir.getCurrentDirectories(),
+      dir.getCurrentDirectories().size(), dir.getObsolete().size());
+
+    su.gatherStats();
+  }
+  private void launchCompactionJob(JobConf job, Path baseDir, CompactionType compactionType,
+                                   StringableList dirsToSearch,
+                                   List<AcidUtils.ParsedDelta> parsedDeltas,
+                                   int curDirNumber, int obsoleteDirNumber) throws IOException {
+    job.setBoolean(IS_MAJOR, compactionType == CompactionType.MAJOR);
+    if(dirsToSearch == null) {
+      dirsToSearch = new StringableList();
+    }
     StringableList deltaDirs = new StringableList();
     long minTxn = Long.MAX_VALUE;
     long maxTxn = Long.MIN_VALUE;
@@ -190,18 +233,15 @@ public class CompactorMR {
     job.set(DIRS_TO_SEARCH, dirsToSearch.toString());
     job.setLong(MIN_TXN, minTxn);
     job.setLong(MAX_TXN, maxTxn);
-    LOG.debug("Setting minimum transaction to " + minTxn);
-    LOG.debug("Setting maximume transaction to " + maxTxn);
 
+    LOG.info("Submitting " + compactionType + " compaction job '" +
+      job.getJobName() + "' to " + job.getQueueName() + " queue.  " +
+      "(current delta dirs count=" + curDirNumber +
+      ", obsolete delta dirs count=" + obsoleteDirNumber + ". TxnIdRange[" + minTxn + "," + maxTxn + "]");
     RunningJob rj = JobClient.runJob(job);
-    LOG.info("Submitted " + (isMajor ? CompactionType.MAJOR : CompactionType.MINOR) + " compaction job '" +
-      jobName + "' with jobID=" + rj.getID() + " to " + job.getQueueName() + " queue.  " +
-      "(current delta dirs count=" + dir.getCurrentDirectories().size() +
-      ", obsolete delta dirs count=" + dir.getObsolete());
+    LOG.info("Submitted compaction job '" + job.getJobName() + "' with jobID=" + rj.getID());
     rj.waitForCompletion();
-    su.gatherStats();
   }
-
   /**
    * Set the column names and types into the job conf for the input format
    * to use.

http://git-wip-us.apache.org/repos/asf/hive/blob/e3ef96f2/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java
index 0548117..cc7441a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java
@@ -77,7 +77,7 @@ public class Worker extends CompactorThread {
       // Make sure nothing escapes this run method and kills the metastore at large,
       // so wrap it in a big catch Throwable statement.
       try {
-        CompactionInfo ci = txnHandler.findNextToCompact(name);
+        final CompactionInfo ci = txnHandler.findNextToCompact(name);
 
         if (ci == null && !stop.get()) {
           try {
@@ -158,14 +158,14 @@ public class Worker extends CompactorThread {
         launchedJob = true;
         try {
           if (runJobAsSelf(runAs)) {
-            mr.run(conf, jobName.toString(), t, sd, txns, isMajor, su);
+            mr.run(conf, jobName.toString(), t, sd, txns, ci, su);
           } else {
             UserGroupInformation ugi = UserGroupInformation.createProxyUser(t.getOwner(),
               UserGroupInformation.getLoginUser());
             ugi.doAs(new PrivilegedExceptionAction<Object>() {
               @Override
               public Object run() throws Exception {
-                mr.run(conf, jobName.toString(), t, sd, txns, isMajor, su);
+                mr.run(conf, jobName.toString(), t, sd, txns, ci, su);
                 return null;
               }
             });

http://git-wip-us.apache.org/repos/asf/hive/blob/e3ef96f2/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/CompactorTest.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/CompactorTest.java b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/CompactorTest.java
index 5a8c932..39c0571 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/CompactorTest.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/CompactorTest.java
@@ -516,6 +516,10 @@ public abstract class CompactorTest {
   abstract boolean useHive130DeltaDirName();
 
   String makeDeltaDirName(long minTxnId, long maxTxnId) {
+    if(minTxnId != maxTxnId) {
+      //covers both streaming api and post compaction style.
+      return makeDeltaDirNameCompacted(minTxnId, maxTxnId);
+    }
     return useHive130DeltaDirName() ?
       AcidUtils.deltaSubdir(minTxnId, maxTxnId, 0) : AcidUtils.deltaSubdir(minTxnId, maxTxnId);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/e3ef96f2/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestWorker.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestWorker.java b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestWorker.java
index 11e5333..245e839 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestWorker.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestWorker.java
@@ -22,6 +22,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.*;
+import org.apache.hadoop.hive.ql.io.AcidUtils;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
@@ -29,6 +30,7 @@ import org.junit.Test;
 import java.io.*;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.BitSet;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -37,6 +39,10 @@ import java.util.Map;
 
 /**
  * Tests for the worker thread and its MR jobs.
+ * todo: most delta files in this test suite use txn id range, i.e. [N,N+M]
+ * That means that they all look like they were created by compaction or by streaming api.
+ * Delta files created by SQL should have [N,N] range (and a suffix in v1.3 and later)
+ * Need to change some of these to have better test coverage.
  */
 public class TestWorker extends CompactorTest {
   static final private String CLASS_NAME = TestWorker.class.getName();
@@ -325,18 +331,14 @@ public class TestWorker extends CompactorTest {
     // There should still now be 5 directories in the location
     FileSystem fs = FileSystem.get(conf);
     FileStatus[] stat = fs.listStatus(new Path(t.getSd().getLocation()));
-    boolean is130 = this instanceof TestWorker2;
-    Assert.assertEquals(is130 ? 5 : 4, stat.length);
+    Assert.assertEquals(4, stat.length);
 
     // Find the new delta file and make sure it has the right contents
     Arrays.sort(stat);
     Assert.assertEquals("base_20", stat[0].getPath().getName());
-    if(is130) {//in1.3.0 orig delta is delta_00021_00022_0000 and compacted one is delta_00021_00022...
-      Assert.assertEquals(makeDeltaDirNameCompacted(21, 22), stat[1].getPath().getName());
-    }
-    Assert.assertEquals(makeDeltaDirName(21, 22), stat[1 + (is130 ? 1 : 0)].getPath().getName());
-    Assert.assertEquals(makeDeltaDirName(23, 25), stat[2 + (is130 ? 1 : 0)].getPath().getName());
-    Assert.assertEquals(makeDeltaDirName(26, 27), stat[3 + (is130 ? 1 : 0)].getPath().getName());
+    Assert.assertEquals(makeDeltaDirNameCompacted(21, 22), stat[1].getPath().getName());
+    Assert.assertEquals(makeDeltaDirName(23, 25), stat[2].getPath().getName());
+    Assert.assertEquals(makeDeltaDirName(26, 27), stat[3].getPath().getName());
   }
 
   @Test
@@ -508,6 +510,108 @@ public class TestWorker extends CompactorTest {
   }
 
   @Test
+  public void minorNoBaseLotsOfDeltas() throws Exception {
+    compactNoBaseLotsOfDeltas(CompactionType.MINOR);
+  }
+  @Test
+  public void majorNoBaseLotsOfDeltas() throws Exception {
+    compactNoBaseLotsOfDeltas(CompactionType.MAJOR);
+  }
+  private void compactNoBaseLotsOfDeltas(CompactionType type) throws Exception {
+    conf.setIntVar(HiveConf.ConfVars.COMPACTOR_MAX_NUM_DELTA, 2);
+    Table t = newTable("default", "mapwb", true);
+    Partition p = newPartition(t, "today");
+
+//    addBaseFile(t, p, 20L, 20);
+    addDeltaFile(t, p, 21L, 21L, 2);
+    addDeltaFile(t, p, 23L, 23L, 2);
+    //make it look like streaming API use case
+    addDeltaFile(t, p, 25L, 29L, 2);
+    addDeltaFile(t, p, 31L, 32L, 3);
+    //make it looks like 31-32 has been compacted, but not cleaned
+    addDeltaFile(t, p, 31L, 33L, 5);
+    addDeltaFile(t, p, 35L, 35L, 1);
+
+    /*since COMPACTOR_MAX_NUM_DELTA=2,
+    we expect files 1,2 to be minor compacted by 1 job to produce delta_21_23
+    * 3,5 to be minor compacted by 2nd job (file 4 is obsolete) to make delta_25_33 (4th is skipped)
+    *
+    * and then the 'requested'
+    * minor compaction to combine delta_21_23, delta_25_33 and delta_35_35 to make delta_21_35
+    * or major compaction to create base_35*/
+    burnThroughTransactions(35);
+    CompactionRequest rqst = new CompactionRequest("default", "mapwb", type);
+    rqst.setPartitionname("ds=today");
+    txnHandler.compact(rqst);
+
+    startWorker();
+
+    ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
+    List<ShowCompactResponseElement> compacts = rsp.getCompacts();
+    Assert.assertEquals(1, compacts.size());
+    Assert.assertEquals("ready for cleaning", compacts.get(0).getState());
+
+    FileSystem fs = FileSystem.get(conf);
+    FileStatus[] stat = fs.listStatus(new Path(p.getSd().getLocation()));
+    Assert.assertEquals(9, stat.length);
+
+    // Find the new delta file and make sure it has the right contents
+    BitSet matchesFound = new BitSet(9);
+    for (int i = 0; i < stat.length; i++) {
+      if(stat[i].getPath().getName().equals(makeDeltaDirName(21,21))) {
+        matchesFound.set(0);
+      }
+      else if(stat[i].getPath().getName().equals(makeDeltaDirName(23, 23))) {
+        matchesFound.set(1);
+      }
+      else if(stat[i].getPath().getName().equals(makeDeltaDirNameCompacted(25, 29))) {
+        matchesFound.set(2);
+      }
+      else if(stat[i].getPath().getName().equals(makeDeltaDirNameCompacted(31, 32))) {
+        matchesFound.set(3);
+      }
+      else if(stat[i].getPath().getName().equals(makeDeltaDirNameCompacted(31, 33))) {
+        matchesFound.set(4);
+      }
+      else if(stat[i].getPath().getName().equals(makeDeltaDirName(35, 35))) {
+        matchesFound.set(5);
+      }
+      else if(stat[i].getPath().getName().equals(makeDeltaDirNameCompacted(21,23))) {
+        matchesFound.set(6);
+      }
+      else if(stat[i].getPath().getName().equals(makeDeltaDirNameCompacted(25,33))) {
+        matchesFound.set(7);
+      }
+      switch (type) {
+        //yes, both do set(8)
+        case MINOR:
+          if(stat[i].getPath().getName().equals(makeDeltaDirNameCompacted(21,35))) {
+            matchesFound.set(8);
+          }
+          break;
+        case MAJOR:
+          if(stat[i].getPath().getName().equals(AcidUtils.baseDir(35))) {
+            matchesFound.set(8);
+          }
+          break;
+        default:
+          throw new IllegalStateException();
+      }
+    }
+    StringBuilder sb = null;
+    for(int i = 0; i < stat.length; i++) {
+      if(!matchesFound.get(i)) {
+        if(sb == null) {
+          sb = new StringBuilder("Some files are missing at index: ");
+        }
+        sb.append(i).append(",");
+      }
+    }
+    if (sb != null) {
+      Assert.assertTrue(sb.toString(), false);
+    }
+  }
+  @Test
   public void majorPartitionWithBase() throws Exception {
     LOG.debug("Starting majorPartitionWithBase");
     Table t = newTable("default", "mapwb", true);


[26/55] [abbrv] hive git commit: HIVE-12039 : Fix TestSSL#testSSLVersion (Vaibhav Gumashta via Ashutosh Chauhan)

Posted by xu...@apache.org.
HIVE-12039 : Fix TestSSL#testSSLVersion (Vaibhav Gumashta via Ashutosh Chauhan)

Signed-off-by: Ashutosh Chauhan <ha...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e8f71f4d
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e8f71f4d
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e8f71f4d

Branch: refs/heads/spark
Commit: e8f71f4d2bfcf6534fbbf3413a8b3c80698fcc46
Parents: ee2d318
Author: Vaibhav Gumashta <vg...@apache.org>
Authored: Thu Oct 8 12:43:00 2015 -0800
Committer: Ashutosh Chauhan <ha...@apache.org>
Committed: Sat Oct 24 14:41:06 2015 -0700

----------------------------------------------------------------------
 .../test/java/org/apache/hive/jdbc/TestSSL.java | 44 +++++++++-----------
 1 file changed, 20 insertions(+), 24 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/e8f71f4d/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java
index 28a3777..b66ffda 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java
@@ -55,15 +55,15 @@ public class TestSSL {
   private static final String HS2_HTTP_MODE = "http";
   private static final String HS2_HTTP_ENDPOINT = "cliservice";
   private static final String HS2_BINARY_AUTH_MODE = "NONE";
-  private static final String HS2_HTTP_AUTH_MODE = "NOSASL";
 
   private MiniHS2 miniHS2 = null;
   private static HiveConf conf = new HiveConf();
   private Connection hs2Conn = null;
   private String dataFileDir = conf.get("test.data.files");
   private Map<String, String> confOverlay;
-  private final String SSL_CONN_PARAMS = ";ssl=true;sslTrustStore=" + URLEncoder.encode(dataFileDir + File.separator +
-      TRUST_STORE_NAME) + ";trustStorePassword=" + KEY_STORE_PASSWORD;
+  private final String SSL_CONN_PARAMS = ";ssl=true;sslTrustStore="
+      + URLEncoder.encode(dataFileDir + File.separator + TRUST_STORE_NAME) + ";trustStorePassword="
+      + KEY_STORE_PASSWORD;
 
   @BeforeClass
   public static void beforeTest() throws Exception {
@@ -111,9 +111,10 @@ public class TestSSL {
    */
   @Test
   public void testSSLVersion() throws Exception {
-    Assume.assumeTrue(execCommand("which openssl") == 0); // we need openssl
-    Assume.assumeTrue(System.getProperty("os.name").toLowerCase()
-      .contains("linux")); // we depend on linux openssl exit codes
+    // we need openssl
+    Assume.assumeTrue(execCommand("which openssl") == 0);
+    // we depend on linux openssl exit codes
+    Assume.assumeTrue(System.getProperty("os.name").toLowerCase().contains("linux"));
 
     setSslConfOverlay(confOverlay);
     // Test in binary mode
@@ -122,16 +123,15 @@ public class TestSSL {
     miniHS2.start(confOverlay);
 
     // make SSL connection
-    hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL() + ";ssl=true;sslTrustStore=" +
-        dataFileDir + File.separator + TRUST_STORE_NAME + ";trustStorePassword=" +
-        KEY_STORE_PASSWORD, System.getProperty("user.name"), "bar");
+    hs2Conn =
+        DriverManager.getConnection(miniHS2.getJdbcURL() + ";ssl=true;sslTrustStore=" + dataFileDir
+            + File.separator + TRUST_STORE_NAME + ";trustStorePassword=" + KEY_STORE_PASSWORD,
+            System.getProperty("user.name"), "bar");
     hs2Conn.close();
-    Assert.assertEquals("Expected exit code of 1", 1,
-      execCommand("openssl s_client -connect " + miniHS2.getHost() + ":" + miniHS2.getBinaryPort()
-      + " -ssl2 < /dev/null"));
-    Assert.assertEquals("Expected exit code of 1", 1,
-      execCommand("openssl s_client -connect " + miniHS2.getHost() + ":" + miniHS2.getBinaryPort()
-      + " -ssl3 < /dev/null"));
+    Assert.assertEquals("Expected exit code of 1", 1, execCommand("openssl s_client -connect "
+        + miniHS2.getHost() + ":" + miniHS2.getBinaryPort() + " -ssl2 < /dev/null"));
+    Assert.assertEquals("Expected exit code of 1", 1, execCommand("openssl s_client -connect "
+        + miniHS2.getHost() + ":" + miniHS2.getBinaryPort() + " -ssl3 < /dev/null"));
     miniHS2.stop();
 
     // Test in http mode
@@ -139,12 +139,10 @@ public class TestSSL {
     miniHS2.start(confOverlay);
     // make SSL connection
     try {
-      hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL() +
-          ";ssl=true;sslTrustStore=" + dataFileDir + File.separator +
-          TRUST_STORE_NAME + ";trustStorePassword=" + KEY_STORE_PASSWORD +
-          "?hive.server2.transport.mode=" + HS2_HTTP_MODE +
-          ";hive.server2.thrift.http.path=" + HS2_HTTP_ENDPOINT,
-          System.getProperty("user.name"), "bar");
+      hs2Conn =
+          DriverManager.getConnection(miniHS2.getJdbcURL() + ";ssl=true;sslTrustStore="
+              + dataFileDir + File.separator + TRUST_STORE_NAME + ";trustStorePassword="
+              + KEY_STORE_PASSWORD, System.getProperty("user.name"), "bar");
       Assert.fail("Expected SQLException during connect");
     } catch (SQLException e) {
       LOG.info("Expected exception: " + e, e);
@@ -402,12 +400,10 @@ public class TestSSL {
     confOverlay.put(ConfVars.HIVE_SERVER2_USE_SSL.varname, "false");
   }
 
-  // Currently http mode works with server in NOSASL auth mode & doesn't support doAs
   private void setHttpConfOverlay(Map<String, String> confOverlay) {
     confOverlay.put(ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname, HS2_HTTP_MODE);
     confOverlay.put(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH.varname, HS2_HTTP_ENDPOINT);
-    confOverlay.put(ConfVars.HIVE_SERVER2_AUTHENTICATION.varname,  HS2_HTTP_AUTH_MODE);
-    confOverlay.put(ConfVars.HIVE_SERVER2_ENABLE_DOAS.varname, "false");
+    confOverlay.put(ConfVars.HIVE_SERVER2_ENABLE_DOAS.varname, "true");
   }
 
   private void setBinaryConfOverlay(Map<String, String> confOverlay) {


[36/55] [abbrv] hive git commit: HIVE-12246: Orc FileDump fails with Missing CLI jar (Prasanth Jayachandran reviewed by Ferdinand Xu)

Posted by xu...@apache.org.
HIVE-12246: Orc FileDump fails with Missing CLI jar (Prasanth Jayachandran reviewed by Ferdinand Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/383d1ccc
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/383d1ccc
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/383d1ccc

Branch: refs/heads/spark
Commit: 383d1ccc8343168189e8aca97b7e247bdb000cd3
Parents: 2a0ea58
Author: Prasanth Jayachandran <j....@gmail.com>
Authored: Mon Oct 26 11:37:27 2015 -0500
Committer: Prasanth Jayachandran <j....@gmail.com>
Committed: Mon Oct 26 11:37:27 2015 -0500

----------------------------------------------------------------------
 bin/ext/util/execHiveCmd.sh | 19 ++++++++++++++++---
 1 file changed, 16 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/383d1ccc/bin/ext/util/execHiveCmd.sh
----------------------------------------------------------------------
diff --git a/bin/ext/util/execHiveCmd.sh b/bin/ext/util/execHiveCmd.sh
index e46ec3c..9a06ce0 100644
--- a/bin/ext/util/execHiveCmd.sh
+++ b/bin/ext/util/execHiveCmd.sh
@@ -13,15 +13,28 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+CLI_JAR="hive-cli-*.jar"
+BEELINE_JAR="hive-beeline-*.jar"
+
 execHiveCmd () {
   CLASS=$1;
   shift;
-  JAR=$1
-  shift;
+
+  # if jar is not passed as parameter use corresponding cli jar
+  if [ "$1" == "$CLI_JAR" ] || [ "$1" == "$BEELINE_JAR" ]; then
+    JAR="$1"
+    shift;
+  else
+    if [ "$USE_DEPRECATED_CLI" == "true" ]; then
+      JAR="$CLI_JAR"
+    else
+      JAR="$BEELINE_JAR"
+    fi
+  fi
 
   # cli specific code
   if [ ! -f ${HIVE_LIB}/$JAR ]; then
-    echo "Missing Hive CLI Jar"
+    echo "Missing $JAR Jar"
     exit 3;
   fi
 


[21/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)

Posted by xu...@apache.org.
HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/26535378
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/26535378
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/26535378

Branch: refs/heads/spark
Commit: 26535378f9ece1543d1a7200f6514a85a7f8090e
Parents: e9cdea9
Author: Sergey Shelukhin <se...@apache.org>
Authored: Fri Oct 23 14:29:23 2015 -0700
Committer: Sergey Shelukhin <se...@apache.org>
Committed: Fri Oct 23 14:31:04 2015 -0700

----------------------------------------------------------------------
 .../gen/thrift/gen-cpp/ThriftHiveMetastore.cpp  | 12960 ++++++++++++++++-
 .../gen/thrift/gen-cpp/ThriftHiveMetastore.h    |  2479 +---
 .../thrift/gen-cpp/hive_metastore_constants.cpp |     2 +-
 .../thrift/gen-cpp/hive_metastore_constants.h   |     2 +-
 .../gen/thrift/gen-cpp/hive_metastore_types.cpp |  2269 ++-
 .../gen/thrift/gen-cpp/hive_metastore_types.h   |  1332 +-
 .../hive/metastore/api/AbortTxnRequest.java     |     6 +-
 .../metastore/api/AddDynamicPartitions.java     |     6 +-
 .../metastore/api/AddPartitionsRequest.java     |     8 +-
 .../hive/metastore/api/AddPartitionsResult.java |     4 +-
 .../hadoop/hive/metastore/api/AggrStats.java    |     6 +-
 .../metastore/api/AlreadyExistsException.java   |     4 +-
 .../metastore/api/BinaryColumnStatsData.java    |    10 +-
 .../metastore/api/BooleanColumnStatsData.java   |    10 +-
 .../hive/metastore/api/CheckLockRequest.java    |     6 +-
 .../metastore/api/ClearFileMetadataRequest.java |     4 +-
 .../metastore/api/ClearFileMetadataResult.java  |     4 +-
 .../hive/metastore/api/ColumnStatistics.java    |     4 +-
 .../metastore/api/ColumnStatisticsData.java     |     2 +-
 .../metastore/api/ColumnStatisticsDesc.java     |     8 +-
 .../hive/metastore/api/ColumnStatisticsObj.java |     4 +-
 .../hive/metastore/api/CommitTxnRequest.java    |     6 +-
 .../hive/metastore/api/CompactionRequest.java   |     4 +-
 .../hive/metastore/api/CompactionType.java      |     2 +-
 .../api/ConfigValSecurityException.java         |     4 +-
 .../api/CurrentNotificationEventId.java         |     6 +-
 .../hadoop/hive/metastore/api/Database.java     |     4 +-
 .../apache/hadoop/hive/metastore/api/Date.java  |     6 +-
 .../hive/metastore/api/DateColumnStatsData.java |     8 +-
 .../hadoop/hive/metastore/api/Decimal.java      |     6 +-
 .../metastore/api/DecimalColumnStatsData.java   |     8 +-
 .../metastore/api/DoubleColumnStatsData.java    |    12 +-
 .../hive/metastore/api/DropPartitionsExpr.java  |     6 +-
 .../metastore/api/DropPartitionsRequest.java    |    12 +-
 .../metastore/api/DropPartitionsResult.java     |     4 +-
 .../hive/metastore/api/EnvironmentContext.java  |     4 +-
 .../hive/metastore/api/EventRequestType.java    |     2 +-
 .../hadoop/hive/metastore/api/FieldSchema.java  |     4 +-
 .../metastore/api/FileMetadataExprType.java     |     2 +-
 .../hive/metastore/api/FireEventRequest.java    |     6 +-
 .../metastore/api/FireEventRequestData.java     |     2 +-
 .../hive/metastore/api/FireEventResponse.java   |     4 +-
 .../hadoop/hive/metastore/api/Function.java     |     6 +-
 .../hadoop/hive/metastore/api/FunctionType.java |     2 +-
 .../metastore/api/GetAllFunctionsResponse.java  |     4 +-
 .../api/GetFileMetadataByExprRequest.java       |     6 +-
 .../api/GetFileMetadataByExprResult.java        |     6 +-
 .../metastore/api/GetFileMetadataRequest.java   |     4 +-
 .../metastore/api/GetFileMetadataResult.java    |     6 +-
 .../metastore/api/GetOpenTxnsInfoResponse.java  |     6 +-
 .../hive/metastore/api/GetOpenTxnsResponse.java |     6 +-
 .../api/GetPrincipalsInRoleRequest.java         |     4 +-
 .../api/GetPrincipalsInRoleResponse.java        |     4 +-
 .../api/GetRoleGrantsForPrincipalRequest.java   |     4 +-
 .../api/GetRoleGrantsForPrincipalResponse.java  |     4 +-
 .../api/GrantRevokePrivilegeRequest.java        |     6 +-
 .../api/GrantRevokePrivilegeResponse.java       |     6 +-
 .../metastore/api/GrantRevokeRoleRequest.java   |     6 +-
 .../metastore/api/GrantRevokeRoleResponse.java  |     6 +-
 .../hive/metastore/api/GrantRevokeType.java     |     2 +-
 .../hive/metastore/api/HeartbeatRequest.java    |     8 +-
 .../metastore/api/HeartbeatTxnRangeRequest.java |     8 +-
 .../api/HeartbeatTxnRangeResponse.java          |     4 +-
 .../hive/metastore/api/HiveObjectPrivilege.java |     4 +-
 .../hive/metastore/api/HiveObjectRef.java       |     4 +-
 .../hive/metastore/api/HiveObjectType.java      |     2 +-
 .../apache/hadoop/hive/metastore/api/Index.java |    10 +-
 .../api/IndexAlreadyExistsException.java        |     4 +-
 .../metastore/api/InsertEventRequestData.java   |     4 +-
 .../metastore/api/InvalidInputException.java    |     4 +-
 .../metastore/api/InvalidObjectException.java   |     4 +-
 .../api/InvalidOperationException.java          |     4 +-
 .../api/InvalidPartitionException.java          |     4 +-
 .../hive/metastore/api/LockComponent.java       |     4 +-
 .../hadoop/hive/metastore/api/LockLevel.java    |     2 +-
 .../hadoop/hive/metastore/api/LockRequest.java  |     6 +-
 .../hadoop/hive/metastore/api/LockResponse.java |     6 +-
 .../hadoop/hive/metastore/api/LockState.java    |     2 +-
 .../hadoop/hive/metastore/api/LockType.java     |     2 +-
 .../hive/metastore/api/LongColumnStatsData.java |    12 +-
 .../hive/metastore/api/MetaException.java       |     4 +-
 .../hive/metastore/api/MetadataPpdResult.java   |     4 +-
 .../hive/metastore/api/NoSuchLockException.java |     4 +-
 .../metastore/api/NoSuchObjectException.java    |     4 +-
 .../hive/metastore/api/NoSuchTxnException.java  |     4 +-
 .../hive/metastore/api/NotificationEvent.java   |     8 +-
 .../metastore/api/NotificationEventRequest.java |     8 +-
 .../api/NotificationEventResponse.java          |     4 +-
 .../hive/metastore/api/OpenTxnRequest.java      |     6 +-
 .../hive/metastore/api/OpenTxnsResponse.java    |     4 +-
 .../apache/hadoop/hive/metastore/api/Order.java |     6 +-
 .../hadoop/hive/metastore/api/Partition.java    |     8 +-
 .../hive/metastore/api/PartitionEventType.java  |     2 +-
 .../api/PartitionListComposingSpec.java         |     4 +-
 .../hive/metastore/api/PartitionSpec.java       |     4 +-
 .../api/PartitionSpecWithSharedSD.java          |     4 +-
 .../hive/metastore/api/PartitionWithoutSD.java  |     8 +-
 .../metastore/api/PartitionsByExprRequest.java  |     6 +-
 .../metastore/api/PartitionsByExprResult.java   |     6 +-
 .../metastore/api/PartitionsStatsRequest.java   |     4 +-
 .../metastore/api/PartitionsStatsResult.java    |     4 +-
 .../metastore/api/PrincipalPrivilegeSet.java    |     4 +-
 .../hive/metastore/api/PrincipalType.java       |     2 +-
 .../hadoop/hive/metastore/api/PrivilegeBag.java |     4 +-
 .../hive/metastore/api/PrivilegeGrantInfo.java  |     8 +-
 .../metastore/api/PutFileMetadataRequest.java   |     6 +-
 .../metastore/api/PutFileMetadataResult.java    |     4 +-
 .../hive/metastore/api/RequestPartsSpec.java    |     2 +-
 .../hadoop/hive/metastore/api/ResourceType.java |     2 +-
 .../hadoop/hive/metastore/api/ResourceUri.java  |     4 +-
 .../apache/hadoop/hive/metastore/api/Role.java  |     6 +-
 .../hive/metastore/api/RolePrincipalGrant.java  |     8 +-
 .../hadoop/hive/metastore/api/Schema.java       |     4 +-
 .../hadoop/hive/metastore/api/SerDeInfo.java    |     4 +-
 .../api/SetPartitionsStatsRequest.java          |     4 +-
 .../hive/metastore/api/ShowCompactRequest.java  |     4 +-
 .../hive/metastore/api/ShowCompactResponse.java |     4 +-
 .../api/ShowCompactResponseElement.java         |     6 +-
 .../hive/metastore/api/ShowLocksRequest.java    |     4 +-
 .../hive/metastore/api/ShowLocksResponse.java   |     4 +-
 .../metastore/api/ShowLocksResponseElement.java |    12 +-
 .../hadoop/hive/metastore/api/SkewedInfo.java   |     4 +-
 .../hive/metastore/api/StorageDescriptor.java   |    10 +-
 .../metastore/api/StringColumnStatsData.java    |    12 +-
 .../apache/hadoop/hive/metastore/api/Table.java |    12 +-
 .../hive/metastore/api/TableStatsRequest.java   |     4 +-
 .../hive/metastore/api/TableStatsResult.java    |     4 +-
 .../hive/metastore/api/ThriftHiveMetastore.java |    98 +-
 .../hive/metastore/api/TxnAbortedException.java |     4 +-
 .../hadoop/hive/metastore/api/TxnInfo.java      |     6 +-
 .../hive/metastore/api/TxnOpenException.java    |     4 +-
 .../hadoop/hive/metastore/api/TxnState.java     |     2 +-
 .../apache/hadoop/hive/metastore/api/Type.java  |     4 +-
 .../hive/metastore/api/UnknownDBException.java  |     4 +-
 .../api/UnknownPartitionException.java          |     4 +-
 .../metastore/api/UnknownTableException.java    |     4 +-
 .../hive/metastore/api/UnlockRequest.java       |     6 +-
 .../hadoop/hive/metastore/api/Version.java      |     4 +-
 .../metastore/api/hive_metastoreConstants.java  |     2 +-
 .../gen-php/metastore/ThriftHiveMetastore.php   |     2 +-
 .../src/gen/thrift/gen-php/metastore/Types.php  |     2 +-
 .../hive_metastore/ThriftHiveMetastore-remote   |     2 +-
 .../hive_metastore/ThriftHiveMetastore.py       |  2652 +++-
 .../thrift/gen-py/hive_metastore/constants.py   |     2 +-
 .../gen/thrift/gen-py/hive_metastore/ttypes.py  |   578 +-
 .../thrift/gen-rb/hive_metastore_constants.rb   |     2 +-
 .../gen/thrift/gen-rb/hive_metastore_types.rb   |     2 +-
 .../gen/thrift/gen-rb/thrift_hive_metastore.rb  |     2 +-
 pom.xml                                         |     6 +-
 .../gen/thrift/gen-cpp/queryplan_constants.cpp  |     2 +-
 ql/src/gen/thrift/gen-cpp/queryplan_constants.h |     2 +-
 ql/src/gen/thrift/gen-cpp/queryplan_types.cpp   |   162 +-
 ql/src/gen/thrift/gen-cpp/queryplan_types.h     |    79 +-
 .../hadoop/hive/ql/plan/api/Adjacency.java      |     4 +-
 .../hadoop/hive/ql/plan/api/AdjacencyType.java  |     2 +-
 .../apache/hadoop/hive/ql/plan/api/Graph.java   |     4 +-
 .../hadoop/hive/ql/plan/api/NodeType.java       |     2 +-
 .../hadoop/hive/ql/plan/api/Operator.java       |     8 +-
 .../hadoop/hive/ql/plan/api/OperatorType.java   |     2 +-
 .../apache/hadoop/hive/ql/plan/api/Query.java   |     8 +-
 .../hadoop/hive/ql/plan/api/QueryPlan.java      |     8 +-
 .../apache/hadoop/hive/ql/plan/api/Stage.java   |     8 +-
 .../hadoop/hive/ql/plan/api/StageType.java      |     2 +-
 .../apache/hadoop/hive/ql/plan/api/Task.java    |     8 +-
 .../hadoop/hive/ql/plan/api/TaskType.java       |     2 +-
 ql/src/gen/thrift/gen-php/Types.php             |     4 +-
 ql/src/gen/thrift/gen-py/queryplan/constants.py |     2 +-
 ql/src/gen/thrift/gen-py/queryplan/ttypes.py    |    80 +-
 ql/src/gen/thrift/gen-rb/queryplan_constants.rb |     2 +-
 ql/src/gen/thrift/gen-rb/queryplan_types.rb     |     2 +-
 .../gen/thrift/gen-cpp/complex_constants.cpp    |     2 +-
 .../src/gen/thrift/gen-cpp/complex_constants.h  |     2 +-
 serde/src/gen/thrift/gen-cpp/complex_types.cpp  |    94 +-
 serde/src/gen/thrift/gen-cpp/complex_types.h    |    46 +-
 .../gen/thrift/gen-cpp/megastruct_constants.cpp |     2 +-
 .../gen/thrift/gen-cpp/megastruct_constants.h   |     2 +-
 .../src/gen/thrift/gen-cpp/megastruct_types.cpp |    70 +-
 serde/src/gen/thrift/gen-cpp/megastruct_types.h |    24 +-
 .../src/gen/thrift/gen-cpp/serde_constants.cpp  |     2 +-
 serde/src/gen/thrift/gen-cpp/serde_constants.h  |     2 +-
 serde/src/gen/thrift/gen-cpp/serde_types.cpp    |     2 +-
 serde/src/gen/thrift/gen-cpp/serde_types.h      |     2 +-
 .../gen/thrift/gen-cpp/testthrift_constants.cpp |     2 +-
 .../gen/thrift/gen-cpp/testthrift_constants.h   |     2 +-
 .../src/gen/thrift/gen-cpp/testthrift_types.cpp |    34 +-
 serde/src/gen/thrift/gen-cpp/testthrift_types.h |    24 +-
 .../hadoop/hive/serde/serdeConstants.java       |     2 +-
 .../hadoop/hive/serde/test/InnerStruct.java     |     6 +-
 .../hadoop/hive/serde/test/ThriftTestObj.java   |     6 +-
 .../hadoop/hive/serde2/thrift/test/Complex.java |     6 +-
 .../hive/serde2/thrift/test/IntString.java      |     8 +-
 .../hive/serde2/thrift/test/MegaStruct.java     |    16 +-
 .../hive/serde2/thrift/test/MiniStruct.java     |     4 +-
 .../hadoop/hive/serde2/thrift/test/MyEnum.java  |     2 +-
 .../hive/serde2/thrift/test/PropValueUnion.java |     2 +-
 .../hive/serde2/thrift/test/SetIntString.java   |     4 +-
 serde/src/gen/thrift/gen-php/Types.php          |     4 +-
 .../org/apache/hadoop/hive/serde/Types.php      |     2 +-
 .../src/gen/thrift/gen-py/complex/constants.py  |     2 +-
 serde/src/gen/thrift/gen-py/complex/ttypes.py   |    44 +-
 .../gen/thrift/gen-py/megastruct/constants.py   |     2 +-
 .../src/gen/thrift/gen-py/megastruct/ttypes.py  |    50 +-
 .../org_apache_hadoop_hive_serde/constants.py   |     2 +-
 .../org_apache_hadoop_hive_serde/ttypes.py      |     2 +-
 .../gen/thrift/gen-py/testthrift/constants.py   |     2 +-
 .../src/gen/thrift/gen-py/testthrift/ttypes.py  |     8 +-
 .../src/gen/thrift/gen-rb/complex_constants.rb  |     2 +-
 serde/src/gen/thrift/gen-rb/complex_types.rb    |     2 +-
 .../gen/thrift/gen-rb/megastruct_constants.rb   |     2 +-
 serde/src/gen/thrift/gen-rb/megastruct_types.rb |     2 +-
 serde/src/gen/thrift/gen-rb/serde_constants.rb  |     2 +-
 serde/src/gen/thrift/gen-rb/serde_types.rb      |     2 +-
 .../gen/thrift/gen-rb/testthrift_constants.rb   |     2 +-
 serde/src/gen/thrift/gen-rb/testthrift_types.rb |     2 +-
 service/src/gen/thrift/gen-cpp/TCLIService.cpp  |  1770 ++-
 service/src/gen/thrift/gen-cpp/TCLIService.h    |   409 +-
 .../thrift/gen-cpp/TCLIService_constants.cpp    |     2 +-
 .../gen/thrift/gen-cpp/TCLIService_constants.h  |     2 +-
 .../gen/thrift/gen-cpp/TCLIService_types.cpp    |  1226 +-
 .../src/gen/thrift/gen-cpp/TCLIService_types.h  |   816 +-
 service/src/gen/thrift/gen-cpp/ThriftHive.cpp   |   853 +-
 service/src/gen/thrift/gen-cpp/ThriftHive.h     |   199 +-
 .../thrift/gen-cpp/hive_service_constants.cpp   |     2 +-
 .../gen/thrift/gen-cpp/hive_service_constants.h |     2 +-
 .../gen/thrift/gen-cpp/hive_service_types.cpp   |    55 +-
 .../src/gen/thrift/gen-cpp/hive_service_types.h |    26 +-
 .../hadoop/hive/service/HiveClusterStatus.java  |    14 +-
 .../hive/service/HiveServerException.java       |     6 +-
 .../hadoop/hive/service/JobTrackerState.java    |     2 +-
 .../apache/hadoop/hive/service/ThriftHive.java  |     6 +-
 .../service/cli/thrift/TArrayTypeEntry.java     |     6 +-
 .../hive/service/cli/thrift/TBinaryColumn.java  |     6 +-
 .../hive/service/cli/thrift/TBoolColumn.java    |     4 +-
 .../hive/service/cli/thrift/TBoolValue.java     |     6 +-
 .../hive/service/cli/thrift/TByteColumn.java    |     4 +-
 .../hive/service/cli/thrift/TByteValue.java     |     6 +-
 .../hive/service/cli/thrift/TCLIService.java    |     4 +-
 .../cli/thrift/TCLIServiceConstants.java        |     2 +-
 .../cli/thrift/TCancelDelegationTokenReq.java   |     4 +-
 .../cli/thrift/TCancelDelegationTokenResp.java  |     4 +-
 .../service/cli/thrift/TCancelOperationReq.java |     4 +-
 .../cli/thrift/TCancelOperationResp.java        |     4 +-
 .../service/cli/thrift/TCloseOperationReq.java  |     4 +-
 .../service/cli/thrift/TCloseOperationResp.java |     4 +-
 .../service/cli/thrift/TCloseSessionReq.java    |     4 +-
 .../service/cli/thrift/TCloseSessionResp.java   |     4 +-
 .../apache/hive/service/cli/thrift/TColumn.java |     2 +-
 .../hive/service/cli/thrift/TColumnDesc.java    |     6 +-
 .../hive/service/cli/thrift/TColumnValue.java   |     2 +-
 .../hive/service/cli/thrift/TDoubleColumn.java  |     4 +-
 .../hive/service/cli/thrift/TDoubleValue.java   |     6 +-
 .../cli/thrift/TExecuteStatementReq.java        |     6 +-
 .../cli/thrift/TExecuteStatementResp.java       |     4 +-
 .../service/cli/thrift/TFetchOrientation.java   |     2 +-
 .../service/cli/thrift/TFetchResultsReq.java    |     8 +-
 .../service/cli/thrift/TFetchResultsResp.java   |     6 +-
 .../service/cli/thrift/TGetCatalogsReq.java     |     4 +-
 .../service/cli/thrift/TGetCatalogsResp.java    |     4 +-
 .../hive/service/cli/thrift/TGetColumnsReq.java |     4 +-
 .../service/cli/thrift/TGetColumnsResp.java     |     4 +-
 .../cli/thrift/TGetDelegationTokenReq.java      |     4 +-
 .../cli/thrift/TGetDelegationTokenResp.java     |     4 +-
 .../service/cli/thrift/TGetFunctionsReq.java    |     4 +-
 .../service/cli/thrift/TGetFunctionsResp.java   |     4 +-
 .../hive/service/cli/thrift/TGetInfoReq.java    |     4 +-
 .../hive/service/cli/thrift/TGetInfoResp.java   |     4 +-
 .../hive/service/cli/thrift/TGetInfoType.java   |     2 +-
 .../hive/service/cli/thrift/TGetInfoValue.java  |     2 +-
 .../cli/thrift/TGetOperationStatusReq.java      |     4 +-
 .../cli/thrift/TGetOperationStatusResp.java     |     6 +-
 .../cli/thrift/TGetResultSetMetadataReq.java    |     4 +-
 .../cli/thrift/TGetResultSetMetadataResp.java   |     4 +-
 .../hive/service/cli/thrift/TGetSchemasReq.java |     4 +-
 .../service/cli/thrift/TGetSchemasResp.java     |     4 +-
 .../service/cli/thrift/TGetTableTypesReq.java   |     4 +-
 .../service/cli/thrift/TGetTableTypesResp.java  |     4 +-
 .../hive/service/cli/thrift/TGetTablesReq.java  |     4 +-
 .../hive/service/cli/thrift/TGetTablesResp.java |     4 +-
 .../service/cli/thrift/TGetTypeInfoReq.java     |     4 +-
 .../service/cli/thrift/TGetTypeInfoResp.java    |     4 +-
 .../service/cli/thrift/THandleIdentifier.java   |     4 +-
 .../hive/service/cli/thrift/TI16Column.java     |     4 +-
 .../hive/service/cli/thrift/TI16Value.java      |     6 +-
 .../hive/service/cli/thrift/TI32Column.java     |     4 +-
 .../hive/service/cli/thrift/TI32Value.java      |     6 +-
 .../hive/service/cli/thrift/TI64Column.java     |     4 +-
 .../hive/service/cli/thrift/TI64Value.java      |     6 +-
 .../hive/service/cli/thrift/TMapTypeEntry.java  |     8 +-
 .../service/cli/thrift/TOpenSessionReq.java     |     4 +-
 .../service/cli/thrift/TOpenSessionResp.java    |     4 +-
 .../service/cli/thrift/TOperationHandle.java    |     8 +-
 .../service/cli/thrift/TOperationState.java     |     2 +-
 .../hive/service/cli/thrift/TOperationType.java |     2 +-
 .../service/cli/thrift/TPrimitiveTypeEntry.java |     4 +-
 .../service/cli/thrift/TProtocolVersion.java    |     2 +-
 .../cli/thrift/TRenewDelegationTokenReq.java    |     4 +-
 .../cli/thrift/TRenewDelegationTokenResp.java   |     4 +-
 .../apache/hive/service/cli/thrift/TRow.java    |     4 +-
 .../apache/hive/service/cli/thrift/TRowSet.java |     6 +-
 .../hive/service/cli/thrift/TSessionHandle.java |     4 +-
 .../apache/hive/service/cli/thrift/TStatus.java |     6 +-
 .../hive/service/cli/thrift/TStatusCode.java    |     2 +-
 .../hive/service/cli/thrift/TStringColumn.java  |     4 +-
 .../hive/service/cli/thrift/TStringValue.java   |     4 +-
 .../service/cli/thrift/TStructTypeEntry.java    |     4 +-
 .../hive/service/cli/thrift/TTableSchema.java   |     4 +-
 .../hive/service/cli/thrift/TTypeDesc.java      |     4 +-
 .../hive/service/cli/thrift/TTypeEntry.java     |     2 +-
 .../apache/hive/service/cli/thrift/TTypeId.java |     2 +-
 .../service/cli/thrift/TTypeQualifierValue.java |     2 +-
 .../service/cli/thrift/TTypeQualifiers.java     |     4 +-
 .../service/cli/thrift/TUnionTypeEntry.java     |     4 +-
 .../cli/thrift/TUserDefinedTypeEntry.java       |     4 +-
 service/src/gen/thrift/gen-php/TCLIService.php  |     3 +-
 service/src/gen/thrift/gen-php/ThriftHive.php   |     3 +-
 service/src/gen/thrift/gen-php/Types.php        |     4 +-
 .../gen-py/TCLIService/TCLIService-remote       |     2 +-
 .../thrift/gen-py/TCLIService/TCLIService.py    |   269 +-
 .../gen/thrift/gen-py/TCLIService/constants.py  |     2 +-
 .../src/gen/thrift/gen-py/TCLIService/ttypes.py |   190 +-
 .../gen-py/hive_service/ThriftHive-remote       |     2 +-
 .../thrift/gen-py/hive_service/ThriftHive.py    |   135 +-
 .../gen/thrift/gen-py/hive_service/constants.py |     2 +-
 .../gen/thrift/gen-py/hive_service/ttypes.py    |    20 +-
 .../gen/thrift/gen-rb/hive_service_constants.rb |     2 +-
 .../src/gen/thrift/gen-rb/hive_service_types.rb |     2 +-
 .../src/gen/thrift/gen-rb/t_c_l_i_service.rb    |     2 +-
 .../thrift/gen-rb/t_c_l_i_service_constants.rb  |     2 +-
 .../gen/thrift/gen-rb/t_c_l_i_service_types.rb  |     2 +-
 service/src/gen/thrift/gen-rb/thrift_hive.rb    |     2 +-
 330 files changed, 22055 insertions(+), 8288 deletions(-)
----------------------------------------------------------------------



[41/55] [abbrv] hive git commit: HIVE-11378 Remove hadoop-1 support from master branch (gates, reviewed by Ashutosh Chauhan and Sergey Shelukhin)

Posted by xu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index a400f78..3b3303c 100644
--- a/pom.xml
+++ b/pom.xml
@@ -123,11 +123,9 @@
     <dropwizard.version>3.1.0</dropwizard.version>
     <guava.version>14.0.1</guava.version>
     <groovy.version>2.4.4</groovy.version>
-    <hadoop-20S.version>1.2.1</hadoop-20S.version>
-    <hadoop-23.version>2.6.0</hadoop-23.version>
+    <hadoop.version>2.6.0</hadoop.version>
     <hadoop.bin.path>${basedir}/${hive.path.to.root}/testutils/hadoop</hadoop.bin.path>
-    <hbase.hadoop1.version>0.98.9-hadoop1</hbase.hadoop1.version>
-    <hbase.hadoop2.version>1.1.1</hbase.hadoop2.version>
+    <hbase.version>1.1.1</hbase.version>
     <!-- httpcomponents are not always in version sync -->
     <httpcomponents.client.version>4.4</httpcomponents.client.version>
     <httpcomponents.core.version>4.4</httpcomponents.core.version>
@@ -236,7 +234,6 @@
     </repository>
   </repositories>
 
-  <!-- Hadoop dependency management is done at the bottom under profiles -->
   <dependencyManagement>
     <dependencies>
       <!-- dependencies are always listed in sorted order by groupId, artifectId -->
@@ -599,6 +596,87 @@
         <artifactId>xercesImpl</artifactId>
         <version>${xerces.version}</version>
       </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-client</artifactId>
+        <version>${hadoop.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-common</artifactId>
+        <version>${hadoop.version}</version>
+        <exclusions>
+          <exclusion>
+            <groupId>org.apache.httpcomponents</groupId>
+            <artifactId>httpcore</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>org.apache.httpcomponents</groupId>
+            <artifactId>httpclient</artifactId>
+          </exclusion>
+        </exclusions>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-hdfs</artifactId>
+        <version>${hadoop.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+        <version>${hadoop.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-mapreduce-client-core</artifactId>
+        <version>${hadoop.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-minikdc</artifactId>
+        <version>${hadoop.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hbase</groupId>
+        <artifactId>hbase-common</artifactId>
+        <version>${hbase.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hbase</groupId>
+        <artifactId>hbase-hadoop-compat</artifactId>
+        <version>${hbase.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hbase</groupId>
+        <artifactId>hbase-hadoop2-compat</artifactId>
+        <version>${hbase.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hbase</groupId>
+        <artifactId>hbase-server</artifactId>
+        <version>${hbase.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-minicluster</artifactId>
+        <version>${hadoop.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.scala-lang</groupId>
+        <artifactId>scala-library</artifactId>
+        <version>${scala.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.spark</groupId>
+        <artifactId>spark-core_${scala.binary.version}</artifactId>
+        <version>${spark.version}</version>
+        <exclusions>
+          <exclusion>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-core</artifactId>
+          </exclusion>
+        </exclusions>
+      </dependency>
     </dependencies>
   </dependencyManagement>
 
@@ -1061,146 +1139,6 @@
         </plugins>
       </reporting>
     </profile>
-
-    <!-- hadoop profiles in the root pom are only used for dependency management -->
-    <profile>
-      <id>hadoop-1</id>
-      <dependencyManagement>
-        <dependencies>
-          <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-client</artifactId>
-            <version>${hadoop-20S.version}</version>
-          </dependency>
-          <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-core</artifactId>
-            <version>${hadoop-20S.version}</version>
-          </dependency>
-          <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-test</artifactId>
-            <version>${hadoop-20S.version}</version>
-          </dependency>
-          <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-tools</artifactId>
-            <version>${hadoop-20S.version}</version>
-          </dependency>
-          <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-common</artifactId>
-            <version>${hbase.hadoop1.version}</version>
-          </dependency>
-          <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-hadoop-compat</artifactId>
-            <version>${hbase.hadoop1.version}</version>
-          </dependency>
-          <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-hadoop1-compat</artifactId>
-            <version>${hbase.hadoop1.version}</version>
-          </dependency>
-          <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-server</artifactId>
-            <version>${hbase.hadoop1.version}</version>
-          </dependency>
-        </dependencies>
-      </dependencyManagement>
-    </profile>
-    <profile>
-      <id>hadoop-2</id>
-      <modules>
-        <module>llap-server</module>
-      </modules>
-      <dependencyManagement>
-        <dependencies>
-          <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-client</artifactId>
-            <version>${hadoop-23.version}</version>
-          </dependency>
-          <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-common</artifactId>
-            <version>${hadoop-23.version}</version>
-            <exclusions>
-              <exclusion>
-                <groupId>org.apache.httpcomponents</groupId>
-                <artifactId>httpcore</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>org.apache.httpcomponents</groupId>
-                <artifactId>httpclient</artifactId>
-              </exclusion>
-            </exclusions>
-          </dependency>
-          <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-hdfs</artifactId>
-            <version>${hadoop-23.version}</version>
-          </dependency>
-          <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
-            <version>${hadoop-23.version}</version>
-          </dependency>
-          <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-mapreduce-client-core</artifactId>
-            <version>${hadoop-23.version}</version>
-          </dependency>
-          <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-minikdc</artifactId>
-            <version>${hadoop-23.version}</version>
-          </dependency>
-          <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-common</artifactId>
-            <version>${hbase.hadoop2.version}</version>
-          </dependency>
-          <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-hadoop-compat</artifactId>
-            <version>${hbase.hadoop2.version}</version>
-          </dependency>
-          <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-hadoop2-compat</artifactId>
-            <version>${hbase.hadoop2.version}</version>
-          </dependency>
-          <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-server</artifactId>
-            <version>${hbase.hadoop2.version}</version>
-          </dependency>
-          <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-minicluster</artifactId>
-            <version>${hadoop-23.version}</version>
-          </dependency>
-          <dependency>
-            <groupId>org.scala-lang</groupId>
-            <artifactId>scala-library</artifactId>
-            <version>${scala.version}</version>
-          </dependency>
-          <dependency>
-            <groupId>org.apache.spark</groupId>
-            <artifactId>spark-core_${scala.binary.version}</artifactId>
-            <version>${spark.version}</version>
-            <exclusions>
-              <exclusion>
-                <groupId>org.apache.hadoop</groupId>
-                <artifactId>hadoop-core</artifactId>
-              </exclusion>
-            </exclusions>
-          </dependency>
-        </dependencies>
-      </dependencyManagement>
-    </profile>
     <profile>
       <!-- Windows-specific settings to allow unit tests to work -->
       <id>windows-test</id>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/ql/pom.xml
----------------------------------------------------------------------
diff --git a/ql/pom.xml b/ql/pom.xml
index 83b9ebf..8ac13a6 100644
--- a/ql/pom.xml
+++ b/ql/pom.xml
@@ -174,6 +174,74 @@
       <version>${libfb303.version}</version>
     </dependency>
     <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>javax.servlet</groupId>
+          <artifactId>servlet-api</artifactId>
+        </exclusion>
+      </exclusions>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-archives</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-common</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <version>${hadoop.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>javax.servlet</groupId>
+          <artifactId>servlet-api</artifactId>
+        </exclusion>
+      </exclusions>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-api</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-common</artifactId>
+      <version>${hadoop.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>javax.servlet</groupId>
+          <artifactId>servlet-api</artifactId>
+        </exclusion>
+      </exclusions>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-client</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
+
+    <dependency>
       <groupId>org.apache.ivy</groupId>
       <artifactId>ivy</artifactId>
       <version>${ivy.version}</version>
@@ -494,91 +562,6 @@
 
   <profiles>
     <profile>
-      <id>hadoop-1</id>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-compiler-plugin</artifactId>
-            <version>2.3.2</version>
-            <configuration>
-              <excludes>
-                <exclude>**/ATSHook.java</exclude>
-              </excludes>
-            </configuration>
-          </plugin>
-        </plugins>
-      </build>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-core</artifactId>
-          <version>${hadoop-20S.version}</version>
-         <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-tools</artifactId>
-          <version>${hadoop-20S.version}</version>
-          <optional>true</optional>
-        </dependency>
-      </dependencies>
-    </profile>
-    <profile>
-      <id>hadoop-2</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-archives</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-core</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-hdfs</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-yarn-api</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-       </dependency>
-       <dependency>
-         <groupId>org.apache.hadoop</groupId>
-         <artifactId>hadoop-yarn-common</artifactId>
-         <version>${hadoop-23.version}</version>
-         <optional>true</optional>
-       </dependency>
-       <dependency>
-         <groupId>org.apache.hadoop</groupId>
-         <artifactId>hadoop-yarn-client</artifactId>
-         <version>${hadoop-23.version}</version>
-         <optional>true</optional>
-       </dependency>
-      </dependencies>
-    </profile>
-    <profile>
       <id>protobuf</id>
       <build>
         <plugins>
@@ -722,7 +705,6 @@
                   <include>org.json:json</include>
                   <include>org.apache.avro:avro</include>
                   <include>org.apache.avro:avro-mapred</include>
-                  <include>org.apache.hive.shims:hive-shims-0.20S</include>
                   <include>org.apache.hive.shims:hive-shims-0.23</include>
                   <include>org.apache.hive.shims:hive-shims-0.23</include>
                   <include>org.apache.hive.shims:hive-shims-common</include>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/serde/pom.xml
----------------------------------------------------------------------
diff --git a/serde/pom.xml b/serde/pom.xml
index b6c0d0c..99c89ed 100644
--- a/serde/pom.xml
+++ b/serde/pom.xml
@@ -85,6 +85,18 @@
       <artifactId>parquet-hadoop-bundle</artifactId>
       <version>${parquet.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
 
       <!-- test inter-project -->
     <dependency>
@@ -111,66 +123,28 @@
       <version>${jersey.version}</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <version>${hadoop.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
 
-  <profiles>
-    <profile>
-      <id>hadoop-1</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-core</artifactId>
-          <version>${hadoop-20S.version}</version>
-         <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-test</artifactId>
-          <version>${hadoop-20S.version}</version>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
-    </profile>
-   <profile>
-      <id>hadoop-2</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-core</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-hdfs</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-hdfs</artifactId>
-          <version>${hadoop-23.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
-    </profile>
-  </profiles>
-
-
   <build>
     <sourceDirectory>${basedir}/src/java</sourceDirectory>
     <testSourceDirectory>${basedir}/src/test</testSourceDirectory>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/service/pom.xml
----------------------------------------------------------------------
diff --git a/service/pom.xml b/service/pom.xml
index 07eeb9a..d7ab5bf 100644
--- a/service/pom.xml
+++ b/service/pom.xml
@@ -96,7 +96,19 @@
       <artifactId>curator-recipes</artifactId>
       <version>${curator.version}</version>
     </dependency>
-    <!-- intra-project -->
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
+     <!-- intra-project -->
     <dependency>
       <groupId>org.apache.hive</groupId>
       <artifactId>hive-exec</artifactId>
@@ -113,37 +125,6 @@
     </dependency>
   </dependencies>
 
-  <profiles>
-    <profile>
-      <id>hadoop-1</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-core</artifactId>
-          <version>${hadoop-20S.version}</version>
-         <optional>true</optional>
-        </dependency>
-      </dependencies>
-    </profile>
-   <profile>
-      <id>hadoop-2</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-core</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-      </dependencies>
-    </profile>
-  </profiles>
-
   <build>
     <sourceDirectory>${basedir}/src/java</sourceDirectory>
     <testSourceDirectory>${basedir}/src/test</testSourceDirectory>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/shims/0.20S/pom.xml
----------------------------------------------------------------------
diff --git a/shims/0.20S/pom.xml b/shims/0.20S/pom.xml
deleted file mode 100644
index 565dd5e..0000000
--- a/shims/0.20S/pom.xml
+++ /dev/null
@@ -1,63 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed under the Apache License, Version 2.0 (the "License");
-  you may not use this file except in compliance with the License.
-  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <parent>
-    <groupId>org.apache.hive</groupId>
-    <artifactId>hive</artifactId>
-    <version>2.0.0-SNAPSHOT</version>
-    <relativePath>../../pom.xml</relativePath>
-  </parent>
-
-  <groupId>org.apache.hive.shims</groupId>
-  <artifactId>hive-shims-0.20S</artifactId>
-  <packaging>jar</packaging>
-  <name>Hive Shims 0.20S</name>
-
-  <properties>
-    <hive.path.to.root>../..</hive.path.to.root>
-  </properties>
-
-  <dependencies>
-    <!-- dependencies are always listed in sorted order by groupId, artifectId -->
-    <!-- intra-project -->
-    <dependency>
-      <groupId>org.apache.hive.shims</groupId>
-      <artifactId>hive-shims-common</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-    <!-- inter-project -->
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-core</artifactId>
-      <version>${hadoop-20S.version}</version>
-      <optional>true</optional>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-test</artifactId>
-      <version>${hadoop-20S.version}</version>
-      <optional>true</optional>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-tools</artifactId>
-      <version>${hadoop-20S.version}</version>
-      <scope>provided</scope>
-    </dependency>
- </dependencies>
-</project>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
----------------------------------------------------------------------
diff --git a/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java b/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
deleted file mode 100644
index f60e8f0..0000000
--- a/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
+++ /dev/null
@@ -1,734 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.shims;
-
-import java.io.IOException;
-import java.net.InetSocketAddress;
-import java.net.MalformedURLException;
-import java.net.URI;
-import java.net.URL;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.TreeMap;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.filecache.DistributedCache;
-import org.apache.hadoop.fs.BlockLocation;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FsShell;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.PathFilter;
-import org.apache.hadoop.fs.ProxyFileSystem;
-import org.apache.hadoop.fs.Trash;
-import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.mapred.ClusterStatus;
-import org.apache.hadoop.mapred.InputSplit;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.JobInProgress;
-import org.apache.hadoop.mapred.JobTracker;
-import org.apache.hadoop.mapred.MiniMRCluster;
-import org.apache.hadoop.mapred.RecordReader;
-import org.apache.hadoop.mapred.Reporter;
-import org.apache.hadoop.mapred.TaskLogServlet;
-import org.apache.hadoop.mapred.WebHCatJTShim20S;
-import org.apache.hadoop.mapred.lib.TotalOrderPartitioner;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.JobContext;
-import org.apache.hadoop.mapreduce.JobID;
-import org.apache.hadoop.mapreduce.JobStatus;
-import org.apache.hadoop.mapreduce.OutputFormat;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.mapreduce.TaskAttemptID;
-import org.apache.hadoop.mapreduce.TaskID;
-import org.apache.hadoop.security.Credentials;
-import org.apache.hadoop.security.KerberosName;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.tools.distcp2.DistCp;
-import org.apache.hadoop.tools.distcp2.DistCpOptions;
-import org.apache.hadoop.tools.distcp2.DistCpOptions.FileAttribute;
-
-import org.apache.hadoop.util.Progressable;
-import org.apache.hadoop.util.VersionInfo;
-
-
-/**
- * Implemention of shims against Hadoop 0.20 with Security.
- */
-public class Hadoop20SShims extends HadoopShimsSecure {
-
-  @Override
-  public HadoopShims.CombineFileInputFormatShim getCombineFileInputFormat() {
-    return new CombineFileInputFormatShim() {
-      @Override
-      public RecordReader getRecordReader(InputSplit split,
-          JobConf job, Reporter reporter) throws IOException {
-        throw new IOException("CombineFileInputFormat.getRecordReader not needed.");
-      }
-
-      @Override
-      protected FileStatus[] listStatus(JobConf job) throws IOException {
-        FileStatus[] result = super.listStatus(job);
-        boolean foundDir = false;
-        for (FileStatus stat: result) {
-          if (stat.isDir()) {
-            foundDir = true;
-            break;
-          }
-        }
-        if (!foundDir) {
-          return result;
-        }
-        ArrayList<FileStatus> files = new ArrayList<FileStatus>();
-        for (FileStatus stat: result) {
-          if (!stat.isDir()) {
-            files.add(stat);
-          }
-        }
-        return files.toArray(new FileStatus[files.size()]);
-      }
-    };
-  }
-
-  @Override
-  public String getTaskAttemptLogUrl(JobConf conf,
-    String taskTrackerHttpAddress, String taskAttemptId)
-    throws MalformedURLException {
-    URL taskTrackerHttpURL = new URL(taskTrackerHttpAddress);
-    return TaskLogServlet.getTaskLogUrl(
-      taskTrackerHttpURL.getHost(),
-      Integer.toString(taskTrackerHttpURL.getPort()),
-      taskAttemptId);
-  }
-
-  @Override
-  public JobTrackerState getJobTrackerState(ClusterStatus clusterStatus) throws Exception {
-    switch (clusterStatus.getJobTrackerState()) {
-    case INITIALIZING:
-      return JobTrackerState.INITIALIZING;
-    case RUNNING:
-      return JobTrackerState.RUNNING;
-    default:
-      String errorMsg = "Unrecognized JobTracker state: " + clusterStatus.getJobTrackerState();
-      throw new Exception(errorMsg);
-    }
-  }
-
-  @Override
-  public org.apache.hadoop.mapreduce.TaskAttemptContext newTaskAttemptContext(Configuration conf, final Progressable progressable) {
-    return new org.apache.hadoop.mapreduce.TaskAttemptContext(conf, new TaskAttemptID()) {
-      @Override
-      public void progress() {
-        progressable.progress();
-      }
-    };
-  }
-
-  @Override
-  public TaskAttemptID newTaskAttemptID(JobID jobId, boolean isMap, int taskId, int id) {
-    return new TaskAttemptID(jobId.getJtIdentifier(), jobId.getId(), isMap, taskId, id);
-  }
-
-  @Override
-  public org.apache.hadoop.mapreduce.JobContext newJobContext(Job job) {
-    return new org.apache.hadoop.mapreduce.JobContext(job.getConfiguration(), job.getJobID());
-  }
-
-  @Override
-  public boolean isLocalMode(Configuration conf) {
-    return "local".equals(getJobLauncherRpcAddress(conf));
-  }
-
-  @Override
-  public String getJobLauncherRpcAddress(Configuration conf) {
-    return conf.get("mapred.job.tracker");
-  }
-
-  @Override
-  public void setJobLauncherRpcAddress(Configuration conf, String val) {
-    conf.set("mapred.job.tracker", val);
-  }
-
-  @Override
-  public String getJobLauncherHttpAddress(Configuration conf) {
-    return conf.get("mapred.job.tracker.http.address");
-  }
-
-  @Override
-  public boolean moveToAppropriateTrash(FileSystem fs, Path path, Configuration conf)
-          throws IOException {
-    // older versions of Hadoop don't have a Trash constructor based on the
-    // Path or FileSystem. So need to achieve this by creating a dummy conf.
-    // this needs to be filtered out based on version
-
-    Configuration dupConf = new Configuration(conf);
-    FileSystem.setDefaultUri(dupConf, fs.getUri());
-    Trash trash = new Trash(dupConf);
-    return trash.moveToTrash(path);
-  }
-  @Override
-  public long getDefaultBlockSize(FileSystem fs, Path path) {
-    return fs.getDefaultBlockSize();
-  }
-
-  @Override
-  public short getDefaultReplication(FileSystem fs, Path path) {
-    return fs.getDefaultReplication();
-  }
-
-  @Override
-  public void refreshDefaultQueue(Configuration conf, String userName) {
-    // MR1 does not expose API required to set MR queue mapping for user
-  }
-
-  @Override
-  public void setTotalOrderPartitionFile(JobConf jobConf, Path partitionFile){
-    TotalOrderPartitioner.setPartitionFile(jobConf, partitionFile);
-  }
-
-  @Override
-  public Comparator<LongWritable> getLongComparator() {
-    return new Comparator<LongWritable>() {
-      @Override
-      public int compare(LongWritable o1, LongWritable o2) {
-        return o1.compareTo(o2);
-      }
-    };
-  }
-
-  /**
-   * Returns a shim to wrap MiniMrCluster
-   */
-  @Override
-  public MiniMrShim getMiniMrCluster(Configuration conf, int numberOfTaskTrackers,
-                                     String nameNode, int numDir) throws IOException {
-    return new MiniMrShim(conf, numberOfTaskTrackers, nameNode, numDir);
-  }
-
-  @Override
-  public MiniMrShim getMiniTezCluster(Configuration conf, int numberOfTaskTrackers,
-      String nameNode, boolean isLlap) throws IOException {
-    throw new IOException("Cannot run tez on current hadoop, Version: " + VersionInfo.getVersion());
-  }
-
-  @Override
-  public MiniMrShim getMiniSparkCluster(Configuration conf, int numberOfTaskTrackers,
-    String nameNode, int numDir) throws IOException {
-    throw new IOException("Cannot run Spark on YARN on current Hadoop, Version: " + VersionInfo.getVersion());
-  }
-
-  /**
-   * Shim for MiniMrCluster
-   */
-  public class MiniMrShim implements HadoopShims.MiniMrShim {
-
-    private final MiniMRCluster mr;
-
-    public MiniMrShim(Configuration conf, int numberOfTaskTrackers,
-        String nameNode, int numDir) throws IOException {
-      this.mr = new MiniMRCluster(numberOfTaskTrackers, nameNode, numDir);
-    }
-
-    @Override
-    public int getJobTrackerPort() throws UnsupportedOperationException {
-      return mr.getJobTrackerPort();
-    }
-
-    @Override
-    public void shutdown() throws IOException {
-      MiniMRCluster.JobTrackerRunner runner = mr.getJobTrackerRunner();
-      JobTracker tracker = runner.getJobTracker();
-      if (tracker != null) {
-        for (JobInProgress running : tracker.getRunningJobs()) {
-          try {
-            running.kill();
-          } catch (Exception e) {
-            // ignore
-          }
-        }
-      }
-      runner.shutdown();
-    }
-
-    @Override
-    public void setupConfiguration(Configuration conf) {
-      setJobLauncherRpcAddress(conf, "localhost:" + mr.getJobTrackerPort());
-    }
-  }
-
-  // Don't move this code to the parent class. There's a binary
-  // incompatibility between hadoop 1 and 2 wrt MiniDFSCluster and we
-  // need to have two different shim classes even though they are
-  // exactly the same.
-  @Override
-  public HadoopShims.MiniDFSShim getMiniDfs(Configuration conf,
-      int numDataNodes,
-      boolean format,
-      String[] racks) throws IOException {
-    return new MiniDFSShim(new MiniDFSCluster(conf, numDataNodes, format, racks));
-  }
-
-  /**
-   * MiniDFSShim.
-   *
-   */
-  public class MiniDFSShim implements HadoopShims.MiniDFSShim {
-    private final MiniDFSCluster cluster;
-
-    public MiniDFSShim(MiniDFSCluster cluster) {
-      this.cluster = cluster;
-    }
-
-    @Override
-    public FileSystem getFileSystem() throws IOException {
-      return cluster.getFileSystem();
-    }
-
-    @Override
-    public void shutdown() {
-      cluster.shutdown();
-    }
-  }
-  private volatile HCatHadoopShims hcatShimInstance;
-  @Override
-  public HCatHadoopShims getHCatShim() {
-    if(hcatShimInstance == null) {
-      hcatShimInstance = new HCatHadoopShims20S();
-    }
-    return hcatShimInstance;
-  }
-  private final class HCatHadoopShims20S implements HCatHadoopShims {
-    @Override
-    public TaskID createTaskID() {
-      return new TaskID();
-    }
-
-    @Override
-    public TaskAttemptID createTaskAttemptID() {
-      return new TaskAttemptID();
-    }
-
-    @Override
-    public TaskAttemptContext createTaskAttemptContext(Configuration conf, TaskAttemptID taskId) {
-      return new TaskAttemptContext(conf, taskId);
-    }
-
-    @Override
-    public org.apache.hadoop.mapred.TaskAttemptContext createTaskAttemptContext(org.apache.hadoop.mapred.JobConf conf,
-                 org.apache.hadoop.mapred.TaskAttemptID taskId, Progressable progressable) {
-      org.apache.hadoop.mapred.TaskAttemptContext newContext = null;
-      try {
-        java.lang.reflect.Constructor construct = org.apache.hadoop.mapred.TaskAttemptContext.class.getDeclaredConstructor(
-                org.apache.hadoop.mapred.JobConf.class, org.apache.hadoop.mapred.TaskAttemptID.class,
-                Progressable.class);
-        construct.setAccessible(true);
-        newContext = (org.apache.hadoop.mapred.TaskAttemptContext)construct.newInstance(conf, taskId, progressable);
-      } catch (Exception e) {
-        throw new RuntimeException(e);
-      }
-      return newContext;
-    }
-
-    @Override
-    public JobContext createJobContext(Configuration conf,
-                                       JobID jobId) {
-      return new JobContext(conf, jobId);
-    }
-
-    @Override
-    public org.apache.hadoop.mapred.JobContext createJobContext(org.apache.hadoop.mapred.JobConf conf,
-                                   org.apache.hadoop.mapreduce.JobID jobId, Progressable progressable) {
-      org.apache.hadoop.mapred.JobContext newContext = null;
-      try {
-        java.lang.reflect.Constructor construct = org.apache.hadoop.mapred.JobContext.class.getDeclaredConstructor(
-                org.apache.hadoop.mapred.JobConf.class, org.apache.hadoop.mapreduce.JobID.class,
-                Progressable.class);
-        construct.setAccessible(true);
-        newContext = (org.apache.hadoop.mapred.JobContext)construct.newInstance(conf, jobId, progressable);
-      } catch (Exception e) {
-        throw new RuntimeException(e);
-      }
-      return newContext;
-    }
-
-    @Override
-    public void commitJob(OutputFormat outputFormat, Job job) throws IOException {
-      if( job.getConfiguration().get("mapred.job.tracker", "").equalsIgnoreCase("local") ) {
-        try {
-          //In local mode, mapreduce will not call OutputCommitter.cleanupJob.
-          //Calling it from here so that the partition publish happens.
-          //This call needs to be removed after MAPREDUCE-1447 is fixed.
-          outputFormat.getOutputCommitter(createTaskAttemptContext(
-                  job.getConfiguration(), createTaskAttemptID())).commitJob(job);
-        } catch (IOException e) {
-          throw new IOException("Failed to cleanup job",e);
-        } catch (InterruptedException e) {
-          throw new IOException("Failed to cleanup job",e);
-        }
-      }
-    }
-
-    @Override
-    public void abortJob(OutputFormat outputFormat, Job job) throws IOException {
-      if (job.getConfiguration().get("mapred.job.tracker", "")
-              .equalsIgnoreCase("local")) {
-        try {
-          // This call needs to be removed after MAPREDUCE-1447 is fixed.
-          outputFormat.getOutputCommitter(createTaskAttemptContext(
-                  job.getConfiguration(), new TaskAttemptID())).abortJob(job, JobStatus.State.FAILED);
-        } catch (IOException e) {
-          throw new IOException("Failed to abort job", e);
-        } catch (InterruptedException e) {
-          throw new IOException("Failed to abort job", e);
-        }
-      }
-    }
-
-    @Override
-    public InetSocketAddress getResourceManagerAddress(Configuration conf)
-    {
-      return JobTracker.getAddress(conf);
-    }
-
-    @Override
-    public String getPropertyName(PropertyName name) {
-      switch (name) {
-        case CACHE_ARCHIVES:
-          return DistributedCache.CACHE_ARCHIVES;
-        case CACHE_FILES:
-          return DistributedCache.CACHE_FILES;
-        case CACHE_SYMLINK:
-          return DistributedCache.CACHE_SYMLINK;
-        case CLASSPATH_ARCHIVES:
-          return "mapred.job.classpath.archives";
-        case CLASSPATH_FILES:
-          return "mapred.job.classpath.files";
-      }
-
-      return "";
-    }
-
-    @Override
-    public boolean isFileInHDFS(FileSystem fs, Path path) throws IOException {
-      // In hadoop 1.x.x the file system URI is sufficient to determine the uri of the file
-      return "hdfs".equals(fs.getUri().getScheme());
-    }
-  }
-  @Override
-  public WebHCatJTShim getWebHCatShim(Configuration conf, UserGroupInformation ugi) throws IOException {
-    return new WebHCatJTShim20S(conf, ugi);//this has state, so can't be cached
-  }
-
-  @Override
-  public List<FileStatus> listLocatedStatus(final FileSystem fs,
-                                            final Path path,
-                                            final PathFilter filter
-                                            ) throws IOException {
-    return Arrays.asList(fs.listStatus(path, filter));
-  }
-
-  @Override
-  public BlockLocation[] getLocations(FileSystem fs,
-                                      FileStatus status) throws IOException {
-    return fs.getFileBlockLocations(status, 0, status.getLen());
-  }
-
-  @Override
-  public TreeMap<Long, BlockLocation> getLocationsWithOffset(FileSystem fs,
-                                                             FileStatus status) throws IOException {
-    TreeMap<Long, BlockLocation> offsetBlockMap = new TreeMap<Long, BlockLocation>();
-    BlockLocation[] locations = getLocations(fs, status);
-    for (BlockLocation location : locations) {
-      offsetBlockMap.put(location.getOffset(), location);
-    }
-    return offsetBlockMap;
-  }
-
-  @Override
-  public void hflush(FSDataOutputStream stream) throws IOException {
-    stream.sync();
-  }
-
-  @Override
-  public HdfsFileStatus getFullFileStatus(Configuration conf, FileSystem fs, Path file)
-      throws IOException {
-    return new Hadoop20SFileStatus(fs.getFileStatus(file));
-  }
-
-  @Override
-  public void setFullFileStatus(Configuration conf, HdfsFileStatus sourceStatus,
-    FileSystem fs, Path target) throws IOException {
-    String group = sourceStatus.getFileStatus().getGroup();
-    String permission = Integer.toString(sourceStatus.getFileStatus().getPermission().toShort(), 8);
-    //use FsShell to change group and permissions recursively
-    try {
-      FsShell fshell = new FsShell();
-      fshell.setConf(conf);
-      run(fshell, new String[]{"-chgrp", "-R", group, target.toString()});
-      run(fshell, new String[]{"-chmod", "-R", permission, target.toString()});
-    } catch (Exception e) {
-      throw new IOException("Unable to set permissions of " + target, e);
-    }
-    try {
-      if (LOG.isDebugEnabled()) {  //some trace logging
-        getFullFileStatus(conf, fs, target).debugLog();
-      }
-    } catch (Exception e) {
-      //ignore.
-    }
-  }
-
-  public class Hadoop20SFileStatus implements HdfsFileStatus {
-    private final FileStatus fileStatus;
-    public Hadoop20SFileStatus(FileStatus fileStatus) {
-      this.fileStatus = fileStatus;
-    }
-    @Override
-    public FileStatus getFileStatus() {
-      return fileStatus;
-    }
-    @Override
-    public void debugLog() {
-      if (fileStatus != null) {
-        LOG.debug(fileStatus.toString());
-      }
-    }
-  }
-
-  @Override
-  public FileSystem createProxyFileSystem(FileSystem fs, URI uri) {
-    return new ProxyFileSystem(fs, uri);
-  }
-  @Override
-  public Map<String, String> getHadoopConfNames() {
-    Map<String, String> ret = new HashMap<String, String>();
-    ret.put("HADOOPFS", "fs.default.name");
-    ret.put("HADOOPMAPFILENAME", "map.input.file");
-    ret.put("HADOOPMAPREDINPUTDIR", "mapred.input.dir");
-    ret.put("HADOOPMAPREDINPUTDIRRECURSIVE", "mapred.input.dir.recursive");
-    ret.put("MAPREDMAXSPLITSIZE", "mapred.max.split.size");
-    ret.put("MAPREDMINSPLITSIZE", "mapred.min.split.size");
-    ret.put("MAPREDMINSPLITSIZEPERNODE", "mapred.min.split.size.per.node");
-    ret.put("MAPREDMINSPLITSIZEPERRACK", "mapred.min.split.size.per.rack");
-    ret.put("HADOOPNUMREDUCERS", "mapred.reduce.tasks");
-    ret.put("HADOOPJOBNAME", "mapred.job.name");
-    ret.put("HADOOPSPECULATIVEEXECREDUCERS", "mapred.reduce.tasks.speculative.execution");
-    ret.put("MAPREDSETUPCLEANUPNEEDED", "mapred.committer.job.setup.cleanup.needed");
-    ret.put("MAPREDTASKCLEANUPNEEDED", "mapreduce.job.committer.task.cleanup.needed");
-    return ret;
-  }
-
-  @Override
-  public ZeroCopyReaderShim getZeroCopyReader(FSDataInputStream in, ByteBufferPoolShim pool) throws IOException {
-    /* not supported */
-    return null;
-  }
-
-  @Override
-  public DirectDecompressorShim getDirectDecompressor(DirectCompressionType codec) {
-    /* not supported */
-    return null;
-  }
-
-  @Override
-  public Configuration getConfiguration(org.apache.hadoop.mapreduce.JobContext context) {
-    return context.getConfiguration();
-  }
-
-  @Override
-  public JobConf getJobConf(org.apache.hadoop.mapred.JobContext context) {
-    return context.getJobConf();
-  }
-
-  @Override
-  public FileSystem getNonCachedFileSystem(URI uri, Configuration conf) throws IOException {
-    boolean origDisableHDFSCache =
-        conf.getBoolean("fs." + uri.getScheme() + ".impl.disable.cache", false);
-    // hadoop-1 compatible flag.
-    conf.setBoolean("fs." + uri.getScheme() + ".impl.disable.cache", true);
-    FileSystem fs = FileSystem.get(uri, conf);
-    conf.setBoolean("fs." + uri.getScheme() + ".impl.disable.cache", origDisableHDFSCache);
-    return fs;
-  }
-
-  @Override
-  public void getMergedCredentials(JobConf jobConf) throws IOException {
-    throw new IOException("Merging of credentials not supported in this version of hadoop");
-  }
-
-  @Override
-  public void mergeCredentials(JobConf dest, JobConf src) throws IOException {
-    throw new IOException("Merging of credentials not supported in this version of hadoop");
-  }
-
-  @Override
-  public String getPassword(Configuration conf, String name) {
-    // No password API, just retrieve value from conf
-    return conf.get(name);
-  }
-
-  @Override
-  public boolean supportStickyBit() {
-    return false;
-  }
-
-  @Override
-  public boolean hasStickyBit(FsPermission permission) {
-    return false;
-  }
-
-  @Override
-  public boolean supportTrashFeature() {
-    return false;
-  }
-
-  @Override
-  public Path getCurrentTrashPath(Configuration conf, FileSystem fs) {
-    return null;
-  }
-
-  @Override
-  public boolean isDirectory(FileStatus fileStatus) {
-    return fileStatus.isDir();
-  }
-
-  /**
-   * Returns a shim to wrap KerberosName
-   */
-  @Override
-  public KerberosNameShim getKerberosNameShim(String name) throws IOException {
-    return new KerberosNameShim(name);
-  }
-
-  /**
-   * Shim for KerberosName
-   */
-  public class KerberosNameShim implements HadoopShimsSecure.KerberosNameShim {
-
-    private final KerberosName kerberosName;
-
-    public KerberosNameShim(String name) {
-      kerberosName = new KerberosName(name);
-    }
-
-    @Override
-    public String getDefaultRealm() {
-      return kerberosName.getDefaultRealm();
-    }
-
-    @Override
-    public String getServiceName() {
-      return kerberosName.getServiceName();
-    }
-
-    @Override
-    public String getHostName() {
-      return kerberosName.getHostName();
-    }
-
-    @Override
-    public String getRealm() {
-      return kerberosName.getRealm();
-    }
-
-    @Override
-    public String getShortName() throws IOException {
-      return kerberosName.getShortName();
-    }
-  }
-
-  @Override
-  public StoragePolicyShim getStoragePolicyShim(FileSystem fs) {
-    return null;
-  }
-
-  @Override
-  public boolean runDistCp(Path src, Path dst, Configuration conf) throws IOException {
-
-    DistCpOptions options = new DistCpOptions(Collections.singletonList(src), dst);
-    options.setSyncFolder(true);
-    options.setSkipCRC(true);
-    options.preserve(FileAttribute.BLOCKSIZE);
-    try {
-      DistCp distcp = new DistCp(conf, options);
-      distcp.execute();
-      return true;
-    } catch (Exception e) {
-      throw new IOException("Cannot execute DistCp process: " + e, e);
-    }
-  }
-
-  @Override
-  public HdfsEncryptionShim createHdfsEncryptionShim(FileSystem fs, Configuration conf) throws IOException {
-    return new HadoopShims.NoopHdfsEncryptionShim();
-  }
-
-  @Override
-  public Path getPathWithoutSchemeAndAuthority(Path path) {
-    return path;
-  }
-
-  @Override
-  public List<HdfsFileStatusWithId> listLocatedHdfsStatus(
-      FileSystem fs, Path path, PathFilter filter) throws IOException {
-    throw new UnsupportedOperationException("Not supported on old version");
-  }
-
-  @Override
-  public int readByteBuffer(FSDataInputStream file, ByteBuffer dest) throws IOException {
-    // Inefficient for direct buffers; only here for compat.
-    int pos = dest.position();
-    if (dest.hasArray()) {
-      int result = file.read(dest.array(), dest.arrayOffset(), dest.remaining());
-      if (result > 0) {
-        dest.position(pos + result);
-      }
-      return result;
-    } else {
-      byte[] arr = new byte[dest.remaining()];
-      int result = file.read(arr, 0, arr.length);
-      if (result > 0) {
-        dest.put(arr, 0, result);
-        dest.position(pos + result);
-      }
-      return result;
-    }
-  }
-
-  @Override
-  public void addDelegationTokens(FileSystem fs, Credentials cred, String uname) throws IOException {
-    Token<?> fsToken = fs.getDelegationToken(uname);
-    cred.addToken(fsToken.getService(), fsToken);
-  }
-
-  @Override
-  public long getFileId(FileSystem fs, String path) throws IOException {
-    throw new UnsupportedOperationException("Not supported on old version");
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Jetty20SShims.java
----------------------------------------------------------------------
diff --git a/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Jetty20SShims.java b/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Jetty20SShims.java
deleted file mode 100644
index 75659ff..0000000
--- a/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Jetty20SShims.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.shims;
-
-
-import org.mortbay.jetty.bio.SocketConnector;
-import org.mortbay.jetty.handler.RequestLogHandler;
-import org.mortbay.jetty.webapp.WebAppContext;
-
-import java.io.IOException;
-
-public class Jetty20SShims implements JettyShims {
-  public Server startServer(String listen, int port) throws IOException {
-    Server s = new Server();
-    s.setupListenerHostPort(listen, port);
-    return s;
-  }
-
-  private static class Server extends org.mortbay.jetty.Server implements JettyShims.Server {
-    public void addWar(String war, String contextPath) {
-      WebAppContext wac = new WebAppContext();
-      wac.setContextPath(contextPath);
-      wac.setWar(war);
-      RequestLogHandler rlh = new RequestLogHandler();
-      rlh.setHandler(wac);
-      this.addHandler(rlh);
-   }
-
-    public void setupListenerHostPort(String listen, int port)
-      throws IOException {
-
-      SocketConnector connector  = new SocketConnector();
-      connector.setPort(port);
-      connector.setHost(listen);
-      this.addConnector(connector);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/shims/0.20S/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim20S.java
----------------------------------------------------------------------
diff --git a/shims/0.20S/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim20S.java b/shims/0.20S/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim20S.java
deleted file mode 100644
index 367ea60..0000000
--- a/shims/0.20S/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim20S.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.mapred;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.filecache.DistributedCache;
-import org.apache.hadoop.hive.shims.HadoopShims.WebHCatJTShim;
-import org.apache.hadoop.ipc.RPC;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.UserGroupInformation;
-
-import java.io.IOException;
-import java.net.InetSocketAddress;
-import java.net.URI;
-import java.util.HashSet;
-import java.util.Set;
-
-/**
- * This is in org.apache.hadoop.mapred package because it relies on
- * JobSubmissionProtocol which is package private
- */
-public class WebHCatJTShim20S implements WebHCatJTShim {
-  private JobSubmissionProtocol cnx;
-
-  /**
-   * Create a connection to the Job Tracker.
-   */
-  public WebHCatJTShim20S(Configuration conf, UserGroupInformation ugi)
-          throws IOException {
-    cnx = (JobSubmissionProtocol)
-            RPC.getProxy(JobSubmissionProtocol.class,
-                    JobSubmissionProtocol.versionID,
-                    getAddress(conf),
-                    ugi,
-                    conf,
-                    NetUtils.getSocketFactory(conf,
-                            JobSubmissionProtocol.class));
-  }
-
-  /**
-   * Grab a handle to a job that is already known to the JobTracker.
-   *
-   * @return Profile of the job, or null if not found.
-   */
-  public JobProfile getJobProfile(org.apache.hadoop.mapred.JobID jobid)
-          throws IOException {
-    return cnx.getJobProfile(jobid);
-  }
-
-  /**
-   * Grab a handle to a job that is already known to the JobTracker.
-   *
-   * @return Status of the job, or null if not found.
-   */
-  public org.apache.hadoop.mapred.JobStatus getJobStatus(org.apache.hadoop.mapred.JobID jobid)
-          throws IOException {
-    return cnx.getJobStatus(jobid);
-  }
-
-
-  /**
-   * Kill a job.
-   */
-  public void killJob(org.apache.hadoop.mapred.JobID jobid)
-          throws IOException {
-    cnx.killJob(jobid);
-  }
-
-  /**
-   * Get all the jobs submitted.
-   */
-  public org.apache.hadoop.mapred.JobStatus[] getAllJobs()
-          throws IOException {
-    return cnx.getAllJobs();
-  }
-
-  /**
-   * Close the connection to the Job Tracker.
-   */
-  public void close() {
-    RPC.stopProxy(cnx);
-  }
-  private InetSocketAddress getAddress(Configuration conf) {
-    String jobTrackerStr = conf.get("mapred.job.tracker", "localhost:8012");
-    return NetUtils.createSocketAddr(jobTrackerStr);
-  }
-  @Override
-  public void addCacheFile(URI uri, Job job) {
-    DistributedCache.addCacheFile(uri, job.getConfiguration());
-  }
-  /**
-   * Kill jobs is only supported on hadoop 2.0+.
-   */
-  @Override
-  public void killJobs(String tag, long timestamp) {
-    return;
-  }
-  /**
-   * Get jobs is only supported on hadoop 2.0+.
-   */
-  @Override
-  public Set<String> getJobs(String tag, long timestamp)
-  {
-    return new HashSet<String>();
-  }
-}
-

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/shims/0.23/pom.xml
----------------------------------------------------------------------
diff --git a/shims/0.23/pom.xml b/shims/0.23/pom.xml
index 3b1fb97..eee594e 100644
--- a/shims/0.23/pom.xml
+++ b/shims/0.23/pom.xml
@@ -54,31 +54,32 @@
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-common</artifactId>
-      <version>${hadoop-23.version}</version>
+      <version>${hadoop.version}</version>
       <optional>true</optional>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-hdfs</artifactId>
-      <version>${hadoop-23.version}</version>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-hdfs</artifactId>
-      <version>${hadoop-23.version}</version>
+      <version>${hadoop.version}</version>
       <type>test-jar</type>
       <optional>true</optional>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-mapreduce-client-core</artifactId>
-      <version>${hadoop-23.version}</version>
+      <version>${hadoop.version}</version>
       <optional>true</optional>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
-      <version>${hadoop-23.version}</version>
+      <version>${hadoop.version}</version>
       <type>test-jar</type>
       <optional>true</optional>
     </dependency>
@@ -103,25 +104,25 @@
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-yarn-api</artifactId>
-      <version>${hadoop-23.version}</version>
+      <version>${hadoop.version}</version>
       <optional>true</optional>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-yarn-common</artifactId>
-     <version>${hadoop-23.version}</version>
+     <version>${hadoop.version}</version>
      <optional>true</optional>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-yarn-client</artifactId>
-     <version>${hadoop-23.version}</version>
+     <version>${hadoop.version}</version>
      <optional>true</optional>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-yarn-server-resourcemanager</artifactId>
-     <version>${hadoop-23.version}</version>
+     <version>${hadoop.version}</version>
      <exclusions>
        <exclusion>
          <groupId>javax.servlet</groupId>
@@ -139,15 +140,16 @@
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-yarn-server-tests</artifactId>
-     <version>${hadoop-23.version}</version>
+     <version>${hadoop.version}</version>
       <optional>true</optional>
      <type>test-jar</type>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-distcp</artifactId>
-     <version>${hadoop-23.version}</version>
+     <version>${hadoop.version}</version>
      <scope>provided</scope>
    </dependency>
-   </dependencies>
+  </dependencies>
+
 </project>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/shims/aggregator/pom.xml
----------------------------------------------------------------------
diff --git a/shims/aggregator/pom.xml b/shims/aggregator/pom.xml
index 07f6d1b..d8c39a2 100644
--- a/shims/aggregator/pom.xml
+++ b/shims/aggregator/pom.xml
@@ -41,12 +41,6 @@
     </dependency>
     <dependency>
       <groupId>org.apache.hive.shims</groupId>
-      <artifactId>hive-shims-0.20S</artifactId>
-      <version>${project.version}</version>
-      <scope>runtime</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hive.shims</groupId>
       <artifactId>hive-shims-0.23</artifactId>
       <version>${project.version}</version>
       <scope>runtime</scope>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/shims/common/pom.xml
----------------------------------------------------------------------
diff --git a/shims/common/pom.xml b/shims/common/pom.xml
index dfdec2b..76d8da5 100644
--- a/shims/common/pom.xml
+++ b/shims/common/pom.xml
@@ -62,8 +62,8 @@
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-core</artifactId>
-      <version>${hadoop-20S.version}</version>
+      <artifactId>hadoop-client</artifactId>
+      <version>${hadoop.version}</version>
       <optional>true</optional>
     </dependency>
     <dependency>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/shims/common/src/main/java/org/apache/hadoop/hive/shims/ShimLoader.java
----------------------------------------------------------------------
diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/shims/ShimLoader.java b/shims/common/src/main/java/org/apache/hadoop/hive/shims/ShimLoader.java
index c7fa11b..0fe3169 100644
--- a/shims/common/src/main/java/org/apache/hadoop/hive/shims/ShimLoader.java
+++ b/shims/common/src/main/java/org/apache/hadoop/hive/shims/ShimLoader.java
@@ -17,19 +17,18 @@
  */
 package org.apache.hadoop.hive.shims;
 
-import java.util.HashMap;
-import java.util.Map;
-
 import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge;
 import org.apache.hadoop.util.VersionInfo;
 import org.apache.log4j.AppenderSkeleton;
 
+import java.util.HashMap;
+import java.util.Map;
+
 /**
  * ShimLoader.
  *
  */
 public abstract class ShimLoader {
-  public static String HADOOP20SVERSIONNAME = "0.20S";
   public static String HADOOP23VERSIONNAME = "0.23";
 
   private static HadoopShims hadoopShims;
@@ -45,7 +44,6 @@ public abstract class ShimLoader {
       new HashMap<String, String>();
 
   static {
-    HADOOP_SHIM_CLASSES.put(HADOOP20SVERSIONNAME, "org.apache.hadoop.hive.shims.Hadoop20SShims");
     HADOOP_SHIM_CLASSES.put(HADOOP23VERSIONNAME, "org.apache.hadoop.hive.shims.Hadoop23Shims");
   }
 
@@ -57,7 +55,6 @@ public abstract class ShimLoader {
       new HashMap<String, String>();
 
   static {
-    JETTY_SHIM_CLASSES.put(HADOOP20SVERSIONNAME, "org.apache.hadoop.hive.shims.Jetty20SShims");
     JETTY_SHIM_CLASSES.put(HADOOP23VERSIONNAME, "org.apache.hadoop.hive.shims.Jetty23Shims");
   }
 
@@ -68,21 +65,17 @@ public abstract class ShimLoader {
       new HashMap<String, String>();
 
   static {
-    EVENT_COUNTER_SHIM_CLASSES.put(HADOOP20SVERSIONNAME, "org.apache.hadoop.log.metrics" +
-        ".EventCounter");
     EVENT_COUNTER_SHIM_CLASSES.put(HADOOP23VERSIONNAME, "org.apache.hadoop.log.metrics" +
         ".EventCounter");
   }
 
   /**
-   * The names of the classes for shimming {@link HadoopThriftAuthBridge}
+   * The names of the classes for shimming HadoopThriftAuthBridge
    */
   private static final HashMap<String, String> HADOOP_THRIFT_AUTH_BRIDGE_CLASSES =
       new HashMap<String, String>();
 
   static {
-    HADOOP_THRIFT_AUTH_BRIDGE_CLASSES.put(HADOOP20SVERSIONNAME,
-        "org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge");
     HADOOP_THRIFT_AUTH_BRIDGE_CLASSES.put(HADOOP23VERSIONNAME,
         "org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge23");
   }
@@ -166,8 +159,6 @@ public abstract class ShimLoader {
     }
 
     switch (Integer.parseInt(parts[0])) {
-    case 1:
-      return HADOOP20SVERSIONNAME;
     case 2:
       return HADOOP23VERSIONNAME;
     default:

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
----------------------------------------------------------------------
diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java b/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
index 7ed7265..6b0bd10 100644
--- a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
+++ b/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
@@ -71,8 +71,11 @@ import org.apache.thrift.transport.TTransportFactory;
 /**
  * Functions that bridge Thrift's SASL transports to Hadoop's
  * SASL callback handlers and authentication classes.
+ * HIVE-11378 This class is not directly used anymore.  It now exists only as a shell to be
+ * extended by HadoopThriftAuthBridge23 in 0.23 shims.  I have made it abstract
+ * to avoid maintenance errors.
  */
-public class HadoopThriftAuthBridge {
+public abstract class HadoopThriftAuthBridge {
   private static final Log LOG = LogFactory.getLog(HadoopThriftAuthBridge.class);
 
   public Client createClient() {
@@ -164,11 +167,7 @@ public class HadoopThriftAuthBridge {
    * @return Hadoop SASL configuration
    */
 
-  public Map<String, String> getHadoopSaslProperties(Configuration conf) {
-    // Initialize the SaslRpcServer to ensure QOP parameters are read from conf
-    SaslRpcServer.init(conf);
-    return SaslRpcServer.SASL_PROPS;
-  }
+  public abstract Map<String, String> getHadoopSaslProperties(Configuration conf);
 
   public static class Client {
     /**

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/shims/pom.xml
----------------------------------------------------------------------
diff --git a/shims/pom.xml b/shims/pom.xml
index 12113d5..ffacf75 100644
--- a/shims/pom.xml
+++ b/shims/pom.xml
@@ -33,7 +33,6 @@
 
   <modules>
     <module>common</module>
-    <module>0.20S</module>
     <module>0.23</module>
     <module>scheduler</module>
     <module>aggregator</module>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/shims/scheduler/pom.xml
----------------------------------------------------------------------
diff --git a/shims/scheduler/pom.xml b/shims/scheduler/pom.xml
index 407d57d..276b6cb 100644
--- a/shims/scheduler/pom.xml
+++ b/shims/scheduler/pom.xml
@@ -49,43 +49,43 @@
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-common</artifactId>
-      <version>${hadoop-23.version}</version>
+      <version>${hadoop.version}</version>
       <optional>true</optional>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-mapreduce-client-core</artifactId>
-      <version>${hadoop-23.version}</version>
+      <version>${hadoop.version}</version>
       <optional>true</optional>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-yarn-api</artifactId>
-      <version>${hadoop-23.version}</version>
+      <version>${hadoop.version}</version>
       <optional>true</optional>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-yarn-common</artifactId>
-     <version>${hadoop-23.version}</version>
+     <version>${hadoop.version}</version>
      <optional>true</optional>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-yarn-client</artifactId>
-     <version>${hadoop-23.version}</version>
+     <version>${hadoop.version}</version>
      <optional>true</optional>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-yarn-server-resourcemanager</artifactId>
-     <version>${hadoop-23.version}</version>
+     <version>${hadoop.version}</version>
      <optional>true</optional>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-yarn-server-tests</artifactId>
-     <version>${hadoop-23.version}</version>
+     <version>${hadoop.version}</version>
       <optional>true</optional>
      <type>test-jar</type>
    </dependency>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/storage-api/pom.xml
----------------------------------------------------------------------
diff --git a/storage-api/pom.xml b/storage-api/pom.xml
index 71b79f1..0af0d27 100644
--- a/storage-api/pom.xml
+++ b/storage-api/pom.xml
@@ -34,6 +34,12 @@
   <dependencies>
     <!-- test inter-project -->
     <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>
       <version>${junit.version}</version>
@@ -41,31 +47,6 @@
     </dependency>
   </dependencies>
 
-  <profiles>
-    <profile>
-      <id>hadoop-1</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-core</artifactId>
-          <version>${hadoop-20S.version}</version>
-         <optional>true</optional>
-        </dependency>
-      </dependencies>
-    </profile>
-   <profile>
-      <id>hadoop-2</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-      </dependencies>
-    </profile>
-  </profiles>
-
   <build>
     <sourceDirectory>${basedir}/src/java</sourceDirectory>
     <testSourceDirectory>${basedir}/src/test</testSourceDirectory>


[39/55] [abbrv] hive git commit: HIVE-9013 : Hive set command exposes metastore db password (Binglin Chang, reviewed by Thejas Nair, Sushanth Sowmyan)

Posted by xu...@apache.org.
HIVE-9013 : Hive set command exposes metastore db password (Binglin Chang, reviewed by Thejas Nair, Sushanth Sowmyan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/a91e1471
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/a91e1471
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/a91e1471

Branch: refs/heads/spark
Commit: a91e1471cfdd8a8da9bfafc1cb60efd16afbe847
Parents: 8f96446
Author: Sushanth Sowmyan <kh...@gmail.com>
Authored: Mon Oct 26 11:43:02 2015 -0700
Committer: Sushanth Sowmyan <kh...@gmail.com>
Committed: Mon Oct 26 11:43:54 2015 -0700

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/conf/HiveConf.java   | 33 ++++++++++++
 .../apache/hadoop/hive/conf/TestHiveConf.java   | 25 +++++++++
 .../apache/hadoop/hive/ql/exec/Utilities.java   | 13 -----
 .../hadoop/hive/ql/exec/mr/ExecDriver.java      |  2 +-
 .../hadoop/hive/ql/exec/tez/DagUtils.java       |  2 +-
 .../hive/ql/exec/tez/TezSessionState.java       |  2 +-
 .../hadoop/hive/ql/processors/SetProcessor.java | 12 ++++-
 .../hive/ql/processors/TestSetProcessor.java    | 54 ++++++++++++++++++++
 8 files changed, 125 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/a91e1471/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index dc79415..643326a 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -45,11 +45,13 @@ import java.io.PrintStream;
 import java.net.URL;
 import java.util.ArrayList;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Properties;
+import java.util.Set;
 import java.util.concurrent.TimeUnit;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
@@ -75,6 +77,7 @@ public class HiveConf extends Configuration {
   private static final Map<String, ConfVars> vars = new HashMap<String, ConfVars>();
   private static final Map<String, ConfVars> metaConfs = new HashMap<String, ConfVars>();
   private final List<String> restrictList = new ArrayList<String>();
+  private final Set<String> hiddenSet = new HashSet<String>();
 
   private Pattern modWhiteListPattern = null;
   private volatile boolean isSparkConfigUpdated = false;
@@ -2095,6 +2098,9 @@ public class HiveConf extends Configuration {
     HIVE_CONF_RESTRICTED_LIST("hive.conf.restricted.list",
         "hive.security.authenticator.manager,hive.security.authorization.manager,hive.users.in.admin.role",
         "Comma separated list of configuration options which are immutable at runtime"),
+    HIVE_CONF_HIDDEN_LIST("hive.conf.hidden.list",
+        METASTOREPWD.varname + "," + HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname,
+        "Comma separated list of configuration options which should not be read by normal user like passwords"),
 
     // If this is set all move tasks at the end of a multi-insert query will only begin once all
     // outputs are ready
@@ -2630,6 +2636,10 @@ public class HiveConf extends Configuration {
     }
   }
 
+  public boolean isHiddenConfig(String name) {
+    return hiddenSet.contains(name);
+  }
+
   /**
    * check whether spark related property is updated, which includes spark configurations,
    * RSC configurations and yarn configuration in Spark on YARN mode.
@@ -2979,6 +2989,7 @@ public class HiveConf extends Configuration {
 
     // setup list of conf vars that are not allowed to change runtime
     setupRestrictList();
+    setupHiddenSet();
 
   }
 
@@ -3298,6 +3309,28 @@ public class HiveConf extends Configuration {
     }
     restrictList.add(ConfVars.HIVE_IN_TEST.varname);
     restrictList.add(ConfVars.HIVE_CONF_RESTRICTED_LIST.varname);
+    restrictList.add(ConfVars.HIVE_CONF_HIDDEN_LIST.varname);
+  }
+
+  private void setupHiddenSet() {
+    String hiddenListStr = this.getVar(ConfVars.HIVE_CONF_HIDDEN_LIST);
+    hiddenSet.clear();
+    if (hiddenListStr != null) {
+      for (String entry : hiddenListStr.split(",")) {
+        hiddenSet.add(entry.trim());
+      }
+    }
+  }
+
+  /**
+   * Strips hidden config entries from configuration
+   */
+  public void stripHiddenConfigurations(Configuration conf) {
+    for (String name : hiddenSet) {
+      if (conf.get(name) != null) {
+        conf.set(name, "");
+      }
+    }
   }
 
   public static boolean isLoadMetastoreConfig() {

http://git-wip-us.apache.org/repos/asf/hive/blob/a91e1471/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java
----------------------------------------------------------------------
diff --git a/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java b/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java
index e9bde21..3b7a525 100644
--- a/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java
+++ b/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hive.conf;
 
 import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.util.Shell;
 import org.apache.hive.common.util.HiveTestUtils;
@@ -117,4 +118,28 @@ public class TestHiveConf {
     Assert.assertEquals(TimeUnit.NANOSECONDS, HiveConf.unitFor("ns", null));
     Assert.assertEquals(TimeUnit.NANOSECONDS, HiveConf.unitFor("nsecs", null));
   }
+
+  @Test
+  public void testHiddenConfig() throws Exception {
+    HiveConf conf = new HiveConf();
+    // check password configs are hidden
+    Assert.assertTrue(conf.isHiddenConfig(HiveConf.ConfVars.METASTOREPWD.varname));
+    Assert.assertTrue(conf.isHiddenConfig(
+        HiveConf.ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname));
+    // check change hidden list should fail
+    try {
+      final String name = HiveConf.ConfVars.HIVE_CONF_HIDDEN_LIST.varname;
+      conf.verifyAndSet(name, "");
+      Assert.fail("Setting config property " + name + " should fail");
+    } catch (IllegalArgumentException e) {
+      // the verifyAndSet in this case is expected to fail with the IllegalArgumentException
+    }
+    // check stripHiddenConfigurations
+    Configuration conf2 = new Configuration(conf);
+    conf2.set(HiveConf.ConfVars.METASTOREPWD.varname, "password");
+    conf2.set(HiveConf.ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname, "password");
+    conf.stripHiddenConfigurations(conf2);
+    Assert.assertEquals("", conf2.get(HiveConf.ConfVars.METASTOREPWD.varname));
+    Assert.assertEquals("", conf2.get(HiveConf.ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname));
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/a91e1471/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
index eb2ab56..0eb5f6d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
@@ -3909,19 +3909,6 @@ public final class Utilities {
   }
 
   /**
-   * Strips Hive password details from configuration
-   */
-  public static void stripHivePasswordDetails(Configuration conf) {
-    // Strip out all Hive related password information from the JobConf
-    if (HiveConf.getVar(conf, HiveConf.ConfVars.METASTOREPWD) != null) {
-      HiveConf.setVar(conf, HiveConf.ConfVars.METASTOREPWD, "");
-    }
-    if (HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD) != null) {
-      HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD, "");
-    }
-  }
-
-  /**
    * Returns the full path to the Jar containing the class. It always return a JAR.
    *
    * @param klass

http://git-wip-us.apache.org/repos/asf/hive/blob/a91e1471/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
index b799a17..bed7d63 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
@@ -167,7 +167,7 @@ public class ExecDriver extends Task<MapredWork> implements Serializable, Hadoop
     if (StringUtils.isNotBlank(addedArchives)) {
       HiveConf.setVar(job, ConfVars.HIVEADDEDARCHIVES, addedArchives);
     }
-    Utilities.stripHivePasswordDetails(job);
+    conf.stripHiddenConfigurations(job);
     this.jobExecHelper = new HadoopJobExecHelper(job, console, this, this);
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/a91e1471/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java
index d250b82..46050e8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java
@@ -1041,7 +1041,7 @@ public class DagUtils {
     // Removing job credential entry/ cannot be set on the tasks
     conf.unset("mapreduce.job.credentials.binary");
 
-    Utilities.stripHivePasswordDetails(conf);
+    hiveConf.stripHiddenConfigurations(conf);
     return conf;
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/a91e1471/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java
index 48231be..58be1dc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java
@@ -255,7 +255,7 @@ public class TezSessionState {
 
     // set up the staging directory to use
     tezConfig.set(TezConfiguration.TEZ_AM_STAGING_DIR, tezScratchDir.toUri().toString());
-    Utilities.stripHivePasswordDetails(tezConfig);
+    conf.stripHiddenConfigurations(tezConfig);
 
     ServicePluginsDescriptor servicePluginsDescriptor;
     UserPayload servicePluginPayload = TezUtils.createUserPayloadFromConf(tezConfig);

http://git-wip-us.apache.org/repos/asf/hive/blob/a91e1471/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java
index 2226300..9389759 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java
@@ -64,6 +64,9 @@ public class SetProcessor implements CommandProcessor {
     for (Object one : p.keySet()) {
       String oneProp = (String) one;
       String oneValue = p.getProperty(oneProp);
+      if (ss.getConf().isHiddenConfig(oneProp)) {
+        continue;
+      }
       sortedMap.put(oneProp, oneValue);
     }
 
@@ -90,7 +93,9 @@ public class SetProcessor implements CommandProcessor {
   private void dumpOption(String s) {
     SessionState ss = SessionState.get();
 
-    if (ss.getConf().get(s) != null) {
+    if (ss.getConf().isHiddenConfig(s)) {
+      ss.out.println(s + " is a hidden config");
+    } else if (ss.getConf().get(s) != null) {
       ss.out.println(s + "=" + ss.getConf().get(s));
     } else if (ss.getHiveVariables().containsKey(s)) {
       ss.out.println(s + "=" + ss.getHiveVariables().get(s));
@@ -241,7 +246,10 @@ public class SetProcessor implements CommandProcessor {
       }
     } else if (varname.indexOf(HIVECONF_PREFIX) == 0) {
       String var = varname.substring(HIVECONF_PREFIX.length());
-      if (ss.getConf().get(var) != null) {
+      if (ss.getConf().isHiddenConfig(var)) {
+        ss.out.println(HIVECONF_PREFIX + var + " is a hidden config");
+        return createProcessorSuccessResponse();
+      } if (ss.getConf().get(var) != null) {
         ss.out.println(HIVECONF_PREFIX + var + "=" + ss.getConf().get(var));
         return createProcessorSuccessResponse();
       } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/a91e1471/ql/src/test/org/apache/hadoop/hive/ql/processors/TestSetProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/processors/TestSetProcessor.java b/ql/src/test/org/apache/hadoop/hive/ql/processors/TestSetProcessor.java
new file mode 100644
index 0000000..bff643a
--- /dev/null
+++ b/ql/src/test/org/apache/hadoop/hive/ql/processors/TestSetProcessor.java
@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.processors;
+
+import java.io.ByteArrayOutputStream;
+import java.io.PrintStream;
+
+import junit.framework.Assert;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.junit.Test;
+
+public class TestSetProcessor {
+
+  @Test
+  public void testHiddenConfig() throws Exception {
+    HiveConf conf = new HiveConf();
+    SessionState.start(conf);
+    SessionState state = SessionState.get();
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    state.out = new PrintStream(baos);
+    SetProcessor processor = new SetProcessor();
+    processor.run("");
+    state.out.flush();
+    String ret = baos.toString();
+    Assert.assertFalse(ret.contains(HiveConf.ConfVars.METASTOREPWD.varname + "="));
+    Assert.assertFalse(ret.contains(HiveConf.ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname + "="));
+
+    baos = new ByteArrayOutputStream();
+    state.out = new PrintStream(baos);
+    processor.run(HiveConf.ConfVars.METASTOREPWD.varname);
+    state.out.flush();
+    ret = new String(baos.toByteArray());
+    Assert.assertTrue(baos.toString().contains("hidden"));
+  }
+
+}


[53/55] [abbrv] hive git commit: HIVE-11497: Make sure --orcfiledump utility includes OrcRecordUpdate.AcidStats (Prasanth Jayachandran reviewed by Eugene Koifman)

Posted by xu...@apache.org.
HIVE-11497: Make sure --orcfiledump utility includes OrcRecordUpdate.AcidStats (Prasanth Jayachandran reviewed by Eugene Koifman)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/3e21a6d4
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/3e21a6d4
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/3e21a6d4

Branch: refs/heads/spark
Commit: 3e21a6d44971feb91ab26ec6dbf8ee207683ada1
Parents: f2ede0e
Author: Prasanth Jayachandran <j....@gmail.com>
Authored: Tue Oct 27 23:44:51 2015 -0500
Committer: Prasanth Jayachandran <j....@gmail.com>
Committed: Tue Oct 27 23:44:51 2015 -0500

----------------------------------------------------------------------
 .../apache/hadoop/hive/ql/io/orc/FileDump.java  |  4 +++
 .../hadoop/hive/ql/io/orc/JsonFileDump.java     |  6 ++++
 .../hadoop/hive/ql/io/orc/OrcRecordUpdater.java | 34 +++++++++++++-------
 ql/src/test/resources/orc-file-dump.json        |  3 +-
 4 files changed, 35 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/3e21a6d4/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java
index 9c6538f..0e9667a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java
@@ -249,6 +249,10 @@ public final class FileDump {
         System.out.println("\nFile length: " + fileLen + " bytes");
         System.out.println("Padding length: " + paddedBytes + " bytes");
         System.out.println("Padding ratio: " + format.format(percentPadding) + "%");
+        OrcRecordUpdater.AcidStats acidStats = OrcRecordUpdater.parseAcidStats(reader);
+        if (acidStats != null) {
+          System.out.println("ACID stats:" + acidStats);
+        }
         rows.close();
         if (files.size() > 1) {
           System.out.println(Strings.repeat("=", 80) + "\n");

http://git-wip-us.apache.org/repos/asf/hive/blob/3e21a6d4/ql/src/java/org/apache/hadoop/hive/ql/io/orc/JsonFileDump.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/JsonFileDump.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/JsonFileDump.java
index 02e01b4..7f673dc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/JsonFileDump.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/JsonFileDump.java
@@ -167,6 +167,12 @@ public class JsonFileDump {
         writer.key("fileLength").value(fileLen);
         writer.key("paddingLength").value(paddedBytes);
         writer.key("paddingRatio").value(percentPadding);
+        OrcRecordUpdater.AcidStats acidStats = OrcRecordUpdater.parseAcidStats(reader);
+        if (acidStats != null) {
+          writer.key("numInserts").value(acidStats.inserts);
+          writer.key("numDeletes").value(acidStats.deletes);
+          writer.key("numUpdates").value(acidStats.updates);
+        }
         writer.key("status").value("OK");
         rows.close();
 

http://git-wip-us.apache.org/repos/asf/hive/blob/3e21a6d4/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java
index 2220b8e..01374a7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java
@@ -45,7 +45,6 @@ import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
 
 import com.google.common.annotations.VisibleForTesting;
-import com.google.common.collect.Lists;
 
 /**
  * A RecordUpdater where the files are stored as ORC.
@@ -128,6 +127,15 @@ public class OrcRecordUpdater implements RecordUpdater {
       builder.append(deletes);
       return builder.toString();
     }
+
+    @Override
+    public String toString() {
+      StringBuilder builder = new StringBuilder();
+      builder.append(" inserts: ").append(inserts);
+      builder.append(" updates: ").append(updates);
+      builder.append(" deletes: ").append(deletes);
+      return builder.toString();
+    }
   }
 
   static Path getSideFile(Path main) {
@@ -448,17 +456,21 @@ public class OrcRecordUpdater implements RecordUpdater {
    * {@link KeyIndexBuilder} creates these
    */
   static AcidStats parseAcidStats(Reader reader) {
-    String statsSerialized;
-    try {
-      ByteBuffer val =
-        reader.getMetadataValue(OrcRecordUpdater.ACID_STATS)
-          .duplicate();
-      statsSerialized = utf8Decoder.decode(val).toString();
-    } catch (CharacterCodingException e) {
-      throw new IllegalArgumentException("Bad string encoding for " +
-        OrcRecordUpdater.ACID_STATS, e);
+    if (reader.hasMetadataValue(OrcRecordUpdater.ACID_STATS)) {
+      String statsSerialized;
+      try {
+        ByteBuffer val =
+            reader.getMetadataValue(OrcRecordUpdater.ACID_STATS)
+                .duplicate();
+        statsSerialized = utf8Decoder.decode(val).toString();
+      } catch (CharacterCodingException e) {
+        throw new IllegalArgumentException("Bad string encoding for " +
+            OrcRecordUpdater.ACID_STATS, e);
+      }
+      return new AcidStats(statsSerialized);
+    } else {
+      return null;
     }
-    return new AcidStats(statsSerialized);
   }
 
   static class KeyIndexBuilder implements OrcFile.WriterCallback {

http://git-wip-us.apache.org/repos/asf/hive/blob/3e21a6d4/ql/src/test/resources/orc-file-dump.json
----------------------------------------------------------------------
diff --git a/ql/src/test/resources/orc-file-dump.json b/ql/src/test/resources/orc-file-dump.json
index 646dfe5..25fd63b 100644
--- a/ql/src/test/resources/orc-file-dump.json
+++ b/ql/src/test/resources/orc-file-dump.json
@@ -1350,5 +1350,6 @@
   ],
   "fileLength": 273300,
   "paddingLength": 0,
-  "paddingRatio": 0
+  "paddingRatio": 0,
+  "status": "OK"
 }


[43/55] [abbrv] hive git commit: HIVE-11378 Remove hadoop-1 support from master branch (gates, reviewed by Ashutosh Chauhan and Sergey Shelukhin)

Posted by xu...@apache.org.
HIVE-11378 Remove hadoop-1 support from master branch (gates, reviewed by Ashutosh Chauhan and Sergey Shelukhin)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f9517efd
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f9517efd
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f9517efd

Branch: refs/heads/spark
Commit: f9517efd2dc70fd88944fa8879feea0ddc11d08b
Parents: d84e393
Author: Alan Gates <ga...@hortonworks.com>
Authored: Mon Oct 26 14:59:01 2015 -0700
Committer: Alan Gates <ga...@hortonworks.com>
Committed: Mon Oct 26 14:59:01 2015 -0700

----------------------------------------------------------------------
 accumulo-handler/pom.xml                        |  61 +-
 beeline/pom.xml                                 |  44 +-
 cli/pom.xml                                     |  63 +-
 common/pom.xml                                  |  41 +-
 contrib/pom.xml                                 |  44 +-
 hbase-handler/pom.xml                           | 246 +++----
 hcatalog/core/pom.xml                           | 194 ++---
 hcatalog/hcatalog-pig-adapter/pom.xml           | 160 ++--
 .../hcatalog/pig/TestHCatLoaderEncryption.java  |  61 +-
 hcatalog/pom.xml                                |  65 +-
 hcatalog/server-extensions/pom.xml              |  29 +-
 hcatalog/streaming/pom.xml                      |  42 +-
 hcatalog/webhcat/java-client/pom.xml            |  39 +-
 hcatalog/webhcat/svr/pom.xml                    |  60 +-
 hplsql/pom.xml                                  |  31 +-
 hwi/pom.xml                                     |  61 +-
 itests/custom-serde/pom.xml                     |  31 +-
 itests/hcatalog-unit/pom.xml                    | 389 ++++------
 itests/hive-jmh/pom.xml                         |  38 +-
 itests/hive-minikdc/pom.xml                     | 181 ++---
 itests/hive-unit-hadoop2/pom.xml                |  12 +-
 .../hive/thrift/TestHadoopAuthBridge23.java     |  27 +-
 itests/hive-unit/pom.xml                        | 358 ++++-----
 itests/pom.xml                                  |   9 +-
 itests/qtest-accumulo/pom.xml                   | 404 ++++------
 itests/qtest-spark/pom.xml                      |  38 +-
 itests/qtest/pom.xml                            | 554 ++++++--------
 itests/test-serde/pom.xml                       |  32 +-
 itests/util/pom.xml                             | 163 ++--
 jdbc/pom.xml                                    |  29 +-
 llap-client/pom.xml                             |  90 +--
 llap-server/pom.xml                             | 146 ++--
 metastore/pom.xml                               |  67 +-
 pom.xml                                         | 228 +++---
 ql/pom.xml                                      | 154 ++--
 serde/pom.xml                                   |  90 +--
 service/pom.xml                                 |  45 +-
 shims/0.20S/pom.xml                             |  63 --
 .../hadoop/hive/shims/Hadoop20SShims.java       | 734 -------------------
 .../apache/hadoop/hive/shims/Jetty20SShims.java |  53 --
 .../apache/hadoop/mapred/WebHCatJTShim20S.java  | 123 ----
 shims/0.23/pom.xml                              |  26 +-
 shims/aggregator/pom.xml                        |   6 -
 shims/common/pom.xml                            |   4 +-
 .../apache/hadoop/hive/shims/ShimLoader.java    |  17 +-
 .../hive/thrift/HadoopThriftAuthBridge.java     |  11 +-
 shims/pom.xml                                   |   1 -
 shims/scheduler/pom.xml                         |  14 +-
 storage-api/pom.xml                             |  31 +-
 49 files changed, 1583 insertions(+), 3826 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/accumulo-handler/pom.xml
----------------------------------------------------------------------
diff --git a/accumulo-handler/pom.xml b/accumulo-handler/pom.xml
index 4e3a087..a330e94 100644
--- a/accumulo-handler/pom.xml
+++ b/accumulo-handler/pom.xml
@@ -91,6 +91,24 @@
       <artifactId>slf4j-api</artifactId>
     </dependency>
     <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-client</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>
       <scope>test</scope>
@@ -102,49 +120,6 @@
     </dependency>
   </dependencies>
 
-  <profiles>
-    <profile>
-      <id>hadoop-1</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-client</artifactId>
-          <version>${hadoop-20S.version}</version>
-          <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-core</artifactId>
-          <version>${hadoop-20S.version}</version>
-         <optional>true</optional>
-        </dependency>
-      </dependencies>
-    </profile>
-   <profile>
-      <id>hadoop-2</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-client</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-core</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-      </dependencies>
-    </profile>
-  </profiles>
-
   <build>
     <sourceDirectory>${basedir}/src/java</sourceDirectory>
     <testSourceDirectory>${basedir}/src/test</testSourceDirectory>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/beeline/pom.xml
----------------------------------------------------------------------
diff --git a/beeline/pom.xml b/beeline/pom.xml
index d46ac3c..391d589 100644
--- a/beeline/pom.xml
+++ b/beeline/pom.xml
@@ -81,6 +81,12 @@
       <version>${jline.version}</version>
     </dependency>
     <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
       <groupId>org.apache.thrift</groupId>
       <artifactId>libthrift</artifactId>
       <version>${libthrift.version}</version>
@@ -106,6 +112,12 @@
     </dependency>
     <!-- test inter-project -->
     <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>
       <version>${junit.version}</version>
@@ -119,38 +131,6 @@
     </dependency>
   </dependencies>
 
-  <profiles>
-    <profile>
-      <id>hadoop-1</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-core</artifactId>
-          <version>${hadoop-20S.version}</version>
-          <optional>true</optional>
-        </dependency>
-      </dependencies>
-    </profile>
-   <profile>
-      <id>hadoop-2</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-        <!-- test inter-project -->
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-core</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
-    </profile>
-  </profiles>
-
   <build>
     <sourceDirectory>${basedir}/src/java</sourceDirectory>
     <testSourceDirectory>${basedir}/src/test</testSourceDirectory>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/cli/pom.xml
----------------------------------------------------------------------
diff --git a/cli/pom.xml b/cli/pom.xml
index 88a815c..a2b9551 100644
--- a/cli/pom.xml
+++ b/cli/pom.xml
@@ -96,8 +96,26 @@
       <artifactId>libthrift</artifactId>
       <version>${libthrift.version}</version>
     </dependency>
-    <!-- test inter-project -->
     <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
+   <!-- test inter-project -->
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-common</artifactId>
+      <version>${hbase.version}</version>
+      <scope>test</scope>
+    </dependency>
+     <dependency>
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>
       <version>${junit.version}</version>
@@ -111,49 +129,6 @@
     </dependency>
   </dependencies>
 
-  <profiles>
-    <profile>
-      <id>hadoop-1</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-core</artifactId>
-          <version>${hadoop-20S.version}</version>
-         <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-common</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
-    </profile>
-   <profile>
-      <id>hadoop-2</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-core</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-common</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
-    </profile>
-  </profiles>
-
   <build>
     <sourceDirectory>${basedir}/src/java</sourceDirectory>
     <testSourceDirectory>${basedir}/src/test</testSourceDirectory>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/common/pom.xml
----------------------------------------------------------------------
diff --git a/common/pom.xml b/common/pom.xml
index dba814d..1ab4c57 100644
--- a/common/pom.xml
+++ b/common/pom.xml
@@ -95,6 +95,18 @@
       <artifactId>ant</artifactId>
       <version>${ant.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
     <!-- test inter-project -->
     <dependency>
       <groupId>com.google.code.tempus-fugit</groupId>
@@ -137,35 +149,6 @@
 
   <profiles>
     <profile>
-      <id>hadoop-1</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-core</artifactId>
-          <version>${hadoop-20S.version}</version>
-         <optional>true</optional>
-        </dependency>
-      </dependencies>
-    </profile>
-   <profile>
-      <id>hadoop-2</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-core</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-      </dependencies>
-    </profile>
-
-    <profile>
       <id>dist</id>
       <build>
         <plugins>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/contrib/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/pom.xml b/contrib/pom.xml
index 8f2ffe4..51602d4 100644
--- a/contrib/pom.xml
+++ b/contrib/pom.xml
@@ -60,6 +60,19 @@
       <artifactId>commons-logging</artifactId>
       <version>${commons-logging.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
+
     <!-- test inter-project -->
     <dependency>
       <groupId>junit</groupId>
@@ -69,37 +82,6 @@
     </dependency>
   </dependencies>
 
-  <profiles>
-    <profile>
-      <id>hadoop-1</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-core</artifactId>
-          <version>${hadoop-20S.version}</version>
-         <optional>true</optional>
-        </dependency>
-      </dependencies>
-    </profile>
-   <profile>
-      <id>hadoop-2</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-core</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-      </dependencies>
-    </profile>
-  </profiles>
-
   <build>
     <sourceDirectory>${basedir}/src/java</sourceDirectory>
     <testSourceDirectory>${basedir}/src/test</testSourceDirectory>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/hbase-handler/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-handler/pom.xml b/hbase-handler/pom.xml
index a4a9752..a6801eb 100644
--- a/hbase-handler/pom.xml
+++ b/hbase-handler/pom.xml
@@ -50,8 +50,93 @@
       <artifactId>commons-logging</artifactId>
       <version>${commons-logging.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-hadoop2-compat</artifactId>
+      <version>${hbase.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-client</artifactId>
+      <version>${hbase.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-server</artifactId>
+      <version>${hbase.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-common</artifactId>
+      <version>${hbase.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-hadoop-compat</artifactId>
+      <version>${hbase.version}</version>
+    </dependency>
+
     <!-- test inter-project -->
     <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <version>${hadoop.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-hadoop2-compat</artifactId>
+      <version>${hbase.version}</version>
+      <classifier>tests</classifier>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-common</artifactId>
+      <version>${hbase.version}</version>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-server</artifactId>
+      <version>${hbase.version}</version>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-hadoop-compat</artifactId>
+      <version>${hbase.version}</version>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-servlet</artifactId>
+      <version>${jersey.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>
       <version>${junit.version}</version>
@@ -64,167 +149,6 @@
 	</dependency>
   </dependencies>
 
-  <profiles>
-    <profile>
-      <id>hadoop-1</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-core</artifactId>
-          <version>${hadoop-20S.version}</version>
-         <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop1-compat</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop1-compat</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-client</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-server</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-common</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop-compat</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-test</artifactId>
-          <version>${hadoop-20S.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-common</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <type>test-jar</type>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-server</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <type>test-jar</type>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop-compat</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <type>test-jar</type>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
-    </profile>
-   <profile>
-      <id>hadoop-2</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-core</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-hdfs</artifactId>
-          <version>${hadoop-23.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop2-compat</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop2-compat</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-client</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-server</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-common</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop-compat</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-common</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <type>test-jar</type>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-server</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <type>test-jar</type>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop-compat</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <type>test-jar</type>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>com.sun.jersey</groupId>
-          <artifactId>jersey-servlet</artifactId>
-          <version>${jersey.version}</version>
-          <scope>test</scope>
-       </dependency>
-      </dependencies>
-    </profile>
-  </profiles>
-
   <build>
     <sourceDirectory>${basedir}/src/java</sourceDirectory>
     <testSourceDirectory>${basedir}/src/test</testSourceDirectory>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/hcatalog/core/pom.xml
----------------------------------------------------------------------
diff --git a/hcatalog/core/pom.xml b/hcatalog/core/pom.xml
index 58ddeee..70297bf 100644
--- a/hcatalog/core/pom.xml
+++ b/hcatalog/core/pom.xml
@@ -78,123 +78,87 @@
       <artifactId>jackson-mapper-asl</artifactId>
       <version>${jackson.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-annotations</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-archives</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <!-- test -->
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-servlet</artifactId>
+      <version>${jersey.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <version>${hadoop.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-hs</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+      <version>${hadoop.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-server-tests</artifactId>
+      <version>${hadoop.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.pig</groupId>
+      <artifactId>pig</artifactId>
+      <version>${pig.version}</version>
+      <classifier>h2</classifier>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
 
-  <profiles>
-    <profile>
-      <id>hadoop-1</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-core</artifactId>
-          <version>${hadoop-20S.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-tools</artifactId>
-          <version>${hadoop-20S.version}</version>
-        </dependency>
-        <!-- test -->
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-test</artifactId>
-          <version>${hadoop-20S.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.pig</groupId>
-          <artifactId>pig</artifactId>
-          <version>${pig.version}</version>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
-    </profile>
-   <profile>
-      <id>hadoop-2</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-annotations</artifactId>
-          <version>${hadoop-23.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-archives</artifactId>
-          <version>${hadoop-23.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-core</artifactId>
-          <version>${hadoop-23.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-hdfs</artifactId>
-          <version>${hadoop-23.version}</version>
-        </dependency>
-        <!-- test -->
-        <dependency>
-          <groupId>com.sun.jersey</groupId>
-          <artifactId>jersey-servlet</artifactId>
-          <version>${jersey.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-hdfs</artifactId>
-          <version>${hadoop-23.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-hs</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
-          <version>${hadoop-23.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-yarn-server-tests</artifactId>
-          <version>${hadoop-23.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.pig</groupId>
-          <artifactId>pig</artifactId>
-          <version>${pig.version}</version>
-          <classifier>h2</classifier>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
-    </profile>
-  </profiles>
-
   <build>
     <plugins>
       <plugin>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/hcatalog/hcatalog-pig-adapter/pom.xml
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/pom.xml b/hcatalog/hcatalog-pig-adapter/pom.xml
index ca9039a..fa02a36 100644
--- a/hcatalog/hcatalog-pig-adapter/pom.xml
+++ b/hcatalog/hcatalog-pig-adapter/pom.xml
@@ -45,6 +45,42 @@
       <artifactId>hive-hcatalog-core</artifactId>
       <version>${project.version}</version>
     </dependency>
+    <!-- inter-project -->
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.pig</groupId>
+      <artifactId>pig</artifactId>
+      <version>${pig.version}</version>
+      <classifier>h2</classifier>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+    <!--this should be automatically brought in by Pig, it's not in Pig 0.12 due to a bug
+        in Pig which requires it This is fixed in Pig's pom file in ASF trunk (pig 13)-->
+      <groupId>joda-time</groupId>
+      <artifactId>joda-time</artifactId>
+      <version>2.2</version>
+    </dependency>
+
     <!-- test intra-project -->
     <dependency>
       <groupId>org.apache.hive.hcatalog</groupId>
@@ -66,103 +102,33 @@
       <type>test-jar</type>
       <scope>test</scope>
     </dependency>
+    <!-- test inter-project -->
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <version>${hadoop.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-common</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-servlet</artifactId>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
 
-  <profiles>
-    <profile>
-      <id>hadoop-1</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-core</artifactId>
-          <version>${hadoop-20S.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-test</artifactId>
-          <version>${hadoop-20S.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.pig</groupId>
-          <artifactId>pig</artifactId>
-          <version>${pig.version}</version>
-        </dependency>
-        <dependency>
-          <!--this should be automatically brought in by Pig, it's not in Pig 0.12 due to a bug
-              in Pig which requires it This is fixed in Pig's pom file in ASF trunk (pig 13)-->
-          <groupId>joda-time</groupId>
-          <artifactId>joda-time</artifactId>
-          <version>2.2</version>
-        </dependency>
-      </dependencies>
-    </profile>
-   <profile>
-      <id>hadoop-2</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
-          <version>${hadoop-23.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-core</artifactId>
-          <version>${hadoop-23.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.pig</groupId>
-          <artifactId>pig</artifactId>
-          <version>${pig.version}</version>
-          <classifier>h2</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-hdfs</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <!--this should be automatically brought in by Pig, it's not in Pig 0.12 due to a bug
-              in Pig which requires it This is fixed in Pig's pom file in ASF trunk (pig 13)-->
-          <groupId>joda-time</groupId>
-          <artifactId>joda-time</artifactId>
-          <version>2.2</version>
-        </dependency>
-        <!-- Test dependencies -->
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-hdfs</artifactId>
-          <version>${hadoop-23.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>com.sun.jersey</groupId>
-          <artifactId>jersey-servlet</artifactId>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
-    </profile>
-  </profiles>
-
 </project>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
index df3b72a..676a4ed 100644
--- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
+++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
@@ -18,22 +18,7 @@
  */
 package org.apache.hive.hcatalog.pig;
 
-import java.io.File;
-import java.io.IOException;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-import java.util.Set;
-import java.util.concurrent.atomic.AtomicInteger;
-
 import org.apache.commons.io.FileUtils;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
@@ -54,7 +39,6 @@ import org.apache.hadoop.hive.ql.processors.HiveCommand;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.shims.HadoopShims;
 import org.apache.hadoop.hive.shims.ShimLoader;
-
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.WritableComparable;
@@ -66,22 +50,35 @@ import org.apache.hive.hcatalog.HcatTestUtils;
 import org.apache.hive.hcatalog.common.HCatUtil;
 import org.apache.hive.hcatalog.data.HCatRecord;
 import org.apache.hive.hcatalog.data.Pair;
-
 import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
 import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
 import org.apache.pig.data.Tuple;
-
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.junit.Assert.*;
+import java.io.File;
+import java.io.IOException;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
 import static org.junit.Assume.assumeTrue;
 
 @RunWith(Parameterized.class)
@@ -100,9 +97,6 @@ public class TestHCatLoaderEncryption {
   private HadoopShims.MiniDFSShim dfs = null;
   private HadoopShims.HdfsEncryptionShim hes = null;
   private final String[] testOnlyCommands = new String[]{"crypto"};
-  private final String[] encryptionUnsupportedHadoopVersion = new String[]{ShimLoader
-      .HADOOP20SVERSIONNAME};
-  private boolean isEncryptionTestEnabled = true;
   private Driver driver;
   private Map<Integer, Pair<Integer, String>> basicInputData;
   private static List<HCatRecord> readRecords = new ArrayList<HCatRecord>();
@@ -196,7 +190,6 @@ public class TestHCatLoaderEncryption {
 
     driver = new Driver(hiveConf);
 
-    checkShimLoaderVersion();
     initEncryptionShim(hiveConf);
     String encryptedTablePath =  TEST_WAREHOUSE_DIR + "/encryptedTable";
     SessionState.start(new CliSessionState(hiveConf));
@@ -231,19 +224,7 @@ public class TestHCatLoaderEncryption {
     server.executeBatch();
   }
 
-  void checkShimLoaderVersion() {
-    for (String v : encryptionUnsupportedHadoopVersion) {
-      if (ShimLoader.getMajorVersion().equals(v)) {
-        isEncryptionTestEnabled = false;
-        return;
-      }
-    }
-  }
-
   void initEncryptionShim(HiveConf conf) throws IOException {
-    if (!isEncryptionTestEnabled) {
-      return;
-    }
     FileSystem fs;
     HadoopShims shims = ShimLoader.getHadoopShims();
     conf.set(SECURITY_KEY_PROVIDER_URI_NAME, getKeyProviderURI());
@@ -268,9 +249,6 @@ public class TestHCatLoaderEncryption {
   }
 
   private void associateEncryptionZoneWithPath(String path) throws SQLException, CommandNeedRetryException {
-    if (!isEncryptionTestEnabled) {
-      return;
-    }
     LOG.info(this.storageFormat + ": associateEncryptionZoneWithPath");
     assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     enableTestOnlyCmd(SessionState.get().getConf());
@@ -289,9 +267,6 @@ public class TestHCatLoaderEncryption {
   }
 
   private void removeEncryptionZone() throws SQLException, CommandNeedRetryException {
-    if (!isEncryptionTestEnabled) {
-      return;
-    }
     LOG.info(this.storageFormat + ": removeEncryptionZone");
     enableTestOnlyCmd(SessionState.get().getConf());
     CommandProcessor crypto = getTestCommand("crypto");
@@ -333,7 +308,6 @@ public class TestHCatLoaderEncryption {
 
   @Test
   public void testReadDataFromEncryptedHiveTableByPig() throws IOException {
-    assumeTrue(isEncryptionTestEnabled);
     assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     PigServer server = new PigServer(ExecType.LOCAL);
 
@@ -356,7 +330,6 @@ public class TestHCatLoaderEncryption {
 
   @Test
   public void testReadDataFromEncryptedHiveTableByHCatMR() throws Exception {
-    assumeTrue(isEncryptionTestEnabled);
     assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
 
     readRecords.clear();

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/hcatalog/pom.xml
----------------------------------------------------------------------
diff --git a/hcatalog/pom.xml b/hcatalog/pom.xml
index 2e145b8..7550eeb 100644
--- a/hcatalog/pom.xml
+++ b/hcatalog/pom.xml
@@ -53,51 +53,24 @@
       <version>${mockito-all.version}</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.pig</groupId>
+      <artifactId>pig</artifactId>
+      <version>${pig.version}</version>
+      <classifier>h2</classifier>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
-
-  <profiles>
-    <profile>
-      <id>hadoop-1</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-core</artifactId>
-          <version>${hadoop-20S.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.pig</groupId>
-          <artifactId>pig</artifactId>
-          <version>${pig.version}</version>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
-    </profile>
-   <profile>
-      <id>hadoop-2</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-core</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.pig</groupId>
-          <artifactId>pig</artifactId>
-          <version>${pig.version}</version>
-          <classifier>h2</classifier>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
-    </profile>
-  </profiles>
-
-
 </project>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/hcatalog/server-extensions/pom.xml
----------------------------------------------------------------------
diff --git a/hcatalog/server-extensions/pom.xml b/hcatalog/server-extensions/pom.xml
index 3cd2813..b808b40 100644
--- a/hcatalog/server-extensions/pom.xml
+++ b/hcatalog/server-extensions/pom.xml
@@ -56,6 +56,11 @@
       <artifactId>jackson-mapper-asl</artifactId>
       <version>${jackson.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
     <!-- test intra-project -->
     <dependency>
       <groupId>org.apache.hive.hcatalog</groupId>
@@ -103,28 +108,4 @@
       <scope>test</scope>
     </dependency>
   </dependencies>
-
-  <profiles>
-    <profile>
-      <id>hadoop-1</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-core</artifactId>
-          <version>${hadoop-20S.version}</version>
-        </dependency>
-      </dependencies>
-    </profile>
-   <profile>
-      <id>hadoop-2</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-        </dependency>
-      </dependencies>
-    </profile>
-  </profiles>
-
 </project>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/hcatalog/streaming/pom.xml
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/pom.xml b/hcatalog/streaming/pom.xml
index ba9f731..39b3abd 100644
--- a/hcatalog/streaming/pom.xml
+++ b/hcatalog/streaming/pom.xml
@@ -32,34 +32,6 @@
     <hive.path.to.root>../..</hive.path.to.root>
   </properties>
 
-  <profiles>
-  <profile>
-    <id>hadoop-1</id>
-    <dependencies>
-      <dependency>
-        <groupId>org.apache.hadoop</groupId>
-        <artifactId>hadoop-core</artifactId>
-        <optional>true</optional>
-      </dependency>
-    </dependencies>
-  </profile>
-  <profile>
-    <id>hadoop-2</id>
-    <dependencies>
-      <dependency>
-        <groupId>org.apache.hadoop</groupId>
-        <artifactId>hadoop-common</artifactId>
-        <optional>true</optional>
-      </dependency>
-      <dependency>
-        <groupId>org.apache.hadoop</groupId>
-        <artifactId>hadoop-mapreduce-client-core</artifactId>
-        <optional>true</optional>
-      </dependency>
-    </dependencies>
-  </profile>
-  </profiles>
-
   <dependencies>
     <!-- dependencies are always listed in sorted order by groupId, artifectId -->
     <!-- intra-project -->
@@ -95,6 +67,18 @@
       <optional>true</optional>
       <version>3.3.2</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <optional>true</optional>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <optional>true</optional>
+      <version>${hadoop.version}</version>
+    </dependency>
 
     <!-- test -->
     <dependency>
@@ -108,7 +92,7 @@
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-mapreduce-client-common</artifactId>
       <scope>test</scope>
-      <version>${hadoop-23.version}</version>
+      <version>${hadoop.version}</version>
     </dependency>
 
   </dependencies>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/hcatalog/webhcat/java-client/pom.xml
----------------------------------------------------------------------
diff --git a/hcatalog/webhcat/java-client/pom.xml b/hcatalog/webhcat/java-client/pom.xml
index b3f3122..4a8596c 100644
--- a/hcatalog/webhcat/java-client/pom.xml
+++ b/hcatalog/webhcat/java-client/pom.xml
@@ -55,6 +55,17 @@
       <artifactId>hive-exec</artifactId>
       <version>${project.version}</version>
     </dependency>
+    <!-- inter-project -->
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
     <!-- test intra-project -->
     <dependency>
       <groupId>org.apache.hive</groupId>
@@ -72,32 +83,4 @@
     </dependency>
   </dependencies>
 
-  <profiles>
-    <profile>
-      <id>hadoop-1</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-core</artifactId>
-          <version>${hadoop-20S.version}</version>
-        </dependency>
-      </dependencies>
-    </profile>
-   <profile>
-      <id>hadoop-2</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-core</artifactId>
-          <version>${hadoop-23.version}</version>
-        </dependency>
-      </dependencies>
-    </profile>
-  </profiles>
-
 </project>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/hcatalog/webhcat/svr/pom.xml
----------------------------------------------------------------------
diff --git a/hcatalog/webhcat/svr/pom.xml b/hcatalog/webhcat/svr/pom.xml
index e635cc6..67d73ae 100644
--- a/hcatalog/webhcat/svr/pom.xml
+++ b/hcatalog/webhcat/svr/pom.xml
@@ -102,6 +102,26 @@
       <artifactId>jul-to-slf4j</artifactId>
       <version>${slf4j.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-auth</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
     <!-- test inter-project -->
     <dependency>
       <groupId>junit</groupId>
@@ -110,46 +130,6 @@
       <scope>test</scope>
     </dependency>
   </dependencies>
-
-
-  <profiles>
-    <profile>
-      <id>hadoop-1</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-core</artifactId>
-          <version>${hadoop-20S.version}</version>
-        </dependency>
-      </dependencies>
-    </profile>
-   <profile>
-      <id>hadoop-2</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-auth</artifactId>
-          <version>${hadoop-23.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-hdfs</artifactId>
-          <version>${hadoop-23.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-core</artifactId>
-          <version>${hadoop-23.version}</version>
-        </dependency>
-      </dependencies>
-    </profile>
-  </profiles>
-
   <build>
     <resources>
       <resource>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/hplsql/pom.xml
----------------------------------------------------------------------
diff --git a/hplsql/pom.xml b/hplsql/pom.xml
index fc1c527..b855007 100644
--- a/hplsql/pom.xml
+++ b/hplsql/pom.xml
@@ -74,6 +74,12 @@
        <version>4.5</version>
     </dependency>
     <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>
       <version>${junit.version}</version>
@@ -81,31 +87,6 @@
     </dependency>
   </dependencies>
 
-  <profiles>
-    <profile>
-      <id>hadoop-1</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-core</artifactId>
-          <version>${hadoop-20S.version}</version>
-         <optional>true</optional>
-        </dependency>
-      </dependencies>
-    </profile>
-   <profile>
-      <id>hadoop-2</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-      </dependencies>
-    </profile>
-  </profiles>
-  
   <build>
     <plugins>
      <plugin>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/hwi/pom.xml
----------------------------------------------------------------------
diff --git a/hwi/pom.xml b/hwi/pom.xml
index d0533ff..e9686c6 100644
--- a/hwi/pom.xml
+++ b/hwi/pom.xml
@@ -65,6 +65,19 @@
       <artifactId>jetty-all-server</artifactId>
       <version>${jetty.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-common</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+      <scope>test</scope>
+    </dependency>
     <!-- test intra-project -->
     <dependency>
       <groupId>org.apache.hive</groupId>
@@ -86,49 +99,15 @@
       <version>${junit.version}</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
 
-  <profiles>
-    <profile>
-      <id>hadoop-1</id>
-      <dependencies>
-    <!-- dependencies are always listed in sorted order by groupId, artifectId -->
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-core</artifactId>
-          <version>${hadoop-20S.version}</version>
-          <optional>true</optional>
-        </dependency>
-      </dependencies>
-    </profile>
-   <profile>
-      <id>hadoop-2</id>
-      <dependencies>
-    <!-- dependencies are always listed in sorted order by groupId, artifectId -->
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-core</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
-    </profile>
-  </profiles>
-
   <build>
     <sourceDirectory>${basedir}/src/java</sourceDirectory>
     <testSourceDirectory>${basedir}/src/test</testSourceDirectory>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/itests/custom-serde/pom.xml
----------------------------------------------------------------------
diff --git a/itests/custom-serde/pom.xml b/itests/custom-serde/pom.xml
index 078549d..1d3f929 100644
--- a/itests/custom-serde/pom.xml
+++ b/itests/custom-serde/pom.xml
@@ -39,31 +39,16 @@
       <version>${project.version}</version>
       <optional>true</optional>
     </dependency>
+    <!-- inter-project -->
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+    </dependency>
   </dependencies>
 
   <profiles>
-    <profile>
-      <id>hadoop-1</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-core</artifactId>
-          <version>${hadoop-20S.version}</version>
-         <optional>true</optional>
-        </dependency>
-      </dependencies>
-    </profile>
-   <profile>
-      <id>hadoop-2</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <optional>true</optional>
-        </dependency>
-      </dependencies>
-    </profile>
-  </profiles>
+ </profiles>
 
 </project>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/itests/hcatalog-unit/pom.xml
----------------------------------------------------------------------
diff --git a/itests/hcatalog-unit/pom.xml b/itests/hcatalog-unit/pom.xml
index eb330e1..ec48a4d 100644
--- a/itests/hcatalog-unit/pom.xml
+++ b/itests/hcatalog-unit/pom.xml
@@ -114,255 +114,150 @@
       <version>${junit.version}</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-annotations</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-archives</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <version>${hadoop.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+      <version>${hadoop.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-server-tests</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+      <classifier>tests</classifier>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-servlet</artifactId>
+      <version>${jersey.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-hs</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-client</artifactId>
+      <version>${hbase.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-common</artifactId>
+      <version>${hbase.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-common</artifactId>
+      <version>${hbase.version}</version>
+      <scope>test</scope>
+      <classifier>tests</classifier>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-hadoop-compat</artifactId>
+      <version>${hbase.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-hadoop-compat</artifactId>
+      <version>${hbase.version}</version>
+      <scope>test</scope>
+      <classifier>tests</classifier>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-hadoop2-compat</artifactId>
+      <version>${hbase.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-hadoop2-compat</artifactId>
+      <version>${hbase.version}</version>
+      <scope>test</scope>
+      <classifier>tests</classifier>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-server</artifactId>
+      <version>${hbase.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-server</artifactId>
+      <version>${hbase.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.pig</groupId>
+      <artifactId>pig</artifactId>
+      <version>${pig.version}</version>
+      <classifier>h2</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <!--this should be automatically brought in by Pig, it's not in Pig 0.12 due to a bug
+      in Pig which requires it This is fixed in Pig's pom file in ASF trunk (pig 13)-->
+      <groupId>joda-time</groupId>
+      <artifactId>joda-time</artifactId>
+      <version>2.2</version>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
 
-  <profiles>
-    <profile>
-      <id>hadoop-1</id>
-      <dependencies>
-        <!-- test -->
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-core</artifactId>
-          <version>${hadoop-20S.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-tools</artifactId>
-          <version>${hadoop-20S.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-test</artifactId>
-          <version>${hadoop-20S.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-client</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-common</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-common</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop-compat</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop-compat</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop1-compat</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop1-compat</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-server</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-server</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.pig</groupId>
-          <artifactId>pig</artifactId>
-          <version>${pig.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <!--this should be automatically brought in by Pig, it's not in Pig 0.12 due to a bug
-              in Pig which requires it This is fixed in Pig's pom file in ASF trunk (pig 13)-->
-          <groupId>joda-time</groupId>
-          <artifactId>joda-time</artifactId>
-          <version>2.2</version>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
-    </profile>
-   <profile>
-      <id>hadoop-2</id>
-      <dependencies>
-        <!-- test -->
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-annotations</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-archives</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-hdfs</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-hdfs</artifactId>
-          <version>${hadoop-23.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-core</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
-          <version>${hadoop-23.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-yarn-server-tests</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>com.sun.jersey</groupId>
-          <artifactId>jersey-servlet</artifactId>
-          <version>${jersey.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-hs</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-client</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-common</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-common</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <scope>test</scope>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop-compat</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop-compat</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <scope>test</scope>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop2-compat</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop2-compat</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <scope>test</scope>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-server</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-server</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.pig</groupId>
-          <artifactId>pig</artifactId>
-          <version>${pig.version}</version>
-          <classifier>h2</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <!--this should be automatically brought in by Pig, it's not in Pig 0.12 due to a bug
-              in Pig which requires it This is fixed in Pig's pom file in ASF trunk (pig 13)-->
-          <groupId>joda-time</groupId>
-          <artifactId>joda-time</artifactId>
-          <version>2.2</version>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
-    </profile>
-  </profiles>
-
   <build>
     <plugins>
       <plugin>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/itests/hive-jmh/pom.xml
----------------------------------------------------------------------
diff --git a/itests/hive-jmh/pom.xml b/itests/hive-jmh/pom.xml
index 56f4016..1daef26 100644
--- a/itests/hive-jmh/pom.xml
+++ b/itests/hive-jmh/pom.xml
@@ -58,36 +58,18 @@
       <artifactId>hive-exec</artifactId>
       <version>${project.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
   </dependencies>
 
-  <profiles>
-    <profile>
-      <id>hadoop-1</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-core</artifactId>
-          <version>${hadoop-20S.version}</version>
-        </dependency>
-      </dependencies>
-    </profile>
-    <profile>
-      <id>hadoop-2</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-core</artifactId>
-          <version>${hadoop-23.version}</version>
-        </dependency>
-      </dependencies>
-    </profile>
-  </profiles>
-
   <build>
     <plugins>
       <plugin>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/itests/hive-minikdc/pom.xml
----------------------------------------------------------------------
diff --git a/itests/hive-minikdc/pom.xml b/itests/hive-minikdc/pom.xml
index 3098dac..6a20cad 100644
--- a/itests/hive-minikdc/pom.xml
+++ b/itests/hive-minikdc/pom.xml
@@ -130,143 +130,58 @@
       <version>${mockito-all.version}</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <version>${hadoop.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+      <version>${hadoop.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-minikdc</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-server</artifactId>
+      <version>${hbase.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-minicluster</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-servlet</artifactId>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
 
   <profiles>
     <profile>
-      <id>hadoop-1</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-core</artifactId>
-          <version>${hadoop-20S.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-test</artifactId>
-          <version>${hadoop-20S.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-common</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-common</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop-compat</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop-compat</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop1-compat</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-hadoop1-compat</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-          <classifier>tests</classifier>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-server</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-server</artifactId>
-          <version>${hbase.hadoop1.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-tools</artifactId>
-          <version>${hadoop-20S.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>com.sun.jersey</groupId>
-          <artifactId>jersey-servlet</artifactId>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
-    </profile>
-    <profile>
-      <id>hadoop-2</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-hdfs</artifactId>
-          <version>${hadoop-23.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
-          <version>${hadoop-23.version}</version>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-mapreduce-client-core</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-minikdc</artifactId>
-          <version>${hadoop-23.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-server</artifactId>
-          <version>${hbase.hadoop2.version}</version>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-minicluster</artifactId>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>com.sun.jersey</groupId>
-          <artifactId>jersey-servlet</artifactId>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
-    </profile>
-    <profile>
       <activation>
 	<os>
           <family>Windows</family>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/itests/hive-unit-hadoop2/pom.xml
----------------------------------------------------------------------
diff --git a/itests/hive-unit-hadoop2/pom.xml b/itests/hive-unit-hadoop2/pom.xml
index 83ef97c..006db5a 100644
--- a/itests/hive-unit-hadoop2/pom.xml
+++ b/itests/hive-unit-hadoop2/pom.xml
@@ -1,4 +1,4 @@
-	<?xml version="1.0" encoding="UTF-8"?>
+<?xml version="1.0" encoding="UTF-8"?>
 	<!--
   Licensed under the Apache License, Version 2.0 (the "License");
   you may not use this file except in compliance with the License.
@@ -128,32 +128,32 @@
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-common</artifactId>
-      <version>${hadoop-23.version}</version>
+      <version>${hadoop.version}</version>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-hdfs</artifactId>
-      <version>${hadoop-23.version}</version>
+      <version>${hadoop.version}</version>
       <classifier>tests</classifier>
       <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
-      <version>${hadoop-23.version}</version>
+      <version>${hadoop.version}</version>
       <classifier>tests</classifier>
       <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-mapreduce-client-core</artifactId>
-      <version>${hadoop-23.version}</version>
+      <version>${hadoop.version}</version>
       <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hbase</groupId>
       <artifactId>hbase-server</artifactId>
-      <version>${hbase.hadoop2.version}</version>
+      <version>${hbase.version}</version>
       <scope>test</scope>
     </dependency>
     <dependency>

http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/thrift/TestHadoopAuthBridge23.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/thrift/TestHadoopAuthBridge23.java b/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/thrift/TestHadoopAuthBridge23.java
index 40b161a..ff56f80 100644
--- a/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/thrift/TestHadoopAuthBridge23.java
+++ b/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/thrift/TestHadoopAuthBridge23.java
@@ -18,21 +18,7 @@ package org.apache.hadoop.hive.thrift;
  */
 
 
-import java.io.ByteArrayInputStream;
-import java.io.DataInputStream;
-import java.io.IOException;
-import java.net.InetAddress;
-import java.net.NetworkInterface;
-import java.net.ServerSocket;
-import java.security.PrivilegedExceptionAction;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Enumeration;
-import java.util.List;
-import java.util.Map;
-
 import junit.framework.TestCase;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -58,6 +44,19 @@ import org.apache.thrift.transport.TSaslServerTransport;
 import org.apache.thrift.transport.TTransportException;
 import org.apache.thrift.transport.TTransportFactory;
 
+import java.io.ByteArrayInputStream;
+import java.io.DataInputStream;
+import java.io.IOException;
+import java.net.InetAddress;
+import java.net.NetworkInterface;
+import java.net.ServerSocket;
+import java.security.PrivilegedExceptionAction;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Enumeration;
+import java.util.List;
+import java.util.Map;
+
 public class TestHadoopAuthBridge23 extends TestCase {
 
   /**


[28/55] [abbrv] hive git commit: HIVE-12239 : Constants in hive.common.metrics.common.MetricsConstant are not final (Aleksei Statkevich via Ashutosh Chauhan)

Posted by xu...@apache.org.
HIVE-12239 : Constants in hive.common.metrics.common.MetricsConstant are not final (Aleksei Statkevich via Ashutosh Chauhan)

Signed-off-by: Ashutosh Chauhan <ha...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/a6da5d15
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/a6da5d15
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/a6da5d15

Branch: refs/heads/spark
Commit: a6da5d15b162aa39ac7cb82f206ad47a59f0dd3e
Parents: 03c62d0
Author: Aleksei Statkevich <me...@gmail.com>
Authored: Thu Oct 22 21:18:00 2015 -0800
Committer: Ashutosh Chauhan <ha...@apache.org>
Committed: Sat Oct 24 14:50:45 2015 -0700

----------------------------------------------------------------------
 .../common/metrics/common/MetricsConstant.java  | 26 ++++++++++----------
 1 file changed, 13 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/a6da5d15/common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsConstant.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsConstant.java b/common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsConstant.java
index 88a3c29..a5aa995 100644
--- a/common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsConstant.java
+++ b/common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsConstant.java
@@ -22,20 +22,20 @@ package org.apache.hadoop.hive.common.metrics.common;
  */
 public class MetricsConstant {
 
-  public static String JVM_PAUSE_INFO = "jvm.pause.info-threshold";
-  public static String JVM_PAUSE_WARN = "jvm.pause.warn-threshold";
-  public static String JVM_EXTRA_SLEEP = "jvm.pause.extraSleepTime";
+  public static final String JVM_PAUSE_INFO = "jvm.pause.info-threshold";
+  public static final String JVM_PAUSE_WARN = "jvm.pause.warn-threshold";
+  public static final String JVM_EXTRA_SLEEP = "jvm.pause.extraSleepTime";
 
-  public static String OPEN_CONNECTIONS = "open_connections";
-  public static String OPEN_OPERATIONS = "open_operations";
+  public static final String OPEN_CONNECTIONS = "open_connections";
+  public static final String OPEN_OPERATIONS = "open_operations";
 
-  public static String JDO_ACTIVE_TRANSACTIONS = "active_jdo_transactions";
-  public static String JDO_ROLLBACK_TRANSACTIONS = "rollbacked_jdo_transactions";
-  public static String JDO_COMMIT_TRANSACTIONS = "committed_jdo_transactions";
-  public static String JDO_OPEN_TRANSACTIONS = "opened_jdo_transactions";
+  public static final String JDO_ACTIVE_TRANSACTIONS = "active_jdo_transactions";
+  public static final String JDO_ROLLBACK_TRANSACTIONS = "rollbacked_jdo_transactions";
+  public static final String JDO_COMMIT_TRANSACTIONS = "committed_jdo_transactions";
+  public static final String JDO_OPEN_TRANSACTIONS = "opened_jdo_transactions";
 
-  public static String METASTORE_HIVE_LOCKS = "metastore_hive_locks";
-  public static String ZOOKEEPER_HIVE_SHAREDLOCKS = "zookeeper_hive_sharedlocks";
-  public static String ZOOKEEPER_HIVE_EXCLUSIVELOCKS = "zookeeper_hive_exclusivelocks";
-  public static String ZOOKEEPER_HIVE_SEMISHAREDLOCKS = "zookeeper_hive_semisharedlocks";
+  public static final String METASTORE_HIVE_LOCKS = "metastore_hive_locks";
+  public static final String ZOOKEEPER_HIVE_SHAREDLOCKS = "zookeeper_hive_sharedlocks";
+  public static final String ZOOKEEPER_HIVE_EXCLUSIVELOCKS = "zookeeper_hive_exclusivelocks";
+  public static final String ZOOKEEPER_HIVE_SEMISHAREDLOCKS = "zookeeper_hive_semisharedlocks";
 }


[35/55] [abbrv] hive git commit: HIVE-12261 - adding more comments

Posted by xu...@apache.org.
HIVE-12261 - adding more comments


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/2a0ea58b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/2a0ea58b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/2a0ea58b

Branch: refs/heads/spark
Commit: 2a0ea58b18b864b556b37870b0189d13537df5ce
Parents: 1f9556d
Author: Thejas Nair <th...@hortonworks.com>
Authored: Sun Oct 25 23:51:16 2015 -0700
Committer: Thejas Nair <th...@hortonworks.com>
Committed: Sun Oct 25 23:51:16 2015 -0700

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java   | 5 ++++-
 1 file changed, 4 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/2a0ea58b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java
index 98798e8..7c7f7ce 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java
@@ -167,7 +167,10 @@ public class MetaStoreSchemaInfo {
   /**
    * A dbVersion is compatible with hive version if it is greater or equal to
    * the hive version. This is result of the db schema upgrade design principles
-   * followed in hive project.
+   * followed in hive project. The state where db schema version is ahead of 
+   * hive software version is often seen when a 'rolling upgrade' or 
+   * 'rolling downgrade' is happening. This is a state where hive is functional 
+   * and returning non zero status for it is misleading.
    *
    * @param hiveVersion
    *          version of hive software


[49/55] [abbrv] hive git commit: HIVE-12061 : add file type support to file metadata by expr call (Sergey Shelukhin, reviewed by Alan Gates)

Posted by xu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/e5b53032/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp b/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
index cb0ee7a..5fd4a90 100644
--- a/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
+++ b/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
@@ -151,6 +151,14 @@ const char* _kResourceTypeNames[] = {
 };
 const std::map<int, const char*> _ResourceType_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(3, _kResourceTypeValues, _kResourceTypeNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL));
 
+int _kFileMetadataExprTypeValues[] = {
+  FileMetadataExprType::ORC_SARG
+};
+const char* _kFileMetadataExprTypeNames[] = {
+  "ORC_SARG"
+};
+const std::map<int, const char*> _FileMetadataExprType_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(1, _kFileMetadataExprTypeValues, _kFileMetadataExprTypeNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL));
+
 
 Version::~Version() throw() {
 }
@@ -14262,6 +14270,11 @@ void GetFileMetadataByExprRequest::__set_doGetFooters(const bool val) {
 __isset.doGetFooters = true;
 }
 
+void GetFileMetadataByExprRequest::__set_type(const FileMetadataExprType::type val) {
+  this->type = val;
+__isset.type = true;
+}
+
 uint32_t GetFileMetadataByExprRequest::read(::apache::thrift::protocol::TProtocol* iprot) {
 
   apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
@@ -14321,6 +14334,16 @@ uint32_t GetFileMetadataByExprRequest::read(::apache::thrift::protocol::TProtoco
           xfer += iprot->skip(ftype);
         }
         break;
+      case 4:
+        if (ftype == ::apache::thrift::protocol::T_I32) {
+          int32_t ecast626;
+          xfer += iprot->readI32(ecast626);
+          this->type = (FileMetadataExprType::type)ecast626;
+          this->__isset.type = true;
+        } else {
+          xfer += iprot->skip(ftype);
+        }
+        break;
       default:
         xfer += iprot->skip(ftype);
         break;
@@ -14345,10 +14368,10 @@ uint32_t GetFileMetadataByExprRequest::write(::apache::thrift::protocol::TProtoc
   xfer += oprot->writeFieldBegin("fileIds", ::apache::thrift::protocol::T_LIST, 1);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_I64, static_cast<uint32_t>(this->fileIds.size()));
-    std::vector<int64_t> ::const_iterator _iter626;
-    for (_iter626 = this->fileIds.begin(); _iter626 != this->fileIds.end(); ++_iter626)
+    std::vector<int64_t> ::const_iterator _iter627;
+    for (_iter627 = this->fileIds.begin(); _iter627 != this->fileIds.end(); ++_iter627)
     {
-      xfer += oprot->writeI64((*_iter626));
+      xfer += oprot->writeI64((*_iter627));
     }
     xfer += oprot->writeListEnd();
   }
@@ -14363,6 +14386,11 @@ uint32_t GetFileMetadataByExprRequest::write(::apache::thrift::protocol::TProtoc
     xfer += oprot->writeBool(this->doGetFooters);
     xfer += oprot->writeFieldEnd();
   }
+  if (this->__isset.type) {
+    xfer += oprot->writeFieldBegin("type", ::apache::thrift::protocol::T_I32, 4);
+    xfer += oprot->writeI32((int32_t)this->type);
+    xfer += oprot->writeFieldEnd();
+  }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
   return xfer;
@@ -14373,20 +14401,23 @@ void swap(GetFileMetadataByExprRequest &a, GetFileMetadataByExprRequest &b) {
   swap(a.fileIds, b.fileIds);
   swap(a.expr, b.expr);
   swap(a.doGetFooters, b.doGetFooters);
+  swap(a.type, b.type);
   swap(a.__isset, b.__isset);
 }
 
-GetFileMetadataByExprRequest::GetFileMetadataByExprRequest(const GetFileMetadataByExprRequest& other627) {
-  fileIds = other627.fileIds;
-  expr = other627.expr;
-  doGetFooters = other627.doGetFooters;
-  __isset = other627.__isset;
-}
-GetFileMetadataByExprRequest& GetFileMetadataByExprRequest::operator=(const GetFileMetadataByExprRequest& other628) {
+GetFileMetadataByExprRequest::GetFileMetadataByExprRequest(const GetFileMetadataByExprRequest& other628) {
   fileIds = other628.fileIds;
   expr = other628.expr;
   doGetFooters = other628.doGetFooters;
+  type = other628.type;
   __isset = other628.__isset;
+}
+GetFileMetadataByExprRequest& GetFileMetadataByExprRequest::operator=(const GetFileMetadataByExprRequest& other629) {
+  fileIds = other629.fileIds;
+  expr = other629.expr;
+  doGetFooters = other629.doGetFooters;
+  type = other629.type;
+  __isset = other629.__isset;
   return *this;
 }
 void GetFileMetadataByExprRequest::printTo(std::ostream& out) const {
@@ -14395,6 +14426,7 @@ void GetFileMetadataByExprRequest::printTo(std::ostream& out) const {
   out << "fileIds=" << to_string(fileIds);
   out << ", " << "expr=" << to_string(expr);
   out << ", " << "doGetFooters="; (__isset.doGetFooters ? (out << to_string(doGetFooters)) : (out << "<null>"));
+  out << ", " << "type="; (__isset.type ? (out << to_string(type)) : (out << "<null>"));
   out << ")";
 }
 
@@ -14438,17 +14470,17 @@ uint32_t GetFileMetadataResult::read(::apache::thrift::protocol::TProtocol* ipro
         if (ftype == ::apache::thrift::protocol::T_MAP) {
           {
             this->metadata.clear();
-            uint32_t _size629;
-            ::apache::thrift::protocol::TType _ktype630;
-            ::apache::thrift::protocol::TType _vtype631;
-            xfer += iprot->readMapBegin(_ktype630, _vtype631, _size629);
-            uint32_t _i633;
-            for (_i633 = 0; _i633 < _size629; ++_i633)
+            uint32_t _size630;
+            ::apache::thrift::protocol::TType _ktype631;
+            ::apache::thrift::protocol::TType _vtype632;
+            xfer += iprot->readMapBegin(_ktype631, _vtype632, _size630);
+            uint32_t _i634;
+            for (_i634 = 0; _i634 < _size630; ++_i634)
             {
-              int64_t _key634;
-              xfer += iprot->readI64(_key634);
-              std::string& _val635 = this->metadata[_key634];
-              xfer += iprot->readBinary(_val635);
+              int64_t _key635;
+              xfer += iprot->readI64(_key635);
+              std::string& _val636 = this->metadata[_key635];
+              xfer += iprot->readBinary(_val636);
             }
             xfer += iprot->readMapEnd();
           }
@@ -14489,11 +14521,11 @@ uint32_t GetFileMetadataResult::write(::apache::thrift::protocol::TProtocol* opr
   xfer += oprot->writeFieldBegin("metadata", ::apache::thrift::protocol::T_MAP, 1);
   {
     xfer += oprot->writeMapBegin(::apache::thrift::protocol::T_I64, ::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->metadata.size()));
-    std::map<int64_t, std::string> ::const_iterator _iter636;
-    for (_iter636 = this->metadata.begin(); _iter636 != this->metadata.end(); ++_iter636)
+    std::map<int64_t, std::string> ::const_iterator _iter637;
+    for (_iter637 = this->metadata.begin(); _iter637 != this->metadata.end(); ++_iter637)
     {
-      xfer += oprot->writeI64(_iter636->first);
-      xfer += oprot->writeBinary(_iter636->second);
+      xfer += oprot->writeI64(_iter637->first);
+      xfer += oprot->writeBinary(_iter637->second);
     }
     xfer += oprot->writeMapEnd();
   }
@@ -14514,13 +14546,13 @@ void swap(GetFileMetadataResult &a, GetFileMetadataResult &b) {
   swap(a.isSupported, b.isSupported);
 }
 
-GetFileMetadataResult::GetFileMetadataResult(const GetFileMetadataResult& other637) {
-  metadata = other637.metadata;
-  isSupported = other637.isSupported;
-}
-GetFileMetadataResult& GetFileMetadataResult::operator=(const GetFileMetadataResult& other638) {
+GetFileMetadataResult::GetFileMetadataResult(const GetFileMetadataResult& other638) {
   metadata = other638.metadata;
   isSupported = other638.isSupported;
+}
+GetFileMetadataResult& GetFileMetadataResult::operator=(const GetFileMetadataResult& other639) {
+  metadata = other639.metadata;
+  isSupported = other639.isSupported;
   return *this;
 }
 void GetFileMetadataResult::printTo(std::ostream& out) const {
@@ -14566,14 +14598,14 @@ uint32_t GetFileMetadataRequest::read(::apache::thrift::protocol::TProtocol* ipr
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->fileIds.clear();
-            uint32_t _size639;
-            ::apache::thrift::protocol::TType _etype642;
-            xfer += iprot->readListBegin(_etype642, _size639);
-            this->fileIds.resize(_size639);
-            uint32_t _i643;
-            for (_i643 = 0; _i643 < _size639; ++_i643)
+            uint32_t _size640;
+            ::apache::thrift::protocol::TType _etype643;
+            xfer += iprot->readListBegin(_etype643, _size640);
+            this->fileIds.resize(_size640);
+            uint32_t _i644;
+            for (_i644 = 0; _i644 < _size640; ++_i644)
             {
-              xfer += iprot->readI64(this->fileIds[_i643]);
+              xfer += iprot->readI64(this->fileIds[_i644]);
             }
             xfer += iprot->readListEnd();
           }
@@ -14604,10 +14636,10 @@ uint32_t GetFileMetadataRequest::write(::apache::thrift::protocol::TProtocol* op
   xfer += oprot->writeFieldBegin("fileIds", ::apache::thrift::protocol::T_LIST, 1);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_I64, static_cast<uint32_t>(this->fileIds.size()));
-    std::vector<int64_t> ::const_iterator _iter644;
-    for (_iter644 = this->fileIds.begin(); _iter644 != this->fileIds.end(); ++_iter644)
+    std::vector<int64_t> ::const_iterator _iter645;
+    for (_iter645 = this->fileIds.begin(); _iter645 != this->fileIds.end(); ++_iter645)
     {
-      xfer += oprot->writeI64((*_iter644));
+      xfer += oprot->writeI64((*_iter645));
     }
     xfer += oprot->writeListEnd();
   }
@@ -14623,11 +14655,11 @@ void swap(GetFileMetadataRequest &a, GetFileMetadataRequest &b) {
   swap(a.fileIds, b.fileIds);
 }
 
-GetFileMetadataRequest::GetFileMetadataRequest(const GetFileMetadataRequest& other645) {
-  fileIds = other645.fileIds;
-}
-GetFileMetadataRequest& GetFileMetadataRequest::operator=(const GetFileMetadataRequest& other646) {
+GetFileMetadataRequest::GetFileMetadataRequest(const GetFileMetadataRequest& other646) {
   fileIds = other646.fileIds;
+}
+GetFileMetadataRequest& GetFileMetadataRequest::operator=(const GetFileMetadataRequest& other647) {
+  fileIds = other647.fileIds;
   return *this;
 }
 void GetFileMetadataRequest::printTo(std::ostream& out) const {
@@ -14686,11 +14718,11 @@ void swap(PutFileMetadataResult &a, PutFileMetadataResult &b) {
   (void) b;
 }
 
-PutFileMetadataResult::PutFileMetadataResult(const PutFileMetadataResult& other647) {
-  (void) other647;
-}
-PutFileMetadataResult& PutFileMetadataResult::operator=(const PutFileMetadataResult& other648) {
+PutFileMetadataResult::PutFileMetadataResult(const PutFileMetadataResult& other648) {
   (void) other648;
+}
+PutFileMetadataResult& PutFileMetadataResult::operator=(const PutFileMetadataResult& other649) {
+  (void) other649;
   return *this;
 }
 void PutFileMetadataResult::printTo(std::ostream& out) const {
@@ -14739,14 +14771,14 @@ uint32_t PutFileMetadataRequest::read(::apache::thrift::protocol::TProtocol* ipr
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->fileIds.clear();
-            uint32_t _size649;
-            ::apache::thrift::protocol::TType _etype652;
-            xfer += iprot->readListBegin(_etype652, _size649);
-            this->fileIds.resize(_size649);
-            uint32_t _i653;
-            for (_i653 = 0; _i653 < _size649; ++_i653)
+            uint32_t _size650;
+            ::apache::thrift::protocol::TType _etype653;
+            xfer += iprot->readListBegin(_etype653, _size650);
+            this->fileIds.resize(_size650);
+            uint32_t _i654;
+            for (_i654 = 0; _i654 < _size650; ++_i654)
             {
-              xfer += iprot->readI64(this->fileIds[_i653]);
+              xfer += iprot->readI64(this->fileIds[_i654]);
             }
             xfer += iprot->readListEnd();
           }
@@ -14759,14 +14791,14 @@ uint32_t PutFileMetadataRequest::read(::apache::thrift::protocol::TProtocol* ipr
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->metadata.clear();
-            uint32_t _size654;
-            ::apache::thrift::protocol::TType _etype657;
-            xfer += iprot->readListBegin(_etype657, _size654);
-            this->metadata.resize(_size654);
-            uint32_t _i658;
-            for (_i658 = 0; _i658 < _size654; ++_i658)
+            uint32_t _size655;
+            ::apache::thrift::protocol::TType _etype658;
+            xfer += iprot->readListBegin(_etype658, _size655);
+            this->metadata.resize(_size655);
+            uint32_t _i659;
+            for (_i659 = 0; _i659 < _size655; ++_i659)
             {
-              xfer += iprot->readBinary(this->metadata[_i658]);
+              xfer += iprot->readBinary(this->metadata[_i659]);
             }
             xfer += iprot->readListEnd();
           }
@@ -14799,10 +14831,10 @@ uint32_t PutFileMetadataRequest::write(::apache::thrift::protocol::TProtocol* op
   xfer += oprot->writeFieldBegin("fileIds", ::apache::thrift::protocol::T_LIST, 1);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_I64, static_cast<uint32_t>(this->fileIds.size()));
-    std::vector<int64_t> ::const_iterator _iter659;
-    for (_iter659 = this->fileIds.begin(); _iter659 != this->fileIds.end(); ++_iter659)
+    std::vector<int64_t> ::const_iterator _iter660;
+    for (_iter660 = this->fileIds.begin(); _iter660 != this->fileIds.end(); ++_iter660)
     {
-      xfer += oprot->writeI64((*_iter659));
+      xfer += oprot->writeI64((*_iter660));
     }
     xfer += oprot->writeListEnd();
   }
@@ -14811,10 +14843,10 @@ uint32_t PutFileMetadataRequest::write(::apache::thrift::protocol::TProtocol* op
   xfer += oprot->writeFieldBegin("metadata", ::apache::thrift::protocol::T_LIST, 2);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->metadata.size()));
-    std::vector<std::string> ::const_iterator _iter660;
-    for (_iter660 = this->metadata.begin(); _iter660 != this->metadata.end(); ++_iter660)
+    std::vector<std::string> ::const_iterator _iter661;
+    for (_iter661 = this->metadata.begin(); _iter661 != this->metadata.end(); ++_iter661)
     {
-      xfer += oprot->writeBinary((*_iter660));
+      xfer += oprot->writeBinary((*_iter661));
     }
     xfer += oprot->writeListEnd();
   }
@@ -14831,13 +14863,13 @@ void swap(PutFileMetadataRequest &a, PutFileMetadataRequest &b) {
   swap(a.metadata, b.metadata);
 }
 
-PutFileMetadataRequest::PutFileMetadataRequest(const PutFileMetadataRequest& other661) {
-  fileIds = other661.fileIds;
-  metadata = other661.metadata;
-}
-PutFileMetadataRequest& PutFileMetadataRequest::operator=(const PutFileMetadataRequest& other662) {
+PutFileMetadataRequest::PutFileMetadataRequest(const PutFileMetadataRequest& other662) {
   fileIds = other662.fileIds;
   metadata = other662.metadata;
+}
+PutFileMetadataRequest& PutFileMetadataRequest::operator=(const PutFileMetadataRequest& other663) {
+  fileIds = other663.fileIds;
+  metadata = other663.metadata;
   return *this;
 }
 void PutFileMetadataRequest::printTo(std::ostream& out) const {
@@ -14897,11 +14929,11 @@ void swap(ClearFileMetadataResult &a, ClearFileMetadataResult &b) {
   (void) b;
 }
 
-ClearFileMetadataResult::ClearFileMetadataResult(const ClearFileMetadataResult& other663) {
-  (void) other663;
-}
-ClearFileMetadataResult& ClearFileMetadataResult::operator=(const ClearFileMetadataResult& other664) {
+ClearFileMetadataResult::ClearFileMetadataResult(const ClearFileMetadataResult& other664) {
   (void) other664;
+}
+ClearFileMetadataResult& ClearFileMetadataResult::operator=(const ClearFileMetadataResult& other665) {
+  (void) other665;
   return *this;
 }
 void ClearFileMetadataResult::printTo(std::ostream& out) const {
@@ -14945,14 +14977,14 @@ uint32_t ClearFileMetadataRequest::read(::apache::thrift::protocol::TProtocol* i
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->fileIds.clear();
-            uint32_t _size665;
-            ::apache::thrift::protocol::TType _etype668;
-            xfer += iprot->readListBegin(_etype668, _size665);
-            this->fileIds.resize(_size665);
-            uint32_t _i669;
-            for (_i669 = 0; _i669 < _size665; ++_i669)
+            uint32_t _size666;
+            ::apache::thrift::protocol::TType _etype669;
+            xfer += iprot->readListBegin(_etype669, _size666);
+            this->fileIds.resize(_size666);
+            uint32_t _i670;
+            for (_i670 = 0; _i670 < _size666; ++_i670)
             {
-              xfer += iprot->readI64(this->fileIds[_i669]);
+              xfer += iprot->readI64(this->fileIds[_i670]);
             }
             xfer += iprot->readListEnd();
           }
@@ -14983,10 +15015,10 @@ uint32_t ClearFileMetadataRequest::write(::apache::thrift::protocol::TProtocol*
   xfer += oprot->writeFieldBegin("fileIds", ::apache::thrift::protocol::T_LIST, 1);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_I64, static_cast<uint32_t>(this->fileIds.size()));
-    std::vector<int64_t> ::const_iterator _iter670;
-    for (_iter670 = this->fileIds.begin(); _iter670 != this->fileIds.end(); ++_iter670)
+    std::vector<int64_t> ::const_iterator _iter671;
+    for (_iter671 = this->fileIds.begin(); _iter671 != this->fileIds.end(); ++_iter671)
     {
-      xfer += oprot->writeI64((*_iter670));
+      xfer += oprot->writeI64((*_iter671));
     }
     xfer += oprot->writeListEnd();
   }
@@ -15002,11 +15034,11 @@ void swap(ClearFileMetadataRequest &a, ClearFileMetadataRequest &b) {
   swap(a.fileIds, b.fileIds);
 }
 
-ClearFileMetadataRequest::ClearFileMetadataRequest(const ClearFileMetadataRequest& other671) {
-  fileIds = other671.fileIds;
-}
-ClearFileMetadataRequest& ClearFileMetadataRequest::operator=(const ClearFileMetadataRequest& other672) {
+ClearFileMetadataRequest::ClearFileMetadataRequest(const ClearFileMetadataRequest& other672) {
   fileIds = other672.fileIds;
+}
+ClearFileMetadataRequest& ClearFileMetadataRequest::operator=(const ClearFileMetadataRequest& other673) {
+  fileIds = other673.fileIds;
   return *this;
 }
 void ClearFileMetadataRequest::printTo(std::ostream& out) const {
@@ -15051,14 +15083,14 @@ uint32_t GetAllFunctionsResponse::read(::apache::thrift::protocol::TProtocol* ip
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->functions.clear();
-            uint32_t _size673;
-            ::apache::thrift::protocol::TType _etype676;
-            xfer += iprot->readListBegin(_etype676, _size673);
-            this->functions.resize(_size673);
-            uint32_t _i677;
-            for (_i677 = 0; _i677 < _size673; ++_i677)
+            uint32_t _size674;
+            ::apache::thrift::protocol::TType _etype677;
+            xfer += iprot->readListBegin(_etype677, _size674);
+            this->functions.resize(_size674);
+            uint32_t _i678;
+            for (_i678 = 0; _i678 < _size674; ++_i678)
             {
-              xfer += this->functions[_i677].read(iprot);
+              xfer += this->functions[_i678].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -15088,10 +15120,10 @@ uint32_t GetAllFunctionsResponse::write(::apache::thrift::protocol::TProtocol* o
     xfer += oprot->writeFieldBegin("functions", ::apache::thrift::protocol::T_LIST, 1);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->functions.size()));
-      std::vector<Function> ::const_iterator _iter678;
-      for (_iter678 = this->functions.begin(); _iter678 != this->functions.end(); ++_iter678)
+      std::vector<Function> ::const_iterator _iter679;
+      for (_iter679 = this->functions.begin(); _iter679 != this->functions.end(); ++_iter679)
       {
-        xfer += (*_iter678).write(oprot);
+        xfer += (*_iter679).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -15108,13 +15140,13 @@ void swap(GetAllFunctionsResponse &a, GetAllFunctionsResponse &b) {
   swap(a.__isset, b.__isset);
 }
 
-GetAllFunctionsResponse::GetAllFunctionsResponse(const GetAllFunctionsResponse& other679) {
-  functions = other679.functions;
-  __isset = other679.__isset;
-}
-GetAllFunctionsResponse& GetAllFunctionsResponse::operator=(const GetAllFunctionsResponse& other680) {
+GetAllFunctionsResponse::GetAllFunctionsResponse(const GetAllFunctionsResponse& other680) {
   functions = other680.functions;
   __isset = other680.__isset;
+}
+GetAllFunctionsResponse& GetAllFunctionsResponse::operator=(const GetAllFunctionsResponse& other681) {
+  functions = other681.functions;
+  __isset = other681.__isset;
   return *this;
 }
 void GetAllFunctionsResponse::printTo(std::ostream& out) const {
@@ -15194,13 +15226,13 @@ void swap(MetaException &a, MetaException &b) {
   swap(a.__isset, b.__isset);
 }
 
-MetaException::MetaException(const MetaException& other681) : TException() {
-  message = other681.message;
-  __isset = other681.__isset;
-}
-MetaException& MetaException::operator=(const MetaException& other682) {
+MetaException::MetaException(const MetaException& other682) : TException() {
   message = other682.message;
   __isset = other682.__isset;
+}
+MetaException& MetaException::operator=(const MetaException& other683) {
+  message = other683.message;
+  __isset = other683.__isset;
   return *this;
 }
 void MetaException::printTo(std::ostream& out) const {
@@ -15291,13 +15323,13 @@ void swap(UnknownTableException &a, UnknownTableException &b) {
   swap(a.__isset, b.__isset);
 }
 
-UnknownTableException::UnknownTableException(const UnknownTableException& other683) : TException() {
-  message = other683.message;
-  __isset = other683.__isset;
-}
-UnknownTableException& UnknownTableException::operator=(const UnknownTableException& other684) {
+UnknownTableException::UnknownTableException(const UnknownTableException& other684) : TException() {
   message = other684.message;
   __isset = other684.__isset;
+}
+UnknownTableException& UnknownTableException::operator=(const UnknownTableException& other685) {
+  message = other685.message;
+  __isset = other685.__isset;
   return *this;
 }
 void UnknownTableException::printTo(std::ostream& out) const {
@@ -15388,13 +15420,13 @@ void swap(UnknownDBException &a, UnknownDBException &b) {
   swap(a.__isset, b.__isset);
 }
 
-UnknownDBException::UnknownDBException(const UnknownDBException& other685) : TException() {
-  message = other685.message;
-  __isset = other685.__isset;
-}
-UnknownDBException& UnknownDBException::operator=(const UnknownDBException& other686) {
+UnknownDBException::UnknownDBException(const UnknownDBException& other686) : TException() {
   message = other686.message;
   __isset = other686.__isset;
+}
+UnknownDBException& UnknownDBException::operator=(const UnknownDBException& other687) {
+  message = other687.message;
+  __isset = other687.__isset;
   return *this;
 }
 void UnknownDBException::printTo(std::ostream& out) const {
@@ -15485,13 +15517,13 @@ void swap(AlreadyExistsException &a, AlreadyExistsException &b) {
   swap(a.__isset, b.__isset);
 }
 
-AlreadyExistsException::AlreadyExistsException(const AlreadyExistsException& other687) : TException() {
-  message = other687.message;
-  __isset = other687.__isset;
-}
-AlreadyExistsException& AlreadyExistsException::operator=(const AlreadyExistsException& other688) {
+AlreadyExistsException::AlreadyExistsException(const AlreadyExistsException& other688) : TException() {
   message = other688.message;
   __isset = other688.__isset;
+}
+AlreadyExistsException& AlreadyExistsException::operator=(const AlreadyExistsException& other689) {
+  message = other689.message;
+  __isset = other689.__isset;
   return *this;
 }
 void AlreadyExistsException::printTo(std::ostream& out) const {
@@ -15582,13 +15614,13 @@ void swap(InvalidPartitionException &a, InvalidPartitionException &b) {
   swap(a.__isset, b.__isset);
 }
 
-InvalidPartitionException::InvalidPartitionException(const InvalidPartitionException& other689) : TException() {
-  message = other689.message;
-  __isset = other689.__isset;
-}
-InvalidPartitionException& InvalidPartitionException::operator=(const InvalidPartitionException& other690) {
+InvalidPartitionException::InvalidPartitionException(const InvalidPartitionException& other690) : TException() {
   message = other690.message;
   __isset = other690.__isset;
+}
+InvalidPartitionException& InvalidPartitionException::operator=(const InvalidPartitionException& other691) {
+  message = other691.message;
+  __isset = other691.__isset;
   return *this;
 }
 void InvalidPartitionException::printTo(std::ostream& out) const {
@@ -15679,13 +15711,13 @@ void swap(UnknownPartitionException &a, UnknownPartitionException &b) {
   swap(a.__isset, b.__isset);
 }
 
-UnknownPartitionException::UnknownPartitionException(const UnknownPartitionException& other691) : TException() {
-  message = other691.message;
-  __isset = other691.__isset;
-}
-UnknownPartitionException& UnknownPartitionException::operator=(const UnknownPartitionException& other692) {
+UnknownPartitionException::UnknownPartitionException(const UnknownPartitionException& other692) : TException() {
   message = other692.message;
   __isset = other692.__isset;
+}
+UnknownPartitionException& UnknownPartitionException::operator=(const UnknownPartitionException& other693) {
+  message = other693.message;
+  __isset = other693.__isset;
   return *this;
 }
 void UnknownPartitionException::printTo(std::ostream& out) const {
@@ -15776,13 +15808,13 @@ void swap(InvalidObjectException &a, InvalidObjectException &b) {
   swap(a.__isset, b.__isset);
 }
 
-InvalidObjectException::InvalidObjectException(const InvalidObjectException& other693) : TException() {
-  message = other693.message;
-  __isset = other693.__isset;
-}
-InvalidObjectException& InvalidObjectException::operator=(const InvalidObjectException& other694) {
+InvalidObjectException::InvalidObjectException(const InvalidObjectException& other694) : TException() {
   message = other694.message;
   __isset = other694.__isset;
+}
+InvalidObjectException& InvalidObjectException::operator=(const InvalidObjectException& other695) {
+  message = other695.message;
+  __isset = other695.__isset;
   return *this;
 }
 void InvalidObjectException::printTo(std::ostream& out) const {
@@ -15873,13 +15905,13 @@ void swap(NoSuchObjectException &a, NoSuchObjectException &b) {
   swap(a.__isset, b.__isset);
 }
 
-NoSuchObjectException::NoSuchObjectException(const NoSuchObjectException& other695) : TException() {
-  message = other695.message;
-  __isset = other695.__isset;
-}
-NoSuchObjectException& NoSuchObjectException::operator=(const NoSuchObjectException& other696) {
+NoSuchObjectException::NoSuchObjectException(const NoSuchObjectException& other696) : TException() {
   message = other696.message;
   __isset = other696.__isset;
+}
+NoSuchObjectException& NoSuchObjectException::operator=(const NoSuchObjectException& other697) {
+  message = other697.message;
+  __isset = other697.__isset;
   return *this;
 }
 void NoSuchObjectException::printTo(std::ostream& out) const {
@@ -15970,13 +16002,13 @@ void swap(IndexAlreadyExistsException &a, IndexAlreadyExistsException &b) {
   swap(a.__isset, b.__isset);
 }
 
-IndexAlreadyExistsException::IndexAlreadyExistsException(const IndexAlreadyExistsException& other697) : TException() {
-  message = other697.message;
-  __isset = other697.__isset;
-}
-IndexAlreadyExistsException& IndexAlreadyExistsException::operator=(const IndexAlreadyExistsException& other698) {
+IndexAlreadyExistsException::IndexAlreadyExistsException(const IndexAlreadyExistsException& other698) : TException() {
   message = other698.message;
   __isset = other698.__isset;
+}
+IndexAlreadyExistsException& IndexAlreadyExistsException::operator=(const IndexAlreadyExistsException& other699) {
+  message = other699.message;
+  __isset = other699.__isset;
   return *this;
 }
 void IndexAlreadyExistsException::printTo(std::ostream& out) const {
@@ -16067,13 +16099,13 @@ void swap(InvalidOperationException &a, InvalidOperationException &b) {
   swap(a.__isset, b.__isset);
 }
 
-InvalidOperationException::InvalidOperationException(const InvalidOperationException& other699) : TException() {
-  message = other699.message;
-  __isset = other699.__isset;
-}
-InvalidOperationException& InvalidOperationException::operator=(const InvalidOperationException& other700) {
+InvalidOperationException::InvalidOperationException(const InvalidOperationException& other700) : TException() {
   message = other700.message;
   __isset = other700.__isset;
+}
+InvalidOperationException& InvalidOperationException::operator=(const InvalidOperationException& other701) {
+  message = other701.message;
+  __isset = other701.__isset;
   return *this;
 }
 void InvalidOperationException::printTo(std::ostream& out) const {
@@ -16164,13 +16196,13 @@ void swap(ConfigValSecurityException &a, ConfigValSecurityException &b) {
   swap(a.__isset, b.__isset);
 }
 
-ConfigValSecurityException::ConfigValSecurityException(const ConfigValSecurityException& other701) : TException() {
-  message = other701.message;
-  __isset = other701.__isset;
-}
-ConfigValSecurityException& ConfigValSecurityException::operator=(const ConfigValSecurityException& other702) {
+ConfigValSecurityException::ConfigValSecurityException(const ConfigValSecurityException& other702) : TException() {
   message = other702.message;
   __isset = other702.__isset;
+}
+ConfigValSecurityException& ConfigValSecurityException::operator=(const ConfigValSecurityException& other703) {
+  message = other703.message;
+  __isset = other703.__isset;
   return *this;
 }
 void ConfigValSecurityException::printTo(std::ostream& out) const {
@@ -16261,13 +16293,13 @@ void swap(InvalidInputException &a, InvalidInputException &b) {
   swap(a.__isset, b.__isset);
 }
 
-InvalidInputException::InvalidInputException(const InvalidInputException& other703) : TException() {
-  message = other703.message;
-  __isset = other703.__isset;
-}
-InvalidInputException& InvalidInputException::operator=(const InvalidInputException& other704) {
+InvalidInputException::InvalidInputException(const InvalidInputException& other704) : TException() {
   message = other704.message;
   __isset = other704.__isset;
+}
+InvalidInputException& InvalidInputException::operator=(const InvalidInputException& other705) {
+  message = other705.message;
+  __isset = other705.__isset;
   return *this;
 }
 void InvalidInputException::printTo(std::ostream& out) const {
@@ -16358,13 +16390,13 @@ void swap(NoSuchTxnException &a, NoSuchTxnException &b) {
   swap(a.__isset, b.__isset);
 }
 
-NoSuchTxnException::NoSuchTxnException(const NoSuchTxnException& other705) : TException() {
-  message = other705.message;
-  __isset = other705.__isset;
-}
-NoSuchTxnException& NoSuchTxnException::operator=(const NoSuchTxnException& other706) {
+NoSuchTxnException::NoSuchTxnException(const NoSuchTxnException& other706) : TException() {
   message = other706.message;
   __isset = other706.__isset;
+}
+NoSuchTxnException& NoSuchTxnException::operator=(const NoSuchTxnException& other707) {
+  message = other707.message;
+  __isset = other707.__isset;
   return *this;
 }
 void NoSuchTxnException::printTo(std::ostream& out) const {
@@ -16455,13 +16487,13 @@ void swap(TxnAbortedException &a, TxnAbortedException &b) {
   swap(a.__isset, b.__isset);
 }
 
-TxnAbortedException::TxnAbortedException(const TxnAbortedException& other707) : TException() {
-  message = other707.message;
-  __isset = other707.__isset;
-}
-TxnAbortedException& TxnAbortedException::operator=(const TxnAbortedException& other708) {
+TxnAbortedException::TxnAbortedException(const TxnAbortedException& other708) : TException() {
   message = other708.message;
   __isset = other708.__isset;
+}
+TxnAbortedException& TxnAbortedException::operator=(const TxnAbortedException& other709) {
+  message = other709.message;
+  __isset = other709.__isset;
   return *this;
 }
 void TxnAbortedException::printTo(std::ostream& out) const {
@@ -16552,13 +16584,13 @@ void swap(TxnOpenException &a, TxnOpenException &b) {
   swap(a.__isset, b.__isset);
 }
 
-TxnOpenException::TxnOpenException(const TxnOpenException& other709) : TException() {
-  message = other709.message;
-  __isset = other709.__isset;
-}
-TxnOpenException& TxnOpenException::operator=(const TxnOpenException& other710) {
+TxnOpenException::TxnOpenException(const TxnOpenException& other710) : TException() {
   message = other710.message;
   __isset = other710.__isset;
+}
+TxnOpenException& TxnOpenException::operator=(const TxnOpenException& other711) {
+  message = other711.message;
+  __isset = other711.__isset;
   return *this;
 }
 void TxnOpenException::printTo(std::ostream& out) const {
@@ -16649,13 +16681,13 @@ void swap(NoSuchLockException &a, NoSuchLockException &b) {
   swap(a.__isset, b.__isset);
 }
 
-NoSuchLockException::NoSuchLockException(const NoSuchLockException& other711) : TException() {
-  message = other711.message;
-  __isset = other711.__isset;
-}
-NoSuchLockException& NoSuchLockException::operator=(const NoSuchLockException& other712) {
+NoSuchLockException::NoSuchLockException(const NoSuchLockException& other712) : TException() {
   message = other712.message;
   __isset = other712.__isset;
+}
+NoSuchLockException& NoSuchLockException::operator=(const NoSuchLockException& other713) {
+  message = other713.message;
+  __isset = other713.__isset;
   return *this;
 }
 void NoSuchLockException::printTo(std::ostream& out) const {

http://git-wip-us.apache.org/repos/asf/hive/blob/e5b53032/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h b/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
index c20badd..53ab272 100644
--- a/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
+++ b/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
@@ -137,6 +137,14 @@ struct ResourceType {
 
 extern const std::map<int, const char*> _ResourceType_VALUES_TO_NAMES;
 
+struct FileMetadataExprType {
+  enum type {
+    ORC_SARG = 1
+  };
+};
+
+extern const std::map<int, const char*> _FileMetadataExprType_VALUES_TO_NAMES;
+
 class Version;
 
 class FieldSchema;
@@ -5797,8 +5805,9 @@ inline std::ostream& operator<<(std::ostream& out, const GetFileMetadataByExprRe
 }
 
 typedef struct _GetFileMetadataByExprRequest__isset {
-  _GetFileMetadataByExprRequest__isset() : doGetFooters(false) {}
+  _GetFileMetadataByExprRequest__isset() : doGetFooters(false), type(false) {}
   bool doGetFooters :1;
+  bool type :1;
 } _GetFileMetadataByExprRequest__isset;
 
 class GetFileMetadataByExprRequest {
@@ -5806,13 +5815,14 @@ class GetFileMetadataByExprRequest {
 
   GetFileMetadataByExprRequest(const GetFileMetadataByExprRequest&);
   GetFileMetadataByExprRequest& operator=(const GetFileMetadataByExprRequest&);
-  GetFileMetadataByExprRequest() : expr(), doGetFooters(0) {
+  GetFileMetadataByExprRequest() : expr(), doGetFooters(0), type((FileMetadataExprType::type)0) {
   }
 
   virtual ~GetFileMetadataByExprRequest() throw();
   std::vector<int64_t>  fileIds;
   std::string expr;
   bool doGetFooters;
+  FileMetadataExprType::type type;
 
   _GetFileMetadataByExprRequest__isset __isset;
 
@@ -5822,6 +5832,8 @@ class GetFileMetadataByExprRequest {
 
   void __set_doGetFooters(const bool val);
 
+  void __set_type(const FileMetadataExprType::type val);
+
   bool operator == (const GetFileMetadataByExprRequest & rhs) const
   {
     if (!(fileIds == rhs.fileIds))
@@ -5832,6 +5844,10 @@ class GetFileMetadataByExprRequest {
       return false;
     else if (__isset.doGetFooters && !(doGetFooters == rhs.doGetFooters))
       return false;
+    if (__isset.type != rhs.__isset.type)
+      return false;
+    else if (__isset.type && !(type == rhs.type))
+      return false;
     return true;
   }
   bool operator != (const GetFileMetadataByExprRequest &rhs) const {

http://git-wip-us.apache.org/repos/asf/hive/blob/e5b53032/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FileMetadataExprType.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FileMetadataExprType.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FileMetadataExprType.java
new file mode 100644
index 0000000..4e393e2
--- /dev/null
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FileMetadataExprType.java
@@ -0,0 +1,42 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.3)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package org.apache.hadoop.hive.metastore.api;
+
+
+import java.util.Map;
+import java.util.HashMap;
+import org.apache.thrift.TEnum;
+
+public enum FileMetadataExprType implements org.apache.thrift.TEnum {
+  ORC_SARG(1);
+
+  private final int value;
+
+  private FileMetadataExprType(int value) {
+    this.value = value;
+  }
+
+  /**
+   * Get the integer value of this enum value, as defined in the Thrift IDL.
+   */
+  public int getValue() {
+    return value;
+  }
+
+  /**
+   * Find a the enum type by its integer value, as defined in the Thrift IDL.
+   * @return null if the value is not found.
+   */
+  public static FileMetadataExprType findByValue(int value) { 
+    switch (value) {
+      case 1:
+        return ORC_SARG;
+      default:
+        return null;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/e5b53032/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataByExprRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataByExprRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataByExprRequest.java
index b880093..0236b4a 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataByExprRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataByExprRequest.java
@@ -41,6 +41,7 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
   private static final org.apache.thrift.protocol.TField FILE_IDS_FIELD_DESC = new org.apache.thrift.protocol.TField("fileIds", org.apache.thrift.protocol.TType.LIST, (short)1);
   private static final org.apache.thrift.protocol.TField EXPR_FIELD_DESC = new org.apache.thrift.protocol.TField("expr", org.apache.thrift.protocol.TType.STRING, (short)2);
   private static final org.apache.thrift.protocol.TField DO_GET_FOOTERS_FIELD_DESC = new org.apache.thrift.protocol.TField("doGetFooters", org.apache.thrift.protocol.TType.BOOL, (short)3);
+  private static final org.apache.thrift.protocol.TField TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField("type", org.apache.thrift.protocol.TType.I32, (short)4);
 
   private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
   static {
@@ -51,12 +52,18 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
   private List<Long> fileIds; // required
   private ByteBuffer expr; // required
   private boolean doGetFooters; // optional
+  private FileMetadataExprType type; // optional
 
   /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
   public enum _Fields implements org.apache.thrift.TFieldIdEnum {
     FILE_IDS((short)1, "fileIds"),
     EXPR((short)2, "expr"),
-    DO_GET_FOOTERS((short)3, "doGetFooters");
+    DO_GET_FOOTERS((short)3, "doGetFooters"),
+    /**
+     * 
+     * @see FileMetadataExprType
+     */
+    TYPE((short)4, "type");
 
     private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
 
@@ -77,6 +84,8 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
           return EXPR;
         case 3: // DO_GET_FOOTERS
           return DO_GET_FOOTERS;
+        case 4: // TYPE
+          return TYPE;
         default:
           return null;
       }
@@ -119,7 +128,7 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
   // isset id assignments
   private static final int __DOGETFOOTERS_ISSET_ID = 0;
   private byte __isset_bitfield = 0;
-  private static final _Fields optionals[] = {_Fields.DO_GET_FOOTERS};
+  private static final _Fields optionals[] = {_Fields.DO_GET_FOOTERS,_Fields.TYPE};
   public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
   static {
     Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
@@ -130,6 +139,8 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING        , true)));
     tmpMap.put(_Fields.DO_GET_FOOTERS, new org.apache.thrift.meta_data.FieldMetaData("doGetFooters", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL)));
+    tmpMap.put(_Fields.TYPE, new org.apache.thrift.meta_data.FieldMetaData("type", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
+        new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, FileMetadataExprType.class)));
     metaDataMap = Collections.unmodifiableMap(tmpMap);
     org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(GetFileMetadataByExprRequest.class, metaDataMap);
   }
@@ -159,6 +170,9 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
       this.expr = org.apache.thrift.TBaseHelper.copyBinary(other.expr);
     }
     this.doGetFooters = other.doGetFooters;
+    if (other.isSetType()) {
+      this.type = other.type;
+    }
   }
 
   public GetFileMetadataByExprRequest deepCopy() {
@@ -171,6 +185,7 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
     this.expr = null;
     setDoGetFootersIsSet(false);
     this.doGetFooters = false;
+    this.type = null;
   }
 
   public int getFileIdsSize() {
@@ -265,6 +280,37 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
     __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __DOGETFOOTERS_ISSET_ID, value);
   }
 
+  /**
+   * 
+   * @see FileMetadataExprType
+   */
+  public FileMetadataExprType getType() {
+    return this.type;
+  }
+
+  /**
+   * 
+   * @see FileMetadataExprType
+   */
+  public void setType(FileMetadataExprType type) {
+    this.type = type;
+  }
+
+  public void unsetType() {
+    this.type = null;
+  }
+
+  /** Returns true if field type is set (has been assigned a value) and false otherwise */
+  public boolean isSetType() {
+    return this.type != null;
+  }
+
+  public void setTypeIsSet(boolean value) {
+    if (!value) {
+      this.type = null;
+    }
+  }
+
   public void setFieldValue(_Fields field, Object value) {
     switch (field) {
     case FILE_IDS:
@@ -291,6 +337,14 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
       }
       break;
 
+    case TYPE:
+      if (value == null) {
+        unsetType();
+      } else {
+        setType((FileMetadataExprType)value);
+      }
+      break;
+
     }
   }
 
@@ -305,6 +359,9 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
     case DO_GET_FOOTERS:
       return isDoGetFooters();
 
+    case TYPE:
+      return getType();
+
     }
     throw new IllegalStateException();
   }
@@ -322,6 +379,8 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
       return isSetExpr();
     case DO_GET_FOOTERS:
       return isSetDoGetFooters();
+    case TYPE:
+      return isSetType();
     }
     throw new IllegalStateException();
   }
@@ -366,6 +425,15 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
         return false;
     }
 
+    boolean this_present_type = true && this.isSetType();
+    boolean that_present_type = true && that.isSetType();
+    if (this_present_type || that_present_type) {
+      if (!(this_present_type && that_present_type))
+        return false;
+      if (!this.type.equals(that.type))
+        return false;
+    }
+
     return true;
   }
 
@@ -388,6 +456,11 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
     if (present_doGetFooters)
       list.add(doGetFooters);
 
+    boolean present_type = true && (isSetType());
+    list.add(present_type);
+    if (present_type)
+      list.add(type.getValue());
+
     return list.hashCode();
   }
 
@@ -429,6 +502,16 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
         return lastComparison;
       }
     }
+    lastComparison = Boolean.valueOf(isSetType()).compareTo(other.isSetType());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetType()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.type, other.type);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
     return 0;
   }
 
@@ -470,6 +553,16 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
       sb.append(this.doGetFooters);
       first = false;
     }
+    if (isSetType()) {
+      if (!first) sb.append(", ");
+      sb.append("type:");
+      if (this.type == null) {
+        sb.append("null");
+      } else {
+        sb.append(this.type);
+      }
+      first = false;
+    }
     sb.append(")");
     return sb.toString();
   }
@@ -557,6 +650,14 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
             break;
+          case 4: // TYPE
+            if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
+              struct.type = org.apache.hadoop.hive.metastore.api.FileMetadataExprType.findByValue(iprot.readI32());
+              struct.setTypeIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
           default:
             org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
         }
@@ -592,6 +693,13 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
         oprot.writeBool(struct.doGetFooters);
         oprot.writeFieldEnd();
       }
+      if (struct.type != null) {
+        if (struct.isSetType()) {
+          oprot.writeFieldBegin(TYPE_FIELD_DESC);
+          oprot.writeI32(struct.type.getValue());
+          oprot.writeFieldEnd();
+        }
+      }
       oprot.writeFieldStop();
       oprot.writeStructEnd();
     }
@@ -621,10 +729,16 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
       if (struct.isSetDoGetFooters()) {
         optionals.set(0);
       }
-      oprot.writeBitSet(optionals, 1);
+      if (struct.isSetType()) {
+        optionals.set(1);
+      }
+      oprot.writeBitSet(optionals, 2);
       if (struct.isSetDoGetFooters()) {
         oprot.writeBool(struct.doGetFooters);
       }
+      if (struct.isSetType()) {
+        oprot.writeI32(struct.type.getValue());
+      }
     }
 
     @Override
@@ -643,11 +757,15 @@ public class GetFileMetadataByExprRequest implements org.apache.thrift.TBase<Get
       struct.setFileIdsIsSet(true);
       struct.expr = iprot.readBinary();
       struct.setExprIsSet(true);
-      BitSet incoming = iprot.readBitSet(1);
+      BitSet incoming = iprot.readBitSet(2);
       if (incoming.get(0)) {
         struct.doGetFooters = iprot.readBool();
         struct.setDoGetFootersIsSet(true);
       }
+      if (incoming.get(1)) {
+        struct.type = org.apache.hadoop.hive.metastore.api.FileMetadataExprType.findByValue(iprot.readI32());
+        struct.setTypeIsSet(true);
+      }
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/e5b53032/metastore/src/gen/thrift/gen-php/metastore/Types.php
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-php/metastore/Types.php b/metastore/src/gen/thrift/gen-php/metastore/Types.php
index 3ec2b1c..e63213d 100644
--- a/metastore/src/gen/thrift/gen-php/metastore/Types.php
+++ b/metastore/src/gen/thrift/gen-php/metastore/Types.php
@@ -143,6 +143,13 @@ final class ResourceType {
   );
 }
 
+final class FileMetadataExprType {
+  const ORC_SARG = 1;
+  static public $__names = array(
+    1 => 'ORC_SARG',
+  );
+}
+
 class Version {
   static $_TSPEC;
 
@@ -14108,6 +14115,10 @@ class GetFileMetadataByExprRequest {
    * @var bool
    */
   public $doGetFooters = null;
+  /**
+   * @var int
+   */
+  public $type = null;
 
   public function __construct($vals=null) {
     if (!isset(self::$_TSPEC)) {
@@ -14128,6 +14139,10 @@ class GetFileMetadataByExprRequest {
           'var' => 'doGetFooters',
           'type' => TType::BOOL,
           ),
+        4 => array(
+          'var' => 'type',
+          'type' => TType::I32,
+          ),
         );
     }
     if (is_array($vals)) {
@@ -14140,6 +14155,9 @@ class GetFileMetadataByExprRequest {
       if (isset($vals['doGetFooters'])) {
         $this->doGetFooters = $vals['doGetFooters'];
       }
+      if (isset($vals['type'])) {
+        $this->type = $vals['type'];
+      }
     }
   }
 
@@ -14193,6 +14211,13 @@ class GetFileMetadataByExprRequest {
             $xfer += $input->skip($ftype);
           }
           break;
+        case 4:
+          if ($ftype == TType::I32) {
+            $xfer += $input->readI32($this->type);
+          } else {
+            $xfer += $input->skip($ftype);
+          }
+          break;
         default:
           $xfer += $input->skip($ftype);
           break;
@@ -14233,6 +14258,11 @@ class GetFileMetadataByExprRequest {
       $xfer += $output->writeBool($this->doGetFooters);
       $xfer += $output->writeFieldEnd();
     }
+    if ($this->type !== null) {
+      $xfer += $output->writeFieldBegin('type', TType::I32, 4);
+      $xfer += $output->writeI32($this->type);
+      $xfer += $output->writeFieldEnd();
+    }
     $xfer += $output->writeFieldStop();
     $xfer += $output->writeStructEnd();
     return $xfer;

http://git-wip-us.apache.org/repos/asf/hive/blob/e5b53032/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py b/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
index 221d602..8940dff 100644
--- a/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
+++ b/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
@@ -213,6 +213,17 @@ class ResourceType:
     "ARCHIVE": 3,
   }
 
+class FileMetadataExprType:
+  ORC_SARG = 1
+
+  _VALUES_TO_NAMES = {
+    1: "ORC_SARG",
+  }
+
+  _NAMES_TO_VALUES = {
+    "ORC_SARG": 1,
+  }
+
 
 class Version:
   """
@@ -9927,6 +9938,7 @@ class GetFileMetadataByExprRequest:
    - fileIds
    - expr
    - doGetFooters
+   - type
   """
 
   thrift_spec = (
@@ -9934,12 +9946,14 @@ class GetFileMetadataByExprRequest:
     (1, TType.LIST, 'fileIds', (TType.I64,None), None, ), # 1
     (2, TType.STRING, 'expr', None, None, ), # 2
     (3, TType.BOOL, 'doGetFooters', None, None, ), # 3
+    (4, TType.I32, 'type', None, None, ), # 4
   )
 
-  def __init__(self, fileIds=None, expr=None, doGetFooters=None,):
+  def __init__(self, fileIds=None, expr=None, doGetFooters=None, type=None,):
     self.fileIds = fileIds
     self.expr = expr
     self.doGetFooters = doGetFooters
+    self.type = type
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -9970,6 +9984,11 @@ class GetFileMetadataByExprRequest:
           self.doGetFooters = iprot.readBool()
         else:
           iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.I32:
+          self.type = iprot.readI32()
+        else:
+          iprot.skip(ftype)
       else:
         iprot.skip(ftype)
       iprot.readFieldEnd()
@@ -9995,6 +10014,10 @@ class GetFileMetadataByExprRequest:
       oprot.writeFieldBegin('doGetFooters', TType.BOOL, 3)
       oprot.writeBool(self.doGetFooters)
       oprot.writeFieldEnd()
+    if self.type is not None:
+      oprot.writeFieldBegin('type', TType.I32, 4)
+      oprot.writeI32(self.type)
+      oprot.writeFieldEnd()
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 
@@ -10011,6 +10034,7 @@ class GetFileMetadataByExprRequest:
     value = (value * 31) ^ hash(self.fileIds)
     value = (value * 31) ^ hash(self.expr)
     value = (value * 31) ^ hash(self.doGetFooters)
+    value = (value * 31) ^ hash(self.type)
     return value
 
   def __repr__(self):

http://git-wip-us.apache.org/repos/asf/hive/blob/e5b53032/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb b/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
index cfabbb8..08b9b06 100644
--- a/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
+++ b/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
@@ -101,6 +101,12 @@ module ResourceType
   VALID_VALUES = Set.new([JAR, FILE, ARCHIVE]).freeze
 end
 
+module FileMetadataExprType
+  ORC_SARG = 1
+  VALUE_MAP = {1 => "ORC_SARG"}
+  VALID_VALUES = Set.new([ORC_SARG]).freeze
+end
+
 class Version
   include ::Thrift::Struct, ::Thrift::Struct_Union
   VERSION = 1
@@ -2274,11 +2280,13 @@ class GetFileMetadataByExprRequest
   FILEIDS = 1
   EXPR = 2
   DOGETFOOTERS = 3
+  TYPE = 4
 
   FIELDS = {
     FILEIDS => {:type => ::Thrift::Types::LIST, :name => 'fileIds', :element => {:type => ::Thrift::Types::I64}},
     EXPR => {:type => ::Thrift::Types::STRING, :name => 'expr', :binary => true},
-    DOGETFOOTERS => {:type => ::Thrift::Types::BOOL, :name => 'doGetFooters', :optional => true}
+    DOGETFOOTERS => {:type => ::Thrift::Types::BOOL, :name => 'doGetFooters', :optional => true},
+    TYPE => {:type => ::Thrift::Types::I32, :name => 'type', :optional => true, :enum_class => ::FileMetadataExprType}
   }
 
   def struct_fields; FIELDS; end
@@ -2286,6 +2294,9 @@ class GetFileMetadataByExprRequest
   def validate
     raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field fileIds is unset!') unless @fileIds
     raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field expr is unset!') unless @expr
+    unless @type.nil? || ::FileMetadataExprType::VALID_VALUES.include?(@type)
+      raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Invalid value of field type!')
+    end
   end
 
   ::Thrift::Struct.generate_accessors self

http://git-wip-us.apache.org/repos/asf/hive/blob/e5b53032/metastore/src/java/org/apache/hadoop/hive/metastore/FileMetadataHandler.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/FileMetadataHandler.java b/metastore/src/java/org/apache/hadoop/hive/metastore/FileMetadataHandler.java
new file mode 100644
index 0000000..7c3525a
--- /dev/null
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/FileMetadataHandler.java
@@ -0,0 +1,30 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.List;
+
+public interface FileMetadataHandler {
+
+  void getFileMetadataByExpr(List<Long> fileIds, byte[] expr,
+      ByteBuffer[] metadatas, ByteBuffer[] results, boolean[] eliminated) throws IOException;
+
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/e5b53032/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index 40e6e62..8ed4310 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -5617,13 +5617,16 @@ public class HiveMetaStore extends ThriftHiveMetastore {
         return result;
       }
       result.setIsSupported(true);
+
       List<Long> fileIds = req.getFileIds();
-      byte[] expr = req.getExpr();
-      boolean needMetadata = req.isDoGetFooters();
-      ByteBuffer[] metadatas = new ByteBuffer[fileIds.size()];
-      ByteBuffer[] stripeBitsets = new ByteBuffer[fileIds.size()];
+      boolean needMetadata = !req.isSetDoGetFooters() || req.isDoGetFooters();
+      FileMetadataExprType type = req.isSetType() ? req.getType() : FileMetadataExprType.ORC_SARG;
+
+      ByteBuffer[] metadatas = needMetadata ? new ByteBuffer[fileIds.size()] : null;
+      ByteBuffer[] ppdResults = new ByteBuffer[fileIds.size()];
       boolean[] eliminated = new boolean[fileIds.size()];
-      getMS().getFileMetadataByExpr(fileIds, expr, metadatas, stripeBitsets, eliminated);
+
+      getMS().getFileMetadataByExpr(fileIds, type, req.getExpr(), metadatas, ppdResults, eliminated);
       for (int i = 0; i < metadatas.length; ++i) {
         long fileId = fileIds.get(i);
         ByteBuffer metadata = metadatas[i];
@@ -5631,7 +5634,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
         metadata = (eliminated[i] || !needMetadata) ? null
             : handleReadOnlyBufferForThrift(metadata);
         MetadataPpdResult mpr = new MetadataPpdResult();
-        ByteBuffer bitset = eliminated[i] ? null : handleReadOnlyBufferForThrift(stripeBitsets[i]);
+        ByteBuffer bitset = eliminated[i] ? null : handleReadOnlyBufferForThrift(ppdResults[i]);
         mpr.setMetadata(metadata);
         mpr.setIncludeBitset(bitset);
         result.putToMetadata(fileId, mpr);

http://git-wip-us.apache.org/repos/asf/hive/blob/e5b53032/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index f0c1893..0f98963 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@ -79,6 +79,7 @@ import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
 import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.FileMetadataExprType;
 import org.apache.hadoop.hive.metastore.api.Function;
 import org.apache.hadoop.hive.metastore.api.FunctionType;
 import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
@@ -7666,7 +7667,7 @@ public class ObjectStore implements RawStore, Configurable {
   }
 
   @Override
-  public void getFileMetadataByExpr(List<Long> fileIds, byte[] expr,
+  public void getFileMetadataByExpr(List<Long> fileIds, FileMetadataExprType type, byte[] expr,
       ByteBuffer[] metadatas, ByteBuffer[] stripeBitsets, boolean[] eliminated) {
     throw new UnsupportedOperationException();
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/e5b53032/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
index 45428ed..4aa17a5 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
@@ -31,6 +31,7 @@ import org.apache.hadoop.hive.metastore.api.AggrStats;
 import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
 import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId;
 import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.FileMetadataExprType;
 import org.apache.hadoop.hive.metastore.api.Function;
 import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
 import org.apache.hadoop.hive.metastore.api.Index;
@@ -617,6 +618,7 @@ public interface RawStore extends Configurable {
    * produce additional information based on file metadata and also filter the file list.
    * @param fileIds List of file IDs from the filesystem.
    * @param expr Format-specific serialized expression applicable to the files' metadatas.
+   * @param type Expression type; used to determine the class that handles the metadata.
    * @param metadatas Output parameter; fileIds-sized array to receive the metadatas
    *                  for corresponding files, if any.
    * @param exprResults Output parameter; fileIds-sized array to receive the format-specific
@@ -624,7 +626,7 @@ public interface RawStore extends Configurable {
    * @param eliminated Output parameter; fileIds-sized array to receive the indication of whether
    *                   the corresponding files are entirely eliminated by the expression.
    */
-  void getFileMetadataByExpr(
-      List<Long> fileIds, byte[] expr, ByteBuffer[] metadatas,
-      ByteBuffer[] exprResults, boolean[] eliminated) throws MetaException;
+  void getFileMetadataByExpr(List<Long> fileIds, FileMetadataExprType type, byte[] expr,
+      ByteBuffer[] metadatas, ByteBuffer[] exprResults, boolean[] eliminated)
+          throws MetaException;
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/e5b53032/metastore/src/java/org/apache/hadoop/hive/metastore/filemeta/OrcFileMetadataHandler.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/filemeta/OrcFileMetadataHandler.java b/metastore/src/java/org/apache/hadoop/hive/metastore/filemeta/OrcFileMetadataHandler.java
new file mode 100644
index 0000000..14189da
--- /dev/null
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/filemeta/OrcFileMetadataHandler.java
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore.filemeta;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.FileMetadataHandler;
+import org.apache.hadoop.hive.metastore.PartitionExpressionProxy;
+import org.apache.hadoop.hive.metastore.hbase.HBaseReadWrite;
+import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
+
+public class OrcFileMetadataHandler implements FileMetadataHandler {
+  private final Configuration conf;
+  private final PartitionExpressionProxy expressionProxy;
+  private final HBaseReadWrite hbase;
+
+  public OrcFileMetadataHandler(Configuration conf,
+      PartitionExpressionProxy expressionProxy, HBaseReadWrite hbase) {
+    this.conf = conf;
+    this.expressionProxy = expressionProxy;
+    this.hbase = hbase;
+  }
+
+  @Override
+  public void getFileMetadataByExpr(List<Long> fileIds, byte[] expr,
+      ByteBuffer[] metadatas, ByteBuffer[] results, boolean[] eliminated) throws IOException {
+    SearchArgument sarg = expressionProxy.createSarg(expr);
+    // For now, don't push anything into HBase, nor store anything special in HBase
+    if (metadatas == null) {
+      // null means don't return metadata; we'd need the array anyway for now.
+      metadatas = new ByteBuffer[results.length];
+    }
+    hbase.getFileMetadata(fileIds, metadatas);
+    for (int i = 0; i < metadatas.length;  ++i) {
+      if (metadatas[i] == null) continue;
+      ByteBuffer result = expressionProxy.applySargToFileMetadata(sarg, metadatas[i]);
+      eliminated[i] = (result == null);
+      if (!eliminated[i]) {
+        results[i] = result;
+      }
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/e5b53032/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
index 781f562..ffd3ee5 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
@@ -1747,7 +1747,7 @@ public class HBaseReadWrite {
    * @param fileIds file ID list.
    * @return Serialized file metadata.
    */
-  void getFileMetadata(List<Long> fileIds, ByteBuffer[] result) throws IOException {
+  public void getFileMetadata(List<Long> fileIds, ByteBuffer[] result) throws IOException {
     byte[][] keys = new byte[fileIds.size()][];
     for (int i = 0; i < fileIds.size(); ++i) {
       keys[i] = HBaseUtils.makeLongKey(fileIds.get(i));

http://git-wip-us.apache.org/repos/asf/hive/blob/e5b53032/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
index 09e57e5..67a02d9 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
@@ -27,6 +27,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.common.FileUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.FileMetadataHandler;
 import org.apache.hadoop.hive.metastore.HiveMetaStore;
 import org.apache.hadoop.hive.metastore.PartFilterExprUtil;
 import org.apache.hadoop.hive.metastore.PartitionExpressionProxy;
@@ -38,6 +39,7 @@ import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
 import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.FileMetadataExprType;
 import org.apache.hadoop.hive.metastore.api.Function;
 import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
 import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
@@ -64,6 +66,7 @@ import org.apache.hadoop.hive.metastore.api.Type;
 import org.apache.hadoop.hive.metastore.api.UnknownDBException;
 import org.apache.hadoop.hive.metastore.api.UnknownPartitionException;
 import org.apache.hadoop.hive.metastore.api.UnknownTableException;
+import org.apache.hadoop.hive.metastore.filemeta.OrcFileMetadataHandler;
 import org.apache.hadoop.hive.metastore.hbase.HBaseFilterPlanUtil.PlanResult;
 import org.apache.hadoop.hive.metastore.hbase.HBaseFilterPlanUtil.ScanPlan;
 import org.apache.hadoop.hive.metastore.parser.ExpressionTree;
@@ -96,6 +99,7 @@ public class HBaseStore implements RawStore {
   private Configuration conf;
   private int txnNestLevel = 0;
   private PartitionExpressionProxy expressionProxy = null;
+  private Map<FileMetadataExprType, FileMetadataHandler> fmHandlers = new HashMap<>();
 
   public HBaseStore() {
   }
@@ -2241,10 +2245,24 @@ public class HBaseStore implements RawStore {
     // initialize expressionProxy. Also re-initialize it if
     // setConf is being called with new configuration object (though that
     // is not expected to happen, doing it just for safety)
-    if(expressionProxy == null || conf != configuration) {
+    // TODO: why not re-intialize HBaseReadWrite?
+    if (expressionProxy == null || conf != configuration) {
       expressionProxy = PartFilterExprUtil.createExpressionProxy(configuration);
     }
     conf = configuration;
+    createFileMetadataHandlers();
+  }
+
+  private void createFileMetadataHandlers() {
+    for (FileMetadataExprType v : FileMetadataExprType.values()) {
+      switch (v) {
+      case ORC_SARG:
+        fmHandlers.put(v, new OrcFileMetadataHandler(conf, expressionProxy, getHBase()));
+        break;
+      default:
+        throw new AssertionError("Unsupported type " + v);
+      }
+    }
   }
 
   @Override
@@ -2380,25 +2398,16 @@ public class HBaseStore implements RawStore {
   }
 
   @Override
-  public void getFileMetadataByExpr(List<Long> fileIds, byte[] expr, ByteBuffer[] metadatas,
-      ByteBuffer[] results, boolean[] eliminated) throws MetaException {
-    SearchArgument sarg = expressionProxy.createSarg(expr);
+  public void getFileMetadataByExpr(List<Long> fileIds, FileMetadataExprType type, byte[] expr,
+      ByteBuffer[] metadatas, ByteBuffer[] results, boolean[] eliminated) throws MetaException {
+    FileMetadataHandler fmh = fmHandlers.get(type);
     boolean commit = true;
     try {
-      // For now, don't push anything into HBase, nor store anything special in HBase
-      getHBase().getFileMetadata(fileIds, metadatas);
-      for (int i = 0; i < metadatas.length;  ++i) {
-        if (metadatas[i] == null) continue;
-        ByteBuffer result = expressionProxy.applySargToFileMetadata(sarg, metadatas[i]);
-        eliminated[i] = (result == null);
-        if (!eliminated[i]) {
-          results[i] = result;
-        }
-      }
+      fmh.getFileMetadataByExpr(fileIds, expr, metadatas, results, eliminated);
     } catch (IOException e) {
+      LOG.error("Unable to get file metadata by expr", e);
       commit = false;
-      LOG.error("Unable to get file metadata", e);
-      throw new MetaException("Error reading file metadata " + e.getMessage());
+      throw new MetaException("Error reading file metadata by expr" + e.getMessage());
     } finally {
       commitOrRoleBack(commit);
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/e5b53032/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
----------------------------------------------------------------------
diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
index d11c0d5..a100e9f 100644
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
+++ b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.hive.metastore.api.AggrStats;
 import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
 import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId;
 import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.FileMetadataExprType;
 import org.apache.hadoop.hive.metastore.api.Function;
 import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
 import org.apache.hadoop.hive.metastore.api.Index;
@@ -777,7 +778,7 @@ public class DummyRawStoreControlledCommit implements RawStore, Configurable {
 
 
   @Override
-  public void getFileMetadataByExpr(List<Long> fileIds, byte[] expr,
+  public void getFileMetadataByExpr(List<Long> fileIds, FileMetadataExprType type, byte[] expr,
       ByteBuffer[] metadatas, ByteBuffer[] stripeBitsets, boolean[] eliminated) {
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/e5b53032/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
----------------------------------------------------------------------
diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
index 2de049a..f6100e6 100644
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
+++ b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
@@ -31,6 +31,7 @@ import org.apache.hadoop.hive.metastore.api.AggrStats;
 import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
 import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId;
 import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.FileMetadataExprType;
 import org.apache.hadoop.hive.metastore.api.Function;
 import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
 import org.apache.hadoop.hive.metastore.api.Index;
@@ -793,7 +794,7 @@ public class DummyRawStoreForJdoConnection implements RawStore {
   }
 
   @Override
-  public void getFileMetadataByExpr(List<Long> fileIds, byte[] expr,
+  public void getFileMetadataByExpr(List<Long> fileIds, FileMetadataExprType type, byte[] expr,
       ByteBuffer[] metadatas, ByteBuffer[] stripeBitsets, boolean[] eliminated) {
   }
 }


[23/55] [abbrv] hive git commit: HIVE-12253 : revert HIVE-12061 (Sergey Shelukhin, reviewed by Prasanth Jayachandran)

Posted by xu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/3e0d87f8/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp b/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
index a82c363..44aa22e 100644
--- a/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
+++ b/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
@@ -1240,14 +1240,14 @@ uint32_t ThriftHiveMetastore_get_databases_result::read(::apache::thrift::protoc
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->success.clear();
-            uint32_t _size714;
-            ::apache::thrift::protocol::TType _etype717;
-            xfer += iprot->readListBegin(_etype717, _size714);
-            this->success.resize(_size714);
-            uint32_t _i718;
-            for (_i718 = 0; _i718 < _size714; ++_i718)
+            uint32_t _size713;
+            ::apache::thrift::protocol::TType _etype716;
+            xfer += iprot->readListBegin(_etype716, _size713);
+            this->success.resize(_size713);
+            uint32_t _i717;
+            for (_i717 = 0; _i717 < _size713; ++_i717)
             {
-              xfer += iprot->readString(this->success[_i718]);
+              xfer += iprot->readString(this->success[_i717]);
             }
             xfer += iprot->readListEnd();
           }
@@ -1286,10 +1286,10 @@ uint32_t ThriftHiveMetastore_get_databases_result::write(::apache::thrift::proto
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->success.size()));
-      std::vector<std::string> ::const_iterator _iter719;
-      for (_iter719 = this->success.begin(); _iter719 != this->success.end(); ++_iter719)
+      std::vector<std::string> ::const_iterator _iter718;
+      for (_iter718 = this->success.begin(); _iter718 != this->success.end(); ++_iter718)
       {
-        xfer += oprot->writeString((*_iter719));
+        xfer += oprot->writeString((*_iter718));
       }
       xfer += oprot->writeListEnd();
     }
@@ -1334,14 +1334,14 @@ uint32_t ThriftHiveMetastore_get_databases_presult::read(::apache::thrift::proto
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             (*(this->success)).clear();
-            uint32_t _size720;
-            ::apache::thrift::protocol::TType _etype723;
-            xfer += iprot->readListBegin(_etype723, _size720);
-            (*(this->success)).resize(_size720);
-            uint32_t _i724;
-            for (_i724 = 0; _i724 < _size720; ++_i724)
+            uint32_t _size719;
+            ::apache::thrift::protocol::TType _etype722;
+            xfer += iprot->readListBegin(_etype722, _size719);
+            (*(this->success)).resize(_size719);
+            uint32_t _i723;
+            for (_i723 = 0; _i723 < _size719; ++_i723)
             {
-              xfer += iprot->readString((*(this->success))[_i724]);
+              xfer += iprot->readString((*(this->success))[_i723]);
             }
             xfer += iprot->readListEnd();
           }
@@ -1458,14 +1458,14 @@ uint32_t ThriftHiveMetastore_get_all_databases_result::read(::apache::thrift::pr
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->success.clear();
-            uint32_t _size725;
-            ::apache::thrift::protocol::TType _etype728;
-            xfer += iprot->readListBegin(_etype728, _size725);
-            this->success.resize(_size725);
-            uint32_t _i729;
-            for (_i729 = 0; _i729 < _size725; ++_i729)
+            uint32_t _size724;
+            ::apache::thrift::protocol::TType _etype727;
+            xfer += iprot->readListBegin(_etype727, _size724);
+            this->success.resize(_size724);
+            uint32_t _i728;
+            for (_i728 = 0; _i728 < _size724; ++_i728)
             {
-              xfer += iprot->readString(this->success[_i729]);
+              xfer += iprot->readString(this->success[_i728]);
             }
             xfer += iprot->readListEnd();
           }
@@ -1504,10 +1504,10 @@ uint32_t ThriftHiveMetastore_get_all_databases_result::write(::apache::thrift::p
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->success.size()));
-      std::vector<std::string> ::const_iterator _iter730;
-      for (_iter730 = this->success.begin(); _iter730 != this->success.end(); ++_iter730)
+      std::vector<std::string> ::const_iterator _iter729;
+      for (_iter729 = this->success.begin(); _iter729 != this->success.end(); ++_iter729)
       {
-        xfer += oprot->writeString((*_iter730));
+        xfer += oprot->writeString((*_iter729));
       }
       xfer += oprot->writeListEnd();
     }
@@ -1552,14 +1552,14 @@ uint32_t ThriftHiveMetastore_get_all_databases_presult::read(::apache::thrift::p
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             (*(this->success)).clear();
-            uint32_t _size731;
-            ::apache::thrift::protocol::TType _etype734;
-            xfer += iprot->readListBegin(_etype734, _size731);
-            (*(this->success)).resize(_size731);
-            uint32_t _i735;
-            for (_i735 = 0; _i735 < _size731; ++_i735)
+            uint32_t _size730;
+            ::apache::thrift::protocol::TType _etype733;
+            xfer += iprot->readListBegin(_etype733, _size730);
+            (*(this->success)).resize(_size730);
+            uint32_t _i734;
+            for (_i734 = 0; _i734 < _size730; ++_i734)
             {
-              xfer += iprot->readString((*(this->success))[_i735]);
+              xfer += iprot->readString((*(this->success))[_i734]);
             }
             xfer += iprot->readListEnd();
           }
@@ -2621,17 +2621,17 @@ uint32_t ThriftHiveMetastore_get_type_all_result::read(::apache::thrift::protoco
         if (ftype == ::apache::thrift::protocol::T_MAP) {
           {
             this->success.clear();
-            uint32_t _size736;
-            ::apache::thrift::protocol::TType _ktype737;
-            ::apache::thrift::protocol::TType _vtype738;
-            xfer += iprot->readMapBegin(_ktype737, _vtype738, _size736);
-            uint32_t _i740;
-            for (_i740 = 0; _i740 < _size736; ++_i740)
+            uint32_t _size735;
+            ::apache::thrift::protocol::TType _ktype736;
+            ::apache::thrift::protocol::TType _vtype737;
+            xfer += iprot->readMapBegin(_ktype736, _vtype737, _size735);
+            uint32_t _i739;
+            for (_i739 = 0; _i739 < _size735; ++_i739)
             {
-              std::string _key741;
-              xfer += iprot->readString(_key741);
-              Type& _val742 = this->success[_key741];
-              xfer += _val742.read(iprot);
+              std::string _key740;
+              xfer += iprot->readString(_key740);
+              Type& _val741 = this->success[_key740];
+              xfer += _val741.read(iprot);
             }
             xfer += iprot->readMapEnd();
           }
@@ -2670,11 +2670,11 @@ uint32_t ThriftHiveMetastore_get_type_all_result::write(::apache::thrift::protoc
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_MAP, 0);
     {
       xfer += oprot->writeMapBegin(::apache::thrift::protocol::T_STRING, ::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->success.size()));
-      std::map<std::string, Type> ::const_iterator _iter743;
-      for (_iter743 = this->success.begin(); _iter743 != this->success.end(); ++_iter743)
+      std::map<std::string, Type> ::const_iterator _iter742;
+      for (_iter742 = this->success.begin(); _iter742 != this->success.end(); ++_iter742)
       {
-        xfer += oprot->writeString(_iter743->first);
-        xfer += _iter743->second.write(oprot);
+        xfer += oprot->writeString(_iter742->first);
+        xfer += _iter742->second.write(oprot);
       }
       xfer += oprot->writeMapEnd();
     }
@@ -2719,17 +2719,17 @@ uint32_t ThriftHiveMetastore_get_type_all_presult::read(::apache::thrift::protoc
         if (ftype == ::apache::thrift::protocol::T_MAP) {
           {
             (*(this->success)).clear();
-            uint32_t _size744;
-            ::apache::thrift::protocol::TType _ktype745;
-            ::apache::thrift::protocol::TType _vtype746;
-            xfer += iprot->readMapBegin(_ktype745, _vtype746, _size744);
-            uint32_t _i748;
-            for (_i748 = 0; _i748 < _size744; ++_i748)
+            uint32_t _size743;
+            ::apache::thrift::protocol::TType _ktype744;
+            ::apache::thrift::protocol::TType _vtype745;
+            xfer += iprot->readMapBegin(_ktype744, _vtype745, _size743);
+            uint32_t _i747;
+            for (_i747 = 0; _i747 < _size743; ++_i747)
             {
-              std::string _key749;
-              xfer += iprot->readString(_key749);
-              Type& _val750 = (*(this->success))[_key749];
-              xfer += _val750.read(iprot);
+              std::string _key748;
+              xfer += iprot->readString(_key748);
+              Type& _val749 = (*(this->success))[_key748];
+              xfer += _val749.read(iprot);
             }
             xfer += iprot->readMapEnd();
           }
@@ -2883,14 +2883,14 @@ uint32_t ThriftHiveMetastore_get_fields_result::read(::apache::thrift::protocol:
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->success.clear();
-            uint32_t _size751;
-            ::apache::thrift::protocol::TType _etype754;
-            xfer += iprot->readListBegin(_etype754, _size751);
-            this->success.resize(_size751);
-            uint32_t _i755;
-            for (_i755 = 0; _i755 < _size751; ++_i755)
+            uint32_t _size750;
+            ::apache::thrift::protocol::TType _etype753;
+            xfer += iprot->readListBegin(_etype753, _size750);
+            this->success.resize(_size750);
+            uint32_t _i754;
+            for (_i754 = 0; _i754 < _size750; ++_i754)
             {
-              xfer += this->success[_i755].read(iprot);
+              xfer += this->success[_i754].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -2945,10 +2945,10 @@ uint32_t ThriftHiveMetastore_get_fields_result::write(::apache::thrift::protocol
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->success.size()));
-      std::vector<FieldSchema> ::const_iterator _iter756;
-      for (_iter756 = this->success.begin(); _iter756 != this->success.end(); ++_iter756)
+      std::vector<FieldSchema> ::const_iterator _iter755;
+      for (_iter755 = this->success.begin(); _iter755 != this->success.end(); ++_iter755)
       {
-        xfer += (*_iter756).write(oprot);
+        xfer += (*_iter755).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -3001,14 +3001,14 @@ uint32_t ThriftHiveMetastore_get_fields_presult::read(::apache::thrift::protocol
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             (*(this->success)).clear();
-            uint32_t _size757;
-            ::apache::thrift::protocol::TType _etype760;
-            xfer += iprot->readListBegin(_etype760, _size757);
-            (*(this->success)).resize(_size757);
-            uint32_t _i761;
-            for (_i761 = 0; _i761 < _size757; ++_i761)
+            uint32_t _size756;
+            ::apache::thrift::protocol::TType _etype759;
+            xfer += iprot->readListBegin(_etype759, _size756);
+            (*(this->success)).resize(_size756);
+            uint32_t _i760;
+            for (_i760 = 0; _i760 < _size756; ++_i760)
             {
-              xfer += (*(this->success))[_i761].read(iprot);
+              xfer += (*(this->success))[_i760].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -3194,14 +3194,14 @@ uint32_t ThriftHiveMetastore_get_fields_with_environment_context_result::read(::
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->success.clear();
-            uint32_t _size762;
-            ::apache::thrift::protocol::TType _etype765;
-            xfer += iprot->readListBegin(_etype765, _size762);
-            this->success.resize(_size762);
-            uint32_t _i766;
-            for (_i766 = 0; _i766 < _size762; ++_i766)
+            uint32_t _size761;
+            ::apache::thrift::protocol::TType _etype764;
+            xfer += iprot->readListBegin(_etype764, _size761);
+            this->success.resize(_size761);
+            uint32_t _i765;
+            for (_i765 = 0; _i765 < _size761; ++_i765)
             {
-              xfer += this->success[_i766].read(iprot);
+              xfer += this->success[_i765].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -3256,10 +3256,10 @@ uint32_t ThriftHiveMetastore_get_fields_with_environment_context_result::write(:
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->success.size()));
-      std::vector<FieldSchema> ::const_iterator _iter767;
-      for (_iter767 = this->success.begin(); _iter767 != this->success.end(); ++_iter767)
+      std::vector<FieldSchema> ::const_iterator _iter766;
+      for (_iter766 = this->success.begin(); _iter766 != this->success.end(); ++_iter766)
       {
-        xfer += (*_iter767).write(oprot);
+        xfer += (*_iter766).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -3312,14 +3312,14 @@ uint32_t ThriftHiveMetastore_get_fields_with_environment_context_presult::read(:
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             (*(this->success)).clear();
-            uint32_t _size768;
-            ::apache::thrift::protocol::TType _etype771;
-            xfer += iprot->readListBegin(_etype771, _size768);
-            (*(this->success)).resize(_size768);
-            uint32_t _i772;
-            for (_i772 = 0; _i772 < _size768; ++_i772)
+            uint32_t _size767;
+            ::apache::thrift::protocol::TType _etype770;
+            xfer += iprot->readListBegin(_etype770, _size767);
+            (*(this->success)).resize(_size767);
+            uint32_t _i771;
+            for (_i771 = 0; _i771 < _size767; ++_i771)
             {
-              xfer += (*(this->success))[_i772].read(iprot);
+              xfer += (*(this->success))[_i771].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -3489,14 +3489,14 @@ uint32_t ThriftHiveMetastore_get_schema_result::read(::apache::thrift::protocol:
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->success.clear();
-            uint32_t _size773;
-            ::apache::thrift::protocol::TType _etype776;
-            xfer += iprot->readListBegin(_etype776, _size773);
-            this->success.resize(_size773);
-            uint32_t _i777;
-            for (_i777 = 0; _i777 < _size773; ++_i777)
+            uint32_t _size772;
+            ::apache::thrift::protocol::TType _etype775;
+            xfer += iprot->readListBegin(_etype775, _size772);
+            this->success.resize(_size772);
+            uint32_t _i776;
+            for (_i776 = 0; _i776 < _size772; ++_i776)
             {
-              xfer += this->success[_i777].read(iprot);
+              xfer += this->success[_i776].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -3551,10 +3551,10 @@ uint32_t ThriftHiveMetastore_get_schema_result::write(::apache::thrift::protocol
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->success.size()));
-      std::vector<FieldSchema> ::const_iterator _iter778;
-      for (_iter778 = this->success.begin(); _iter778 != this->success.end(); ++_iter778)
+      std::vector<FieldSchema> ::const_iterator _iter777;
+      for (_iter777 = this->success.begin(); _iter777 != this->success.end(); ++_iter777)
       {
-        xfer += (*_iter778).write(oprot);
+        xfer += (*_iter777).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -3607,14 +3607,14 @@ uint32_t ThriftHiveMetastore_get_schema_presult::read(::apache::thrift::protocol
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             (*(this->success)).clear();
-            uint32_t _size779;
-            ::apache::thrift::protocol::TType _etype782;
-            xfer += iprot->readListBegin(_etype782, _size779);
-            (*(this->success)).resize(_size779);
-            uint32_t _i783;
-            for (_i783 = 0; _i783 < _size779; ++_i783)
+            uint32_t _size778;
+            ::apache::thrift::protocol::TType _etype781;
+            xfer += iprot->readListBegin(_etype781, _size778);
+            (*(this->success)).resize(_size778);
+            uint32_t _i782;
+            for (_i782 = 0; _i782 < _size778; ++_i782)
             {
-              xfer += (*(this->success))[_i783].read(iprot);
+              xfer += (*(this->success))[_i782].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -3800,14 +3800,14 @@ uint32_t ThriftHiveMetastore_get_schema_with_environment_context_result::read(::
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->success.clear();
-            uint32_t _size784;
-            ::apache::thrift::protocol::TType _etype787;
-            xfer += iprot->readListBegin(_etype787, _size784);
-            this->success.resize(_size784);
-            uint32_t _i788;
-            for (_i788 = 0; _i788 < _size784; ++_i788)
+            uint32_t _size783;
+            ::apache::thrift::protocol::TType _etype786;
+            xfer += iprot->readListBegin(_etype786, _size783);
+            this->success.resize(_size783);
+            uint32_t _i787;
+            for (_i787 = 0; _i787 < _size783; ++_i787)
             {
-              xfer += this->success[_i788].read(iprot);
+              xfer += this->success[_i787].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -3862,10 +3862,10 @@ uint32_t ThriftHiveMetastore_get_schema_with_environment_context_result::write(:
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->success.size()));
-      std::vector<FieldSchema> ::const_iterator _iter789;
-      for (_iter789 = this->success.begin(); _iter789 != this->success.end(); ++_iter789)
+      std::vector<FieldSchema> ::const_iterator _iter788;
+      for (_iter788 = this->success.begin(); _iter788 != this->success.end(); ++_iter788)
       {
-        xfer += (*_iter789).write(oprot);
+        xfer += (*_iter788).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -3918,14 +3918,14 @@ uint32_t ThriftHiveMetastore_get_schema_with_environment_context_presult::read(:
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             (*(this->success)).clear();
-            uint32_t _size790;
-            ::apache::thrift::protocol::TType _etype793;
-            xfer += iprot->readListBegin(_etype793, _size790);
-            (*(this->success)).resize(_size790);
-            uint32_t _i794;
-            for (_i794 = 0; _i794 < _size790; ++_i794)
+            uint32_t _size789;
+            ::apache::thrift::protocol::TType _etype792;
+            xfer += iprot->readListBegin(_etype792, _size789);
+            (*(this->success)).resize(_size789);
+            uint32_t _i793;
+            for (_i793 = 0; _i793 < _size789; ++_i793)
             {
-              xfer += (*(this->success))[_i794].read(iprot);
+              xfer += (*(this->success))[_i793].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -5099,14 +5099,14 @@ uint32_t ThriftHiveMetastore_get_tables_result::read(::apache::thrift::protocol:
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->success.clear();
-            uint32_t _size795;
-            ::apache::thrift::protocol::TType _etype798;
-            xfer += iprot->readListBegin(_etype798, _size795);
-            this->success.resize(_size795);
-            uint32_t _i799;
-            for (_i799 = 0; _i799 < _size795; ++_i799)
+            uint32_t _size794;
+            ::apache::thrift::protocol::TType _etype797;
+            xfer += iprot->readListBegin(_etype797, _size794);
+            this->success.resize(_size794);
+            uint32_t _i798;
+            for (_i798 = 0; _i798 < _size794; ++_i798)
             {
-              xfer += iprot->readString(this->success[_i799]);
+              xfer += iprot->readString(this->success[_i798]);
             }
             xfer += iprot->readListEnd();
           }
@@ -5145,10 +5145,10 @@ uint32_t ThriftHiveMetastore_get_tables_result::write(::apache::thrift::protocol
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->success.size()));
-      std::vector<std::string> ::const_iterator _iter800;
-      for (_iter800 = this->success.begin(); _iter800 != this->success.end(); ++_iter800)
+      std::vector<std::string> ::const_iterator _iter799;
+      for (_iter799 = this->success.begin(); _iter799 != this->success.end(); ++_iter799)
       {
-        xfer += oprot->writeString((*_iter800));
+        xfer += oprot->writeString((*_iter799));
       }
       xfer += oprot->writeListEnd();
     }
@@ -5193,14 +5193,14 @@ uint32_t ThriftHiveMetastore_get_tables_presult::read(::apache::thrift::protocol
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             (*(this->success)).clear();
-            uint32_t _size801;
-            ::apache::thrift::protocol::TType _etype804;
-            xfer += iprot->readListBegin(_etype804, _size801);
-            (*(this->success)).resize(_size801);
-            uint32_t _i805;
-            for (_i805 = 0; _i805 < _size801; ++_i805)
+            uint32_t _size800;
+            ::apache::thrift::protocol::TType _etype803;
+            xfer += iprot->readListBegin(_etype803, _size800);
+            (*(this->success)).resize(_size800);
+            uint32_t _i804;
+            for (_i804 = 0; _i804 < _size800; ++_i804)
             {
-              xfer += iprot->readString((*(this->success))[_i805]);
+              xfer += iprot->readString((*(this->success))[_i804]);
             }
             xfer += iprot->readListEnd();
           }
@@ -5338,14 +5338,14 @@ uint32_t ThriftHiveMetastore_get_all_tables_result::read(::apache::thrift::proto
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->success.clear();
-            uint32_t _size806;
-            ::apache::thrift::protocol::TType _etype809;
-            xfer += iprot->readListBegin(_etype809, _size806);
-            this->success.resize(_size806);
-            uint32_t _i810;
-            for (_i810 = 0; _i810 < _size806; ++_i810)
+            uint32_t _size805;
+            ::apache::thrift::protocol::TType _etype808;
+            xfer += iprot->readListBegin(_etype808, _size805);
+            this->success.resize(_size805);
+            uint32_t _i809;
+            for (_i809 = 0; _i809 < _size805; ++_i809)
             {
-              xfer += iprot->readString(this->success[_i810]);
+              xfer += iprot->readString(this->success[_i809]);
             }
             xfer += iprot->readListEnd();
           }
@@ -5384,10 +5384,10 @@ uint32_t ThriftHiveMetastore_get_all_tables_result::write(::apache::thrift::prot
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->success.size()));
-      std::vector<std::string> ::const_iterator _iter811;
-      for (_iter811 = this->success.begin(); _iter811 != this->success.end(); ++_iter811)
+      std::vector<std::string> ::const_iterator _iter810;
+      for (_iter810 = this->success.begin(); _iter810 != this->success.end(); ++_iter810)
       {
-        xfer += oprot->writeString((*_iter811));
+        xfer += oprot->writeString((*_iter810));
       }
       xfer += oprot->writeListEnd();
     }
@@ -5432,14 +5432,14 @@ uint32_t ThriftHiveMetastore_get_all_tables_presult::read(::apache::thrift::prot
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             (*(this->success)).clear();
-            uint32_t _size812;
-            ::apache::thrift::protocol::TType _etype815;
-            xfer += iprot->readListBegin(_etype815, _size812);
-            (*(this->success)).resize(_size812);
-            uint32_t _i816;
-            for (_i816 = 0; _i816 < _size812; ++_i816)
+            uint32_t _size811;
+            ::apache::thrift::protocol::TType _etype814;
+            xfer += iprot->readListBegin(_etype814, _size811);
+            (*(this->success)).resize(_size811);
+            uint32_t _i815;
+            for (_i815 = 0; _i815 < _size811; ++_i815)
             {
-              xfer += iprot->readString((*(this->success))[_i816]);
+              xfer += iprot->readString((*(this->success))[_i815]);
             }
             xfer += iprot->readListEnd();
           }
@@ -5749,14 +5749,14 @@ uint32_t ThriftHiveMetastore_get_table_objects_by_name_args::read(::apache::thri
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->tbl_names.clear();
-            uint32_t _size817;
-            ::apache::thrift::protocol::TType _etype820;
-            xfer += iprot->readListBegin(_etype820, _size817);
-            this->tbl_names.resize(_size817);
-            uint32_t _i821;
-            for (_i821 = 0; _i821 < _size817; ++_i821)
+            uint32_t _size816;
+            ::apache::thrift::protocol::TType _etype819;
+            xfer += iprot->readListBegin(_etype819, _size816);
+            this->tbl_names.resize(_size816);
+            uint32_t _i820;
+            for (_i820 = 0; _i820 < _size816; ++_i820)
             {
-              xfer += iprot->readString(this->tbl_names[_i821]);
+              xfer += iprot->readString(this->tbl_names[_i820]);
             }
             xfer += iprot->readListEnd();
           }
@@ -5789,10 +5789,10 @@ uint32_t ThriftHiveMetastore_get_table_objects_by_name_args::write(::apache::thr
   xfer += oprot->writeFieldBegin("tbl_names", ::apache::thrift::protocol::T_LIST, 2);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->tbl_names.size()));
-    std::vector<std::string> ::const_iterator _iter822;
-    for (_iter822 = this->tbl_names.begin(); _iter822 != this->tbl_names.end(); ++_iter822)
+    std::vector<std::string> ::const_iterator _iter821;
+    for (_iter821 = this->tbl_names.begin(); _iter821 != this->tbl_names.end(); ++_iter821)
     {
-      xfer += oprot->writeString((*_iter822));
+      xfer += oprot->writeString((*_iter821));
     }
     xfer += oprot->writeListEnd();
   }
@@ -5820,10 +5820,10 @@ uint32_t ThriftHiveMetastore_get_table_objects_by_name_pargs::write(::apache::th
   xfer += oprot->writeFieldBegin("tbl_names", ::apache::thrift::protocol::T_LIST, 2);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->tbl_names)).size()));
-    std::vector<std::string> ::const_iterator _iter823;
-    for (_iter823 = (*(this->tbl_names)).begin(); _iter823 != (*(this->tbl_names)).end(); ++_iter823)
+    std::vector<std::string> ::const_iterator _iter822;
+    for (_iter822 = (*(this->tbl_names)).begin(); _iter822 != (*(this->tbl_names)).end(); ++_iter822)
     {
-      xfer += oprot->writeString((*_iter823));
+      xfer += oprot->writeString((*_iter822));
     }
     xfer += oprot->writeListEnd();
   }
@@ -5864,14 +5864,14 @@ uint32_t ThriftHiveMetastore_get_table_objects_by_name_result::read(::apache::th
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->success.clear();
-            uint32_t _size824;
-            ::apache::thrift::protocol::TType _etype827;
-            xfer += iprot->readListBegin(_etype827, _size824);
-            this->success.resize(_size824);
-            uint32_t _i828;
-            for (_i828 = 0; _i828 < _size824; ++_i828)
+            uint32_t _size823;
+            ::apache::thrift::protocol::TType _etype826;
+            xfer += iprot->readListBegin(_etype826, _size823);
+            this->success.resize(_size823);
+            uint32_t _i827;
+            for (_i827 = 0; _i827 < _size823; ++_i827)
             {
-              xfer += this->success[_i828].read(iprot);
+              xfer += this->success[_i827].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -5926,10 +5926,10 @@ uint32_t ThriftHiveMetastore_get_table_objects_by_name_result::write(::apache::t
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->success.size()));
-      std::vector<Table> ::const_iterator _iter829;
-      for (_iter829 = this->success.begin(); _iter829 != this->success.end(); ++_iter829)
+      std::vector<Table> ::const_iterator _iter828;
+      for (_iter828 = this->success.begin(); _iter828 != this->success.end(); ++_iter828)
       {
-        xfer += (*_iter829).write(oprot);
+        xfer += (*_iter828).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -5982,14 +5982,14 @@ uint32_t ThriftHiveMetastore_get_table_objects_by_name_presult::read(::apache::t
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             (*(this->success)).clear();
-            uint32_t _size830;
-            ::apache::thrift::protocol::TType _etype833;
-            xfer += iprot->readListBegin(_etype833, _size830);
-            (*(this->success)).resize(_size830);
-            uint32_t _i834;
-            for (_i834 = 0; _i834 < _size830; ++_i834)
+            uint32_t _size829;
+            ::apache::thrift::protocol::TType _etype832;
+            xfer += iprot->readListBegin(_etype832, _size829);
+            (*(this->success)).resize(_size829);
+            uint32_t _i833;
+            for (_i833 = 0; _i833 < _size829; ++_i833)
             {
-              xfer += (*(this->success))[_i834].read(iprot);
+              xfer += (*(this->success))[_i833].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -6175,14 +6175,14 @@ uint32_t ThriftHiveMetastore_get_table_names_by_filter_result::read(::apache::th
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->success.clear();
-            uint32_t _size835;
-            ::apache::thrift::protocol::TType _etype838;
-            xfer += iprot->readListBegin(_etype838, _size835);
-            this->success.resize(_size835);
-            uint32_t _i839;
-            for (_i839 = 0; _i839 < _size835; ++_i839)
+            uint32_t _size834;
+            ::apache::thrift::protocol::TType _etype837;
+            xfer += iprot->readListBegin(_etype837, _size834);
+            this->success.resize(_size834);
+            uint32_t _i838;
+            for (_i838 = 0; _i838 < _size834; ++_i838)
             {
-              xfer += iprot->readString(this->success[_i839]);
+              xfer += iprot->readString(this->success[_i838]);
             }
             xfer += iprot->readListEnd();
           }
@@ -6237,10 +6237,10 @@ uint32_t ThriftHiveMetastore_get_table_names_by_filter_result::write(::apache::t
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->success.size()));
-      std::vector<std::string> ::const_iterator _iter840;
-      for (_iter840 = this->success.begin(); _iter840 != this->success.end(); ++_iter840)
+      std::vector<std::string> ::const_iterator _iter839;
+      for (_iter839 = this->success.begin(); _iter839 != this->success.end(); ++_iter839)
       {
-        xfer += oprot->writeString((*_iter840));
+        xfer += oprot->writeString((*_iter839));
       }
       xfer += oprot->writeListEnd();
     }
@@ -6293,14 +6293,14 @@ uint32_t ThriftHiveMetastore_get_table_names_by_filter_presult::read(::apache::t
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             (*(this->success)).clear();
-            uint32_t _size841;
-            ::apache::thrift::protocol::TType _etype844;
-            xfer += iprot->readListBegin(_etype844, _size841);
-            (*(this->success)).resize(_size841);
-            uint32_t _i845;
-            for (_i845 = 0; _i845 < _size841; ++_i845)
+            uint32_t _size840;
+            ::apache::thrift::protocol::TType _etype843;
+            xfer += iprot->readListBegin(_etype843, _size840);
+            (*(this->success)).resize(_size840);
+            uint32_t _i844;
+            for (_i844 = 0; _i844 < _size840; ++_i844)
             {
-              xfer += iprot->readString((*(this->success))[_i845]);
+              xfer += iprot->readString((*(this->success))[_i844]);
             }
             xfer += iprot->readListEnd();
           }
@@ -7634,14 +7634,14 @@ uint32_t ThriftHiveMetastore_add_partitions_args::read(::apache::thrift::protoco
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->new_parts.clear();
-            uint32_t _size846;
-            ::apache::thrift::protocol::TType _etype849;
-            xfer += iprot->readListBegin(_etype849, _size846);
-            this->new_parts.resize(_size846);
-            uint32_t _i850;
-            for (_i850 = 0; _i850 < _size846; ++_i850)
+            uint32_t _size845;
+            ::apache::thrift::protocol::TType _etype848;
+            xfer += iprot->readListBegin(_etype848, _size845);
+            this->new_parts.resize(_size845);
+            uint32_t _i849;
+            for (_i849 = 0; _i849 < _size845; ++_i849)
             {
-              xfer += this->new_parts[_i850].read(iprot);
+              xfer += this->new_parts[_i849].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -7670,10 +7670,10 @@ uint32_t ThriftHiveMetastore_add_partitions_args::write(::apache::thrift::protoc
   xfer += oprot->writeFieldBegin("new_parts", ::apache::thrift::protocol::T_LIST, 1);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->new_parts.size()));
-    std::vector<Partition> ::const_iterator _iter851;
-    for (_iter851 = this->new_parts.begin(); _iter851 != this->new_parts.end(); ++_iter851)
+    std::vector<Partition> ::const_iterator _iter850;
+    for (_iter850 = this->new_parts.begin(); _iter850 != this->new_parts.end(); ++_iter850)
     {
-      xfer += (*_iter851).write(oprot);
+      xfer += (*_iter850).write(oprot);
     }
     xfer += oprot->writeListEnd();
   }
@@ -7697,10 +7697,10 @@ uint32_t ThriftHiveMetastore_add_partitions_pargs::write(::apache::thrift::proto
   xfer += oprot->writeFieldBegin("new_parts", ::apache::thrift::protocol::T_LIST, 1);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>((*(this->new_parts)).size()));
-    std::vector<Partition> ::const_iterator _iter852;
-    for (_iter852 = (*(this->new_parts)).begin(); _iter852 != (*(this->new_parts)).end(); ++_iter852)
+    std::vector<Partition> ::const_iterator _iter851;
+    for (_iter851 = (*(this->new_parts)).begin(); _iter851 != (*(this->new_parts)).end(); ++_iter851)
     {
-      xfer += (*_iter852).write(oprot);
+      xfer += (*_iter851).write(oprot);
     }
     xfer += oprot->writeListEnd();
   }
@@ -7909,14 +7909,14 @@ uint32_t ThriftHiveMetastore_add_partitions_pspec_args::read(::apache::thrift::p
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->new_parts.clear();
-            uint32_t _size853;
-            ::apache::thrift::protocol::TType _etype856;
-            xfer += iprot->readListBegin(_etype856, _size853);
-            this->new_parts.resize(_size853);
-            uint32_t _i857;
-            for (_i857 = 0; _i857 < _size853; ++_i857)
+            uint32_t _size852;
+            ::apache::thrift::protocol::TType _etype855;
+            xfer += iprot->readListBegin(_etype855, _size852);
+            this->new_parts.resize(_size852);
+            uint32_t _i856;
+            for (_i856 = 0; _i856 < _size852; ++_i856)
             {
-              xfer += this->new_parts[_i857].read(iprot);
+              xfer += this->new_parts[_i856].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -7945,10 +7945,10 @@ uint32_t ThriftHiveMetastore_add_partitions_pspec_args::write(::apache::thrift::
   xfer += oprot->writeFieldBegin("new_parts", ::apache::thrift::protocol::T_LIST, 1);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->new_parts.size()));
-    std::vector<PartitionSpec> ::const_iterator _iter858;
-    for (_iter858 = this->new_parts.begin(); _iter858 != this->new_parts.end(); ++_iter858)
+    std::vector<PartitionSpec> ::const_iterator _iter857;
+    for (_iter857 = this->new_parts.begin(); _iter857 != this->new_parts.end(); ++_iter857)
     {
-      xfer += (*_iter858).write(oprot);
+      xfer += (*_iter857).write(oprot);
     }
     xfer += oprot->writeListEnd();
   }
@@ -7972,10 +7972,10 @@ uint32_t ThriftHiveMetastore_add_partitions_pspec_pargs::write(::apache::thrift:
   xfer += oprot->writeFieldBegin("new_parts", ::apache::thrift::protocol::T_LIST, 1);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>((*(this->new_parts)).size()));
-    std::vector<PartitionSpec> ::const_iterator _iter859;
-    for (_iter859 = (*(this->new_parts)).begin(); _iter859 != (*(this->new_parts)).end(); ++_iter859)
+    std::vector<PartitionSpec> ::const_iterator _iter858;
+    for (_iter858 = (*(this->new_parts)).begin(); _iter858 != (*(this->new_parts)).end(); ++_iter858)
     {
-      xfer += (*_iter859).write(oprot);
+      xfer += (*_iter858).write(oprot);
     }
     xfer += oprot->writeListEnd();
   }
@@ -8200,14 +8200,14 @@ uint32_t ThriftHiveMetastore_append_partition_args::read(::apache::thrift::proto
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->part_vals.clear();
-            uint32_t _size860;
-            ::apache::thrift::protocol::TType _etype863;
-            xfer += iprot->readListBegin(_etype863, _size860);
-            this->part_vals.resize(_size860);
-            uint32_t _i864;
-            for (_i864 = 0; _i864 < _size860; ++_i864)
+            uint32_t _size859;
+            ::apache::thrift::protocol::TType _etype862;
+            xfer += iprot->readListBegin(_etype862, _size859);
+            this->part_vals.resize(_size859);
+            uint32_t _i863;
+            for (_i863 = 0; _i863 < _size859; ++_i863)
             {
-              xfer += iprot->readString(this->part_vals[_i864]);
+              xfer += iprot->readString(this->part_vals[_i863]);
             }
             xfer += iprot->readListEnd();
           }
@@ -8244,10 +8244,10 @@ uint32_t ThriftHiveMetastore_append_partition_args::write(::apache::thrift::prot
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->part_vals.size()));
-    std::vector<std::string> ::const_iterator _iter865;
-    for (_iter865 = this->part_vals.begin(); _iter865 != this->part_vals.end(); ++_iter865)
+    std::vector<std::string> ::const_iterator _iter864;
+    for (_iter864 = this->part_vals.begin(); _iter864 != this->part_vals.end(); ++_iter864)
     {
-      xfer += oprot->writeString((*_iter865));
+      xfer += oprot->writeString((*_iter864));
     }
     xfer += oprot->writeListEnd();
   }
@@ -8279,10 +8279,10 @@ uint32_t ThriftHiveMetastore_append_partition_pargs::write(::apache::thrift::pro
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->part_vals)).size()));
-    std::vector<std::string> ::const_iterator _iter866;
-    for (_iter866 = (*(this->part_vals)).begin(); _iter866 != (*(this->part_vals)).end(); ++_iter866)
+    std::vector<std::string> ::const_iterator _iter865;
+    for (_iter865 = (*(this->part_vals)).begin(); _iter865 != (*(this->part_vals)).end(); ++_iter865)
     {
-      xfer += oprot->writeString((*_iter866));
+      xfer += oprot->writeString((*_iter865));
     }
     xfer += oprot->writeListEnd();
   }
@@ -8754,14 +8754,14 @@ uint32_t ThriftHiveMetastore_append_partition_with_environment_context_args::rea
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->part_vals.clear();
-            uint32_t _size867;
-            ::apache::thrift::protocol::TType _etype870;
-            xfer += iprot->readListBegin(_etype870, _size867);
-            this->part_vals.resize(_size867);
-            uint32_t _i871;
-            for (_i871 = 0; _i871 < _size867; ++_i871)
+            uint32_t _size866;
+            ::apache::thrift::protocol::TType _etype869;
+            xfer += iprot->readListBegin(_etype869, _size866);
+            this->part_vals.resize(_size866);
+            uint32_t _i870;
+            for (_i870 = 0; _i870 < _size866; ++_i870)
             {
-              xfer += iprot->readString(this->part_vals[_i871]);
+              xfer += iprot->readString(this->part_vals[_i870]);
             }
             xfer += iprot->readListEnd();
           }
@@ -8806,10 +8806,10 @@ uint32_t ThriftHiveMetastore_append_partition_with_environment_context_args::wri
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->part_vals.size()));
-    std::vector<std::string> ::const_iterator _iter872;
-    for (_iter872 = this->part_vals.begin(); _iter872 != this->part_vals.end(); ++_iter872)
+    std::vector<std::string> ::const_iterator _iter871;
+    for (_iter871 = this->part_vals.begin(); _iter871 != this->part_vals.end(); ++_iter871)
     {
-      xfer += oprot->writeString((*_iter872));
+      xfer += oprot->writeString((*_iter871));
     }
     xfer += oprot->writeListEnd();
   }
@@ -8845,10 +8845,10 @@ uint32_t ThriftHiveMetastore_append_partition_with_environment_context_pargs::wr
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->part_vals)).size()));
-    std::vector<std::string> ::const_iterator _iter873;
-    for (_iter873 = (*(this->part_vals)).begin(); _iter873 != (*(this->part_vals)).end(); ++_iter873)
+    std::vector<std::string> ::const_iterator _iter872;
+    for (_iter872 = (*(this->part_vals)).begin(); _iter872 != (*(this->part_vals)).end(); ++_iter872)
     {
-      xfer += oprot->writeString((*_iter873));
+      xfer += oprot->writeString((*_iter872));
     }
     xfer += oprot->writeListEnd();
   }
@@ -9651,14 +9651,14 @@ uint32_t ThriftHiveMetastore_drop_partition_args::read(::apache::thrift::protoco
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->part_vals.clear();
-            uint32_t _size874;
-            ::apache::thrift::protocol::TType _etype877;
-            xfer += iprot->readListBegin(_etype877, _size874);
-            this->part_vals.resize(_size874);
-            uint32_t _i878;
-            for (_i878 = 0; _i878 < _size874; ++_i878)
+            uint32_t _size873;
+            ::apache::thrift::protocol::TType _etype876;
+            xfer += iprot->readListBegin(_etype876, _size873);
+            this->part_vals.resize(_size873);
+            uint32_t _i877;
+            for (_i877 = 0; _i877 < _size873; ++_i877)
             {
-              xfer += iprot->readString(this->part_vals[_i878]);
+              xfer += iprot->readString(this->part_vals[_i877]);
             }
             xfer += iprot->readListEnd();
           }
@@ -9703,10 +9703,10 @@ uint32_t ThriftHiveMetastore_drop_partition_args::write(::apache::thrift::protoc
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->part_vals.size()));
-    std::vector<std::string> ::const_iterator _iter879;
-    for (_iter879 = this->part_vals.begin(); _iter879 != this->part_vals.end(); ++_iter879)
+    std::vector<std::string> ::const_iterator _iter878;
+    for (_iter878 = this->part_vals.begin(); _iter878 != this->part_vals.end(); ++_iter878)
     {
-      xfer += oprot->writeString((*_iter879));
+      xfer += oprot->writeString((*_iter878));
     }
     xfer += oprot->writeListEnd();
   }
@@ -9742,10 +9742,10 @@ uint32_t ThriftHiveMetastore_drop_partition_pargs::write(::apache::thrift::proto
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->part_vals)).size()));
-    std::vector<std::string> ::const_iterator _iter880;
-    for (_iter880 = (*(this->part_vals)).begin(); _iter880 != (*(this->part_vals)).end(); ++_iter880)
+    std::vector<std::string> ::const_iterator _iter879;
+    for (_iter879 = (*(this->part_vals)).begin(); _iter879 != (*(this->part_vals)).end(); ++_iter879)
     {
-      xfer += oprot->writeString((*_iter880));
+      xfer += oprot->writeString((*_iter879));
     }
     xfer += oprot->writeListEnd();
   }
@@ -9954,14 +9954,14 @@ uint32_t ThriftHiveMetastore_drop_partition_with_environment_context_args::read(
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->part_vals.clear();
-            uint32_t _size881;
-            ::apache::thrift::protocol::TType _etype884;
-            xfer += iprot->readListBegin(_etype884, _size881);
-            this->part_vals.resize(_size881);
-            uint32_t _i885;
-            for (_i885 = 0; _i885 < _size881; ++_i885)
+            uint32_t _size880;
+            ::apache::thrift::protocol::TType _etype883;
+            xfer += iprot->readListBegin(_etype883, _size880);
+            this->part_vals.resize(_size880);
+            uint32_t _i884;
+            for (_i884 = 0; _i884 < _size880; ++_i884)
             {
-              xfer += iprot->readString(this->part_vals[_i885]);
+              xfer += iprot->readString(this->part_vals[_i884]);
             }
             xfer += iprot->readListEnd();
           }
@@ -10014,10 +10014,10 @@ uint32_t ThriftHiveMetastore_drop_partition_with_environment_context_args::write
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->part_vals.size()));
-    std::vector<std::string> ::const_iterator _iter886;
-    for (_iter886 = this->part_vals.begin(); _iter886 != this->part_vals.end(); ++_iter886)
+    std::vector<std::string> ::const_iterator _iter885;
+    for (_iter885 = this->part_vals.begin(); _iter885 != this->part_vals.end(); ++_iter885)
     {
-      xfer += oprot->writeString((*_iter886));
+      xfer += oprot->writeString((*_iter885));
     }
     xfer += oprot->writeListEnd();
   }
@@ -10057,10 +10057,10 @@ uint32_t ThriftHiveMetastore_drop_partition_with_environment_context_pargs::writ
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->part_vals)).size()));
-    std::vector<std::string> ::const_iterator _iter887;
-    for (_iter887 = (*(this->part_vals)).begin(); _iter887 != (*(this->part_vals)).end(); ++_iter887)
+    std::vector<std::string> ::const_iterator _iter886;
+    for (_iter886 = (*(this->part_vals)).begin(); _iter886 != (*(this->part_vals)).end(); ++_iter886)
     {
-      xfer += oprot->writeString((*_iter887));
+      xfer += oprot->writeString((*_iter886));
     }
     xfer += oprot->writeListEnd();
   }
@@ -11066,14 +11066,14 @@ uint32_t ThriftHiveMetastore_get_partition_args::read(::apache::thrift::protocol
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->part_vals.clear();
-            uint32_t _size888;
-            ::apache::thrift::protocol::TType _etype891;
-            xfer += iprot->readListBegin(_etype891, _size888);
-            this->part_vals.resize(_size888);
-            uint32_t _i892;
-            for (_i892 = 0; _i892 < _size888; ++_i892)
+            uint32_t _size887;
+            ::apache::thrift::protocol::TType _etype890;
+            xfer += iprot->readListBegin(_etype890, _size887);
+            this->part_vals.resize(_size887);
+            uint32_t _i891;
+            for (_i891 = 0; _i891 < _size887; ++_i891)
             {
-              xfer += iprot->readString(this->part_vals[_i892]);
+              xfer += iprot->readString(this->part_vals[_i891]);
             }
             xfer += iprot->readListEnd();
           }
@@ -11110,10 +11110,10 @@ uint32_t ThriftHiveMetastore_get_partition_args::write(::apache::thrift::protoco
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->part_vals.size()));
-    std::vector<std::string> ::const_iterator _iter893;
-    for (_iter893 = this->part_vals.begin(); _iter893 != this->part_vals.end(); ++_iter893)
+    std::vector<std::string> ::const_iterator _iter892;
+    for (_iter892 = this->part_vals.begin(); _iter892 != this->part_vals.end(); ++_iter892)
     {
-      xfer += oprot->writeString((*_iter893));
+      xfer += oprot->writeString((*_iter892));
     }
     xfer += oprot->writeListEnd();
   }
@@ -11145,10 +11145,10 @@ uint32_t ThriftHiveMetastore_get_partition_pargs::write(::apache::thrift::protoc
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->part_vals)).size()));
-    std::vector<std::string> ::const_iterator _iter894;
-    for (_iter894 = (*(this->part_vals)).begin(); _iter894 != (*(this->part_vals)).end(); ++_iter894)
+    std::vector<std::string> ::const_iterator _iter893;
+    for (_iter893 = (*(this->part_vals)).begin(); _iter893 != (*(this->part_vals)).end(); ++_iter893)
     {
-      xfer += oprot->writeString((*_iter894));
+      xfer += oprot->writeString((*_iter893));
     }
     xfer += oprot->writeListEnd();
   }
@@ -11337,17 +11337,17 @@ uint32_t ThriftHiveMetastore_exchange_partition_args::read(::apache::thrift::pro
         if (ftype == ::apache::thrift::protocol::T_MAP) {
           {
             this->partitionSpecs.clear();
-            uint32_t _size895;
-            ::apache::thrift::protocol::TType _ktype896;
-            ::apache::thrift::protocol::TType _vtype897;
-            xfer += iprot->readMapBegin(_ktype896, _vtype897, _size895);
-            uint32_t _i899;
-            for (_i899 = 0; _i899 < _size895; ++_i899)
+            uint32_t _size894;
+            ::apache::thrift::protocol::TType _ktype895;
+            ::apache::thrift::protocol::TType _vtype896;
+            xfer += iprot->readMapBegin(_ktype895, _vtype896, _size894);
+            uint32_t _i898;
+            for (_i898 = 0; _i898 < _size894; ++_i898)
             {
-              std::string _key900;
-              xfer += iprot->readString(_key900);
-              std::string& _val901 = this->partitionSpecs[_key900];
-              xfer += iprot->readString(_val901);
+              std::string _key899;
+              xfer += iprot->readString(_key899);
+              std::string& _val900 = this->partitionSpecs[_key899];
+              xfer += iprot->readString(_val900);
             }
             xfer += iprot->readMapEnd();
           }
@@ -11408,11 +11408,11 @@ uint32_t ThriftHiveMetastore_exchange_partition_args::write(::apache::thrift::pr
   xfer += oprot->writeFieldBegin("partitionSpecs", ::apache::thrift::protocol::T_MAP, 1);
   {
     xfer += oprot->writeMapBegin(::apache::thrift::protocol::T_STRING, ::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->partitionSpecs.size()));
-    std::map<std::string, std::string> ::const_iterator _iter902;
-    for (_iter902 = this->partitionSpecs.begin(); _iter902 != this->partitionSpecs.end(); ++_iter902)
+    std::map<std::string, std::string> ::const_iterator _iter901;
+    for (_iter901 = this->partitionSpecs.begin(); _iter901 != this->partitionSpecs.end(); ++_iter901)
     {
-      xfer += oprot->writeString(_iter902->first);
-      xfer += oprot->writeString(_iter902->second);
+      xfer += oprot->writeString(_iter901->first);
+      xfer += oprot->writeString(_iter901->second);
     }
     xfer += oprot->writeMapEnd();
   }
@@ -11452,11 +11452,11 @@ uint32_t ThriftHiveMetastore_exchange_partition_pargs::write(::apache::thrift::p
   xfer += oprot->writeFieldBegin("partitionSpecs", ::apache::thrift::protocol::T_MAP, 1);
   {
     xfer += oprot->writeMapBegin(::apache::thrift::protocol::T_STRING, ::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->partitionSpecs)).size()));
-    std::map<std::string, std::string> ::const_iterator _iter903;
-    for (_iter903 = (*(this->partitionSpecs)).begin(); _iter903 != (*(this->partitionSpecs)).end(); ++_iter903)
+    std::map<std::string, std::string> ::const_iterator _iter902;
+    for (_iter902 = (*(this->partitionSpecs)).begin(); _iter902 != (*(this->partitionSpecs)).end(); ++_iter902)
     {
-      xfer += oprot->writeString(_iter903->first);
-      xfer += oprot->writeString(_iter903->second);
+      xfer += oprot->writeString(_iter902->first);
+      xfer += oprot->writeString(_iter902->second);
     }
     xfer += oprot->writeMapEnd();
   }
@@ -11717,14 +11717,14 @@ uint32_t ThriftHiveMetastore_get_partition_with_auth_args::read(::apache::thrift
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->part_vals.clear();
-            uint32_t _size904;
-            ::apache::thrift::protocol::TType _etype907;
-            xfer += iprot->readListBegin(_etype907, _size904);
-            this->part_vals.resize(_size904);
-            uint32_t _i908;
-            for (_i908 = 0; _i908 < _size904; ++_i908)
+            uint32_t _size903;
+            ::apache::thrift::protocol::TType _etype906;
+            xfer += iprot->readListBegin(_etype906, _size903);
+            this->part_vals.resize(_size903);
+            uint32_t _i907;
+            for (_i907 = 0; _i907 < _size903; ++_i907)
             {
-              xfer += iprot->readString(this->part_vals[_i908]);
+              xfer += iprot->readString(this->part_vals[_i907]);
             }
             xfer += iprot->readListEnd();
           }
@@ -11745,14 +11745,14 @@ uint32_t ThriftHiveMetastore_get_partition_with_auth_args::read(::apache::thrift
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->group_names.clear();
-            uint32_t _size909;
-            ::apache::thrift::protocol::TType _etype912;
-            xfer += iprot->readListBegin(_etype912, _size909);
-            this->group_names.resize(_size909);
-            uint32_t _i913;
-            for (_i913 = 0; _i913 < _size909; ++_i913)
+            uint32_t _size908;
+            ::apache::thrift::protocol::TType _etype911;
+            xfer += iprot->readListBegin(_etype911, _size908);
+            this->group_names.resize(_size908);
+            uint32_t _i912;
+            for (_i912 = 0; _i912 < _size908; ++_i912)
             {
-              xfer += iprot->readString(this->group_names[_i913]);
+              xfer += iprot->readString(this->group_names[_i912]);
             }
             xfer += iprot->readListEnd();
           }
@@ -11789,10 +11789,10 @@ uint32_t ThriftHiveMetastore_get_partition_with_auth_args::write(::apache::thrif
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->part_vals.size()));
-    std::vector<std::string> ::const_iterator _iter914;
-    for (_iter914 = this->part_vals.begin(); _iter914 != this->part_vals.end(); ++_iter914)
+    std::vector<std::string> ::const_iterator _iter913;
+    for (_iter913 = this->part_vals.begin(); _iter913 != this->part_vals.end(); ++_iter913)
     {
-      xfer += oprot->writeString((*_iter914));
+      xfer += oprot->writeString((*_iter913));
     }
     xfer += oprot->writeListEnd();
   }
@@ -11805,10 +11805,10 @@ uint32_t ThriftHiveMetastore_get_partition_with_auth_args::write(::apache::thrif
   xfer += oprot->writeFieldBegin("group_names", ::apache::thrift::protocol::T_LIST, 5);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->group_names.size()));
-    std::vector<std::string> ::const_iterator _iter915;
-    for (_iter915 = this->group_names.begin(); _iter915 != this->group_names.end(); ++_iter915)
+    std::vector<std::string> ::const_iterator _iter914;
+    for (_iter914 = this->group_names.begin(); _iter914 != this->group_names.end(); ++_iter914)
     {
-      xfer += oprot->writeString((*_iter915));
+      xfer += oprot->writeString((*_iter914));
     }
     xfer += oprot->writeListEnd();
   }
@@ -11840,10 +11840,10 @@ uint32_t ThriftHiveMetastore_get_partition_with_auth_pargs::write(::apache::thri
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->part_vals)).size()));
-    std::vector<std::string> ::const_iterator _iter916;
-    for (_iter916 = (*(this->part_vals)).begin(); _iter916 != (*(this->part_vals)).end(); ++_iter916)
+    std::vector<std::string> ::const_iterator _iter915;
+    for (_iter915 = (*(this->part_vals)).begin(); _iter915 != (*(this->part_vals)).end(); ++_iter915)
     {
-      xfer += oprot->writeString((*_iter916));
+      xfer += oprot->writeString((*_iter915));
     }
     xfer += oprot->writeListEnd();
   }
@@ -11856,10 +11856,10 @@ uint32_t ThriftHiveMetastore_get_partition_with_auth_pargs::write(::apache::thri
   xfer += oprot->writeFieldBegin("group_names", ::apache::thrift::protocol::T_LIST, 5);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->group_names)).size()));
-    std::vector<std::string> ::const_iterator _iter917;
-    for (_iter917 = (*(this->group_names)).begin(); _iter917 != (*(this->group_names)).end(); ++_iter917)
+    std::vector<std::string> ::const_iterator _iter916;
+    for (_iter916 = (*(this->group_names)).begin(); _iter916 != (*(this->group_names)).end(); ++_iter916)
     {
-      xfer += oprot->writeString((*_iter917));
+      xfer += oprot->writeString((*_iter916));
     }
     xfer += oprot->writeListEnd();
   }
@@ -12418,14 +12418,14 @@ uint32_t ThriftHiveMetastore_get_partitions_result::read(::apache::thrift::proto
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->success.clear();
-            uint32_t _size918;
-            ::apache::thrift::protocol::TType _etype921;
-            xfer += iprot->readListBegin(_etype921, _size918);
-            this->success.resize(_size918);
-            uint32_t _i922;
-            for (_i922 = 0; _i922 < _size918; ++_i922)
+            uint32_t _size917;
+            ::apache::thrift::protocol::TType _etype920;
+            xfer += iprot->readListBegin(_etype920, _size917);
+            this->success.resize(_size917);
+            uint32_t _i921;
+            for (_i921 = 0; _i921 < _size917; ++_i921)
             {
-              xfer += this->success[_i922].read(iprot);
+              xfer += this->success[_i921].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -12472,10 +12472,10 @@ uint32_t ThriftHiveMetastore_get_partitions_result::write(::apache::thrift::prot
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->success.size()));
-      std::vector<Partition> ::const_iterator _iter923;
-      for (_iter923 = this->success.begin(); _iter923 != this->success.end(); ++_iter923)
+      std::vector<Partition> ::const_iterator _iter922;
+      for (_iter922 = this->success.begin(); _iter922 != this->success.end(); ++_iter922)
       {
-        xfer += (*_iter923).write(oprot);
+        xfer += (*_iter922).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -12524,14 +12524,14 @@ uint32_t ThriftHiveMetastore_get_partitions_presult::read(::apache::thrift::prot
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             (*(this->success)).clear();
-            uint32_t _size924;
-            ::apache::thrift::protocol::TType _etype927;
-            xfer += iprot->readListBegin(_etype927, _size924);
-            (*(this->success)).resize(_size924);
-            uint32_t _i928;
-            for (_i928 = 0; _i928 < _size924; ++_i928)
+            uint32_t _size923;
+            ::apache::thrift::protocol::TType _etype926;
+            xfer += iprot->readListBegin(_etype926, _size923);
+            (*(this->success)).resize(_size923);
+            uint32_t _i927;
+            for (_i927 = 0; _i927 < _size923; ++_i927)
             {
-              xfer += (*(this->success))[_i928].read(iprot);
+              xfer += (*(this->success))[_i927].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -12630,14 +12630,14 @@ uint32_t ThriftHiveMetastore_get_partitions_with_auth_args::read(::apache::thrif
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->group_names.clear();
-            uint32_t _size929;
-            ::apache::thrift::protocol::TType _etype932;
-            xfer += iprot->readListBegin(_etype932, _size929);
-            this->group_names.resize(_size929);
-            uint32_t _i933;
-            for (_i933 = 0; _i933 < _size929; ++_i933)
+            uint32_t _size928;
+            ::apache::thrift::protocol::TType _etype931;
+            xfer += iprot->readListBegin(_etype931, _size928);
+            this->group_names.resize(_size928);
+            uint32_t _i932;
+            for (_i932 = 0; _i932 < _size928; ++_i932)
             {
-              xfer += iprot->readString(this->group_names[_i933]);
+              xfer += iprot->readString(this->group_names[_i932]);
             }
             xfer += iprot->readListEnd();
           }
@@ -12682,10 +12682,10 @@ uint32_t ThriftHiveMetastore_get_partitions_with_auth_args::write(::apache::thri
   xfer += oprot->writeFieldBegin("group_names", ::apache::thrift::protocol::T_LIST, 5);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->group_names.size()));
-    std::vector<std::string> ::const_iterator _iter934;
-    for (_iter934 = this->group_names.begin(); _iter934 != this->group_names.end(); ++_iter934)
+    std::vector<std::string> ::const_iterator _iter933;
+    for (_iter933 = this->group_names.begin(); _iter933 != this->group_names.end(); ++_iter933)
     {
-      xfer += oprot->writeString((*_iter934));
+      xfer += oprot->writeString((*_iter933));
     }
     xfer += oprot->writeListEnd();
   }
@@ -12725,10 +12725,10 @@ uint32_t ThriftHiveMetastore_get_partitions_with_auth_pargs::write(::apache::thr
   xfer += oprot->writeFieldBegin("group_names", ::apache::thrift::protocol::T_LIST, 5);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->group_names)).size()));
-    std::vector<std::string> ::const_iterator _iter935;
-    for (_iter935 = (*(this->group_names)).begin(); _iter935 != (*(this->group_names)).end(); ++_iter935)
+    std::vector<std::string> ::const_iterator _iter934;
+    for (_iter934 = (*(this->group_names)).begin(); _iter934 != (*(this->group_names)).end(); ++_iter934)
     {
-      xfer += oprot->writeString((*_iter935));
+      xfer += oprot->writeString((*_iter934));
     }
     xfer += oprot->writeListEnd();
   }
@@ -12769,14 +12769,14 @@ uint32_t ThriftHiveMetastore_get_partitions_with_auth_result::read(::apache::thr
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->success.clear();
-            uint32_t _size936;
-            ::apache::thrift::protocol::TType _etype939;
-            xfer += iprot->readListBegin(_etype939, _size936);
-            this->success.resize(_size936);
-            uint32_t _i940;
-            for (_i940 = 0; _i940 < _size936; ++_i940)
+            uint32_t _size935;
+            ::apache::thrift::protocol::TType _etype938;
+            xfer += iprot->readListBegin(_etype938, _size935);
+            this->success.resize(_size935);
+            uint32_t _i939;
+            for (_i939 = 0; _i939 < _size935; ++_i939)
             {
-              xfer += this->success[_i940].read(iprot);
+              xfer += this->success[_i939].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -12823,10 +12823,10 @@ uint32_t ThriftHiveMetastore_get_partitions_with_auth_result::write(::apache::th
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->success.size()));
-      std::vector<Partition> ::const_iterator _iter941;
-      for (_iter941 = this->success.begin(); _iter941 != this->success.end(); ++_iter941)
+      std::vector<Partition> ::const_iterator _iter940;
+      for (_iter940 = this->success.begin(); _iter940 != this->success.end(); ++_iter940)
       {
-        xfer += (*_iter941).write(oprot);
+        xfer += (*_iter940).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -12875,14 +12875,14 @@ uint32_t ThriftHiveMetastore_get_partitions_with_auth_presult::read(::apache::th
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             (*(this->success)).clear();
-            uint32_t _size942;
-            ::apache::thrift::protocol::TType _etype945;
-            xfer += iprot->readListBegin(_etype945, _size942);
-            (*(this->success)).resize(_size942);
-            uint32_t _i946;
-            for (_i946 = 0; _i946 < _size942; ++_i946)
+            uint32_t _size941;
+            ::apache::thrift::protocol::TType _etype944;
+            xfer += iprot->readListBegin(_etype944, _size941);
+            (*(this->success)).resize(_size941);
+            uint32_t _i945;
+            for (_i945 = 0; _i945 < _size941; ++_i945)
             {
-              xfer += (*(this->success))[_i946].read(iprot);
+              xfer += (*(this->success))[_i945].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -13060,14 +13060,14 @@ uint32_t ThriftHiveMetastore_get_partitions_pspec_result::read(::apache::thrift:
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->success.clear();
-            uint32_t _size947;
-            ::apache::thrift::protocol::TType _etype950;
-            xfer += iprot->readListBegin(_etype950, _size947);
-            this->success.resize(_size947);
-            uint32_t _i951;
-            for (_i951 = 0; _i951 < _size947; ++_i951)
+            uint32_t _size946;
+            ::apache::thrift::protocol::TType _etype949;
+            xfer += iprot->readListBegin(_etype949, _size946);
+            this->success.resize(_size946);
+            uint32_t _i950;
+            for (_i950 = 0; _i950 < _size946; ++_i950)
             {
-              xfer += this->success[_i951].read(iprot);
+              xfer += this->success[_i950].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -13114,10 +13114,10 @@ uint32_t ThriftHiveMetastore_get_partitions_pspec_result::write(::apache::thrift
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->success.size()));
-      std::vector<PartitionSpec> ::const_iterator _iter952;
-      for (_iter952 = this->success.begin(); _iter952 != this->success.end(); ++_iter952)
+      std::vector<PartitionSpec> ::const_iterator _iter951;
+      for (_iter951 = this->success.begin(); _iter951 != this->success.end(); ++_iter951)
       {
-        xfer += (*_iter952).write(oprot);
+        xfer += (*_iter951).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -13166,14 +13166,14 @@ uint32_t ThriftHiveMetastore_get_partitions_pspec_presult::read(::apache::thrift
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             (*(this->success)).clear();
-            uint32_t _size953;
-            ::apache::thrift::protocol::TType _etype956;
-            xfer += iprot->readListBegin(_etype956, _size953);
-            (*(this->success)).resize(_size953);
-            uint32_t _i957;
-            for (_i957 = 0; _i957 < _size953; ++_i957)
+            uint32_t _size952;
+            ::apache::thrift::protocol::TType _etype955;
+            xfer += iprot->readListBegin(_etype955, _size952);
+            (*(this->success)).resize(_size952);
+            uint32_t _i956;
+            for (_i956 = 0; _i956 < _size952; ++_i956)
             {
-              xfer += (*(this->success))[_i957].read(iprot);
+              xfer += (*(this->success))[_i956].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -13351,14 +13351,14 @@ uint32_t ThriftHiveMetastore_get_partition_names_result::read(::apache::thrift::
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->success.clear();
-            uint32_t _size958;
-            ::apache::thrift::protocol::TType _etype961;
-            xfer += iprot->readListBegin(_etype961, _size958);
-            this->success.resize(_size958);
-            uint32_t _i962;
-            for (_i962 = 0; _i962 < _size958; ++_i962)
+            uint32_t _size957;
+            ::apache::thrift::protocol::TType _etype960;
+            xfer += iprot->readListBegin(_etype960, _size957);
+            this->success.resize(_size957);
+            uint32_t _i961;
+            for (_i961 = 0; _i961 < _size957; ++_i961)
             {
-              xfer += iprot->readString(this->success[_i962]);
+              xfer += iprot->readString(this->success[_i961]);
             }
             xfer += iprot->readListEnd();
           }
@@ -13397,10 +13397,10 @@ uint32_t ThriftHiveMetastore_get_partition_names_result::write(::apache::thrift:
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->success.size()));
-      std::vector<std::string> ::const_iterator _iter963;
-      for (_iter963 = this->success.begin(); _iter963 != this->success.end(); ++_iter963)
+      std::vector<std::string> ::const_iterator _iter962;
+      for (_iter962 = this->success.begin(); _iter962 != this->success.end(); ++_iter962)
       {
-        xfer += oprot->writeString((*_iter963));
+        xfer += oprot->writeString((*_iter962));
       }
       xfer += oprot->writeListEnd();
     }
@@ -13445,14 +13445,14 @@ uint32_t ThriftHiveMetastore_get_partition_names_presult::read(::apache::thrift:
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             (*(this->success)).clear();
-            uint32_t _size964;
-            ::apache::thrift::protocol::TType _etype967;
-            xfer += iprot->readListBegin(_etype967, _size964);
-            (*(this->success)).resize(_size964);
-            uint32_t _i968;
-            for (_i968 = 0; _i968 < _size964; ++_i968)
+            uint32_t _size963;
+            ::apache::thrift::protocol::TType _etype966;
+            xfer += iprot->readListBegin(_etype966, _size963);
+            (*(this->success)).resize(_size963);
+            uint32_t _i967;
+            for (_i967 = 0; _i967 < _size963; ++_i967)
             {
-              xfer += iprot->readString((*(this->success))[_i968]);
+              xfer += iprot->readString((*(this->success))[_i967]);
             }
             xfer += iprot->readListEnd();
           }
@@ -13527,14 +13527,14 @@ uint32_t ThriftHiveMetastore_get_partitions_ps_args::read(::apache::thrift::prot
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->part_vals.clear();
-            uint32_t _size969;
-            ::apache::thrift::protocol::TType _etype972;
-            xfer += iprot->readListBegin(_etype972, _size969);
-            this->part_vals.resize(_size969);
-            uint32_t _i973;
-            for (_i973 = 0; _i973 < _size969; ++_i973)
+            uint32_t _size968;
+            ::apache::thrift::protocol::TType _etype971;
+            xfer += iprot->readListBegin(_etype971, _size968);
+            this->part_vals.resize(_size968);
+            uint32_t _i972;
+            for (_i972 = 0; _i972 < _size968; ++_i972)
             {
-              xfer += iprot->readString(this->part_vals[_i973]);
+              xfer += iprot->readString(this->part_vals[_i972]);
             }
             xfer += iprot->readListEnd();
           }
@@ -13579,10 +13579,10 @@ uint32_t ThriftHiveMetastore_get_partitions_ps_args::write(::apache::thrift::pro
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->part_vals.size()));
-    std::vector<std::string> ::const_iterator _iter974;
-    for (_iter974 = this->part_vals.begin(); _iter974 != this->part_vals.end(); ++_iter974)
+    std::vector<std::string> ::const_iterator _iter973;
+    for (_iter973 = this->part_vals.begin(); _iter973 != this->part_vals.end(); ++_iter973)
     {
-      xfer += oprot->writeString((*_iter974));
+      xfer += oprot->writeString((*_iter973));
     }
     xfer += oprot->writeListEnd();
   }
@@ -13618,10 +13618,10 @@ uint32_t ThriftHiveMetastore_get_partitions_ps_pargs::write(::apache::thrift::pr
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->part_vals)).size()));
-    std::vector<std::string> ::const_iterator _iter975;
-    for (_iter975 = (*(this->part_vals)).begin(); _iter975 != (*(this->part_vals)).end(); ++_iter975)
+    std::vector<std::string> ::const_iterator _iter974;
+    for (_iter974 = (*(this->part_vals)).begin(); _iter974 != (*(this->part_vals)).end(); ++_iter974)
     {
-      xfer += oprot->writeString((*_iter975));
+      xfer += oprot->writeString((*_iter974));
     }
     xfer += oprot->writeListEnd();
   }
@@ -13666,14 +13666,14 @@ uint32_t ThriftHiveMetastore_get_partitions_ps_result::read(::apache::thrift::pr
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->success.clear();
-            uint32_t _size976;
-            ::apache::thrift::protocol::TType _etype979;
-            xfer += iprot->readListBegin(_etype979, _size976);
-            this->success.resize(_size976);
-            uint32_t _i980;
-            for (_i980 = 0; _i980 < _size976; ++_i980)
+            uint32_t _size975;
+            ::apache::thrift::protocol::TType _etype978;
+            xfer += iprot->readListBegin(_etype978, _size975);
+            this->success.resize(_size975);
+            uint32_t _i979;
+            for (_i979 = 0; _i979 < _size975; ++_i979)
             {
-              xfer += this->success[_i980].read(iprot);
+              xfer += this->success[_i979].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -13720,10 +13720,10 @@ uint32_t ThriftHiveMetastore_get_partitions_ps_result::write(::apache::thrift::p
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->success.size()));
-      std::vector<Partition> ::const_iterator _iter981;
-      for (_iter981 = this->success.begin(); _iter981 != this->success.end(); ++_iter981)
+      std::vector<Partition> ::const_iterator _iter980;
+      for (_iter980 = this->success.begin(); _iter980 != this->success.end(); ++_iter980)
       {
-        xfer += (*_iter981).write(oprot);
+        xfer += (*_iter980).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -13772,14 +13772,14 @@ uint32_t ThriftHiveMetastore_get_partitions_ps_presult::read(::apache::thrift::p
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             (*(this->success)).clear();
-            uint32_t _size982;
-            ::apache::thrift::protocol::TType _etype985;
-            xfer += iprot->readListBegin(_etype985, _size982);
-            (*(this->success)).resize(_size982);
-            uint32_t _i986;
-            for (_i986 = 0; _i986 < _size982; ++_i986)
+            uint32_t _size981;
+            ::apache::thrift::protocol::TType _etype984;
+            xfer += iprot->readListBegin(_etype984, _size981);
+            (*(this->success)).resize(_size981);
+            uint32_t _i985;
+            for (_i985 = 0; _i985 < _size981; ++_i985)
             {
-              xfer += (*(this->success))[_i986].read(iprot);
+              xfer += (*(this->success))[_i985].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -13862,14 +13862,14 @@ uint32_t ThriftHiveMetastore_get_partitions_ps_with_auth_args::read(::apache::th
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->part_vals.clear();
-            uint32_t _size987;
-            ::apache::thrift::protocol::TType _etype990;
-            xfer += iprot->readListBegin(_etype990, _size987);
-            this->part_vals.resize(_size987);
-            uint32_t _i991;
-            for (_i991 = 0; _i991 < _size987; ++_i991)
+            uint32_t _size986;
+            ::apache::thrift::protocol::TType _etype989;
+            xfer += iprot->readListBegin(_etype989, _size986);
+            this->part_vals.resize(_size986);
+            uint32_t _i990;
+            for (_i990 = 0; _i990 < _size986; ++_i990)
             {
-              xfer += iprot->readString(this->part_vals[_i991]);
+              xfer += iprot->readString(this->part_vals[_i990]);
             }
             xfer += iprot->readListEnd();
           }
@@ -13898,14 +13898,14 @@ uint32_t ThriftHiveMetastore_get_partitions_ps_with_auth_args::read(::apache::th
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->group_names.clear();
-            uint32_t _size992;
-            ::apache::thrift::protocol::TType _etype995;
-            xfer += iprot->readListBegin(_etype995, _size992);
-            this->group_names.resize(_size992);
-            uint32_t _i996;
-            for (_i996 = 0; _i996 < _size992; ++_i996)
+            uint32_t _size991;
+            ::apache::thrift::protocol::TType _etype994;
+            xfer += iprot->readListBegin(_etype994, _size991);
+            this->group_names.resize(_size991);
+            uint32_t _i995;
+            for (_i995 = 0; _i995 < _size991; ++_i995)
             {
-              xfer += iprot->readString(this->group_names[_i996]);
+              xfer += iprot->readString(this->group_names[_i995]);
             }
             xfer += iprot->readListEnd();
           }
@@ -13942,10 +13942,10 @@ uint32_t ThriftHiveMetastore_get_partitions_ps_with_auth_args::write(::apache::t
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->part_vals.size()));
-    std::vector<std::string> ::const_iterator _iter997;
-    for (_iter997 = this->part_vals.begin(); _iter997 != this->part_vals.end(); ++_iter997)
+    std::vector<std::string> ::const_iterator _iter996;
+    for (_iter996 = this->part_vals.begin(); _iter996 != this->part_vals.end(); ++_iter996)
     {
-      xfer += oprot->writeString((*_iter997));
+      xfer += oprot->writeString((*_iter996));
     }
     xfer += oprot->writeListEnd();
   }
@@ -13962,10 +13962,10 @@ uint32_t ThriftHiveMetastore_get_partitions_ps_with_auth_args::write(::apache::t
   xfer += oprot->writeFieldBegin("group_names", ::apache::thrift::protocol::T_LIST, 6);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->group_names.size()));
-    std::vector<std::string> ::const_iterator _iter998;
-    for (_iter998 = this->group_names.begin(); _iter998 != this->group_names.end(); ++_iter998)
+    std::vector<std::string> ::const_iterator _iter997;
+    for (_iter997 = this->group_names.begin(); _iter997 != this->group_names.end(); ++_iter997)
     {
-      xfer += oprot->writeString((*_iter998));
+      xfer += oprot->writeString((*_iter997));
     }
     xfer += oprot->writeListEnd();
   }
@@ -13997,10 +13997,10 @@ uint32_t ThriftHiveMetastore_get_partitions_ps_with_auth_pargs::write(::apache::
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->part_vals)).size()));
-    std::vector<std::string> ::const_iterator _iter999;
-    for (_iter999 = (*(this->part_vals)).begin(); _iter999 != (*(this->part_vals)).end(); ++_iter999)
+    std::vector<std::string> ::const_iterator _iter998;
+    for (_iter998 = (*(this->part_vals)).begin(); _iter998 != (*(this->part_vals)).end(); ++_iter998)
     {
-      xfer += oprot->writeString((*_iter999));
+      xfer += oprot->writeString((*_iter998));
     }
     xfer += oprot->writeListEnd();
   }
@@ -14017,10 +14017,10 @@ uint32_t ThriftHiveMetastore_get_partitions_ps_with_auth_pargs::write(::apache::
   xfer += oprot->writeFieldBegin("group_names", ::apache::thrift::protocol::T_LIST, 6);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->group_names)).size()));
-    std::vector<std::string> ::const_iterator _iter1000;
-    for (_iter1000 = (*(this->group_names)).begin(); _iter1000 != (*(this->group_names)).end(); ++_iter1000)
+    std::vector<std::string> ::const_iterator _iter999;
+    for (_iter999 = (*(this->group_names)).begin(); _iter999 != (*(this->group_names)).end(); ++_iter999)
     {
-      xfer += oprot->writeString((*_iter1000));
+      xfer += oprot->writeString((*_iter999));
     }
     xfer += oprot->writeListEnd();
   }
@@ -14061,14 +14061,14 @@ uint32_t ThriftHiveMetastore_get_partitions_ps_with_auth_result::read(::apache::
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->success.clear();
-            uint32_t _size1001;
-            ::apache::thrift::protocol::TType _etype1004;
-            xfer += iprot->readListBegin(_etype1004, _size1001);
-            this->success.resize(_size1001);
-            uint32_t _i1005;
-            for (_i1005 = 0; _i1005 < _size1001; ++_i1005)
+            uint32_t _size1000;
+            ::apache::thrift::protocol::TType _etype1003;
+            xfer += iprot->readListBegin(_etype1003, _size1000);
+            this->success.resize(_size1000);
+            uint32_t _i1004;
+            for (_i1004 = 0; _i1004 < _size1000; ++_i1004)
             {
-              xfer += this->success[_i1005].read(iprot);
+              xfer += this->success[_i1004].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -14115,10 +14115,10 @@ uint32_t ThriftHiveMetastore_get_partitions_ps_with_auth_result::write(::apache:
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->success.size()));
-      std::vector<Partition> ::const_iterator _iter1006;
-      for (_iter1006 = this->success.begin(); _iter1006 != this->success.end(); ++_iter1006)
+      std::vector<Partition> ::const_iterator _iter1005;
+      for (_iter1005 = this->success.begin(); _iter1005 != this->success.end(); ++_iter1005)
       {
-        xfer += (*_iter1006).write(oprot);
+        xfer += (*_iter1005).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -14167,14 +14167,14 @@ uint32_t ThriftHiveMetastore_get_partitions_ps_with_auth_presult::read(::apache:
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             (*(this->success)).clear();
-            uint32_t _size1007;
-            ::apache::thrift::protocol::TType _etype1010;
-            xfer += iprot->readListBegin(_etype1010, _size1007);
-            (*(this->success)).resize(_size1007);
-            uint32_t _i1011;
-            for (_i1011 = 0; _i1011 < _size1007; ++_i1011)
+            uint32_t _size1006;
+            ::apache::thrift::protocol::TType _etype1009;
+            xfer += iprot->readListBegin(_etype1009, _size1006);
+            (*(this->success)).resize(_size1006);
+            uint32_t _i1010;
+            for (_i1010 = 0; _i1010 < _size1006; ++_i1010)
             {
-              xfer += (*(this->success))[_i1011].read(iprot);
+              xfer += (*(this->success))[_i1010].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -14257,14 +14257,14 @@ uint32_t ThriftHiveMetastore_get_partition_names_ps_args::read(::apache::thrift:
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->part_vals.clear();
-            uint32_t _size1012;
-            ::apache::thrift::protocol::TType _etype1015;
-            xfer += iprot->readListBegin(_etype1015, _size1012);
-            this->part_vals.resize(_size1012);
-            uint32_t _i1016;
-            for (_i1016 = 0; _i1016 < _size1012; ++_i1016)
+            uint32_t _size1011;
+            ::apache::thrift::protocol::TType _etype1014;
+            xfer += iprot->readListBegin(_etype1014, _size1011);
+            this->part_vals.resize(_size1011);
+            uint32_t _i1015;
+            for (_i1015 = 0; _i1015 < _size1011; ++_i1015)
             {
-              xfer += iprot->readString(this->part_vals[_i1016]);
+              xfer += iprot->readString(this->part_vals[_i1015]);
             }
             xfer += iprot->readListEnd();
           }
@@ -14309,10 +14309,10 @@ uint32_t ThriftHiveMetastore_get_partition_names_ps_args::write(::apache::thrift
   xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->part_vals.size()));
-    std::vector<std::string> ::const_iterator _iter1017;
-    for (_iter1017 = this->part_vals.begin(); _iter1017 != this->part_vals.end(); ++_iter1017)
+    std::vector<std::string> ::const_iterator _iter1016;
+    for (_iter1016 = this->part_vals.begin(); _iter1016 != this->part_vals.end(); ++_iter1016)
     {
-      xfer += oprot->writeString((*_iter1017));
+      xfer

<TRUNCATED>

[51/55] [abbrv] hive git commit: HIVE-12061 : add file type support to file metadata by expr call (Sergey Shelukhin, reviewed by Alan Gates)

Posted by xu...@apache.org.
HIVE-12061 : add file type support to file metadata by expr call (Sergey Shelukhin, reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e5b53032
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e5b53032
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e5b53032

Branch: refs/heads/spark
Commit: e5b53032beaafb95c798a152e724acf0e38dc094
Parents: 6df9090
Author: Sergey Shelukhin <se...@apache.org>
Authored: Tue Oct 27 14:32:13 2015 -0700
Committer: Sergey Shelukhin <se...@apache.org>
Committed: Tue Oct 27 14:32:13 2015 -0700

----------------------------------------------------------------------
 metastore/if/hive_metastore.thrift              |    8 +-
 .../gen/thrift/gen-cpp/ThriftHiveMetastore.cpp  | 1744 +++++++++---------
 .../gen/thrift/gen-cpp/hive_metastore_types.cpp |  402 ++--
 .../gen/thrift/gen-cpp/hive_metastore_types.h   |   20 +-
 .../metastore/api/FileMetadataExprType.java     |   42 +
 .../api/GetFileMetadataByExprRequest.java       |  126 +-
 .../src/gen/thrift/gen-php/metastore/Types.php  |   30 +
 .../gen/thrift/gen-py/hive_metastore/ttypes.py  |   26 +-
 .../gen/thrift/gen-rb/hive_metastore_types.rb   |   13 +-
 .../hive/metastore/FileMetadataHandler.java     |   30 +
 .../hadoop/hive/metastore/HiveMetaStore.java    |   15 +-
 .../hadoop/hive/metastore/ObjectStore.java      |    3 +-
 .../apache/hadoop/hive/metastore/RawStore.java  |    8 +-
 .../filemeta/OrcFileMetadataHandler.java        |   63 +
 .../hive/metastore/hbase/HBaseReadWrite.java    |    2 +-
 .../hadoop/hive/metastore/hbase/HBaseStore.java |   41 +-
 .../DummyRawStoreControlledCommit.java          |    3 +-
 .../DummyRawStoreForJdoConnection.java          |    3 +-
 18 files changed, 1484 insertions(+), 1095 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/e5b53032/metastore/if/hive_metastore.thrift
----------------------------------------------------------------------
diff --git a/metastore/if/hive_metastore.thrift b/metastore/if/hive_metastore.thrift
index 751cebe..3e30f56 100755
--- a/metastore/if/hive_metastore.thrift
+++ b/metastore/if/hive_metastore.thrift
@@ -725,11 +725,17 @@ struct GetFileMetadataByExprResult {
   2: required bool isSupported
 }
 
+enum FileMetadataExprType {
+  ORC_SARG = 1
+}
+
+
 // Request type for get_file_metadata_by_expr
 struct GetFileMetadataByExprRequest {
   1: required list<i64> fileIds,
   2: required binary expr,
-  3: optional bool doGetFooters
+  3: optional bool doGetFooters,
+  4: optional FileMetadataExprType type
 }
 
 // Return type for get_file_metadata


[45/55] [abbrv] hive git commit: HIVE-12259: Command containing semicolon is broken in Beeline (Chaoyu Tang, reviewed by Ferdinand Xu)

Posted by xu...@apache.org.
HIVE-12259: Command containing semicolon is broken in Beeline (Chaoyu Tang, reviewed by Ferdinand Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/ccdd1740
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/ccdd1740
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/ccdd1740

Branch: refs/heads/spark
Commit: ccdd1740a582ed49bcfba0d940e60438e1c7cb08
Parents: 86346fb
Author: ctang <ct...@gmail.com>
Authored: Tue Oct 27 08:16:24 2015 -0400
Committer: ctang <ct...@gmail.com>
Committed: Tue Oct 27 08:16:24 2015 -0400

----------------------------------------------------------------------
 beeline/src/java/org/apache/hive/beeline/BeeLine.java        | 4 ++--
 .../java/org/apache/hive/beeline/TestBeeLineWithArgs.java    | 8 ++++++++
 2 files changed, 10 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/ccdd1740/beeline/src/java/org/apache/hive/beeline/BeeLine.java
----------------------------------------------------------------------
diff --git a/beeline/src/java/org/apache/hive/beeline/BeeLine.java b/beeline/src/java/org/apache/hive/beeline/BeeLine.java
index 69e9418..4e04997 100644
--- a/beeline/src/java/org/apache/hive/beeline/BeeLine.java
+++ b/beeline/src/java/org/apache/hive/beeline/BeeLine.java
@@ -1108,8 +1108,8 @@ public class BeeLine implements Closeable {
     }
 
     if (isBeeLine) {
-      if (line.startsWith(COMMAND_PREFIX) && !line.contains(";")) {
-        // handle the case "!cmd" for beeline
+      if (line.startsWith(COMMAND_PREFIX)) {
+        // handle SQLLine command in beeline which starts with ! and does not end with ;
         return execCommandWithPrefix(line);
       } else {
         return commands.sql(line, getOpts().getEntireLineAsCommand());

http://git-wip-us.apache.org/repos/asf/hive/blob/ccdd1740/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java b/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java
index 0465ef3..7cc0acf 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java
@@ -752,4 +752,12 @@ public class TestBeeLineWithArgs {
     final String EXPECTED_PATTERN = "Stage-1 map =";
     testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
   }
+
+  @Test
+  public void testConnectionUrlWithSemiColon() throws Throwable{
+    List<String> argList = getBaseArgs(miniHS2.getJdbcURL("default", "sess_var_list?var1=value1"));
+    final String SCRIPT_TEXT = "set var1";
+    final String EXPECTED_PATTERN = "var1=value1";
+    testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
+  }
 }


[20/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)

Posted by xu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp b/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
index bc84e20..a82c363 100644
--- a/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
+++ b/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -15,6 +15,7 @@ ThriftHiveMetastore_getMetaConf_args::~ThriftHiveMetastore_getMetaConf_args() th
 
 uint32_t ThriftHiveMetastore_getMetaConf_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -55,7 +56,7 @@ uint32_t ThriftHiveMetastore_getMetaConf_args::read(::apache::thrift::protocol::
 
 uint32_t ThriftHiveMetastore_getMetaConf_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_getMetaConf_args");
 
   xfer += oprot->writeFieldBegin("key", ::apache::thrift::protocol::T_STRING, 1);
@@ -64,7 +65,6 @@ uint32_t ThriftHiveMetastore_getMetaConf_args::write(::apache::thrift::protocol:
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -75,7 +75,7 @@ ThriftHiveMetastore_getMetaConf_pargs::~ThriftHiveMetastore_getMetaConf_pargs()
 
 uint32_t ThriftHiveMetastore_getMetaConf_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_getMetaConf_pargs");
 
   xfer += oprot->writeFieldBegin("key", ::apache::thrift::protocol::T_STRING, 1);
@@ -84,7 +84,6 @@ uint32_t ThriftHiveMetastore_getMetaConf_pargs::write(::apache::thrift::protocol
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -95,6 +94,7 @@ ThriftHiveMetastore_getMetaConf_result::~ThriftHiveMetastore_getMetaConf_result(
 
 uint32_t ThriftHiveMetastore_getMetaConf_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -168,6 +168,7 @@ ThriftHiveMetastore_getMetaConf_presult::~ThriftHiveMetastore_getMetaConf_presul
 
 uint32_t ThriftHiveMetastore_getMetaConf_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -221,6 +222,7 @@ ThriftHiveMetastore_setMetaConf_args::~ThriftHiveMetastore_setMetaConf_args() th
 
 uint32_t ThriftHiveMetastore_setMetaConf_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -269,7 +271,7 @@ uint32_t ThriftHiveMetastore_setMetaConf_args::read(::apache::thrift::protocol::
 
 uint32_t ThriftHiveMetastore_setMetaConf_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_setMetaConf_args");
 
   xfer += oprot->writeFieldBegin("key", ::apache::thrift::protocol::T_STRING, 1);
@@ -282,7 +284,6 @@ uint32_t ThriftHiveMetastore_setMetaConf_args::write(::apache::thrift::protocol:
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -293,7 +294,7 @@ ThriftHiveMetastore_setMetaConf_pargs::~ThriftHiveMetastore_setMetaConf_pargs()
 
 uint32_t ThriftHiveMetastore_setMetaConf_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_setMetaConf_pargs");
 
   xfer += oprot->writeFieldBegin("key", ::apache::thrift::protocol::T_STRING, 1);
@@ -306,7 +307,6 @@ uint32_t ThriftHiveMetastore_setMetaConf_pargs::write(::apache::thrift::protocol
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -317,6 +317,7 @@ ThriftHiveMetastore_setMetaConf_result::~ThriftHiveMetastore_setMetaConf_result(
 
 uint32_t ThriftHiveMetastore_setMetaConf_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -378,6 +379,7 @@ ThriftHiveMetastore_setMetaConf_presult::~ThriftHiveMetastore_setMetaConf_presul
 
 uint32_t ThriftHiveMetastore_setMetaConf_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -423,6 +425,7 @@ ThriftHiveMetastore_create_database_args::~ThriftHiveMetastore_create_database_a
 
 uint32_t ThriftHiveMetastore_create_database_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -463,7 +466,7 @@ uint32_t ThriftHiveMetastore_create_database_args::read(::apache::thrift::protoc
 
 uint32_t ThriftHiveMetastore_create_database_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_create_database_args");
 
   xfer += oprot->writeFieldBegin("database", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -472,7 +475,6 @@ uint32_t ThriftHiveMetastore_create_database_args::write(::apache::thrift::proto
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -483,7 +485,7 @@ ThriftHiveMetastore_create_database_pargs::~ThriftHiveMetastore_create_database_
 
 uint32_t ThriftHiveMetastore_create_database_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_create_database_pargs");
 
   xfer += oprot->writeFieldBegin("database", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -492,7 +494,6 @@ uint32_t ThriftHiveMetastore_create_database_pargs::write(::apache::thrift::prot
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -503,6 +504,7 @@ ThriftHiveMetastore_create_database_result::~ThriftHiveMetastore_create_database
 
 uint32_t ThriftHiveMetastore_create_database_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -588,6 +590,7 @@ ThriftHiveMetastore_create_database_presult::~ThriftHiveMetastore_create_databas
 
 uint32_t ThriftHiveMetastore_create_database_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -649,6 +652,7 @@ ThriftHiveMetastore_get_database_args::~ThriftHiveMetastore_get_database_args()
 
 uint32_t ThriftHiveMetastore_get_database_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -689,7 +693,7 @@ uint32_t ThriftHiveMetastore_get_database_args::read(::apache::thrift::protocol:
 
 uint32_t ThriftHiveMetastore_get_database_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_database_args");
 
   xfer += oprot->writeFieldBegin("name", ::apache::thrift::protocol::T_STRING, 1);
@@ -698,7 +702,6 @@ uint32_t ThriftHiveMetastore_get_database_args::write(::apache::thrift::protocol
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -709,7 +712,7 @@ ThriftHiveMetastore_get_database_pargs::~ThriftHiveMetastore_get_database_pargs(
 
 uint32_t ThriftHiveMetastore_get_database_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_database_pargs");
 
   xfer += oprot->writeFieldBegin("name", ::apache::thrift::protocol::T_STRING, 1);
@@ -718,7 +721,6 @@ uint32_t ThriftHiveMetastore_get_database_pargs::write(::apache::thrift::protoco
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -729,6 +731,7 @@ ThriftHiveMetastore_get_database_result::~ThriftHiveMetastore_get_database_resul
 
 uint32_t ThriftHiveMetastore_get_database_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -814,6 +817,7 @@ ThriftHiveMetastore_get_database_presult::~ThriftHiveMetastore_get_database_pres
 
 uint32_t ThriftHiveMetastore_get_database_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -875,6 +879,7 @@ ThriftHiveMetastore_drop_database_args::~ThriftHiveMetastore_drop_database_args(
 
 uint32_t ThriftHiveMetastore_drop_database_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -931,7 +936,7 @@ uint32_t ThriftHiveMetastore_drop_database_args::read(::apache::thrift::protocol
 
 uint32_t ThriftHiveMetastore_drop_database_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_drop_database_args");
 
   xfer += oprot->writeFieldBegin("name", ::apache::thrift::protocol::T_STRING, 1);
@@ -948,7 +953,6 @@ uint32_t ThriftHiveMetastore_drop_database_args::write(::apache::thrift::protoco
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -959,7 +963,7 @@ ThriftHiveMetastore_drop_database_pargs::~ThriftHiveMetastore_drop_database_parg
 
 uint32_t ThriftHiveMetastore_drop_database_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_drop_database_pargs");
 
   xfer += oprot->writeFieldBegin("name", ::apache::thrift::protocol::T_STRING, 1);
@@ -976,7 +980,6 @@ uint32_t ThriftHiveMetastore_drop_database_pargs::write(::apache::thrift::protoc
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -987,6 +990,7 @@ ThriftHiveMetastore_drop_database_result::~ThriftHiveMetastore_drop_database_res
 
 uint32_t ThriftHiveMetastore_drop_database_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1072,6 +1076,7 @@ ThriftHiveMetastore_drop_database_presult::~ThriftHiveMetastore_drop_database_pr
 
 uint32_t ThriftHiveMetastore_drop_database_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1133,6 +1138,7 @@ ThriftHiveMetastore_get_databases_args::~ThriftHiveMetastore_get_databases_args(
 
 uint32_t ThriftHiveMetastore_get_databases_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1173,7 +1179,7 @@ uint32_t ThriftHiveMetastore_get_databases_args::read(::apache::thrift::protocol
 
 uint32_t ThriftHiveMetastore_get_databases_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_databases_args");
 
   xfer += oprot->writeFieldBegin("pattern", ::apache::thrift::protocol::T_STRING, 1);
@@ -1182,7 +1188,6 @@ uint32_t ThriftHiveMetastore_get_databases_args::write(::apache::thrift::protoco
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1193,7 +1198,7 @@ ThriftHiveMetastore_get_databases_pargs::~ThriftHiveMetastore_get_databases_parg
 
 uint32_t ThriftHiveMetastore_get_databases_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_databases_pargs");
 
   xfer += oprot->writeFieldBegin("pattern", ::apache::thrift::protocol::T_STRING, 1);
@@ -1202,7 +1207,6 @@ uint32_t ThriftHiveMetastore_get_databases_pargs::write(::apache::thrift::protoc
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1213,6 +1217,7 @@ ThriftHiveMetastore_get_databases_result::~ThriftHiveMetastore_get_databases_res
 
 uint32_t ThriftHiveMetastore_get_databases_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1306,6 +1311,7 @@ ThriftHiveMetastore_get_databases_presult::~ThriftHiveMetastore_get_databases_pr
 
 uint32_t ThriftHiveMetastore_get_databases_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1371,6 +1377,7 @@ ThriftHiveMetastore_get_all_databases_args::~ThriftHiveMetastore_get_all_databas
 
 uint32_t ThriftHiveMetastore_get_all_databases_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1398,12 +1405,11 @@ uint32_t ThriftHiveMetastore_get_all_databases_args::read(::apache::thrift::prot
 
 uint32_t ThriftHiveMetastore_get_all_databases_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_all_databases_args");
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1414,12 +1420,11 @@ ThriftHiveMetastore_get_all_databases_pargs::~ThriftHiveMetastore_get_all_databa
 
 uint32_t ThriftHiveMetastore_get_all_databases_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_all_databases_pargs");
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1430,6 +1435,7 @@ ThriftHiveMetastore_get_all_databases_result::~ThriftHiveMetastore_get_all_datab
 
 uint32_t ThriftHiveMetastore_get_all_databases_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1523,6 +1529,7 @@ ThriftHiveMetastore_get_all_databases_presult::~ThriftHiveMetastore_get_all_data
 
 uint32_t ThriftHiveMetastore_get_all_databases_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1588,6 +1595,7 @@ ThriftHiveMetastore_alter_database_args::~ThriftHiveMetastore_alter_database_arg
 
 uint32_t ThriftHiveMetastore_alter_database_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1636,7 +1644,7 @@ uint32_t ThriftHiveMetastore_alter_database_args::read(::apache::thrift::protoco
 
 uint32_t ThriftHiveMetastore_alter_database_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_alter_database_args");
 
   xfer += oprot->writeFieldBegin("dbname", ::apache::thrift::protocol::T_STRING, 1);
@@ -1649,7 +1657,6 @@ uint32_t ThriftHiveMetastore_alter_database_args::write(::apache::thrift::protoc
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1660,7 +1667,7 @@ ThriftHiveMetastore_alter_database_pargs::~ThriftHiveMetastore_alter_database_pa
 
 uint32_t ThriftHiveMetastore_alter_database_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_alter_database_pargs");
 
   xfer += oprot->writeFieldBegin("dbname", ::apache::thrift::protocol::T_STRING, 1);
@@ -1673,7 +1680,6 @@ uint32_t ThriftHiveMetastore_alter_database_pargs::write(::apache::thrift::proto
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1684,6 +1690,7 @@ ThriftHiveMetastore_alter_database_result::~ThriftHiveMetastore_alter_database_r
 
 uint32_t ThriftHiveMetastore_alter_database_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1757,6 +1764,7 @@ ThriftHiveMetastore_alter_database_presult::~ThriftHiveMetastore_alter_database_
 
 uint32_t ThriftHiveMetastore_alter_database_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1810,6 +1818,7 @@ ThriftHiveMetastore_get_type_args::~ThriftHiveMetastore_get_type_args() throw()
 
 uint32_t ThriftHiveMetastore_get_type_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1850,7 +1859,7 @@ uint32_t ThriftHiveMetastore_get_type_args::read(::apache::thrift::protocol::TPr
 
 uint32_t ThriftHiveMetastore_get_type_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_type_args");
 
   xfer += oprot->writeFieldBegin("name", ::apache::thrift::protocol::T_STRING, 1);
@@ -1859,7 +1868,6 @@ uint32_t ThriftHiveMetastore_get_type_args::write(::apache::thrift::protocol::TP
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1870,7 +1878,7 @@ ThriftHiveMetastore_get_type_pargs::~ThriftHiveMetastore_get_type_pargs() throw(
 
 uint32_t ThriftHiveMetastore_get_type_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_type_pargs");
 
   xfer += oprot->writeFieldBegin("name", ::apache::thrift::protocol::T_STRING, 1);
@@ -1879,7 +1887,6 @@ uint32_t ThriftHiveMetastore_get_type_pargs::write(::apache::thrift::protocol::T
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1890,6 +1897,7 @@ ThriftHiveMetastore_get_type_result::~ThriftHiveMetastore_get_type_result() thro
 
 uint32_t ThriftHiveMetastore_get_type_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1975,6 +1983,7 @@ ThriftHiveMetastore_get_type_presult::~ThriftHiveMetastore_get_type_presult() th
 
 uint32_t ThriftHiveMetastore_get_type_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2036,6 +2045,7 @@ ThriftHiveMetastore_create_type_args::~ThriftHiveMetastore_create_type_args() th
 
 uint32_t ThriftHiveMetastore_create_type_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2076,7 +2086,7 @@ uint32_t ThriftHiveMetastore_create_type_args::read(::apache::thrift::protocol::
 
 uint32_t ThriftHiveMetastore_create_type_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_create_type_args");
 
   xfer += oprot->writeFieldBegin("type", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -2085,7 +2095,6 @@ uint32_t ThriftHiveMetastore_create_type_args::write(::apache::thrift::protocol:
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2096,7 +2105,7 @@ ThriftHiveMetastore_create_type_pargs::~ThriftHiveMetastore_create_type_pargs()
 
 uint32_t ThriftHiveMetastore_create_type_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_create_type_pargs");
 
   xfer += oprot->writeFieldBegin("type", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -2105,7 +2114,6 @@ uint32_t ThriftHiveMetastore_create_type_pargs::write(::apache::thrift::protocol
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2116,6 +2124,7 @@ ThriftHiveMetastore_create_type_result::~ThriftHiveMetastore_create_type_result(
 
 uint32_t ThriftHiveMetastore_create_type_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2213,6 +2222,7 @@ ThriftHiveMetastore_create_type_presult::~ThriftHiveMetastore_create_type_presul
 
 uint32_t ThriftHiveMetastore_create_type_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2282,6 +2292,7 @@ ThriftHiveMetastore_drop_type_args::~ThriftHiveMetastore_drop_type_args() throw(
 
 uint32_t ThriftHiveMetastore_drop_type_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2322,7 +2333,7 @@ uint32_t ThriftHiveMetastore_drop_type_args::read(::apache::thrift::protocol::TP
 
 uint32_t ThriftHiveMetastore_drop_type_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_drop_type_args");
 
   xfer += oprot->writeFieldBegin("type", ::apache::thrift::protocol::T_STRING, 1);
@@ -2331,7 +2342,6 @@ uint32_t ThriftHiveMetastore_drop_type_args::write(::apache::thrift::protocol::T
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2342,7 +2352,7 @@ ThriftHiveMetastore_drop_type_pargs::~ThriftHiveMetastore_drop_type_pargs() thro
 
 uint32_t ThriftHiveMetastore_drop_type_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_drop_type_pargs");
 
   xfer += oprot->writeFieldBegin("type", ::apache::thrift::protocol::T_STRING, 1);
@@ -2351,7 +2361,6 @@ uint32_t ThriftHiveMetastore_drop_type_pargs::write(::apache::thrift::protocol::
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2362,6 +2371,7 @@ ThriftHiveMetastore_drop_type_result::~ThriftHiveMetastore_drop_type_result() th
 
 uint32_t ThriftHiveMetastore_drop_type_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2447,6 +2457,7 @@ ThriftHiveMetastore_drop_type_presult::~ThriftHiveMetastore_drop_type_presult()
 
 uint32_t ThriftHiveMetastore_drop_type_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2508,6 +2519,7 @@ ThriftHiveMetastore_get_type_all_args::~ThriftHiveMetastore_get_type_all_args()
 
 uint32_t ThriftHiveMetastore_get_type_all_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2548,7 +2560,7 @@ uint32_t ThriftHiveMetastore_get_type_all_args::read(::apache::thrift::protocol:
 
 uint32_t ThriftHiveMetastore_get_type_all_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_type_all_args");
 
   xfer += oprot->writeFieldBegin("name", ::apache::thrift::protocol::T_STRING, 1);
@@ -2557,7 +2569,6 @@ uint32_t ThriftHiveMetastore_get_type_all_args::write(::apache::thrift::protocol
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2568,7 +2579,7 @@ ThriftHiveMetastore_get_type_all_pargs::~ThriftHiveMetastore_get_type_all_pargs(
 
 uint32_t ThriftHiveMetastore_get_type_all_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_type_all_pargs");
 
   xfer += oprot->writeFieldBegin("name", ::apache::thrift::protocol::T_STRING, 1);
@@ -2577,7 +2588,6 @@ uint32_t ThriftHiveMetastore_get_type_all_pargs::write(::apache::thrift::protoco
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2588,6 +2598,7 @@ ThriftHiveMetastore_get_type_all_result::~ThriftHiveMetastore_get_type_all_resul
 
 uint32_t ThriftHiveMetastore_get_type_all_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2685,6 +2696,7 @@ ThriftHiveMetastore_get_type_all_presult::~ThriftHiveMetastore_get_type_all_pres
 
 uint32_t ThriftHiveMetastore_get_type_all_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2753,6 +2765,7 @@ ThriftHiveMetastore_get_fields_args::~ThriftHiveMetastore_get_fields_args() thro
 
 uint32_t ThriftHiveMetastore_get_fields_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2801,7 +2814,7 @@ uint32_t ThriftHiveMetastore_get_fields_args::read(::apache::thrift::protocol::T
 
 uint32_t ThriftHiveMetastore_get_fields_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_fields_args");
 
   xfer += oprot->writeFieldBegin("db_name", ::apache::thrift::protocol::T_STRING, 1);
@@ -2814,7 +2827,6 @@ uint32_t ThriftHiveMetastore_get_fields_args::write(::apache::thrift::protocol::
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2825,7 +2837,7 @@ ThriftHiveMetastore_get_fields_pargs::~ThriftHiveMetastore_get_fields_pargs() th
 
 uint32_t ThriftHiveMetastore_get_fields_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_fields_pargs");
 
   xfer += oprot->writeFieldBegin("db_name", ::apache::thrift::protocol::T_STRING, 1);
@@ -2838,7 +2850,6 @@ uint32_t ThriftHiveMetastore_get_fields_pargs::write(::apache::thrift::protocol:
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -2849,6 +2860,7 @@ ThriftHiveMetastore_get_fields_result::~ThriftHiveMetastore_get_fields_result()
 
 uint32_t ThriftHiveMetastore_get_fields_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2966,6 +2978,7 @@ ThriftHiveMetastore_get_fields_presult::~ThriftHiveMetastore_get_fields_presult(
 
 uint32_t ThriftHiveMetastore_get_fields_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -3047,6 +3060,7 @@ ThriftHiveMetastore_get_fields_with_environment_context_args::~ThriftHiveMetasto
 
 uint32_t ThriftHiveMetastore_get_fields_with_environment_context_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -3103,7 +3117,7 @@ uint32_t ThriftHiveMetastore_get_fields_with_environment_context_args::read(::ap
 
 uint32_t ThriftHiveMetastore_get_fields_with_environment_context_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_fields_with_environment_context_args");
 
   xfer += oprot->writeFieldBegin("db_name", ::apache::thrift::protocol::T_STRING, 1);
@@ -3120,7 +3134,6 @@ uint32_t ThriftHiveMetastore_get_fields_with_environment_context_args::write(::a
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -3131,7 +3144,7 @@ ThriftHiveMetastore_get_fields_with_environment_context_pargs::~ThriftHiveMetast
 
 uint32_t ThriftHiveMetastore_get_fields_with_environment_context_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_fields_with_environment_context_pargs");
 
   xfer += oprot->writeFieldBegin("db_name", ::apache::thrift::protocol::T_STRING, 1);
@@ -3148,7 +3161,6 @@ uint32_t ThriftHiveMetastore_get_fields_with_environment_context_pargs::write(::
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -3159,6 +3171,7 @@ ThriftHiveMetastore_get_fields_with_environment_context_result::~ThriftHiveMetas
 
 uint32_t ThriftHiveMetastore_get_fields_with_environment_context_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -3276,6 +3289,7 @@ ThriftHiveMetastore_get_fields_with_environment_context_presult::~ThriftHiveMeta
 
 uint32_t ThriftHiveMetastore_get_fields_with_environment_context_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -3357,6 +3371,7 @@ ThriftHiveMetastore_get_schema_args::~ThriftHiveMetastore_get_schema_args() thro
 
 uint32_t ThriftHiveMetastore_get_schema_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -3405,7 +3420,7 @@ uint32_t ThriftHiveMetastore_get_schema_args::read(::apache::thrift::protocol::T
 
 uint32_t ThriftHiveMetastore_get_schema_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_schema_args");
 
   xfer += oprot->writeFieldBegin("db_name", ::apache::thrift::protocol::T_STRING, 1);
@@ -3418,7 +3433,6 @@ uint32_t ThriftHiveMetastore_get_schema_args::write(::apache::thrift::protocol::
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -3429,7 +3443,7 @@ ThriftHiveMetastore_get_schema_pargs::~ThriftHiveMetastore_get_schema_pargs() th
 
 uint32_t ThriftHiveMetastore_get_schema_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_schema_pargs");
 
   xfer += oprot->writeFieldBegin("db_name", ::apache::thrift::protocol::T_STRING, 1);
@@ -3442,7 +3456,6 @@ uint32_t ThriftHiveMetastore_get_schema_pargs::write(::apache::thrift::protocol:
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -3453,6 +3466,7 @@ ThriftHiveMetastore_get_schema_result::~ThriftHiveMetastore_get_schema_result()
 
 uint32_t ThriftHiveMetastore_get_schema_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -3570,6 +3584,7 @@ ThriftHiveMetastore_get_schema_presult::~ThriftHiveMetastore_get_schema_presult(
 
 uint32_t ThriftHiveMetastore_get_schema_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -3651,6 +3666,7 @@ ThriftHiveMetastore_get_schema_with_environment_context_args::~ThriftHiveMetasto
 
 uint32_t ThriftHiveMetastore_get_schema_with_environment_context_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -3707,7 +3723,7 @@ uint32_t ThriftHiveMetastore_get_schema_with_environment_context_args::read(::ap
 
 uint32_t ThriftHiveMetastore_get_schema_with_environment_context_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_schema_with_environment_context_args");
 
   xfer += oprot->writeFieldBegin("db_name", ::apache::thrift::protocol::T_STRING, 1);
@@ -3724,7 +3740,6 @@ uint32_t ThriftHiveMetastore_get_schema_with_environment_context_args::write(::a
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -3735,7 +3750,7 @@ ThriftHiveMetastore_get_schema_with_environment_context_pargs::~ThriftHiveMetast
 
 uint32_t ThriftHiveMetastore_get_schema_with_environment_context_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_schema_with_environment_context_pargs");
 
   xfer += oprot->writeFieldBegin("db_name", ::apache::thrift::protocol::T_STRING, 1);
@@ -3752,7 +3767,6 @@ uint32_t ThriftHiveMetastore_get_schema_with_environment_context_pargs::write(::
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -3763,6 +3777,7 @@ ThriftHiveMetastore_get_schema_with_environment_context_result::~ThriftHiveMetas
 
 uint32_t ThriftHiveMetastore_get_schema_with_environment_context_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -3880,6 +3895,7 @@ ThriftHiveMetastore_get_schema_with_environment_context_presult::~ThriftHiveMeta
 
 uint32_t ThriftHiveMetastore_get_schema_with_environment_context_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -3961,6 +3977,7 @@ ThriftHiveMetastore_create_table_args::~ThriftHiveMetastore_create_table_args()
 
 uint32_t ThriftHiveMetastore_create_table_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -4001,7 +4018,7 @@ uint32_t ThriftHiveMetastore_create_table_args::read(::apache::thrift::protocol:
 
 uint32_t ThriftHiveMetastore_create_table_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_create_table_args");
 
   xfer += oprot->writeFieldBegin("tbl", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -4010,7 +4027,6 @@ uint32_t ThriftHiveMetastore_create_table_args::write(::apache::thrift::protocol
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -4021,7 +4037,7 @@ ThriftHiveMetastore_create_table_pargs::~ThriftHiveMetastore_create_table_pargs(
 
 uint32_t ThriftHiveMetastore_create_table_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_create_table_pargs");
 
   xfer += oprot->writeFieldBegin("tbl", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -4030,7 +4046,6 @@ uint32_t ThriftHiveMetastore_create_table_pargs::write(::apache::thrift::protoco
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -4041,6 +4056,7 @@ ThriftHiveMetastore_create_table_result::~ThriftHiveMetastore_create_table_resul
 
 uint32_t ThriftHiveMetastore_create_table_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -4138,6 +4154,7 @@ ThriftHiveMetastore_create_table_presult::~ThriftHiveMetastore_create_table_pres
 
 uint32_t ThriftHiveMetastore_create_table_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -4207,6 +4224,7 @@ ThriftHiveMetastore_create_table_with_environment_context_args::~ThriftHiveMetas
 
 uint32_t ThriftHiveMetastore_create_table_with_environment_context_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -4255,7 +4273,7 @@ uint32_t ThriftHiveMetastore_create_table_with_environment_context_args::read(::
 
 uint32_t ThriftHiveMetastore_create_table_with_environment_context_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_create_table_with_environment_context_args");
 
   xfer += oprot->writeFieldBegin("tbl", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -4268,7 +4286,6 @@ uint32_t ThriftHiveMetastore_create_table_with_environment_context_args::write(:
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -4279,7 +4296,7 @@ ThriftHiveMetastore_create_table_with_environment_context_pargs::~ThriftHiveMeta
 
 uint32_t ThriftHiveMetastore_create_table_with_environment_context_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_create_table_with_environment_context_pargs");
 
   xfer += oprot->writeFieldBegin("tbl", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -4292,7 +4309,6 @@ uint32_t ThriftHiveMetastore_create_table_with_environment_context_pargs::write(
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -4303,6 +4319,7 @@ ThriftHiveMetastore_create_table_with_environment_context_result::~ThriftHiveMet
 
 uint32_t ThriftHiveMetastore_create_table_with_environment_context_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -4400,6 +4417,7 @@ ThriftHiveMetastore_create_table_with_environment_context_presult::~ThriftHiveMe
 
 uint32_t ThriftHiveMetastore_create_table_with_environment_context_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -4469,6 +4487,7 @@ ThriftHiveMetastore_drop_table_args::~ThriftHiveMetastore_drop_table_args() thro
 
 uint32_t ThriftHiveMetastore_drop_table_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -4525,7 +4544,7 @@ uint32_t ThriftHiveMetastore_drop_table_args::read(::apache::thrift::protocol::T
 
 uint32_t ThriftHiveMetastore_drop_table_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_drop_table_args");
 
   xfer += oprot->writeFieldBegin("dbname", ::apache::thrift::protocol::T_STRING, 1);
@@ -4542,7 +4561,6 @@ uint32_t ThriftHiveMetastore_drop_table_args::write(::apache::thrift::protocol::
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -4553,7 +4571,7 @@ ThriftHiveMetastore_drop_table_pargs::~ThriftHiveMetastore_drop_table_pargs() th
 
 uint32_t ThriftHiveMetastore_drop_table_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_drop_table_pargs");
 
   xfer += oprot->writeFieldBegin("dbname", ::apache::thrift::protocol::T_STRING, 1);
@@ -4570,7 +4588,6 @@ uint32_t ThriftHiveMetastore_drop_table_pargs::write(::apache::thrift::protocol:
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -4581,6 +4598,7 @@ ThriftHiveMetastore_drop_table_result::~ThriftHiveMetastore_drop_table_result()
 
 uint32_t ThriftHiveMetastore_drop_table_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -4654,6 +4672,7 @@ ThriftHiveMetastore_drop_table_presult::~ThriftHiveMetastore_drop_table_presult(
 
 uint32_t ThriftHiveMetastore_drop_table_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -4707,6 +4726,7 @@ ThriftHiveMetastore_drop_table_with_environment_context_args::~ThriftHiveMetasto
 
 uint32_t ThriftHiveMetastore_drop_table_with_environment_context_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -4771,7 +4791,7 @@ uint32_t ThriftHiveMetastore_drop_table_with_environment_context_args::read(::ap
 
 uint32_t ThriftHiveMetastore_drop_table_with_environment_context_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_drop_table_with_environment_context_args");
 
   xfer += oprot->writeFieldBegin("dbname", ::apache::thrift::protocol::T_STRING, 1);
@@ -4792,7 +4812,6 @@ uint32_t ThriftHiveMetastore_drop_table_with_environment_context_args::write(::a
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -4803,7 +4822,7 @@ ThriftHiveMetastore_drop_table_with_environment_context_pargs::~ThriftHiveMetast
 
 uint32_t ThriftHiveMetastore_drop_table_with_environment_context_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_drop_table_with_environment_context_pargs");
 
   xfer += oprot->writeFieldBegin("dbname", ::apache::thrift::protocol::T_STRING, 1);
@@ -4824,7 +4843,6 @@ uint32_t ThriftHiveMetastore_drop_table_with_environment_context_pargs::write(::
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -4835,6 +4853,7 @@ ThriftHiveMetastore_drop_table_with_environment_context_result::~ThriftHiveMetas
 
 uint32_t ThriftHiveMetastore_drop_table_with_environment_context_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -4908,6 +4927,7 @@ ThriftHiveMetastore_drop_table_with_environment_context_presult::~ThriftHiveMeta
 
 uint32_t ThriftHiveMetastore_drop_table_with_environment_context_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -4961,6 +4981,7 @@ ThriftHiveMetastore_get_tables_args::~ThriftHiveMetastore_get_tables_args() thro
 
 uint32_t ThriftHiveMetastore_get_tables_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -5009,7 +5030,7 @@ uint32_t ThriftHiveMetastore_get_tables_args::read(::apache::thrift::protocol::T
 
 uint32_t ThriftHiveMetastore_get_tables_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_tables_args");
 
   xfer += oprot->writeFieldBegin("db_name", ::apache::thrift::protocol::T_STRING, 1);
@@ -5022,7 +5043,6 @@ uint32_t ThriftHiveMetastore_get_tables_args::write(::apache::thrift::protocol::
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -5033,7 +5053,7 @@ ThriftHiveMetastore_get_tables_pargs::~ThriftHiveMetastore_get_tables_pargs() th
 
 uint32_t ThriftHiveMetastore_get_tables_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_tables_pargs");
 
   xfer += oprot->writeFieldBegin("db_name", ::apache::thrift::protocol::T_STRING, 1);
@@ -5046,7 +5066,6 @@ uint32_t ThriftHiveMetastore_get_tables_pargs::write(::apache::thrift::protocol:
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -5057,6 +5076,7 @@ ThriftHiveMetastore_get_tables_result::~ThriftHiveMetastore_get_tables_result()
 
 uint32_t ThriftHiveMetastore_get_tables_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -5150,6 +5170,7 @@ ThriftHiveMetastore_get_tables_presult::~ThriftHiveMetastore_get_tables_presult(
 
 uint32_t ThriftHiveMetastore_get_tables_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -5215,6 +5236,7 @@ ThriftHiveMetastore_get_all_tables_args::~ThriftHiveMetastore_get_all_tables_arg
 
 uint32_t ThriftHiveMetastore_get_all_tables_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -5255,7 +5277,7 @@ uint32_t ThriftHiveMetastore_get_all_tables_args::read(::apache::thrift::protoco
 
 uint32_t ThriftHiveMetastore_get_all_tables_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_all_tables_args");
 
   xfer += oprot->writeFieldBegin("db_name", ::apache::thrift::protocol::T_STRING, 1);
@@ -5264,7 +5286,6 @@ uint32_t ThriftHiveMetastore_get_all_tables_args::write(::apache::thrift::protoc
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -5275,7 +5296,7 @@ ThriftHiveMetastore_get_all_tables_pargs::~ThriftHiveMetastore_get_all_tables_pa
 
 uint32_t ThriftHiveMetastore_get_all_tables_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_all_tables_pargs");
 
   xfer += oprot->writeFieldBegin("db_name", ::apache::thrift::protocol::T_STRING, 1);
@@ -5284,7 +5305,6 @@ uint32_t ThriftHiveMetastore_get_all_tables_pargs::write(::apache::thrift::proto
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -5295,6 +5315,7 @@ ThriftHiveMetastore_get_all_tables_result::~ThriftHiveMetastore_get_all_tables_r
 
 uint32_t ThriftHiveMetastore_get_all_tables_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -5388,6 +5409,7 @@ ThriftHiveMetastore_get_all_tables_presult::~ThriftHiveMetastore_get_all_tables_
 
 uint32_t ThriftHiveMetastore_get_all_tables_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -5453,6 +5475,7 @@ ThriftHiveMetastore_get_table_args::~ThriftHiveMetastore_get_table_args() throw(
 
 uint32_t ThriftHiveMetastore_get_table_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -5501,7 +5524,7 @@ uint32_t ThriftHiveMetastore_get_table_args::read(::apache::thrift::protocol::TP
 
 uint32_t ThriftHiveMetastore_get_table_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_table_args");
 
   xfer += oprot->writeFieldBegin("dbname", ::apache::thrift::protocol::T_STRING, 1);
@@ -5514,7 +5537,6 @@ uint32_t ThriftHiveMetastore_get_table_args::write(::apache::thrift::protocol::T
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -5525,7 +5547,7 @@ ThriftHiveMetastore_get_table_pargs::~ThriftHiveMetastore_get_table_pargs() thro
 
 uint32_t ThriftHiveMetastore_get_table_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_table_pargs");
 
   xfer += oprot->writeFieldBegin("dbname", ::apache::thrift::protocol::T_STRING, 1);
@@ -5538,7 +5560,6 @@ uint32_t ThriftHiveMetastore_get_table_pargs::write(::apache::thrift::protocol::
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -5549,6 +5570,7 @@ ThriftHiveMetastore_get_table_result::~ThriftHiveMetastore_get_table_result() th
 
 uint32_t ThriftHiveMetastore_get_table_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -5634,6 +5656,7 @@ ThriftHiveMetastore_get_table_presult::~ThriftHiveMetastore_get_table_presult()
 
 uint32_t ThriftHiveMetastore_get_table_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -5695,6 +5718,7 @@ ThriftHiveMetastore_get_table_objects_by_name_args::~ThriftHiveMetastore_get_tab
 
 uint32_t ThriftHiveMetastore_get_table_objects_by_name_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -5755,7 +5779,7 @@ uint32_t ThriftHiveMetastore_get_table_objects_by_name_args::read(::apache::thri
 
 uint32_t ThriftHiveMetastore_get_table_objects_by_name_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_table_objects_by_name_args");
 
   xfer += oprot->writeFieldBegin("dbname", ::apache::thrift::protocol::T_STRING, 1);
@@ -5776,7 +5800,6 @@ uint32_t ThriftHiveMetastore_get_table_objects_by_name_args::write(::apache::thr
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -5787,7 +5810,7 @@ ThriftHiveMetastore_get_table_objects_by_name_pargs::~ThriftHiveMetastore_get_ta
 
 uint32_t ThriftHiveMetastore_get_table_objects_by_name_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_table_objects_by_name_pargs");
 
   xfer += oprot->writeFieldBegin("dbname", ::apache::thrift::protocol::T_STRING, 1);
@@ -5808,7 +5831,6 @@ uint32_t ThriftHiveMetastore_get_table_objects_by_name_pargs::write(::apache::th
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -5819,6 +5841,7 @@ ThriftHiveMetastore_get_table_objects_by_name_result::~ThriftHiveMetastore_get_t
 
 uint32_t ThriftHiveMetastore_get_table_objects_by_name_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -5936,6 +5959,7 @@ ThriftHiveMetastore_get_table_objects_by_name_presult::~ThriftHiveMetastore_get_
 
 uint32_t ThriftHiveMetastore_get_table_objects_by_name_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -6017,6 +6041,7 @@ ThriftHiveMetastore_get_table_names_by_filter_args::~ThriftHiveMetastore_get_tab
 
 uint32_t ThriftHiveMetastore_get_table_names_by_filter_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -6073,7 +6098,7 @@ uint32_t ThriftHiveMetastore_get_table_names_by_filter_args::read(::apache::thri
 
 uint32_t ThriftHiveMetastore_get_table_names_by_filter_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_table_names_by_filter_args");
 
   xfer += oprot->writeFieldBegin("dbname", ::apache::thrift::protocol::T_STRING, 1);
@@ -6090,7 +6115,6 @@ uint32_t ThriftHiveMetastore_get_table_names_by_filter_args::write(::apache::thr
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -6101,7 +6125,7 @@ ThriftHiveMetastore_get_table_names_by_filter_pargs::~ThriftHiveMetastore_get_ta
 
 uint32_t ThriftHiveMetastore_get_table_names_by_filter_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_table_names_by_filter_pargs");
 
   xfer += oprot->writeFieldBegin("dbname", ::apache::thrift::protocol::T_STRING, 1);
@@ -6118,7 +6142,6 @@ uint32_t ThriftHiveMetastore_get_table_names_by_filter_pargs::write(::apache::th
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -6129,6 +6152,7 @@ ThriftHiveMetastore_get_table_names_by_filter_result::~ThriftHiveMetastore_get_t
 
 uint32_t ThriftHiveMetastore_get_table_names_by_filter_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -6246,6 +6270,7 @@ ThriftHiveMetastore_get_table_names_by_filter_presult::~ThriftHiveMetastore_get_
 
 uint32_t ThriftHiveMetastore_get_table_names_by_filter_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -6327,6 +6352,7 @@ ThriftHiveMetastore_alter_table_args::~ThriftHiveMetastore_alter_table_args() th
 
 uint32_t ThriftHiveMetastore_alter_table_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -6383,7 +6409,7 @@ uint32_t ThriftHiveMetastore_alter_table_args::read(::apache::thrift::protocol::
 
 uint32_t ThriftHiveMetastore_alter_table_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_alter_table_args");
 
   xfer += oprot->writeFieldBegin("dbname", ::apache::thrift::protocol::T_STRING, 1);
@@ -6400,7 +6426,6 @@ uint32_t ThriftHiveMetastore_alter_table_args::write(::apache::thrift::protocol:
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -6411,7 +6436,7 @@ ThriftHiveMetastore_alter_table_pargs::~ThriftHiveMetastore_alter_table_pargs()
 
 uint32_t ThriftHiveMetastore_alter_table_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_alter_table_pargs");
 
   xfer += oprot->writeFieldBegin("dbname", ::apache::thrift::protocol::T_STRING, 1);
@@ -6428,7 +6453,6 @@ uint32_t ThriftHiveMetastore_alter_table_pargs::write(::apache::thrift::protocol
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -6439,6 +6463,7 @@ ThriftHiveMetastore_alter_table_result::~ThriftHiveMetastore_alter_table_result(
 
 uint32_t ThriftHiveMetastore_alter_table_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -6512,6 +6537,7 @@ ThriftHiveMetastore_alter_table_presult::~ThriftHiveMetastore_alter_table_presul
 
 uint32_t ThriftHiveMetastore_alter_table_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -6565,6 +6591,7 @@ ThriftHiveMetastore_alter_table_with_environment_context_args::~ThriftHiveMetast
 
 uint32_t ThriftHiveMetastore_alter_table_with_environment_context_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -6629,7 +6656,7 @@ uint32_t ThriftHiveMetastore_alter_table_with_environment_context_args::read(::a
 
 uint32_t ThriftHiveMetastore_alter_table_with_environment_context_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_alter_table_with_environment_context_args");
 
   xfer += oprot->writeFieldBegin("dbname", ::apache::thrift::protocol::T_STRING, 1);
@@ -6650,7 +6677,6 @@ uint32_t ThriftHiveMetastore_alter_table_with_environment_context_args::write(::
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -6661,7 +6687,7 @@ ThriftHiveMetastore_alter_table_with_environment_context_pargs::~ThriftHiveMetas
 
 uint32_t ThriftHiveMetastore_alter_table_with_environment_context_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_alter_table_with_environment_context_pargs");
 
   xfer += oprot->writeFieldBegin("dbname", ::apache::thrift::protocol::T_STRING, 1);
@@ -6682,7 +6708,6 @@ uint32_t ThriftHiveMetastore_alter_table_with_environment_context_pargs::write(:
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -6693,6 +6718,7 @@ ThriftHiveMetastore_alter_table_with_environment_context_result::~ThriftHiveMeta
 
 uint32_t ThriftHiveMetastore_alter_table_with_environment_context_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -6766,6 +6792,7 @@ ThriftHiveMetastore_alter_table_with_environment_context_presult::~ThriftHiveMet
 
 uint32_t ThriftHiveMetastore_alter_table_with_environment_context_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -6819,6 +6846,7 @@ ThriftHiveMetastore_alter_table_with_cascade_args::~ThriftHiveMetastore_alter_ta
 
 uint32_t ThriftHiveMetastore_alter_table_with_cascade_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -6883,7 +6911,7 @@ uint32_t ThriftHiveMetastore_alter_table_with_cascade_args::read(::apache::thrif
 
 uint32_t ThriftHiveMetastore_alter_table_with_cascade_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_alter_table_with_cascade_args");
 
   xfer += oprot->writeFieldBegin("dbname", ::apache::thrift::protocol::T_STRING, 1);
@@ -6904,7 +6932,6 @@ uint32_t ThriftHiveMetastore_alter_table_with_cascade_args::write(::apache::thri
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -6915,7 +6942,7 @@ ThriftHiveMetastore_alter_table_with_cascade_pargs::~ThriftHiveMetastore_alter_t
 
 uint32_t ThriftHiveMetastore_alter_table_with_cascade_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_alter_table_with_cascade_pargs");
 
   xfer += oprot->writeFieldBegin("dbname", ::apache::thrift::protocol::T_STRING, 1);
@@ -6936,7 +6963,6 @@ uint32_t ThriftHiveMetastore_alter_table_with_cascade_pargs::write(::apache::thr
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -6947,6 +6973,7 @@ ThriftHiveMetastore_alter_table_with_cascade_result::~ThriftHiveMetastore_alter_
 
 uint32_t ThriftHiveMetastore_alter_table_with_cascade_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -7020,6 +7047,7 @@ ThriftHiveMetastore_alter_table_with_cascade_presult::~ThriftHiveMetastore_alter
 
 uint32_t ThriftHiveMetastore_alter_table_with_cascade_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -7073,6 +7101,7 @@ ThriftHiveMetastore_add_partition_args::~ThriftHiveMetastore_add_partition_args(
 
 uint32_t ThriftHiveMetastore_add_partition_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -7113,7 +7142,7 @@ uint32_t ThriftHiveMetastore_add_partition_args::read(::apache::thrift::protocol
 
 uint32_t ThriftHiveMetastore_add_partition_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_add_partition_args");
 
   xfer += oprot->writeFieldBegin("new_part", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -7122,7 +7151,6 @@ uint32_t ThriftHiveMetastore_add_partition_args::write(::apache::thrift::protoco
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -7133,7 +7161,7 @@ ThriftHiveMetastore_add_partition_pargs::~ThriftHiveMetastore_add_partition_parg
 
 uint32_t ThriftHiveMetastore_add_partition_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_add_partition_pargs");
 
   xfer += oprot->writeFieldBegin("new_part", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -7142,7 +7170,6 @@ uint32_t ThriftHiveMetastore_add_partition_pargs::write(::apache::thrift::protoc
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -7153,6 +7180,7 @@ ThriftHiveMetastore_add_partition_result::~ThriftHiveMetastore_add_partition_res
 
 uint32_t ThriftHiveMetastore_add_partition_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -7250,6 +7278,7 @@ ThriftHiveMetastore_add_partition_presult::~ThriftHiveMetastore_add_partition_pr
 
 uint32_t ThriftHiveMetastore_add_partition_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -7319,6 +7348,7 @@ ThriftHiveMetastore_add_partition_with_environment_context_args::~ThriftHiveMeta
 
 uint32_t ThriftHiveMetastore_add_partition_with_environment_context_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -7367,7 +7397,7 @@ uint32_t ThriftHiveMetastore_add_partition_with_environment_context_args::read(:
 
 uint32_t ThriftHiveMetastore_add_partition_with_environment_context_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_add_partition_with_environment_context_args");
 
   xfer += oprot->writeFieldBegin("new_part", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -7380,7 +7410,6 @@ uint32_t ThriftHiveMetastore_add_partition_with_environment_context_args::write(
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -7391,7 +7420,7 @@ ThriftHiveMetastore_add_partition_with_environment_context_pargs::~ThriftHiveMet
 
 uint32_t ThriftHiveMetastore_add_partition_with_environment_context_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_add_partition_with_environment_context_pargs");
 
   xfer += oprot->writeFieldBegin("new_part", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -7404,7 +7433,6 @@ uint32_t ThriftHiveMetastore_add_partition_with_environment_context_pargs::write
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -7415,6 +7443,7 @@ ThriftHiveMetastore_add_partition_with_environment_context_result::~ThriftHiveMe
 
 uint32_t ThriftHiveMetastore_add_partition_with_environment_context_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -7512,6 +7541,7 @@ ThriftHiveMetastore_add_partition_with_environment_context_presult::~ThriftHiveM
 
 uint32_t ThriftHiveMetastore_add_partition_with_environment_context_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -7581,6 +7611,7 @@ ThriftHiveMetastore_add_partitions_args::~ThriftHiveMetastore_add_partitions_arg
 
 uint32_t ThriftHiveMetastore_add_partitions_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -7633,7 +7664,7 @@ uint32_t ThriftHiveMetastore_add_partitions_args::read(::apache::thrift::protoco
 
 uint32_t ThriftHiveMetastore_add_partitions_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_add_partitions_args");
 
   xfer += oprot->writeFieldBegin("new_parts", ::apache::thrift::protocol::T_LIST, 1);
@@ -7650,7 +7681,6 @@ uint32_t ThriftHiveMetastore_add_partitions_args::write(::apache::thrift::protoc
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -7661,7 +7691,7 @@ ThriftHiveMetastore_add_partitions_pargs::~ThriftHiveMetastore_add_partitions_pa
 
 uint32_t ThriftHiveMetastore_add_partitions_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_add_partitions_pargs");
 
   xfer += oprot->writeFieldBegin("new_parts", ::apache::thrift::protocol::T_LIST, 1);
@@ -7678,7 +7708,6 @@ uint32_t ThriftHiveMetastore_add_partitions_pargs::write(::apache::thrift::proto
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -7689,6 +7718,7 @@ ThriftHiveMetastore_add_partitions_result::~ThriftHiveMetastore_add_partitions_r
 
 uint32_t ThriftHiveMetastore_add_partitions_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -7786,6 +7816,7 @@ ThriftHiveMetastore_add_partitions_presult::~ThriftHiveMetastore_add_partitions_
 
 uint32_t ThriftHiveMetastore_add_partitions_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -7855,6 +7886,7 @@ ThriftHiveMetastore_add_partitions_pspec_args::~ThriftHiveMetastore_add_partitio
 
 uint32_t ThriftHiveMetastore_add_partitions_pspec_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -7907,7 +7939,7 @@ uint32_t ThriftHiveMetastore_add_partitions_pspec_args::read(::apache::thrift::p
 
 uint32_t ThriftHiveMetastore_add_partitions_pspec_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_add_partitions_pspec_args");
 
   xfer += oprot->writeFieldBegin("new_parts", ::apache::thrift::protocol::T_LIST, 1);
@@ -7924,7 +7956,6 @@ uint32_t ThriftHiveMetastore_add_partitions_pspec_args::write(::apache::thrift::
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -7935,7 +7966,7 @@ ThriftHiveMetastore_add_partitions_pspec_pargs::~ThriftHiveMetastore_add_partiti
 
 uint32_t ThriftHiveMetastore_add_partitions_pspec_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_add_partitions_pspec_pargs");
 
   xfer += oprot->writeFieldBegin("new_parts", ::apache::thrift::protocol::T_LIST, 1);
@@ -7952,7 +7983,6 @@ uint32_t ThriftHiveMetastore_add_partitions_pspec_pargs::write(::apache::thrift:
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -7963,6 +7993,7 @@ ThriftHiveMetastore_add_partitions_pspec_result::~ThriftHiveMetastore_add_partit
 
 uint32_t ThriftHiveMetastore_add_partitions_pspec_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -8060,6 +8091,7 @@ ThriftHiveMetastore_add_partitions_pspec_presult::~ThriftHiveMetastore_add_parti
 
 uint32_t ThriftHiveMetastore_add_partitions_pspec_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -8129,6 +8161,7 @@ ThriftHiveMetastore_append_partition_args::~ThriftHiveMetastore_append_partition
 
 uint32_t ThriftHiveMetastore_append_partition_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -8197,7 +8230,7 @@ uint32_t ThriftHiveMetastore_append_partition_args::read(::apache::thrift::proto
 
 uint32_t ThriftHiveMetastore_append_partition_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_append_partition_args");
 
   xfer += oprot->writeFieldBegin("db_name", ::apache::thrift::protocol::T_STRING, 1);
@@ -8222,7 +8255,6 @@ uint32_t ThriftHiveMetastore_append_partition_args::write(::apache::thrift::prot
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -8233,7 +8265,7 @@ ThriftHiveMetastore_append_partition_pargs::~ThriftHiveMetastore_append_partitio
 
 uint32_t ThriftHiveMetastore_append_partition_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_append_partition_pargs");
 
   xfer += oprot->writeFieldBegin("db_name", ::apache::thrift::protocol::T_STRING, 1);
@@ -8258,7 +8290,6 @@ uint32_t ThriftHiveMetastore_append_partition_pargs::write(::apache::thrift::pro
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -8269,6 +8300,7 @@ ThriftHiveMetastore_append_partition_result::~ThriftHiveMetastore_append_partiti
 
 uint32_t ThriftHiveMetastore_append_partition_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -8366,6 +8398,7 @@ ThriftHiveMetastore_append_partition_presult::~ThriftHiveMetastore_append_partit
 
 uint32_t ThriftHiveMetastore_append_partition_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -8435,6 +8468,7 @@ ThriftHiveMetastore_add_partitions_req_args::~ThriftHiveMetastore_add_partitions
 
 uint32_t ThriftHiveMetastore_add_partitions_req_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -8475,7 +8509,7 @@ uint32_t ThriftHiveMetastore_add_partitions_req_args::read(::apache::thrift::pro
 
 uint32_t ThriftHiveMetastore_add_partitions_req_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_add_partitions_req_args");
 
   xfer += oprot->writeFieldBegin("request", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -8484,7 +8518,6 @@ uint32_t ThriftHiveMetastore_add_partitions_req_args::write(::apache::thrift::pr
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -8495,7 +8528,7 @@ ThriftHiveMetastore_add_partitions_req_pargs::~ThriftHiveMetastore_add_partition
 
 uint32_t ThriftHiveMetastore_add_partitions_req_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHiveMetastore_add_partitions_req_pargs");
 
   xfer += oprot->writeFieldBegin("request", ::apache::thrift::protocol::T_STRUCT, 1);
@@ -8504,7 +8537,6 @@ uint32_t ThriftHiveMetastore_add_partitions_req_pargs::write(::apache::thrift::p
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -8515,6 +8547,7 @@ ThriftHiveMetastore_add_partitions_req_result::~ThriftHiveMetastore_add_partitio
 
 uint32_t ThriftHiveMetastore_add_partitions_req_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -8612,6 +8645,7 @@ ThriftHiveMetastore_add_partitions_req_presult::~ThriftHiveMetastore_add_partiti
 
 uint32_t ThriftHiveMetastore_add_partitions_req_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+

<TRUNCATED>

[04/55] [abbrv] hive git commit: HIVE-11591 : upgrade thrift to 0.9.3 and change generation to use undated annotations (Sergey Shelukhin, reviewed by Alan Gates)

Posted by xu...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-cpp/ThriftHive.cpp
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-cpp/ThriftHive.cpp b/service/src/gen/thrift/gen-cpp/ThriftHive.cpp
index 8935c04..a5448f0 100644
--- a/service/src/gen/thrift/gen-cpp/ThriftHive.cpp
+++ b/service/src/gen/thrift/gen-cpp/ThriftHive.cpp
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -15,6 +15,7 @@ ThriftHive_execute_args::~ThriftHive_execute_args() throw() {
 
 uint32_t ThriftHive_execute_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -55,7 +56,7 @@ uint32_t ThriftHive_execute_args::read(::apache::thrift::protocol::TProtocol* ip
 
 uint32_t ThriftHive_execute_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHive_execute_args");
 
   xfer += oprot->writeFieldBegin("query", ::apache::thrift::protocol::T_STRING, 1);
@@ -64,7 +65,6 @@ uint32_t ThriftHive_execute_args::write(::apache::thrift::protocol::TProtocol* o
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -75,7 +75,7 @@ ThriftHive_execute_pargs::~ThriftHive_execute_pargs() throw() {
 
 uint32_t ThriftHive_execute_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHive_execute_pargs");
 
   xfer += oprot->writeFieldBegin("query", ::apache::thrift::protocol::T_STRING, 1);
@@ -84,7 +84,6 @@ uint32_t ThriftHive_execute_pargs::write(::apache::thrift::protocol::TProtocol*
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -95,6 +94,7 @@ ThriftHive_execute_result::~ThriftHive_execute_result() throw() {
 
 uint32_t ThriftHive_execute_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -156,6 +156,7 @@ ThriftHive_execute_presult::~ThriftHive_execute_presult() throw() {
 
 uint32_t ThriftHive_execute_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -201,6 +202,7 @@ ThriftHive_fetchOne_args::~ThriftHive_fetchOne_args() throw() {
 
 uint32_t ThriftHive_fetchOne_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -228,12 +230,11 @@ uint32_t ThriftHive_fetchOne_args::read(::apache::thrift::protocol::TProtocol* i
 
 uint32_t ThriftHive_fetchOne_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHive_fetchOne_args");
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -244,12 +245,11 @@ ThriftHive_fetchOne_pargs::~ThriftHive_fetchOne_pargs() throw() {
 
 uint32_t ThriftHive_fetchOne_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHive_fetchOne_pargs");
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -260,6 +260,7 @@ ThriftHive_fetchOne_result::~ThriftHive_fetchOne_result() throw() {
 
 uint32_t ThriftHive_fetchOne_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -333,6 +334,7 @@ ThriftHive_fetchOne_presult::~ThriftHive_fetchOne_presult() throw() {
 
 uint32_t ThriftHive_fetchOne_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -386,6 +388,7 @@ ThriftHive_fetchN_args::~ThriftHive_fetchN_args() throw() {
 
 uint32_t ThriftHive_fetchN_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -426,7 +429,7 @@ uint32_t ThriftHive_fetchN_args::read(::apache::thrift::protocol::TProtocol* ipr
 
 uint32_t ThriftHive_fetchN_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHive_fetchN_args");
 
   xfer += oprot->writeFieldBegin("numRows", ::apache::thrift::protocol::T_I32, 1);
@@ -435,7 +438,6 @@ uint32_t ThriftHive_fetchN_args::write(::apache::thrift::protocol::TProtocol* op
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -446,7 +448,7 @@ ThriftHive_fetchN_pargs::~ThriftHive_fetchN_pargs() throw() {
 
 uint32_t ThriftHive_fetchN_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHive_fetchN_pargs");
 
   xfer += oprot->writeFieldBegin("numRows", ::apache::thrift::protocol::T_I32, 1);
@@ -455,7 +457,6 @@ uint32_t ThriftHive_fetchN_pargs::write(::apache::thrift::protocol::TProtocol* o
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -466,6 +467,7 @@ ThriftHive_fetchN_result::~ThriftHive_fetchN_result() throw() {
 
 uint32_t ThriftHive_fetchN_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -559,6 +561,7 @@ ThriftHive_fetchN_presult::~ThriftHive_fetchN_presult() throw() {
 
 uint32_t ThriftHive_fetchN_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -624,6 +627,7 @@ ThriftHive_fetchAll_args::~ThriftHive_fetchAll_args() throw() {
 
 uint32_t ThriftHive_fetchAll_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -651,12 +655,11 @@ uint32_t ThriftHive_fetchAll_args::read(::apache::thrift::protocol::TProtocol* i
 
 uint32_t ThriftHive_fetchAll_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHive_fetchAll_args");
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -667,12 +670,11 @@ ThriftHive_fetchAll_pargs::~ThriftHive_fetchAll_pargs() throw() {
 
 uint32_t ThriftHive_fetchAll_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHive_fetchAll_pargs");
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -683,6 +685,7 @@ ThriftHive_fetchAll_result::~ThriftHive_fetchAll_result() throw() {
 
 uint32_t ThriftHive_fetchAll_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -776,6 +779,7 @@ ThriftHive_fetchAll_presult::~ThriftHive_fetchAll_presult() throw() {
 
 uint32_t ThriftHive_fetchAll_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -841,6 +845,7 @@ ThriftHive_getSchema_args::~ThriftHive_getSchema_args() throw() {
 
 uint32_t ThriftHive_getSchema_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -868,12 +873,11 @@ uint32_t ThriftHive_getSchema_args::read(::apache::thrift::protocol::TProtocol*
 
 uint32_t ThriftHive_getSchema_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHive_getSchema_args");
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -884,12 +888,11 @@ ThriftHive_getSchema_pargs::~ThriftHive_getSchema_pargs() throw() {
 
 uint32_t ThriftHive_getSchema_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHive_getSchema_pargs");
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -900,6 +903,7 @@ ThriftHive_getSchema_result::~ThriftHive_getSchema_result() throw() {
 
 uint32_t ThriftHive_getSchema_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -973,6 +977,7 @@ ThriftHive_getSchema_presult::~ThriftHive_getSchema_presult() throw() {
 
 uint32_t ThriftHive_getSchema_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1026,6 +1031,7 @@ ThriftHive_getThriftSchema_args::~ThriftHive_getThriftSchema_args() throw() {
 
 uint32_t ThriftHive_getThriftSchema_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1053,12 +1059,11 @@ uint32_t ThriftHive_getThriftSchema_args::read(::apache::thrift::protocol::TProt
 
 uint32_t ThriftHive_getThriftSchema_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHive_getThriftSchema_args");
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1069,12 +1074,11 @@ ThriftHive_getThriftSchema_pargs::~ThriftHive_getThriftSchema_pargs() throw() {
 
 uint32_t ThriftHive_getThriftSchema_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHive_getThriftSchema_pargs");
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1085,6 +1089,7 @@ ThriftHive_getThriftSchema_result::~ThriftHive_getThriftSchema_result() throw()
 
 uint32_t ThriftHive_getThriftSchema_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1158,6 +1163,7 @@ ThriftHive_getThriftSchema_presult::~ThriftHive_getThriftSchema_presult() throw(
 
 uint32_t ThriftHive_getThriftSchema_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1211,6 +1217,7 @@ ThriftHive_getClusterStatus_args::~ThriftHive_getClusterStatus_args() throw() {
 
 uint32_t ThriftHive_getClusterStatus_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1238,12 +1245,11 @@ uint32_t ThriftHive_getClusterStatus_args::read(::apache::thrift::protocol::TPro
 
 uint32_t ThriftHive_getClusterStatus_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHive_getClusterStatus_args");
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1254,12 +1260,11 @@ ThriftHive_getClusterStatus_pargs::~ThriftHive_getClusterStatus_pargs() throw()
 
 uint32_t ThriftHive_getClusterStatus_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHive_getClusterStatus_pargs");
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1270,6 +1275,7 @@ ThriftHive_getClusterStatus_result::~ThriftHive_getClusterStatus_result() throw(
 
 uint32_t ThriftHive_getClusterStatus_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1343,6 +1349,7 @@ ThriftHive_getClusterStatus_presult::~ThriftHive_getClusterStatus_presult() thro
 
 uint32_t ThriftHive_getClusterStatus_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1396,6 +1403,7 @@ ThriftHive_getQueryPlan_args::~ThriftHive_getQueryPlan_args() throw() {
 
 uint32_t ThriftHive_getQueryPlan_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1423,12 +1431,11 @@ uint32_t ThriftHive_getQueryPlan_args::read(::apache::thrift::protocol::TProtoco
 
 uint32_t ThriftHive_getQueryPlan_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHive_getQueryPlan_args");
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1439,12 +1446,11 @@ ThriftHive_getQueryPlan_pargs::~ThriftHive_getQueryPlan_pargs() throw() {
 
 uint32_t ThriftHive_getQueryPlan_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHive_getQueryPlan_pargs");
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1455,6 +1461,7 @@ ThriftHive_getQueryPlan_result::~ThriftHive_getQueryPlan_result() throw() {
 
 uint32_t ThriftHive_getQueryPlan_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1528,6 +1535,7 @@ ThriftHive_getQueryPlan_presult::~ThriftHive_getQueryPlan_presult() throw() {
 
 uint32_t ThriftHive_getQueryPlan_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1581,6 +1589,7 @@ ThriftHive_clean_args::~ThriftHive_clean_args() throw() {
 
 uint32_t ThriftHive_clean_args::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1608,12 +1617,11 @@ uint32_t ThriftHive_clean_args::read(::apache::thrift::protocol::TProtocol* ipro
 
 uint32_t ThriftHive_clean_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHive_clean_args");
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1624,12 +1632,11 @@ ThriftHive_clean_pargs::~ThriftHive_clean_pargs() throw() {
 
 uint32_t ThriftHive_clean_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("ThriftHive_clean_pargs");
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -1640,6 +1647,7 @@ ThriftHive_clean_result::~ThriftHive_clean_result() throw() {
 
 uint32_t ThriftHive_clean_result::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -1683,6 +1691,7 @@ ThriftHive_clean_presult::~ThriftHive_clean_presult() throw() {
 
 uint32_t ThriftHive_clean_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -2761,5 +2770,775 @@ void ThriftHiveProcessor::process_clean(int32_t seqid, ::apache::thrift::protoco
   ::boost::shared_ptr< ::apache::thrift::TProcessor > processor(new ThriftHiveProcessor(handler));
   return processor;
 }
+
+void ThriftHiveConcurrentClient::execute(const std::string& query)
+{
+  int32_t seqid = send_execute(query);
+  recv_execute(seqid);
+}
+
+int32_t ThriftHiveConcurrentClient::send_execute(const std::string& query)
+{
+  int32_t cseqid = this->sync_.generateSeqId();
+  ::apache::thrift::async::TConcurrentSendSentry sentry(&this->sync_);
+  oprot_->writeMessageBegin("execute", ::apache::thrift::protocol::T_CALL, cseqid);
+
+  ThriftHive_execute_pargs args;
+  args.query = &query;
+  args.write(oprot_);
+
+  oprot_->writeMessageEnd();
+  oprot_->getTransport()->writeEnd();
+  oprot_->getTransport()->flush();
+
+  sentry.commit();
+  return cseqid;
+}
+
+void ThriftHiveConcurrentClient::recv_execute(const int32_t seqid)
+{
+
+  int32_t rseqid = 0;
+  std::string fname;
+  ::apache::thrift::protocol::TMessageType mtype;
+
+  // the read mutex gets dropped and reacquired as part of waitForWork()
+  // The destructor of this sentry wakes up other clients
+  ::apache::thrift::async::TConcurrentRecvSentry sentry(&this->sync_, seqid);
+
+  while(true) {
+    if(!this->sync_.getPending(fname, mtype, rseqid)) {
+      iprot_->readMessageBegin(fname, mtype, rseqid);
+    }
+    if(seqid == rseqid) {
+      if (mtype == ::apache::thrift::protocol::T_EXCEPTION) {
+        ::apache::thrift::TApplicationException x;
+        x.read(iprot_);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+        sentry.commit();
+        throw x;
+      }
+      if (mtype != ::apache::thrift::protocol::T_REPLY) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+      }
+      if (fname.compare("execute") != 0) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+
+        // in a bad state, don't commit
+        using ::apache::thrift::protocol::TProtocolException;
+        throw TProtocolException(TProtocolException::INVALID_DATA);
+      }
+      ThriftHive_execute_presult result;
+      result.read(iprot_);
+      iprot_->readMessageEnd();
+      iprot_->getTransport()->readEnd();
+
+      if (result.__isset.ex) {
+        sentry.commit();
+        throw result.ex;
+      }
+      sentry.commit();
+      return;
+    }
+    // seqid != rseqid
+    this->sync_.updatePending(fname, mtype, rseqid);
+
+    // this will temporarily unlock the readMutex, and let other clients get work done
+    this->sync_.waitForWork(seqid);
+  } // end while(true)
+}
+
+void ThriftHiveConcurrentClient::fetchOne(std::string& _return)
+{
+  int32_t seqid = send_fetchOne();
+  recv_fetchOne(_return, seqid);
+}
+
+int32_t ThriftHiveConcurrentClient::send_fetchOne()
+{
+  int32_t cseqid = this->sync_.generateSeqId();
+  ::apache::thrift::async::TConcurrentSendSentry sentry(&this->sync_);
+  oprot_->writeMessageBegin("fetchOne", ::apache::thrift::protocol::T_CALL, cseqid);
+
+  ThriftHive_fetchOne_pargs args;
+  args.write(oprot_);
+
+  oprot_->writeMessageEnd();
+  oprot_->getTransport()->writeEnd();
+  oprot_->getTransport()->flush();
+
+  sentry.commit();
+  return cseqid;
+}
+
+void ThriftHiveConcurrentClient::recv_fetchOne(std::string& _return, const int32_t seqid)
+{
+
+  int32_t rseqid = 0;
+  std::string fname;
+  ::apache::thrift::protocol::TMessageType mtype;
+
+  // the read mutex gets dropped and reacquired as part of waitForWork()
+  // The destructor of this sentry wakes up other clients
+  ::apache::thrift::async::TConcurrentRecvSentry sentry(&this->sync_, seqid);
+
+  while(true) {
+    if(!this->sync_.getPending(fname, mtype, rseqid)) {
+      iprot_->readMessageBegin(fname, mtype, rseqid);
+    }
+    if(seqid == rseqid) {
+      if (mtype == ::apache::thrift::protocol::T_EXCEPTION) {
+        ::apache::thrift::TApplicationException x;
+        x.read(iprot_);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+        sentry.commit();
+        throw x;
+      }
+      if (mtype != ::apache::thrift::protocol::T_REPLY) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+      }
+      if (fname.compare("fetchOne") != 0) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+
+        // in a bad state, don't commit
+        using ::apache::thrift::protocol::TProtocolException;
+        throw TProtocolException(TProtocolException::INVALID_DATA);
+      }
+      ThriftHive_fetchOne_presult result;
+      result.success = &_return;
+      result.read(iprot_);
+      iprot_->readMessageEnd();
+      iprot_->getTransport()->readEnd();
+
+      if (result.__isset.success) {
+        // _return pointer has now been filled
+        sentry.commit();
+        return;
+      }
+      if (result.__isset.ex) {
+        sentry.commit();
+        throw result.ex;
+      }
+      // in a bad state, don't commit
+      throw ::apache::thrift::TApplicationException(::apache::thrift::TApplicationException::MISSING_RESULT, "fetchOne failed: unknown result");
+    }
+    // seqid != rseqid
+    this->sync_.updatePending(fname, mtype, rseqid);
+
+    // this will temporarily unlock the readMutex, and let other clients get work done
+    this->sync_.waitForWork(seqid);
+  } // end while(true)
+}
+
+void ThriftHiveConcurrentClient::fetchN(std::vector<std::string> & _return, const int32_t numRows)
+{
+  int32_t seqid = send_fetchN(numRows);
+  recv_fetchN(_return, seqid);
+}
+
+int32_t ThriftHiveConcurrentClient::send_fetchN(const int32_t numRows)
+{
+  int32_t cseqid = this->sync_.generateSeqId();
+  ::apache::thrift::async::TConcurrentSendSentry sentry(&this->sync_);
+  oprot_->writeMessageBegin("fetchN", ::apache::thrift::protocol::T_CALL, cseqid);
+
+  ThriftHive_fetchN_pargs args;
+  args.numRows = &numRows;
+  args.write(oprot_);
+
+  oprot_->writeMessageEnd();
+  oprot_->getTransport()->writeEnd();
+  oprot_->getTransport()->flush();
+
+  sentry.commit();
+  return cseqid;
+}
+
+void ThriftHiveConcurrentClient::recv_fetchN(std::vector<std::string> & _return, const int32_t seqid)
+{
+
+  int32_t rseqid = 0;
+  std::string fname;
+  ::apache::thrift::protocol::TMessageType mtype;
+
+  // the read mutex gets dropped and reacquired as part of waitForWork()
+  // The destructor of this sentry wakes up other clients
+  ::apache::thrift::async::TConcurrentRecvSentry sentry(&this->sync_, seqid);
+
+  while(true) {
+    if(!this->sync_.getPending(fname, mtype, rseqid)) {
+      iprot_->readMessageBegin(fname, mtype, rseqid);
+    }
+    if(seqid == rseqid) {
+      if (mtype == ::apache::thrift::protocol::T_EXCEPTION) {
+        ::apache::thrift::TApplicationException x;
+        x.read(iprot_);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+        sentry.commit();
+        throw x;
+      }
+      if (mtype != ::apache::thrift::protocol::T_REPLY) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+      }
+      if (fname.compare("fetchN") != 0) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+
+        // in a bad state, don't commit
+        using ::apache::thrift::protocol::TProtocolException;
+        throw TProtocolException(TProtocolException::INVALID_DATA);
+      }
+      ThriftHive_fetchN_presult result;
+      result.success = &_return;
+      result.read(iprot_);
+      iprot_->readMessageEnd();
+      iprot_->getTransport()->readEnd();
+
+      if (result.__isset.success) {
+        // _return pointer has now been filled
+        sentry.commit();
+        return;
+      }
+      if (result.__isset.ex) {
+        sentry.commit();
+        throw result.ex;
+      }
+      // in a bad state, don't commit
+      throw ::apache::thrift::TApplicationException(::apache::thrift::TApplicationException::MISSING_RESULT, "fetchN failed: unknown result");
+    }
+    // seqid != rseqid
+    this->sync_.updatePending(fname, mtype, rseqid);
+
+    // this will temporarily unlock the readMutex, and let other clients get work done
+    this->sync_.waitForWork(seqid);
+  } // end while(true)
+}
+
+void ThriftHiveConcurrentClient::fetchAll(std::vector<std::string> & _return)
+{
+  int32_t seqid = send_fetchAll();
+  recv_fetchAll(_return, seqid);
+}
+
+int32_t ThriftHiveConcurrentClient::send_fetchAll()
+{
+  int32_t cseqid = this->sync_.generateSeqId();
+  ::apache::thrift::async::TConcurrentSendSentry sentry(&this->sync_);
+  oprot_->writeMessageBegin("fetchAll", ::apache::thrift::protocol::T_CALL, cseqid);
+
+  ThriftHive_fetchAll_pargs args;
+  args.write(oprot_);
+
+  oprot_->writeMessageEnd();
+  oprot_->getTransport()->writeEnd();
+  oprot_->getTransport()->flush();
+
+  sentry.commit();
+  return cseqid;
+}
+
+void ThriftHiveConcurrentClient::recv_fetchAll(std::vector<std::string> & _return, const int32_t seqid)
+{
+
+  int32_t rseqid = 0;
+  std::string fname;
+  ::apache::thrift::protocol::TMessageType mtype;
+
+  // the read mutex gets dropped and reacquired as part of waitForWork()
+  // The destructor of this sentry wakes up other clients
+  ::apache::thrift::async::TConcurrentRecvSentry sentry(&this->sync_, seqid);
+
+  while(true) {
+    if(!this->sync_.getPending(fname, mtype, rseqid)) {
+      iprot_->readMessageBegin(fname, mtype, rseqid);
+    }
+    if(seqid == rseqid) {
+      if (mtype == ::apache::thrift::protocol::T_EXCEPTION) {
+        ::apache::thrift::TApplicationException x;
+        x.read(iprot_);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+        sentry.commit();
+        throw x;
+      }
+      if (mtype != ::apache::thrift::protocol::T_REPLY) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+      }
+      if (fname.compare("fetchAll") != 0) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+
+        // in a bad state, don't commit
+        using ::apache::thrift::protocol::TProtocolException;
+        throw TProtocolException(TProtocolException::INVALID_DATA);
+      }
+      ThriftHive_fetchAll_presult result;
+      result.success = &_return;
+      result.read(iprot_);
+      iprot_->readMessageEnd();
+      iprot_->getTransport()->readEnd();
+
+      if (result.__isset.success) {
+        // _return pointer has now been filled
+        sentry.commit();
+        return;
+      }
+      if (result.__isset.ex) {
+        sentry.commit();
+        throw result.ex;
+      }
+      // in a bad state, don't commit
+      throw ::apache::thrift::TApplicationException(::apache::thrift::TApplicationException::MISSING_RESULT, "fetchAll failed: unknown result");
+    }
+    // seqid != rseqid
+    this->sync_.updatePending(fname, mtype, rseqid);
+
+    // this will temporarily unlock the readMutex, and let other clients get work done
+    this->sync_.waitForWork(seqid);
+  } // end while(true)
+}
+
+void ThriftHiveConcurrentClient::getSchema( ::Apache::Hadoop::Hive::Schema& _return)
+{
+  int32_t seqid = send_getSchema();
+  recv_getSchema(_return, seqid);
+}
+
+int32_t ThriftHiveConcurrentClient::send_getSchema()
+{
+  int32_t cseqid = this->sync_.generateSeqId();
+  ::apache::thrift::async::TConcurrentSendSentry sentry(&this->sync_);
+  oprot_->writeMessageBegin("getSchema", ::apache::thrift::protocol::T_CALL, cseqid);
+
+  ThriftHive_getSchema_pargs args;
+  args.write(oprot_);
+
+  oprot_->writeMessageEnd();
+  oprot_->getTransport()->writeEnd();
+  oprot_->getTransport()->flush();
+
+  sentry.commit();
+  return cseqid;
+}
+
+void ThriftHiveConcurrentClient::recv_getSchema( ::Apache::Hadoop::Hive::Schema& _return, const int32_t seqid)
+{
+
+  int32_t rseqid = 0;
+  std::string fname;
+  ::apache::thrift::protocol::TMessageType mtype;
+
+  // the read mutex gets dropped and reacquired as part of waitForWork()
+  // The destructor of this sentry wakes up other clients
+  ::apache::thrift::async::TConcurrentRecvSentry sentry(&this->sync_, seqid);
+
+  while(true) {
+    if(!this->sync_.getPending(fname, mtype, rseqid)) {
+      iprot_->readMessageBegin(fname, mtype, rseqid);
+    }
+    if(seqid == rseqid) {
+      if (mtype == ::apache::thrift::protocol::T_EXCEPTION) {
+        ::apache::thrift::TApplicationException x;
+        x.read(iprot_);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+        sentry.commit();
+        throw x;
+      }
+      if (mtype != ::apache::thrift::protocol::T_REPLY) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+      }
+      if (fname.compare("getSchema") != 0) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+
+        // in a bad state, don't commit
+        using ::apache::thrift::protocol::TProtocolException;
+        throw TProtocolException(TProtocolException::INVALID_DATA);
+      }
+      ThriftHive_getSchema_presult result;
+      result.success = &_return;
+      result.read(iprot_);
+      iprot_->readMessageEnd();
+      iprot_->getTransport()->readEnd();
+
+      if (result.__isset.success) {
+        // _return pointer has now been filled
+        sentry.commit();
+        return;
+      }
+      if (result.__isset.ex) {
+        sentry.commit();
+        throw result.ex;
+      }
+      // in a bad state, don't commit
+      throw ::apache::thrift::TApplicationException(::apache::thrift::TApplicationException::MISSING_RESULT, "getSchema failed: unknown result");
+    }
+    // seqid != rseqid
+    this->sync_.updatePending(fname, mtype, rseqid);
+
+    // this will temporarily unlock the readMutex, and let other clients get work done
+    this->sync_.waitForWork(seqid);
+  } // end while(true)
+}
+
+void ThriftHiveConcurrentClient::getThriftSchema( ::Apache::Hadoop::Hive::Schema& _return)
+{
+  int32_t seqid = send_getThriftSchema();
+  recv_getThriftSchema(_return, seqid);
+}
+
+int32_t ThriftHiveConcurrentClient::send_getThriftSchema()
+{
+  int32_t cseqid = this->sync_.generateSeqId();
+  ::apache::thrift::async::TConcurrentSendSentry sentry(&this->sync_);
+  oprot_->writeMessageBegin("getThriftSchema", ::apache::thrift::protocol::T_CALL, cseqid);
+
+  ThriftHive_getThriftSchema_pargs args;
+  args.write(oprot_);
+
+  oprot_->writeMessageEnd();
+  oprot_->getTransport()->writeEnd();
+  oprot_->getTransport()->flush();
+
+  sentry.commit();
+  return cseqid;
+}
+
+void ThriftHiveConcurrentClient::recv_getThriftSchema( ::Apache::Hadoop::Hive::Schema& _return, const int32_t seqid)
+{
+
+  int32_t rseqid = 0;
+  std::string fname;
+  ::apache::thrift::protocol::TMessageType mtype;
+
+  // the read mutex gets dropped and reacquired as part of waitForWork()
+  // The destructor of this sentry wakes up other clients
+  ::apache::thrift::async::TConcurrentRecvSentry sentry(&this->sync_, seqid);
+
+  while(true) {
+    if(!this->sync_.getPending(fname, mtype, rseqid)) {
+      iprot_->readMessageBegin(fname, mtype, rseqid);
+    }
+    if(seqid == rseqid) {
+      if (mtype == ::apache::thrift::protocol::T_EXCEPTION) {
+        ::apache::thrift::TApplicationException x;
+        x.read(iprot_);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+        sentry.commit();
+        throw x;
+      }
+      if (mtype != ::apache::thrift::protocol::T_REPLY) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+      }
+      if (fname.compare("getThriftSchema") != 0) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+
+        // in a bad state, don't commit
+        using ::apache::thrift::protocol::TProtocolException;
+        throw TProtocolException(TProtocolException::INVALID_DATA);
+      }
+      ThriftHive_getThriftSchema_presult result;
+      result.success = &_return;
+      result.read(iprot_);
+      iprot_->readMessageEnd();
+      iprot_->getTransport()->readEnd();
+
+      if (result.__isset.success) {
+        // _return pointer has now been filled
+        sentry.commit();
+        return;
+      }
+      if (result.__isset.ex) {
+        sentry.commit();
+        throw result.ex;
+      }
+      // in a bad state, don't commit
+      throw ::apache::thrift::TApplicationException(::apache::thrift::TApplicationException::MISSING_RESULT, "getThriftSchema failed: unknown result");
+    }
+    // seqid != rseqid
+    this->sync_.updatePending(fname, mtype, rseqid);
+
+    // this will temporarily unlock the readMutex, and let other clients get work done
+    this->sync_.waitForWork(seqid);
+  } // end while(true)
+}
+
+void ThriftHiveConcurrentClient::getClusterStatus(HiveClusterStatus& _return)
+{
+  int32_t seqid = send_getClusterStatus();
+  recv_getClusterStatus(_return, seqid);
+}
+
+int32_t ThriftHiveConcurrentClient::send_getClusterStatus()
+{
+  int32_t cseqid = this->sync_.generateSeqId();
+  ::apache::thrift::async::TConcurrentSendSentry sentry(&this->sync_);
+  oprot_->writeMessageBegin("getClusterStatus", ::apache::thrift::protocol::T_CALL, cseqid);
+
+  ThriftHive_getClusterStatus_pargs args;
+  args.write(oprot_);
+
+  oprot_->writeMessageEnd();
+  oprot_->getTransport()->writeEnd();
+  oprot_->getTransport()->flush();
+
+  sentry.commit();
+  return cseqid;
+}
+
+void ThriftHiveConcurrentClient::recv_getClusterStatus(HiveClusterStatus& _return, const int32_t seqid)
+{
+
+  int32_t rseqid = 0;
+  std::string fname;
+  ::apache::thrift::protocol::TMessageType mtype;
+
+  // the read mutex gets dropped and reacquired as part of waitForWork()
+  // The destructor of this sentry wakes up other clients
+  ::apache::thrift::async::TConcurrentRecvSentry sentry(&this->sync_, seqid);
+
+  while(true) {
+    if(!this->sync_.getPending(fname, mtype, rseqid)) {
+      iprot_->readMessageBegin(fname, mtype, rseqid);
+    }
+    if(seqid == rseqid) {
+      if (mtype == ::apache::thrift::protocol::T_EXCEPTION) {
+        ::apache::thrift::TApplicationException x;
+        x.read(iprot_);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+        sentry.commit();
+        throw x;
+      }
+      if (mtype != ::apache::thrift::protocol::T_REPLY) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+      }
+      if (fname.compare("getClusterStatus") != 0) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+
+        // in a bad state, don't commit
+        using ::apache::thrift::protocol::TProtocolException;
+        throw TProtocolException(TProtocolException::INVALID_DATA);
+      }
+      ThriftHive_getClusterStatus_presult result;
+      result.success = &_return;
+      result.read(iprot_);
+      iprot_->readMessageEnd();
+      iprot_->getTransport()->readEnd();
+
+      if (result.__isset.success) {
+        // _return pointer has now been filled
+        sentry.commit();
+        return;
+      }
+      if (result.__isset.ex) {
+        sentry.commit();
+        throw result.ex;
+      }
+      // in a bad state, don't commit
+      throw ::apache::thrift::TApplicationException(::apache::thrift::TApplicationException::MISSING_RESULT, "getClusterStatus failed: unknown result");
+    }
+    // seqid != rseqid
+    this->sync_.updatePending(fname, mtype, rseqid);
+
+    // this will temporarily unlock the readMutex, and let other clients get work done
+    this->sync_.waitForWork(seqid);
+  } // end while(true)
+}
+
+void ThriftHiveConcurrentClient::getQueryPlan( ::Apache::Hadoop::Hive::QueryPlan& _return)
+{
+  int32_t seqid = send_getQueryPlan();
+  recv_getQueryPlan(_return, seqid);
+}
+
+int32_t ThriftHiveConcurrentClient::send_getQueryPlan()
+{
+  int32_t cseqid = this->sync_.generateSeqId();
+  ::apache::thrift::async::TConcurrentSendSentry sentry(&this->sync_);
+  oprot_->writeMessageBegin("getQueryPlan", ::apache::thrift::protocol::T_CALL, cseqid);
+
+  ThriftHive_getQueryPlan_pargs args;
+  args.write(oprot_);
+
+  oprot_->writeMessageEnd();
+  oprot_->getTransport()->writeEnd();
+  oprot_->getTransport()->flush();
+
+  sentry.commit();
+  return cseqid;
+}
+
+void ThriftHiveConcurrentClient::recv_getQueryPlan( ::Apache::Hadoop::Hive::QueryPlan& _return, const int32_t seqid)
+{
+
+  int32_t rseqid = 0;
+  std::string fname;
+  ::apache::thrift::protocol::TMessageType mtype;
+
+  // the read mutex gets dropped and reacquired as part of waitForWork()
+  // The destructor of this sentry wakes up other clients
+  ::apache::thrift::async::TConcurrentRecvSentry sentry(&this->sync_, seqid);
+
+  while(true) {
+    if(!this->sync_.getPending(fname, mtype, rseqid)) {
+      iprot_->readMessageBegin(fname, mtype, rseqid);
+    }
+    if(seqid == rseqid) {
+      if (mtype == ::apache::thrift::protocol::T_EXCEPTION) {
+        ::apache::thrift::TApplicationException x;
+        x.read(iprot_);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+        sentry.commit();
+        throw x;
+      }
+      if (mtype != ::apache::thrift::protocol::T_REPLY) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+      }
+      if (fname.compare("getQueryPlan") != 0) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+
+        // in a bad state, don't commit
+        using ::apache::thrift::protocol::TProtocolException;
+        throw TProtocolException(TProtocolException::INVALID_DATA);
+      }
+      ThriftHive_getQueryPlan_presult result;
+      result.success = &_return;
+      result.read(iprot_);
+      iprot_->readMessageEnd();
+      iprot_->getTransport()->readEnd();
+
+      if (result.__isset.success) {
+        // _return pointer has now been filled
+        sentry.commit();
+        return;
+      }
+      if (result.__isset.ex) {
+        sentry.commit();
+        throw result.ex;
+      }
+      // in a bad state, don't commit
+      throw ::apache::thrift::TApplicationException(::apache::thrift::TApplicationException::MISSING_RESULT, "getQueryPlan failed: unknown result");
+    }
+    // seqid != rseqid
+    this->sync_.updatePending(fname, mtype, rseqid);
+
+    // this will temporarily unlock the readMutex, and let other clients get work done
+    this->sync_.waitForWork(seqid);
+  } // end while(true)
+}
+
+void ThriftHiveConcurrentClient::clean()
+{
+  int32_t seqid = send_clean();
+  recv_clean(seqid);
+}
+
+int32_t ThriftHiveConcurrentClient::send_clean()
+{
+  int32_t cseqid = this->sync_.generateSeqId();
+  ::apache::thrift::async::TConcurrentSendSentry sentry(&this->sync_);
+  oprot_->writeMessageBegin("clean", ::apache::thrift::protocol::T_CALL, cseqid);
+
+  ThriftHive_clean_pargs args;
+  args.write(oprot_);
+
+  oprot_->writeMessageEnd();
+  oprot_->getTransport()->writeEnd();
+  oprot_->getTransport()->flush();
+
+  sentry.commit();
+  return cseqid;
+}
+
+void ThriftHiveConcurrentClient::recv_clean(const int32_t seqid)
+{
+
+  int32_t rseqid = 0;
+  std::string fname;
+  ::apache::thrift::protocol::TMessageType mtype;
+
+  // the read mutex gets dropped and reacquired as part of waitForWork()
+  // The destructor of this sentry wakes up other clients
+  ::apache::thrift::async::TConcurrentRecvSentry sentry(&this->sync_, seqid);
+
+  while(true) {
+    if(!this->sync_.getPending(fname, mtype, rseqid)) {
+      iprot_->readMessageBegin(fname, mtype, rseqid);
+    }
+    if(seqid == rseqid) {
+      if (mtype == ::apache::thrift::protocol::T_EXCEPTION) {
+        ::apache::thrift::TApplicationException x;
+        x.read(iprot_);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+        sentry.commit();
+        throw x;
+      }
+      if (mtype != ::apache::thrift::protocol::T_REPLY) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+      }
+      if (fname.compare("clean") != 0) {
+        iprot_->skip(::apache::thrift::protocol::T_STRUCT);
+        iprot_->readMessageEnd();
+        iprot_->getTransport()->readEnd();
+
+        // in a bad state, don't commit
+        using ::apache::thrift::protocol::TProtocolException;
+        throw TProtocolException(TProtocolException::INVALID_DATA);
+      }
+      ThriftHive_clean_presult result;
+      result.read(iprot_);
+      iprot_->readMessageEnd();
+      iprot_->getTransport()->readEnd();
+
+      sentry.commit();
+      return;
+    }
+    // seqid != rseqid
+    this->sync_.updatePending(fname, mtype, rseqid);
+
+    // this will temporarily unlock the readMutex, and let other clients get work done
+    this->sync_.waitForWork(seqid);
+  } // end while(true)
+}
+
 }}} // namespace
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-cpp/ThriftHive.h
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-cpp/ThriftHive.h b/service/src/gen/thrift/gen-cpp/ThriftHive.h
index e610e50..902bd4b 100644
--- a/service/src/gen/thrift/gen-cpp/ThriftHive.h
+++ b/service/src/gen/thrift/gen-cpp/ThriftHive.h
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -8,11 +8,17 @@
 #define ThriftHive_H
 
 #include <thrift/TDispatchProcessor.h>
+#include <thrift/async/TConcurrentClientSyncInfo.h>
 #include "hive_service_types.h"
 #include "ThriftHiveMetastore.h"
 
 namespace Apache { namespace Hadoop { namespace Hive {
 
+#ifdef _WIN32
+  #pragma warning( push )
+  #pragma warning (disable : 4250 ) //inheriting methods via dominance 
+#endif
+
 class ThriftHiveIf : virtual public  ::Apache::Hadoop::Hive::ThriftHiveMetastoreIf {
  public:
   virtual ~ThriftHiveIf() {}
@@ -91,9 +97,6 @@ typedef struct _ThriftHive_execute_args__isset {
 class ThriftHive_execute_args {
  public:
 
-  static const char* ascii_fingerprint; // = "EFB929595D312AC8F305D5A794CFEDA1";
-  static const uint8_t binary_fingerprint[16]; // = {0xEF,0xB9,0x29,0x59,0x5D,0x31,0x2A,0xC8,0xF3,0x05,0xD5,0xA7,0x94,0xCF,0xED,0xA1};
-
   ThriftHive_execute_args(const ThriftHive_execute_args&);
   ThriftHive_execute_args& operator=(const ThriftHive_execute_args&);
   ThriftHive_execute_args() : query() {
@@ -121,23 +124,18 @@ class ThriftHive_execute_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_execute_args& obj);
 };
 
 
 class ThriftHive_execute_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "EFB929595D312AC8F305D5A794CFEDA1";
-  static const uint8_t binary_fingerprint[16]; // = {0xEF,0xB9,0x29,0x59,0x5D,0x31,0x2A,0xC8,0xF3,0x05,0xD5,0xA7,0x94,0xCF,0xED,0xA1};
-
 
   virtual ~ThriftHive_execute_pargs() throw();
   const std::string* query;
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_execute_pargs& obj);
 };
 
 typedef struct _ThriftHive_execute_result__isset {
@@ -148,9 +146,6 @@ typedef struct _ThriftHive_execute_result__isset {
 class ThriftHive_execute_result {
  public:
 
-  static const char* ascii_fingerprint; // = "6AC6FD61CA5E3524E0174B0B96D6F9C0";
-  static const uint8_t binary_fingerprint[16]; // = {0x6A,0xC6,0xFD,0x61,0xCA,0x5E,0x35,0x24,0xE0,0x17,0x4B,0x0B,0x96,0xD6,0xF9,0xC0};
-
   ThriftHive_execute_result(const ThriftHive_execute_result&);
   ThriftHive_execute_result& operator=(const ThriftHive_execute_result&);
   ThriftHive_execute_result() {
@@ -178,7 +173,6 @@ class ThriftHive_execute_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_execute_result& obj);
 };
 
 typedef struct _ThriftHive_execute_presult__isset {
@@ -189,9 +183,6 @@ typedef struct _ThriftHive_execute_presult__isset {
 class ThriftHive_execute_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "6AC6FD61CA5E3524E0174B0B96D6F9C0";
-  static const uint8_t binary_fingerprint[16]; // = {0x6A,0xC6,0xFD,0x61,0xCA,0x5E,0x35,0x24,0xE0,0x17,0x4B,0x0B,0x96,0xD6,0xF9,0xC0};
-
 
   virtual ~ThriftHive_execute_presult() throw();
   HiveServerException ex;
@@ -200,16 +191,12 @@ class ThriftHive_execute_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_execute_presult& obj);
 };
 
 
 class ThriftHive_fetchOne_args {
  public:
 
-  static const char* ascii_fingerprint; // = "99914B932BD37A50B983C5E7C90AE93B";
-  static const uint8_t binary_fingerprint[16]; // = {0x99,0x91,0x4B,0x93,0x2B,0xD3,0x7A,0x50,0xB9,0x83,0xC5,0xE7,0xC9,0x0A,0xE9,0x3B};
-
   ThriftHive_fetchOne_args(const ThriftHive_fetchOne_args&);
   ThriftHive_fetchOne_args& operator=(const ThriftHive_fetchOne_args&);
   ThriftHive_fetchOne_args() {
@@ -230,22 +217,17 @@ class ThriftHive_fetchOne_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_fetchOne_args& obj);
 };
 
 
 class ThriftHive_fetchOne_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "99914B932BD37A50B983C5E7C90AE93B";
-  static const uint8_t binary_fingerprint[16]; // = {0x99,0x91,0x4B,0x93,0x2B,0xD3,0x7A,0x50,0xB9,0x83,0xC5,0xE7,0xC9,0x0A,0xE9,0x3B};
-
 
   virtual ~ThriftHive_fetchOne_pargs() throw();
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_fetchOne_pargs& obj);
 };
 
 typedef struct _ThriftHive_fetchOne_result__isset {
@@ -257,9 +239,6 @@ typedef struct _ThriftHive_fetchOne_result__isset {
 class ThriftHive_fetchOne_result {
  public:
 
-  static const char* ascii_fingerprint; // = "BD5C3537D3AE5C0248BD3B45C053AE32";
-  static const uint8_t binary_fingerprint[16]; // = {0xBD,0x5C,0x35,0x37,0xD3,0xAE,0x5C,0x02,0x48,0xBD,0x3B,0x45,0xC0,0x53,0xAE,0x32};
-
   ThriftHive_fetchOne_result(const ThriftHive_fetchOne_result&);
   ThriftHive_fetchOne_result& operator=(const ThriftHive_fetchOne_result&);
   ThriftHive_fetchOne_result() : success() {
@@ -292,7 +271,6 @@ class ThriftHive_fetchOne_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_fetchOne_result& obj);
 };
 
 typedef struct _ThriftHive_fetchOne_presult__isset {
@@ -304,9 +282,6 @@ typedef struct _ThriftHive_fetchOne_presult__isset {
 class ThriftHive_fetchOne_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "BD5C3537D3AE5C0248BD3B45C053AE32";
-  static const uint8_t binary_fingerprint[16]; // = {0xBD,0x5C,0x35,0x37,0xD3,0xAE,0x5C,0x02,0x48,0xBD,0x3B,0x45,0xC0,0x53,0xAE,0x32};
-
 
   virtual ~ThriftHive_fetchOne_presult() throw();
   std::string* success;
@@ -316,7 +291,6 @@ class ThriftHive_fetchOne_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_fetchOne_presult& obj);
 };
 
 typedef struct _ThriftHive_fetchN_args__isset {
@@ -327,9 +301,6 @@ typedef struct _ThriftHive_fetchN_args__isset {
 class ThriftHive_fetchN_args {
  public:
 
-  static const char* ascii_fingerprint; // = "E86CACEB22240450EDCBEFC3A83970E4";
-  static const uint8_t binary_fingerprint[16]; // = {0xE8,0x6C,0xAC,0xEB,0x22,0x24,0x04,0x50,0xED,0xCB,0xEF,0xC3,0xA8,0x39,0x70,0xE4};
-
   ThriftHive_fetchN_args(const ThriftHive_fetchN_args&);
   ThriftHive_fetchN_args& operator=(const ThriftHive_fetchN_args&);
   ThriftHive_fetchN_args() : numRows(0) {
@@ -357,23 +328,18 @@ class ThriftHive_fetchN_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_fetchN_args& obj);
 };
 
 
 class ThriftHive_fetchN_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "E86CACEB22240450EDCBEFC3A83970E4";
-  static const uint8_t binary_fingerprint[16]; // = {0xE8,0x6C,0xAC,0xEB,0x22,0x24,0x04,0x50,0xED,0xCB,0xEF,0xC3,0xA8,0x39,0x70,0xE4};
-
 
   virtual ~ThriftHive_fetchN_pargs() throw();
   const int32_t* numRows;
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_fetchN_pargs& obj);
 };
 
 typedef struct _ThriftHive_fetchN_result__isset {
@@ -385,9 +351,6 @@ typedef struct _ThriftHive_fetchN_result__isset {
 class ThriftHive_fetchN_result {
  public:
 
-  static const char* ascii_fingerprint; // = "EB142A6BE66D8EE6065D07106EABD55D";
-  static const uint8_t binary_fingerprint[16]; // = {0xEB,0x14,0x2A,0x6B,0xE6,0x6D,0x8E,0xE6,0x06,0x5D,0x07,0x10,0x6E,0xAB,0xD5,0x5D};
-
   ThriftHive_fetchN_result(const ThriftHive_fetchN_result&);
   ThriftHive_fetchN_result& operator=(const ThriftHive_fetchN_result&);
   ThriftHive_fetchN_result() {
@@ -420,7 +383,6 @@ class ThriftHive_fetchN_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_fetchN_result& obj);
 };
 
 typedef struct _ThriftHive_fetchN_presult__isset {
@@ -432,9 +394,6 @@ typedef struct _ThriftHive_fetchN_presult__isset {
 class ThriftHive_fetchN_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "EB142A6BE66D8EE6065D07106EABD55D";
-  static const uint8_t binary_fingerprint[16]; // = {0xEB,0x14,0x2A,0x6B,0xE6,0x6D,0x8E,0xE6,0x06,0x5D,0x07,0x10,0x6E,0xAB,0xD5,0x5D};
-
 
   virtual ~ThriftHive_fetchN_presult() throw();
   std::vector<std::string> * success;
@@ -444,16 +403,12 @@ class ThriftHive_fetchN_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_fetchN_presult& obj);
 };
 
 
 class ThriftHive_fetchAll_args {
  public:
 
-  static const char* ascii_fingerprint; // = "99914B932BD37A50B983C5E7C90AE93B";
-  static const uint8_t binary_fingerprint[16]; // = {0x99,0x91,0x4B,0x93,0x2B,0xD3,0x7A,0x50,0xB9,0x83,0xC5,0xE7,0xC9,0x0A,0xE9,0x3B};
-
   ThriftHive_fetchAll_args(const ThriftHive_fetchAll_args&);
   ThriftHive_fetchAll_args& operator=(const ThriftHive_fetchAll_args&);
   ThriftHive_fetchAll_args() {
@@ -474,22 +429,17 @@ class ThriftHive_fetchAll_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_fetchAll_args& obj);
 };
 
 
 class ThriftHive_fetchAll_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "99914B932BD37A50B983C5E7C90AE93B";
-  static const uint8_t binary_fingerprint[16]; // = {0x99,0x91,0x4B,0x93,0x2B,0xD3,0x7A,0x50,0xB9,0x83,0xC5,0xE7,0xC9,0x0A,0xE9,0x3B};
-
 
   virtual ~ThriftHive_fetchAll_pargs() throw();
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_fetchAll_pargs& obj);
 };
 
 typedef struct _ThriftHive_fetchAll_result__isset {
@@ -501,9 +451,6 @@ typedef struct _ThriftHive_fetchAll_result__isset {
 class ThriftHive_fetchAll_result {
  public:
 
-  static const char* ascii_fingerprint; // = "EB142A6BE66D8EE6065D07106EABD55D";
-  static const uint8_t binary_fingerprint[16]; // = {0xEB,0x14,0x2A,0x6B,0xE6,0x6D,0x8E,0xE6,0x06,0x5D,0x07,0x10,0x6E,0xAB,0xD5,0x5D};
-
   ThriftHive_fetchAll_result(const ThriftHive_fetchAll_result&);
   ThriftHive_fetchAll_result& operator=(const ThriftHive_fetchAll_result&);
   ThriftHive_fetchAll_result() {
@@ -536,7 +483,6 @@ class ThriftHive_fetchAll_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_fetchAll_result& obj);
 };
 
 typedef struct _ThriftHive_fetchAll_presult__isset {
@@ -548,9 +494,6 @@ typedef struct _ThriftHive_fetchAll_presult__isset {
 class ThriftHive_fetchAll_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "EB142A6BE66D8EE6065D07106EABD55D";
-  static const uint8_t binary_fingerprint[16]; // = {0xEB,0x14,0x2A,0x6B,0xE6,0x6D,0x8E,0xE6,0x06,0x5D,0x07,0x10,0x6E,0xAB,0xD5,0x5D};
-
 
   virtual ~ThriftHive_fetchAll_presult() throw();
   std::vector<std::string> * success;
@@ -560,16 +503,12 @@ class ThriftHive_fetchAll_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_fetchAll_presult& obj);
 };
 
 
 class ThriftHive_getSchema_args {
  public:
 
-  static const char* ascii_fingerprint; // = "99914B932BD37A50B983C5E7C90AE93B";
-  static const uint8_t binary_fingerprint[16]; // = {0x99,0x91,0x4B,0x93,0x2B,0xD3,0x7A,0x50,0xB9,0x83,0xC5,0xE7,0xC9,0x0A,0xE9,0x3B};
-
   ThriftHive_getSchema_args(const ThriftHive_getSchema_args&);
   ThriftHive_getSchema_args& operator=(const ThriftHive_getSchema_args&);
   ThriftHive_getSchema_args() {
@@ -590,22 +529,17 @@ class ThriftHive_getSchema_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_getSchema_args& obj);
 };
 
 
 class ThriftHive_getSchema_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "99914B932BD37A50B983C5E7C90AE93B";
-  static const uint8_t binary_fingerprint[16]; // = {0x99,0x91,0x4B,0x93,0x2B,0xD3,0x7A,0x50,0xB9,0x83,0xC5,0xE7,0xC9,0x0A,0xE9,0x3B};
-
 
   virtual ~ThriftHive_getSchema_pargs() throw();
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_getSchema_pargs& obj);
 };
 
 typedef struct _ThriftHive_getSchema_result__isset {
@@ -617,9 +551,6 @@ typedef struct _ThriftHive_getSchema_result__isset {
 class ThriftHive_getSchema_result {
  public:
 
-  static const char* ascii_fingerprint; // = "FCFAE75CC7093F1A3926C2AD58A6FFD1";
-  static const uint8_t binary_fingerprint[16]; // = {0xFC,0xFA,0xE7,0x5C,0xC7,0x09,0x3F,0x1A,0x39,0x26,0xC2,0xAD,0x58,0xA6,0xFF,0xD1};
-
   ThriftHive_getSchema_result(const ThriftHive_getSchema_result&);
   ThriftHive_getSchema_result& operator=(const ThriftHive_getSchema_result&);
   ThriftHive_getSchema_result() {
@@ -652,7 +583,6 @@ class ThriftHive_getSchema_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_getSchema_result& obj);
 };
 
 typedef struct _ThriftHive_getSchema_presult__isset {
@@ -664,9 +594,6 @@ typedef struct _ThriftHive_getSchema_presult__isset {
 class ThriftHive_getSchema_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "FCFAE75CC7093F1A3926C2AD58A6FFD1";
-  static const uint8_t binary_fingerprint[16]; // = {0xFC,0xFA,0xE7,0x5C,0xC7,0x09,0x3F,0x1A,0x39,0x26,0xC2,0xAD,0x58,0xA6,0xFF,0xD1};
-
 
   virtual ~ThriftHive_getSchema_presult() throw();
    ::Apache::Hadoop::Hive::Schema* success;
@@ -676,16 +603,12 @@ class ThriftHive_getSchema_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_getSchema_presult& obj);
 };
 
 
 class ThriftHive_getThriftSchema_args {
  public:
 
-  static const char* ascii_fingerprint; // = "99914B932BD37A50B983C5E7C90AE93B";
-  static const uint8_t binary_fingerprint[16]; // = {0x99,0x91,0x4B,0x93,0x2B,0xD3,0x7A,0x50,0xB9,0x83,0xC5,0xE7,0xC9,0x0A,0xE9,0x3B};
-
   ThriftHive_getThriftSchema_args(const ThriftHive_getThriftSchema_args&);
   ThriftHive_getThriftSchema_args& operator=(const ThriftHive_getThriftSchema_args&);
   ThriftHive_getThriftSchema_args() {
@@ -706,22 +629,17 @@ class ThriftHive_getThriftSchema_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_getThriftSchema_args& obj);
 };
 
 
 class ThriftHive_getThriftSchema_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "99914B932BD37A50B983C5E7C90AE93B";
-  static const uint8_t binary_fingerprint[16]; // = {0x99,0x91,0x4B,0x93,0x2B,0xD3,0x7A,0x50,0xB9,0x83,0xC5,0xE7,0xC9,0x0A,0xE9,0x3B};
-
 
   virtual ~ThriftHive_getThriftSchema_pargs() throw();
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_getThriftSchema_pargs& obj);
 };
 
 typedef struct _ThriftHive_getThriftSchema_result__isset {
@@ -733,9 +651,6 @@ typedef struct _ThriftHive_getThriftSchema_result__isset {
 class ThriftHive_getThriftSchema_result {
  public:
 
-  static const char* ascii_fingerprint; // = "FCFAE75CC7093F1A3926C2AD58A6FFD1";
-  static const uint8_t binary_fingerprint[16]; // = {0xFC,0xFA,0xE7,0x5C,0xC7,0x09,0x3F,0x1A,0x39,0x26,0xC2,0xAD,0x58,0xA6,0xFF,0xD1};
-
   ThriftHive_getThriftSchema_result(const ThriftHive_getThriftSchema_result&);
   ThriftHive_getThriftSchema_result& operator=(const ThriftHive_getThriftSchema_result&);
   ThriftHive_getThriftSchema_result() {
@@ -768,7 +683,6 @@ class ThriftHive_getThriftSchema_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_getThriftSchema_result& obj);
 };
 
 typedef struct _ThriftHive_getThriftSchema_presult__isset {
@@ -780,9 +694,6 @@ typedef struct _ThriftHive_getThriftSchema_presult__isset {
 class ThriftHive_getThriftSchema_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "FCFAE75CC7093F1A3926C2AD58A6FFD1";
-  static const uint8_t binary_fingerprint[16]; // = {0xFC,0xFA,0xE7,0x5C,0xC7,0x09,0x3F,0x1A,0x39,0x26,0xC2,0xAD,0x58,0xA6,0xFF,0xD1};
-
 
   virtual ~ThriftHive_getThriftSchema_presult() throw();
    ::Apache::Hadoop::Hive::Schema* success;
@@ -792,16 +703,12 @@ class ThriftHive_getThriftSchema_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_getThriftSchema_presult& obj);
 };
 
 
 class ThriftHive_getClusterStatus_args {
  public:
 
-  static const char* ascii_fingerprint; // = "99914B932BD37A50B983C5E7C90AE93B";
-  static const uint8_t binary_fingerprint[16]; // = {0x99,0x91,0x4B,0x93,0x2B,0xD3,0x7A,0x50,0xB9,0x83,0xC5,0xE7,0xC9,0x0A,0xE9,0x3B};
-
   ThriftHive_getClusterStatus_args(const ThriftHive_getClusterStatus_args&);
   ThriftHive_getClusterStatus_args& operator=(const ThriftHive_getClusterStatus_args&);
   ThriftHive_getClusterStatus_args() {
@@ -822,22 +729,17 @@ class ThriftHive_getClusterStatus_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_getClusterStatus_args& obj);
 };
 
 
 class ThriftHive_getClusterStatus_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "99914B932BD37A50B983C5E7C90AE93B";
-  static const uint8_t binary_fingerprint[16]; // = {0x99,0x91,0x4B,0x93,0x2B,0xD3,0x7A,0x50,0xB9,0x83,0xC5,0xE7,0xC9,0x0A,0xE9,0x3B};
-
 
   virtual ~ThriftHive_getClusterStatus_pargs() throw();
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_getClusterStatus_pargs& obj);
 };
 
 typedef struct _ThriftHive_getClusterStatus_result__isset {
@@ -849,9 +751,6 @@ typedef struct _ThriftHive_getClusterStatus_result__isset {
 class ThriftHive_getClusterStatus_result {
  public:
 
-  static const char* ascii_fingerprint; // = "F486E00F8F0F2B6A17A0371997BB7B87";
-  static const uint8_t binary_fingerprint[16]; // = {0xF4,0x86,0xE0,0x0F,0x8F,0x0F,0x2B,0x6A,0x17,0xA0,0x37,0x19,0x97,0xBB,0x7B,0x87};
-
   ThriftHive_getClusterStatus_result(const ThriftHive_getClusterStatus_result&);
   ThriftHive_getClusterStatus_result& operator=(const ThriftHive_getClusterStatus_result&);
   ThriftHive_getClusterStatus_result() {
@@ -884,7 +783,6 @@ class ThriftHive_getClusterStatus_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_getClusterStatus_result& obj);
 };
 
 typedef struct _ThriftHive_getClusterStatus_presult__isset {
@@ -896,9 +794,6 @@ typedef struct _ThriftHive_getClusterStatus_presult__isset {
 class ThriftHive_getClusterStatus_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "F486E00F8F0F2B6A17A0371997BB7B87";
-  static const uint8_t binary_fingerprint[16]; // = {0xF4,0x86,0xE0,0x0F,0x8F,0x0F,0x2B,0x6A,0x17,0xA0,0x37,0x19,0x97,0xBB,0x7B,0x87};
-
 
   virtual ~ThriftHive_getClusterStatus_presult() throw();
   HiveClusterStatus* success;
@@ -908,16 +803,12 @@ class ThriftHive_getClusterStatus_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_getClusterStatus_presult& obj);
 };
 
 
 class ThriftHive_getQueryPlan_args {
  public:
 
-  static const char* ascii_fingerprint; // = "99914B932BD37A50B983C5E7C90AE93B";
-  static const uint8_t binary_fingerprint[16]; // = {0x99,0x91,0x4B,0x93,0x2B,0xD3,0x7A,0x50,0xB9,0x83,0xC5,0xE7,0xC9,0x0A,0xE9,0x3B};
-
   ThriftHive_getQueryPlan_args(const ThriftHive_getQueryPlan_args&);
   ThriftHive_getQueryPlan_args& operator=(const ThriftHive_getQueryPlan_args&);
   ThriftHive_getQueryPlan_args() {
@@ -938,22 +829,17 @@ class ThriftHive_getQueryPlan_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_getQueryPlan_args& obj);
 };
 
 
 class ThriftHive_getQueryPlan_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "99914B932BD37A50B983C5E7C90AE93B";
-  static const uint8_t binary_fingerprint[16]; // = {0x99,0x91,0x4B,0x93,0x2B,0xD3,0x7A,0x50,0xB9,0x83,0xC5,0xE7,0xC9,0x0A,0xE9,0x3B};
-
 
   virtual ~ThriftHive_getQueryPlan_pargs() throw();
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_getQueryPlan_pargs& obj);
 };
 
 typedef struct _ThriftHive_getQueryPlan_result__isset {
@@ -965,9 +851,6 @@ typedef struct _ThriftHive_getQueryPlan_result__isset {
 class ThriftHive_getQueryPlan_result {
  public:
 
-  static const char* ascii_fingerprint; // = "0263544CFF7194CEE7DC6128DD5941ED";
-  static const uint8_t binary_fingerprint[16]; // = {0x02,0x63,0x54,0x4C,0xFF,0x71,0x94,0xCE,0xE7,0xDC,0x61,0x28,0xDD,0x59,0x41,0xED};
-
   ThriftHive_getQueryPlan_result(const ThriftHive_getQueryPlan_result&);
   ThriftHive_getQueryPlan_result& operator=(const ThriftHive_getQueryPlan_result&);
   ThriftHive_getQueryPlan_result() {
@@ -1000,7 +883,6 @@ class ThriftHive_getQueryPlan_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_getQueryPlan_result& obj);
 };
 
 typedef struct _ThriftHive_getQueryPlan_presult__isset {
@@ -1012,9 +894,6 @@ typedef struct _ThriftHive_getQueryPlan_presult__isset {
 class ThriftHive_getQueryPlan_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "0263544CFF7194CEE7DC6128DD5941ED";
-  static const uint8_t binary_fingerprint[16]; // = {0x02,0x63,0x54,0x4C,0xFF,0x71,0x94,0xCE,0xE7,0xDC,0x61,0x28,0xDD,0x59,0x41,0xED};
-
 
   virtual ~ThriftHive_getQueryPlan_presult() throw();
    ::Apache::Hadoop::Hive::QueryPlan* success;
@@ -1024,16 +903,12 @@ class ThriftHive_getQueryPlan_presult {
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_getQueryPlan_presult& obj);
 };
 
 
 class ThriftHive_clean_args {
  public:
 
-  static const char* ascii_fingerprint; // = "99914B932BD37A50B983C5E7C90AE93B";
-  static const uint8_t binary_fingerprint[16]; // = {0x99,0x91,0x4B,0x93,0x2B,0xD3,0x7A,0x50,0xB9,0x83,0xC5,0xE7,0xC9,0x0A,0xE9,0x3B};
-
   ThriftHive_clean_args(const ThriftHive_clean_args&);
   ThriftHive_clean_args& operator=(const ThriftHive_clean_args&);
   ThriftHive_clean_args() {
@@ -1054,31 +929,23 @@ class ThriftHive_clean_args {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_clean_args& obj);
 };
 
 
 class ThriftHive_clean_pargs {
  public:
 
-  static const char* ascii_fingerprint; // = "99914B932BD37A50B983C5E7C90AE93B";
-  static const uint8_t binary_fingerprint[16]; // = {0x99,0x91,0x4B,0x93,0x2B,0xD3,0x7A,0x50,0xB9,0x83,0xC5,0xE7,0xC9,0x0A,0xE9,0x3B};
-
 
   virtual ~ThriftHive_clean_pargs() throw();
 
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_clean_pargs& obj);
 };
 
 
 class ThriftHive_clean_result {
  public:
 
-  static const char* ascii_fingerprint; // = "99914B932BD37A50B983C5E7C90AE93B";
-  static const uint8_t binary_fingerprint[16]; // = {0x99,0x91,0x4B,0x93,0x2B,0xD3,0x7A,0x50,0xB9,0x83,0xC5,0xE7,0xC9,0x0A,0xE9,0x3B};
-
   ThriftHive_clean_result(const ThriftHive_clean_result&);
   ThriftHive_clean_result& operator=(const ThriftHive_clean_result&);
   ThriftHive_clean_result() {
@@ -1099,22 +966,17 @@ class ThriftHive_clean_result {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_clean_result& obj);
 };
 
 
 class ThriftHive_clean_presult {
  public:
 
-  static const char* ascii_fingerprint; // = "99914B932BD37A50B983C5E7C90AE93B";
-  static const uint8_t binary_fingerprint[16]; // = {0x99,0x91,0x4B,0x93,0x2B,0xD3,0x7A,0x50,0xB9,0x83,0xC5,0xE7,0xC9,0x0A,0xE9,0x3B};
-
 
   virtual ~ThriftHive_clean_presult() throw();
 
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
 
-  friend std::ostream& operator<<(std::ostream& out, const ThriftHive_clean_presult& obj);
 };
 
 class ThriftHiveClient : virtual public ThriftHiveIf, public  ::Apache::Hadoop::Hive::ThriftHiveMetastoreClient {
@@ -1310,6 +1172,53 @@ class ThriftHiveMultiface : virtual public ThriftHiveIf, public  ::Apache::Hadoo
 
 };
 
+// The 'concurrent' client is a thread safe client that correctly handles
+// out of order responses.  It is slower than the regular client, so should
+// only be used when you need to share a connection among multiple threads
+class ThriftHiveConcurrentClient : virtual public ThriftHiveIf, public  ::Apache::Hadoop::Hive::ThriftHiveMetastoreConcurrentClient {
+ public:
+  ThriftHiveConcurrentClient(boost::shared_ptr< ::apache::thrift::protocol::TProtocol> prot) :
+     ::Apache::Hadoop::Hive::ThriftHiveMetastoreConcurrentClient(prot, prot) {}
+  ThriftHiveConcurrentClient(boost::shared_ptr< ::apache::thrift::protocol::TProtocol> iprot, boost::shared_ptr< ::apache::thrift::protocol::TProtocol> oprot) :     ::Apache::Hadoop::Hive::ThriftHiveMetastoreConcurrentClient(iprot, oprot) {}
+  boost::shared_ptr< ::apache::thrift::protocol::TProtocol> getInputProtocol() {
+    return piprot_;
+  }
+  boost::shared_ptr< ::apache::thrift::protocol::TProtocol> getOutputProtocol() {
+    return poprot_;
+  }
+  void execute(const std::string& query);
+  int32_t send_execute(const std::string& query);
+  void recv_execute(const int32_t seqid);
+  void fetchOne(std::string& _return);
+  int32_t send_fetchOne();
+  void recv_fetchOne(std::string& _return, const int32_t seqid);
+  void fetchN(std::vector<std::string> & _return, const int32_t numRows);
+  int32_t send_fetchN(const int32_t numRows);
+  void recv_fetchN(std::vector<std::string> & _return, const int32_t seqid);
+  void fetchAll(std::vector<std::string> & _return);
+  int32_t send_fetchAll();
+  void recv_fetchAll(std::vector<std::string> & _return, const int32_t seqid);
+  void getSchema( ::Apache::Hadoop::Hive::Schema& _return);
+  int32_t send_getSchema();
+  void recv_getSchema( ::Apache::Hadoop::Hive::Schema& _return, const int32_t seqid);
+  void getThriftSchema( ::Apache::Hadoop::Hive::Schema& _return);
+  int32_t send_getThriftSchema();
+  void recv_getThriftSchema( ::Apache::Hadoop::Hive::Schema& _return, const int32_t seqid);
+  void getClusterStatus(HiveClusterStatus& _return);
+  int32_t send_getClusterStatus();
+  void recv_getClusterStatus(HiveClusterStatus& _return, const int32_t seqid);
+  void getQueryPlan( ::Apache::Hadoop::Hive::QueryPlan& _return);
+  int32_t send_getQueryPlan();
+  void recv_getQueryPlan( ::Apache::Hadoop::Hive::QueryPlan& _return, const int32_t seqid);
+  void clean();
+  int32_t send_clean();
+  void recv_clean(const int32_t seqid);
+};
+
+#ifdef _WIN32
+  #pragma warning( pop )
+#endif
+
 }}} // namespace
 
 #endif

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-cpp/hive_service_constants.cpp
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-cpp/hive_service_constants.cpp b/service/src/gen/thrift/gen-cpp/hive_service_constants.cpp
index c3c8482..e2bbe71 100644
--- a/service/src/gen/thrift/gen-cpp/hive_service_constants.cpp
+++ b/service/src/gen/thrift/gen-cpp/hive_service_constants.cpp
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-cpp/hive_service_constants.h
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-cpp/hive_service_constants.h b/service/src/gen/thrift/gen-cpp/hive_service_constants.h
index 5878dbe..e0887f4 100644
--- a/service/src/gen/thrift/gen-cpp/hive_service_constants.h
+++ b/service/src/gen/thrift/gen-cpp/hive_service_constants.h
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-cpp/hive_service_types.cpp
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-cpp/hive_service_types.cpp b/service/src/gen/thrift/gen-cpp/hive_service_types.cpp
index 4d6cf01..9ddf7c1 100644
--- a/service/src/gen/thrift/gen-cpp/hive_service_types.cpp
+++ b/service/src/gen/thrift/gen-cpp/hive_service_types.cpp
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -52,11 +52,9 @@ void HiveClusterStatus::__set_state(const JobTrackerState::type val) {
   this->state = val;
 }
 
-const char* HiveClusterStatus::ascii_fingerprint = "D514EDCFFC86F64A2E924DCD16D4FAD8";
-const uint8_t HiveClusterStatus::binary_fingerprint[16] = {0xD5,0x14,0xED,0xCF,0xFC,0x86,0xF6,0x4A,0x2E,0x92,0x4D,0xCD,0x16,0xD4,0xFA,0xD8};
-
 uint32_t HiveClusterStatus::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -139,7 +137,7 @@ uint32_t HiveClusterStatus::read(::apache::thrift::protocol::TProtocol* iprot) {
 
 uint32_t HiveClusterStatus::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("HiveClusterStatus");
 
   xfer += oprot->writeFieldBegin("taskTrackers", ::apache::thrift::protocol::T_I32, 1);
@@ -168,7 +166,6 @@ uint32_t HiveClusterStatus::write(::apache::thrift::protocol::TProtocol* oprot)
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -202,17 +199,16 @@ HiveClusterStatus& HiveClusterStatus::operator=(const HiveClusterStatus& other2)
   __isset = other2.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const HiveClusterStatus& obj) {
-  using apache::thrift::to_string;
+void HiveClusterStatus::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "HiveClusterStatus(";
-  out << "taskTrackers=" << to_string(obj.taskTrackers);
-  out << ", " << "mapTasks=" << to_string(obj.mapTasks);
-  out << ", " << "reduceTasks=" << to_string(obj.reduceTasks);
-  out << ", " << "maxMapTasks=" << to_string(obj.maxMapTasks);
-  out << ", " << "maxReduceTasks=" << to_string(obj.maxReduceTasks);
-  out << ", " << "state=" << to_string(obj.state);
+  out << "taskTrackers=" << to_string(taskTrackers);
+  out << ", " << "mapTasks=" << to_string(mapTasks);
+  out << ", " << "reduceTasks=" << to_string(reduceTasks);
+  out << ", " << "maxMapTasks=" << to_string(maxMapTasks);
+  out << ", " << "maxReduceTasks=" << to_string(maxReduceTasks);
+  out << ", " << "state=" << to_string(state);
   out << ")";
-  return out;
 }
 
 
@@ -232,11 +228,9 @@ void HiveServerException::__set_SQLState(const std::string& val) {
   this->SQLState = val;
 }
 
-const char* HiveServerException::ascii_fingerprint = "70563A0628F75DF9555F4D24690B1E26";
-const uint8_t HiveServerException::binary_fingerprint[16] = {0x70,0x56,0x3A,0x06,0x28,0xF7,0x5D,0xF9,0x55,0x5F,0x4D,0x24,0x69,0x0B,0x1E,0x26};
-
 uint32_t HiveServerException::read(::apache::thrift::protocol::TProtocol* iprot) {
 
+  apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
   uint32_t xfer = 0;
   std::string fname;
   ::apache::thrift::protocol::TType ftype;
@@ -293,7 +287,7 @@ uint32_t HiveServerException::read(::apache::thrift::protocol::TProtocol* iprot)
 
 uint32_t HiveServerException::write(::apache::thrift::protocol::TProtocol* oprot) const {
   uint32_t xfer = 0;
-  oprot->incrementRecursionDepth();
+  apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
   xfer += oprot->writeStructBegin("HiveServerException");
 
   xfer += oprot->writeFieldBegin("message", ::apache::thrift::protocol::T_STRING, 1);
@@ -310,7 +304,6 @@ uint32_t HiveServerException::write(::apache::thrift::protocol::TProtocol* oprot
 
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
-  oprot->decrementRecursionDepth();
   return xfer;
 }
 
@@ -335,14 +328,24 @@ HiveServerException& HiveServerException::operator=(const HiveServerException& o
   __isset = other4.__isset;
   return *this;
 }
-std::ostream& operator<<(std::ostream& out, const HiveServerException& obj) {
-  using apache::thrift::to_string;
+void HiveServerException::printTo(std::ostream& out) const {
+  using ::apache::thrift::to_string;
   out << "HiveServerException(";
-  out << "message=" << to_string(obj.message);
-  out << ", " << "errorCode=" << to_string(obj.errorCode);
-  out << ", " << "SQLState=" << to_string(obj.SQLState);
+  out << "message=" << to_string(message);
+  out << ", " << "errorCode=" << to_string(errorCode);
+  out << ", " << "SQLState=" << to_string(SQLState);
   out << ")";
-  return out;
+}
+
+const char* HiveServerException::what() const throw() {
+  try {
+    std::stringstream ss;
+    ss << "TException - service has thrown: " << *this;
+    this->thriftTExceptionMessageHolder_ = ss.str();
+    return this->thriftTExceptionMessageHolder_.c_str();
+  } catch (const std::exception&) {
+    return "TException - service has thrown: HiveServerException";
+  }
 }
 
 }}} // namespace

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-cpp/hive_service_types.h
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-cpp/hive_service_types.h b/service/src/gen/thrift/gen-cpp/hive_service_types.h
index 7fea88c..266f8ea 100644
--- a/service/src/gen/thrift/gen-cpp/hive_service_types.h
+++ b/service/src/gen/thrift/gen-cpp/hive_service_types.h
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -48,9 +48,6 @@ typedef struct _HiveClusterStatus__isset {
 class HiveClusterStatus {
  public:
 
-  static const char* ascii_fingerprint; // = "D514EDCFFC86F64A2E924DCD16D4FAD8";
-  static const uint8_t binary_fingerprint[16]; // = {0xD5,0x14,0xED,0xCF,0xFC,0x86,0xF6,0x4A,0x2E,0x92,0x4D,0xCD,0x16,0xD4,0xFA,0xD8};
-
   HiveClusterStatus(const HiveClusterStatus&);
   HiveClusterStatus& operator=(const HiveClusterStatus&);
   HiveClusterStatus() : taskTrackers(0), mapTasks(0), reduceTasks(0), maxMapTasks(0), maxReduceTasks(0), state((JobTrackerState::type)0) {
@@ -103,11 +100,17 @@ class HiveClusterStatus {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const HiveClusterStatus& obj);
+  virtual void printTo(std::ostream& out) const;
 };
 
 void swap(HiveClusterStatus &a, HiveClusterStatus &b);
 
+inline std::ostream& operator<<(std::ostream& out, const HiveClusterStatus& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _HiveServerException__isset {
   _HiveServerException__isset() : message(false), errorCode(false), SQLState(false) {}
   bool message :1;
@@ -118,9 +121,6 @@ typedef struct _HiveServerException__isset {
 class HiveServerException : public ::apache::thrift::TException {
  public:
 
-  static const char* ascii_fingerprint; // = "70563A0628F75DF9555F4D24690B1E26";
-  static const uint8_t binary_fingerprint[16]; // = {0x70,0x56,0x3A,0x06,0x28,0xF7,0x5D,0xF9,0x55,0x5F,0x4D,0x24,0x69,0x0B,0x1E,0x26};
-
   HiveServerException(const HiveServerException&);
   HiveServerException& operator=(const HiveServerException&);
   HiveServerException() : message(), errorCode(0), SQLState() {
@@ -158,11 +158,19 @@ class HiveServerException : public ::apache::thrift::TException {
   uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
   uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
 
-  friend std::ostream& operator<<(std::ostream& out, const HiveServerException& obj);
+  virtual void printTo(std::ostream& out) const;
+  mutable std::string thriftTExceptionMessageHolder_;
+  const char* what() const throw();
 };
 
 void swap(HiveServerException &a, HiveServerException &b);
 
+inline std::ostream& operator<<(std::ostream& out, const HiveServerException& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 }}} // namespace
 
 #endif

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/HiveClusterStatus.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/HiveClusterStatus.java b/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/HiveClusterStatus.java
index 028829d..ad89867 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/HiveClusterStatus.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/HiveClusterStatus.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class HiveClusterStatus implements org.apache.thrift.TBase<HiveClusterStatus, HiveClusterStatus._Fields>, java.io.Serializable, Cloneable, Comparable<HiveClusterStatus> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HiveClusterStatus");
 
@@ -417,19 +417,19 @@ public class HiveClusterStatus implements org.apache.thrift.TBase<HiveClusterSta
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case TASK_TRACKERS:
-      return Integer.valueOf(getTaskTrackers());
+      return getTaskTrackers();
 
     case MAP_TASKS:
-      return Integer.valueOf(getMapTasks());
+      return getMapTasks();
 
     case REDUCE_TASKS:
-      return Integer.valueOf(getReduceTasks());
+      return getReduceTasks();
 
     case MAX_MAP_TASKS:
-      return Integer.valueOf(getMaxMapTasks());
+      return getMaxMapTasks();
 
     case MAX_REDUCE_TASKS:
-      return Integer.valueOf(getMaxReduceTasks());
+      return getMaxReduceTasks();
 
     case STATE:
       return getState();

http://git-wip-us.apache.org/repos/asf/hive/blob/26535378/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/HiveServerException.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/HiveServerException.java b/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/HiveServerException.java
index 45f21f9..97b1219 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/HiveServerException.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/HiveServerException.java
@@ -1,5 +1,5 @@
 /**
- * Autogenerated by Thrift Compiler (0.9.2)
+ * Autogenerated by Thrift Compiler (0.9.3)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
  *  @generated
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-21")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 public class HiveServerException extends TException implements org.apache.thrift.TBase<HiveServerException, HiveServerException._Fields>, java.io.Serializable, Cloneable, Comparable<HiveServerException> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HiveServerException");
 
@@ -276,7 +276,7 @@ public class HiveServerException extends TException implements org.apache.thrift
       return getMessage();
 
     case ERROR_CODE:
-      return Integer.valueOf(getErrorCode());
+      return getErrorCode();
 
     case SQLSTATE:
       return getSQLState();


[38/55] [abbrv] hive git commit: HIVE-12213 Investigating the test failure TestHCatClient.testTableSchemaPropagation (Aleksei Statkevich via Aihua Xu)

Posted by xu...@apache.org.
HIVE-12213 Investigating the test failure TestHCatClient.testTableSchemaPropagation (Aleksei Statkevich via Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/8f964465
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/8f964465
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/8f964465

Branch: refs/heads/spark
Commit: 8f9644658fd141fd5ea7395d3e97a093f98870bb
Parents: 0808741
Author: Aihua Xu <ax...@cloudera.com>
Authored: Mon Oct 26 12:49:11 2015 -0400
Committer: aihuaxu <ai...@apache.org>
Committed: Mon Oct 26 13:03:48 2015 -0400

----------------------------------------------------------------------
 .../hive/hcatalog/api/TestHCatClient.java       | 39 +++++++++-----------
 .../hadoop/hive/metastore/MetaStoreUtils.java   | 10 +++--
 2 files changed, 24 insertions(+), 25 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/8f964465/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java
----------------------------------------------------------------------
diff --git a/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java b/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java
index 8992d552c..891322a 100644
--- a/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java
+++ b/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java
@@ -90,7 +90,6 @@ public class TestHCatClient {
   private static HiveConf replicationTargetHCatConf;
   private static SecurityManager securityManager;
   private static boolean useExternalMS = false;
-  private static boolean useExternalMSForReplication = false;
 
   public static class RunMS implements Runnable {
 
@@ -1056,14 +1055,14 @@ public class TestHCatClient {
       HCatTable targetTable = targetMetaStore.deserializeTable(tableStringRep);
 
       assertEquals("Table after deserialization should have been identical to sourceTable.",
-          sourceTable.diff(targetTable), HCatTable.NO_DIFF);
+          HCatTable.NO_DIFF, sourceTable.diff(targetTable));
 
       // Create table on Target.
       targetMetaStore.createTable(HCatCreateTableDesc.create(targetTable).build());
       // Verify that the created table is identical to sourceTable.
       targetTable = targetMetaStore.getTable(dbName, tableName);
       assertEquals("Table after deserialization should have been identical to sourceTable.",
-          sourceTable.diff(targetTable), HCatTable.NO_DIFF);
+          HCatTable.NO_DIFF, sourceTable.diff(targetTable));
 
       // Modify sourceTable.
       List<HCatFieldSchema> newColumnSchema = new ArrayList<HCatFieldSchema>(columnSchema);
@@ -1098,7 +1097,7 @@ public class TestHCatClient {
       targetTable = targetMetaStore.getTable(dbName, tableName);
 
       assertEquals("After propagating schema changes, source and target tables should have been equivalent.",
-          targetTable.diff(sourceTable), HCatTable.NO_DIFF);
+          HCatTable.NO_DIFF, targetTable.diff(sourceTable));
 
     }
     catch (Exception unexpected) {
@@ -1157,14 +1156,14 @@ public class TestHCatClient {
 
       sourceMetaStore.addPartition(HCatAddPartitionDesc.create(sourcePartition_1).build());
       assertEquals("Unexpected number of partitions. ",
-                   sourceMetaStore.getPartitions(dbName, tableName).size(), 1);
+                   1, sourceMetaStore.getPartitions(dbName, tableName).size());
       // Verify that partition_1 was added correctly, and properties were inherited from the HCatTable.
       HCatPartition addedPartition_1 = sourceMetaStore.getPartition(dbName, tableName, partitionSpec_1);
-      assertEquals("Column schema doesn't match.", addedPartition_1.getColumns(), sourceTable.getCols());
-      assertEquals("InputFormat doesn't match.", addedPartition_1.getInputFormat(), sourceTable.getInputFileFormat());
-      assertEquals("OutputFormat doesn't match.", addedPartition_1.getOutputFormat(), sourceTable.getOutputFileFormat());
-      assertEquals("SerDe doesn't match.", addedPartition_1.getSerDe(), sourceTable.getSerdeLib());
-      assertEquals("SerDe params don't match.", addedPartition_1.getSerdeParams(), sourceTable.getSerdeParams());
+      assertEquals("Column schema doesn't match.", sourceTable.getCols(), addedPartition_1.getColumns());
+      assertEquals("InputFormat doesn't match.", sourceTable.getInputFileFormat(), addedPartition_1.getInputFormat());
+      assertEquals("OutputFormat doesn't match.", sourceTable.getOutputFileFormat(), addedPartition_1.getOutputFormat());
+      assertEquals("SerDe doesn't match.", sourceTable.getSerdeLib(), addedPartition_1.getSerDe());
+      assertEquals("SerDe params don't match.", sourceTable.getSerdeParams(), addedPartition_1.getSerdeParams());
 
       // Replicate table definition.
 
@@ -1177,8 +1176,7 @@ public class TestHCatClient {
       targetMetaStore.createTable(HCatCreateTableDesc.create(targetTable).build());
       targetTable = targetMetaStore.getTable(dbName, tableName);
 
-      assertEquals("Created table doesn't match the source.",
-                  targetTable.diff(sourceTable), HCatTable.NO_DIFF);
+      assertEquals("Created table doesn't match the source.", HCatTable.NO_DIFF, targetTable.diff(sourceTable));
 
       // Modify Table schema at the source.
       List<HCatFieldSchema> newColumnSchema = new ArrayList<HCatFieldSchema>(columnSchema);
@@ -1215,7 +1213,7 @@ public class TestHCatClient {
 
       List<HCatPartition> targetPartitions = targetMetaStore.getPartitions(dbName, tableName);
 
-      assertEquals("Expected the same number of partitions. ", targetPartitions.size(), sourcePartitions.size());
+      assertEquals("Expected the same number of partitions. ", sourcePartitions.size(), targetPartitions.size());
 
       for (int i=0; i<targetPartitions.size(); ++i) {
         HCatPartition sourcePartition = sourcePartitions.get(i),
@@ -1286,14 +1284,14 @@ public class TestHCatClient {
 
       sourceMetaStore.addPartition(HCatAddPartitionDesc.create(sourcePartition_1).build());
       assertEquals("Unexpected number of partitions. ",
-          sourceMetaStore.getPartitions(dbName, tableName).size(), 1);
+          1, sourceMetaStore.getPartitions(dbName, tableName).size());
       // Verify that partition_1 was added correctly, and properties were inherited from the HCatTable.
       HCatPartition addedPartition_1 = sourceMetaStore.getPartition(dbName, tableName, partitionSpec_1);
-      assertEquals("Column schema doesn't match.", addedPartition_1.getColumns(), sourceTable.getCols());
-      assertEquals("InputFormat doesn't match.", addedPartition_1.getInputFormat(), sourceTable.getInputFileFormat());
-      assertEquals("OutputFormat doesn't match.", addedPartition_1.getOutputFormat(), sourceTable.getOutputFileFormat());
-      assertEquals("SerDe doesn't match.", addedPartition_1.getSerDe(), sourceTable.getSerdeLib());
-      assertEquals("SerDe params don't match.", addedPartition_1.getSerdeParams(), sourceTable.getSerdeParams());
+      assertEquals("Column schema doesn't match.", sourceTable.getCols(), addedPartition_1.getColumns());
+      assertEquals("InputFormat doesn't match.", sourceTable.getInputFileFormat(), addedPartition_1.getInputFormat());
+      assertEquals("OutputFormat doesn't match.", sourceTable.getOutputFileFormat(), addedPartition_1.getOutputFormat());
+      assertEquals("SerDe doesn't match.", sourceTable.getSerdeLib(), addedPartition_1.getSerDe());
+      assertEquals("SerDe params don't match.", sourceTable.getSerdeParams(), addedPartition_1.getSerdeParams());
 
       // Replicate table definition.
 
@@ -1306,8 +1304,7 @@ public class TestHCatClient {
       targetMetaStore.createTable(HCatCreateTableDesc.create(targetTable).build());
       targetTable = targetMetaStore.getTable(dbName, tableName);
 
-      assertEquals("Created table doesn't match the source.",
-          targetTable.diff(sourceTable), HCatTable.NO_DIFF);
+      assertEquals("Created table doesn't match the source.", HCatTable.NO_DIFF, targetTable.diff(sourceTable));
 
       // Modify Table schema at the source.
       List<HCatFieldSchema> newColumnSchema = new ArrayList<HCatFieldSchema>(columnSchema);

http://git-wip-us.apache.org/repos/asf/hive/blob/8f964465/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
index 12f3f16..3fde18e 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
@@ -185,11 +185,13 @@ public class MetaStoreUtils {
 
   public static boolean updateTableStatsFast(Database db, Table tbl, Warehouse wh,
       boolean madeDir, boolean forceRecompute) throws MetaException {
-    FileStatus[] fileStatuses = {};
-    if (tbl.getPartitionKeysSize() == 0) { // Update stats only when unpartitioned
-      fileStatuses = wh.getFileStatusesForUnpartitionedTable(db, tbl);
+    if (tbl.getPartitionKeysSize() == 0) {
+      // Update stats only when unpartitioned
+      FileStatus[] fileStatuses = wh.getFileStatusesForUnpartitionedTable(db, tbl);
+      return updateTableStatsFast(tbl, fileStatuses, madeDir, forceRecompute);
+    } else {
+      return false;
     }
-    return updateTableStatsFast(tbl, fileStatuses, madeDir, forceRecompute);
   }
 
   /**


[33/55] [abbrv] hive git commit: HIVE-12189: The list in pushdownPreds of ppd.ExprWalkerInfo should not be allowed to grow very large (Yongzhi Chen, reviewed by Chao Sun)

Posted by xu...@apache.org.
HIVE-12189: The list in pushdownPreds of ppd.ExprWalkerInfo should not be allowed to grow very large (Yongzhi Chen, reviewed by Chao Sun)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f415ce95
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f415ce95
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f415ce95

Branch: refs/heads/spark
Commit: f415ce9567661dcdc4b68b49ccb6ee4962a330e4
Parents: 9ea51d1
Author: Yongzhi Chen <yo...@hotmail.com>
Authored: Sun Oct 25 23:24:31 2015 -0700
Committer: Chao Sun <su...@apache.org>
Committed: Sun Oct 25 23:24:31 2015 -0700

----------------------------------------------------------------------
 .../hadoop/hive/ql/ppd/ExprWalkerInfo.java      | 23 ++++++++++++++++++--
 1 file changed, 21 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/f415ce95/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java b/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java
index e4b768e..fca671c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java
@@ -149,7 +149,13 @@ public class ExprWalkerInfo implements NodeProcessorCtx {
   }
 
   public void addFinalCandidate(String alias, ExprNodeDesc expr) {
-    getPushdownPreds(alias).add(expr);
+    List<ExprNodeDesc> predicates = getPushdownPreds(alias);
+    for (ExprNodeDesc curPred: predicates) {
+      if (curPred.isSame(expr)) {
+        return;
+      }
+    }
+    predicates.add(expr);
   }
 
   /**
@@ -159,7 +165,20 @@ public class ExprWalkerInfo implements NodeProcessorCtx {
    * @param pushDowns
    */
   public void addPushDowns(String alias, List<ExprNodeDesc> pushDowns) {
-    getPushdownPreds(alias).addAll(pushDowns);
+    List<ExprNodeDesc> predicates = getPushdownPreds(alias);
+    boolean isNew;
+    for (ExprNodeDesc newPred: pushDowns) {
+      isNew = true;
+      for (ExprNodeDesc curPred: predicates) {
+        if (curPred.isSame(newPred)) {
+          isNew = false;
+          break;
+        }
+      }
+      if (isNew) {
+        predicates.add(newPred);
+      }
+    }
   }
 
   /**