You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2014/01/30 01:24:29 UTC

svn commit: r1562653 [8/10] - in /hive/trunk: itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/ metastore/if/ metastore/src/gen/thrift/gen-cpp/ metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ metastore/src/gen...

Modified: hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py?rev=1562653&r1=1562652&r2=1562653&view=diff
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py Thu Jan 30 00:24:28 2014
@@ -3412,6 +3412,376 @@ class PartitionsByExprRequest:
   def __ne__(self, other):
     return not (self == other)
 
+class TableStatsResult:
+  """
+  Attributes:
+   - tableStats
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.LIST, 'tableStats', (TType.STRUCT,(ColumnStatisticsObj, ColumnStatisticsObj.thrift_spec)), None, ), # 1
+  )
+
+  def __init__(self, tableStats=None,):
+    self.tableStats = tableStats
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.LIST:
+          self.tableStats = []
+          (_etype237, _size234) = iprot.readListBegin()
+          for _i238 in xrange(_size234):
+            _elem239 = ColumnStatisticsObj()
+            _elem239.read(iprot)
+            self.tableStats.append(_elem239)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('TableStatsResult')
+    if self.tableStats is not None:
+      oprot.writeFieldBegin('tableStats', TType.LIST, 1)
+      oprot.writeListBegin(TType.STRUCT, len(self.tableStats))
+      for iter240 in self.tableStats:
+        iter240.write(oprot)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.tableStats is None:
+      raise TProtocol.TProtocolException(message='Required field tableStats is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class PartitionsStatsResult:
+  """
+  Attributes:
+   - partStats
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.MAP, 'partStats', (TType.STRING,None,TType.LIST,(TType.STRUCT,(ColumnStatisticsObj, ColumnStatisticsObj.thrift_spec))), None, ), # 1
+  )
+
+  def __init__(self, partStats=None,):
+    self.partStats = partStats
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.MAP:
+          self.partStats = {}
+          (_ktype242, _vtype243, _size241 ) = iprot.readMapBegin() 
+          for _i245 in xrange(_size241):
+            _key246 = iprot.readString();
+            _val247 = []
+            (_etype251, _size248) = iprot.readListBegin()
+            for _i252 in xrange(_size248):
+              _elem253 = ColumnStatisticsObj()
+              _elem253.read(iprot)
+              _val247.append(_elem253)
+            iprot.readListEnd()
+            self.partStats[_key246] = _val247
+          iprot.readMapEnd()
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('PartitionsStatsResult')
+    if self.partStats is not None:
+      oprot.writeFieldBegin('partStats', TType.MAP, 1)
+      oprot.writeMapBegin(TType.STRING, TType.LIST, len(self.partStats))
+      for kiter254,viter255 in self.partStats.items():
+        oprot.writeString(kiter254)
+        oprot.writeListBegin(TType.STRUCT, len(viter255))
+        for iter256 in viter255:
+          iter256.write(oprot)
+        oprot.writeListEnd()
+      oprot.writeMapEnd()
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.partStats is None:
+      raise TProtocol.TProtocolException(message='Required field partStats is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class TableStatsRequest:
+  """
+  Attributes:
+   - dbName
+   - tblName
+   - colNames
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'dbName', None, None, ), # 1
+    (2, TType.STRING, 'tblName', None, None, ), # 2
+    (3, TType.LIST, 'colNames', (TType.STRING,None), None, ), # 3
+  )
+
+  def __init__(self, dbName=None, tblName=None, colNames=None,):
+    self.dbName = dbName
+    self.tblName = tblName
+    self.colNames = colNames
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.dbName = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.tblName = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.LIST:
+          self.colNames = []
+          (_etype260, _size257) = iprot.readListBegin()
+          for _i261 in xrange(_size257):
+            _elem262 = iprot.readString();
+            self.colNames.append(_elem262)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('TableStatsRequest')
+    if self.dbName is not None:
+      oprot.writeFieldBegin('dbName', TType.STRING, 1)
+      oprot.writeString(self.dbName)
+      oprot.writeFieldEnd()
+    if self.tblName is not None:
+      oprot.writeFieldBegin('tblName', TType.STRING, 2)
+      oprot.writeString(self.tblName)
+      oprot.writeFieldEnd()
+    if self.colNames is not None:
+      oprot.writeFieldBegin('colNames', TType.LIST, 3)
+      oprot.writeListBegin(TType.STRING, len(self.colNames))
+      for iter263 in self.colNames:
+        oprot.writeString(iter263)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.dbName is None:
+      raise TProtocol.TProtocolException(message='Required field dbName is unset!')
+    if self.tblName is None:
+      raise TProtocol.TProtocolException(message='Required field tblName is unset!')
+    if self.colNames is None:
+      raise TProtocol.TProtocolException(message='Required field colNames is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class PartitionsStatsRequest:
+  """
+  Attributes:
+   - dbName
+   - tblName
+   - colNames
+   - partNames
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'dbName', None, None, ), # 1
+    (2, TType.STRING, 'tblName', None, None, ), # 2
+    (3, TType.LIST, 'colNames', (TType.STRING,None), None, ), # 3
+    (4, TType.LIST, 'partNames', (TType.STRING,None), None, ), # 4
+  )
+
+  def __init__(self, dbName=None, tblName=None, colNames=None, partNames=None,):
+    self.dbName = dbName
+    self.tblName = tblName
+    self.colNames = colNames
+    self.partNames = partNames
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.dbName = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.tblName = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.LIST:
+          self.colNames = []
+          (_etype267, _size264) = iprot.readListBegin()
+          for _i268 in xrange(_size264):
+            _elem269 = iprot.readString();
+            self.colNames.append(_elem269)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.LIST:
+          self.partNames = []
+          (_etype273, _size270) = iprot.readListBegin()
+          for _i274 in xrange(_size270):
+            _elem275 = iprot.readString();
+            self.partNames.append(_elem275)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('PartitionsStatsRequest')
+    if self.dbName is not None:
+      oprot.writeFieldBegin('dbName', TType.STRING, 1)
+      oprot.writeString(self.dbName)
+      oprot.writeFieldEnd()
+    if self.tblName is not None:
+      oprot.writeFieldBegin('tblName', TType.STRING, 2)
+      oprot.writeString(self.tblName)
+      oprot.writeFieldEnd()
+    if self.colNames is not None:
+      oprot.writeFieldBegin('colNames', TType.LIST, 3)
+      oprot.writeListBegin(TType.STRING, len(self.colNames))
+      for iter276 in self.colNames:
+        oprot.writeString(iter276)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    if self.partNames is not None:
+      oprot.writeFieldBegin('partNames', TType.LIST, 4)
+      oprot.writeListBegin(TType.STRING, len(self.partNames))
+      for iter277 in self.partNames:
+        oprot.writeString(iter277)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.dbName is None:
+      raise TProtocol.TProtocolException(message='Required field dbName is unset!')
+    if self.tblName is None:
+      raise TProtocol.TProtocolException(message='Required field tblName is unset!')
+    if self.colNames is None:
+      raise TProtocol.TProtocolException(message='Required field colNames is unset!')
+    if self.partNames is None:
+      raise TProtocol.TProtocolException(message='Required field partNames is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
 class AddPartitionsResult:
   """
   Attributes:
@@ -3438,11 +3808,11 @@ class AddPartitionsResult:
       if fid == 1:
         if ftype == TType.LIST:
           self.partitions = []
-          (_etype237, _size234) = iprot.readListBegin()
-          for _i238 in xrange(_size234):
-            _elem239 = Partition()
-            _elem239.read(iprot)
-            self.partitions.append(_elem239)
+          (_etype281, _size278) = iprot.readListBegin()
+          for _i282 in xrange(_size278):
+            _elem283 = Partition()
+            _elem283.read(iprot)
+            self.partitions.append(_elem283)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
@@ -3459,8 +3829,8 @@ class AddPartitionsResult:
     if self.partitions is not None:
       oprot.writeFieldBegin('partitions', TType.LIST, 1)
       oprot.writeListBegin(TType.STRUCT, len(self.partitions))
-      for iter240 in self.partitions:
-        iter240.write(oprot)
+      for iter284 in self.partitions:
+        iter284.write(oprot)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
@@ -3529,11 +3899,11 @@ class AddPartitionsRequest:
       elif fid == 3:
         if ftype == TType.LIST:
           self.parts = []
-          (_etype244, _size241) = iprot.readListBegin()
-          for _i245 in xrange(_size241):
-            _elem246 = Partition()
-            _elem246.read(iprot)
-            self.parts.append(_elem246)
+          (_etype288, _size285) = iprot.readListBegin()
+          for _i289 in xrange(_size285):
+            _elem290 = Partition()
+            _elem290.read(iprot)
+            self.parts.append(_elem290)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
@@ -3568,8 +3938,8 @@ class AddPartitionsRequest:
     if self.parts is not None:
       oprot.writeFieldBegin('parts', TType.LIST, 3)
       oprot.writeListBegin(TType.STRUCT, len(self.parts))
-      for iter247 in self.parts:
-        iter247.write(oprot)
+      for iter291 in self.parts:
+        iter291.write(oprot)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     if self.ifNotExists is not None:

Modified: hive/trunk/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb?rev=1562653&r1=1562652&r2=1562653&view=diff
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb Thu Jan 30 00:24:28 2014
@@ -773,6 +773,89 @@ class PartitionsByExprRequest
   ::Thrift::Struct.generate_accessors self
 end
 
+class TableStatsResult
+  include ::Thrift::Struct, ::Thrift::Struct_Union
+  TABLESTATS = 1
+
+  FIELDS = {
+    TABLESTATS => {:type => ::Thrift::Types::LIST, :name => 'tableStats', :element => {:type => ::Thrift::Types::STRUCT, :class => ::ColumnStatisticsObj}}
+  }
+
+  def struct_fields; FIELDS; end
+
+  def validate
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field tableStats is unset!') unless @tableStats
+  end
+
+  ::Thrift::Struct.generate_accessors self
+end
+
+class PartitionsStatsResult
+  include ::Thrift::Struct, ::Thrift::Struct_Union
+  PARTSTATS = 1
+
+  FIELDS = {
+    PARTSTATS => {:type => ::Thrift::Types::MAP, :name => 'partStats', :key => {:type => ::Thrift::Types::STRING}, :value => {:type => ::Thrift::Types::LIST, :element => {:type => ::Thrift::Types::STRUCT, :class => ::ColumnStatisticsObj}}}
+  }
+
+  def struct_fields; FIELDS; end
+
+  def validate
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field partStats is unset!') unless @partStats
+  end
+
+  ::Thrift::Struct.generate_accessors self
+end
+
+class TableStatsRequest
+  include ::Thrift::Struct, ::Thrift::Struct_Union
+  DBNAME = 1
+  TBLNAME = 2
+  COLNAMES = 3
+
+  FIELDS = {
+    DBNAME => {:type => ::Thrift::Types::STRING, :name => 'dbName'},
+    TBLNAME => {:type => ::Thrift::Types::STRING, :name => 'tblName'},
+    COLNAMES => {:type => ::Thrift::Types::LIST, :name => 'colNames', :element => {:type => ::Thrift::Types::STRING}}
+  }
+
+  def struct_fields; FIELDS; end
+
+  def validate
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field dbName is unset!') unless @dbName
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field tblName is unset!') unless @tblName
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field colNames is unset!') unless @colNames
+  end
+
+  ::Thrift::Struct.generate_accessors self
+end
+
+class PartitionsStatsRequest
+  include ::Thrift::Struct, ::Thrift::Struct_Union
+  DBNAME = 1
+  TBLNAME = 2
+  COLNAMES = 3
+  PARTNAMES = 4
+
+  FIELDS = {
+    DBNAME => {:type => ::Thrift::Types::STRING, :name => 'dbName'},
+    TBLNAME => {:type => ::Thrift::Types::STRING, :name => 'tblName'},
+    COLNAMES => {:type => ::Thrift::Types::LIST, :name => 'colNames', :element => {:type => ::Thrift::Types::STRING}},
+    PARTNAMES => {:type => ::Thrift::Types::LIST, :name => 'partNames', :element => {:type => ::Thrift::Types::STRING}}
+  }
+
+  def struct_fields; FIELDS; end
+
+  def validate
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field dbName is unset!') unless @dbName
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field tblName is unset!') unless @tblName
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field colNames is unset!') unless @colNames
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field partNames is unset!') unless @partNames
+  end
+
+  ::Thrift::Struct.generate_accessors self
+end
+
 class AddPartitionsResult
   include ::Thrift::Struct, ::Thrift::Struct_Union
   PARTITIONS = 1

Modified: hive/trunk/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb?rev=1562653&r1=1562652&r2=1562653&view=diff
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb Thu Jan 30 00:24:28 2014
@@ -1180,6 +1180,40 @@ module ThriftHiveMetastore
       raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'get_partition_column_statistics failed: unknown result')
     end
 
+    def get_table_statistics_req(request)
+      send_get_table_statistics_req(request)
+      return recv_get_table_statistics_req()
+    end
+
+    def send_get_table_statistics_req(request)
+      send_message('get_table_statistics_req', Get_table_statistics_req_args, :request => request)
+    end
+
+    def recv_get_table_statistics_req()
+      result = receive_message(Get_table_statistics_req_result)
+      return result.success unless result.success.nil?
+      raise result.o1 unless result.o1.nil?
+      raise result.o2 unless result.o2.nil?
+      raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'get_table_statistics_req failed: unknown result')
+    end
+
+    def get_partitions_statistics_req(request)
+      send_get_partitions_statistics_req(request)
+      return recv_get_partitions_statistics_req()
+    end
+
+    def send_get_partitions_statistics_req(request)
+      send_message('get_partitions_statistics_req', Get_partitions_statistics_req_args, :request => request)
+    end
+
+    def recv_get_partitions_statistics_req()
+      result = receive_message(Get_partitions_statistics_req_result)
+      return result.success unless result.success.nil?
+      raise result.o1 unless result.o1.nil?
+      raise result.o2 unless result.o2.nil?
+      raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'get_partitions_statistics_req failed: unknown result')
+    end
+
     def delete_partition_column_statistics(db_name, tbl_name, part_name, col_name)
       send_delete_partition_column_statistics(db_name, tbl_name, part_name, col_name)
       return recv_delete_partition_column_statistics()
@@ -2384,6 +2418,32 @@ module ThriftHiveMetastore
       write_result(result, oprot, 'get_partition_column_statistics', seqid)
     end
 
+    def process_get_table_statistics_req(seqid, iprot, oprot)
+      args = read_args(iprot, Get_table_statistics_req_args)
+      result = Get_table_statistics_req_result.new()
+      begin
+        result.success = @handler.get_table_statistics_req(args.request)
+      rescue ::NoSuchObjectException => o1
+        result.o1 = o1
+      rescue ::MetaException => o2
+        result.o2 = o2
+      end
+      write_result(result, oprot, 'get_table_statistics_req', seqid)
+    end
+
+    def process_get_partitions_statistics_req(seqid, iprot, oprot)
+      args = read_args(iprot, Get_partitions_statistics_req_args)
+      result = Get_partitions_statistics_req_result.new()
+      begin
+        result.success = @handler.get_partitions_statistics_req(args.request)
+      rescue ::NoSuchObjectException => o1
+        result.o1 = o1
+      rescue ::MetaException => o2
+        result.o2 = o2
+      end
+      write_result(result, oprot, 'get_partitions_statistics_req', seqid)
+    end
+
     def process_delete_partition_column_statistics(seqid, iprot, oprot)
       args = read_args(iprot, Delete_partition_column_statistics_args)
       result = Delete_partition_column_statistics_result.new()
@@ -5289,6 +5349,78 @@ module ThriftHiveMetastore
     ::Thrift::Struct.generate_accessors self
   end
 
+  class Get_table_statistics_req_args
+    include ::Thrift::Struct, ::Thrift::Struct_Union
+    REQUEST = 1
+
+    FIELDS = {
+      REQUEST => {:type => ::Thrift::Types::STRUCT, :name => 'request', :class => ::TableStatsRequest}
+    }
+
+    def struct_fields; FIELDS; end
+
+    def validate
+    end
+
+    ::Thrift::Struct.generate_accessors self
+  end
+
+  class Get_table_statistics_req_result
+    include ::Thrift::Struct, ::Thrift::Struct_Union
+    SUCCESS = 0
+    O1 = 1
+    O2 = 2
+
+    FIELDS = {
+      SUCCESS => {:type => ::Thrift::Types::STRUCT, :name => 'success', :class => ::TableStatsResult},
+      O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => ::NoSuchObjectException},
+      O2 => {:type => ::Thrift::Types::STRUCT, :name => 'o2', :class => ::MetaException}
+    }
+
+    def struct_fields; FIELDS; end
+
+    def validate
+    end
+
+    ::Thrift::Struct.generate_accessors self
+  end
+
+  class Get_partitions_statistics_req_args
+    include ::Thrift::Struct, ::Thrift::Struct_Union
+    REQUEST = 1
+
+    FIELDS = {
+      REQUEST => {:type => ::Thrift::Types::STRUCT, :name => 'request', :class => ::PartitionsStatsRequest}
+    }
+
+    def struct_fields; FIELDS; end
+
+    def validate
+    end
+
+    ::Thrift::Struct.generate_accessors self
+  end
+
+  class Get_partitions_statistics_req_result
+    include ::Thrift::Struct, ::Thrift::Struct_Union
+    SUCCESS = 0
+    O1 = 1
+    O2 = 2
+
+    FIELDS = {
+      SUCCESS => {:type => ::Thrift::Types::STRUCT, :name => 'success', :class => ::PartitionsStatsResult},
+      O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => ::NoSuchObjectException},
+      O2 => {:type => ::Thrift::Types::STRUCT, :name => 'o2', :class => ::MetaException}
+    }
+
+    def struct_fields; FIELDS; end
+
+    def validate
+    end
+
+    ::Thrift::Struct.generate_accessors self
+  end
+
   class Delete_partition_column_statistics_args
     include ::Thrift::Struct, ::Thrift::Struct_Union
     DB_NAME = 1

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=1562653&r1=1562652&r2=1562653&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java Thu Jan 30 00:24:28 2014
@@ -83,6 +83,8 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.PartitionEventType;
 import org.apache.hadoop.hive.metastore.api.PartitionsByExprRequest;
 import org.apache.hadoop.hive.metastore.api.PartitionsByExprResult;
+import org.apache.hadoop.hive.metastore.api.PartitionsStatsRequest;
+import org.apache.hadoop.hive.metastore.api.PartitionsStatsResult;
 import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet;
 import org.apache.hadoop.hive.metastore.api.PrincipalType;
 import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
@@ -90,6 +92,8 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.Role;
 import org.apache.hadoop.hive.metastore.api.SkewedInfo;
 import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.api.TableStatsRequest;
+import org.apache.hadoop.hive.metastore.api.TableStatsResult;
 import org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore;
 import org.apache.hadoop.hive.metastore.api.Type;
 import org.apache.hadoop.hive.metastore.api.UnknownDBException;
@@ -146,6 +150,7 @@ import org.apache.thrift.transport.TTran
 
 import com.facebook.fb303.FacebookBase;
 import com.facebook.fb303.fb_status;
+import com.google.common.collect.Lists;
 
 /**
  * TODO:pc remove application logic to a separate interface.
@@ -3193,17 +3198,34 @@ public class HiveMetaStore extends Thrif
                     " column=" + colName);
       ColumnStatistics statsObj = null;
       try {
-        statsObj = getMS().getTableColumnStatistics(dbName, tableName, colName);
+        statsObj = getMS().getTableColumnStatistics(
+            dbName, tableName, Lists.newArrayList(colName));
+        assert statsObj.getStatsObjSize() <= 1;
+        return statsObj;
       } finally {
         endFunction("get_column_statistics_by_table: ", statsObj != null, null, tableName);
       }
-      return statsObj;
+    }
+
+    public TableStatsResult get_table_statistics_req(TableStatsRequest request)
+        throws MetaException, NoSuchObjectException, TException {
+      String dbName = request.getDbName(), tblName = request.getTblName();
+      startFunction("get_table_statistics_req: db=" + dbName + " table=" + tblName);
+      TableStatsResult result = null;
+      try {
+        ColumnStatistics cs = getMS().getTableColumnStatistics(
+            dbName, tblName, request.getColNames());
+        result = new TableStatsResult(
+            cs == null ? Lists.<ColumnStatisticsObj>newArrayList() : cs.getStatsObj());
+      } finally {
+        endFunction("get_table_statistics_req: ", result == null, null, tblName);
+      }
+      return result;
     }
 
     public ColumnStatistics get_partition_column_statistics(String dbName, String tableName,
       String partName, String colName) throws NoSuchObjectException, MetaException,
-      InvalidInputException, TException,InvalidObjectException
-    {
+      InvalidInputException, TException, InvalidObjectException {
       dbName = dbName.toLowerCase();
       tableName = tableName.toLowerCase();
       colName = colName.toLowerCase();
@@ -3213,14 +3235,39 @@ public class HiveMetaStore extends Thrif
       ColumnStatistics statsObj = null;
 
       try {
-        List<String> partVals = getPartValsFromName(getMS(), dbName, tableName, partName);
-        statsObj = getMS().getPartitionColumnStatistics(dbName, tableName, convertedPartName,
-                                                            partVals, colName);
+        List<ColumnStatistics> list = getMS().getPartitionColumnStatistics(dbName, tableName,
+            Lists.newArrayList(convertedPartName), Lists.newArrayList(colName));
+        if (list.isEmpty()) return null;
+        if (list.size() != 1) {
+          throw new MetaException(list.size() + " statistics for single column and partition");
+        }
+        statsObj = list.get(0);
       } finally {
         endFunction("get_column_statistics_by_partition: ", statsObj != null, null, tableName);
       }
       return statsObj;
-   }
+    }
+
+    public PartitionsStatsResult get_partitions_statistics_req(PartitionsStatsRequest request)
+        throws MetaException, NoSuchObjectException, TException {
+      String dbName = request.getDbName(), tblName = request.getTblName();
+      startFunction("get_partitions_statistics_req: db=" + dbName + " table=" + tblName);
+
+      PartitionsStatsResult result = null;
+      try {
+        List<ColumnStatistics> stats = getMS().getPartitionColumnStatistics(
+            dbName, tblName, request.getPartNames(), request.getColNames());
+        Map<String, List<ColumnStatisticsObj>> map =
+            new HashMap<String, List<ColumnStatisticsObj>>();
+        for (ColumnStatistics stat : stats) {
+          map.put(stat.getStatsDesc().getPartName(), stat.getStatsObj());
+        }
+        result = new PartitionsStatsResult(map);
+      } finally {
+        endFunction("get_partitions_statistics_req: ", result == null, null, tblName);
+      }
+      return result;
+    }
 
     public boolean update_table_column_statistics(ColumnStatistics colStats)
       throws NoSuchObjectException,InvalidObjectException,MetaException,TException,

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java?rev=1562653&r1=1562652&r2=1562653&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java Thu Jan 30 00:24:28 2014
@@ -49,6 +49,7 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.AddPartitionsResult;
 import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
 import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
+import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
 import org.apache.hadoop.hive.metastore.api.ConfigValSecurityException;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.EnvironmentContext;
@@ -66,11 +67,13 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.PartitionEventType;
 import org.apache.hadoop.hive.metastore.api.PartitionsByExprRequest;
 import org.apache.hadoop.hive.metastore.api.PartitionsByExprResult;
+import org.apache.hadoop.hive.metastore.api.PartitionsStatsRequest;
 import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet;
 import org.apache.hadoop.hive.metastore.api.PrincipalType;
 import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
 import org.apache.hadoop.hive.metastore.api.Role;
 import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.api.TableStatsRequest;
 import org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore;
 import org.apache.hadoop.hive.metastore.api.Type;
 import org.apache.hadoop.hive.metastore.api.UnknownDBException;
@@ -1110,17 +1113,19 @@ public class HiveMetaStoreClient impleme
   }
 
   /** {@inheritDoc} */
-  public ColumnStatistics getTableColumnStatistics(String dbName, String tableName,String colName)
-    throws NoSuchObjectException, MetaException, TException, InvalidInputException,
-    InvalidObjectException {
-    return client.get_table_column_statistics(dbName, tableName, colName);
+  public List<ColumnStatisticsObj> getTableColumnStatistics(String dbName, String tableName,
+      List<String> colNames) throws NoSuchObjectException, MetaException, TException,
+      InvalidInputException, InvalidObjectException {
+    return client.get_table_statistics_req(
+        new TableStatsRequest(dbName, tableName, colNames)).getTableStats();
   }
 
   /** {@inheritDoc} */
-  public ColumnStatistics getPartitionColumnStatistics(String dbName, String tableName,
-    String partName, String colName) throws NoSuchObjectException, MetaException, TException,
-    InvalidInputException, InvalidObjectException {
-    return client.get_partition_column_statistics(dbName, tableName, partName, colName);
+  public Map<String, List<ColumnStatisticsObj>> getPartitionColumnStatistics(
+      String dbName, String tableName, List<String> partNames, List<String> colNames)
+          throws NoSuchObjectException, MetaException, TException {
+    return client.get_partitions_statistics_req(
+        new PartitionsStatsRequest(dbName, tableName, colNames, partNames)).getPartStats();
   }
 
   /** {@inheritDoc} */

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java?rev=1562653&r1=1562652&r2=1562653&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java Thu Jan 30 00:24:28 2014
@@ -24,6 +24,7 @@ import java.util.Set;
 
 import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
 import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
+import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
 import org.apache.hadoop.hive.metastore.api.ConfigValSecurityException;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -811,40 +812,20 @@ public interface IMetaStoreClient {
    throws NoSuchObjectException, InvalidObjectException, MetaException, TException,
    InvalidInputException;
 
- /**
-  * Get table level column statistics given dbName, tableName and colName
-  * @param dbName
-  * @param tableName
-  * @param colName
-  * @return ColumnStatistics struct for a given db, table and col
-  * @throws NoSuchObjectException
-  * @throws MetaException
-  * @throws TException
-  * @throws InvalidInputException
-  * @throws InvalidObjectException
-  */
-
-  public ColumnStatistics getTableColumnStatistics(String dbName, String tableName, String colName)
-      throws NoSuchObjectException, MetaException, TException,
-            InvalidInputException, InvalidObjectException;
-
   /**
-   * Get partition level column statistics given dbName, tableName, partitionName and colName
-   * @param dbName
-   * @param tableName
-   * @param partitionName
-   * @param colName
-   * @return ColumnStatistics struct for a given db, table, partition and col
-   * @throws NoSuchObjectException
-   * @throws MetaException
-   * @throws TException
-   * @throws InvalidInputException
-   * @throws InvalidObjectException
+   * Get table column statistics given dbName, tableName and multiple colName-s
+   * @return ColumnStatistics struct for a given db, table and columns
    */
+  public List<ColumnStatisticsObj> getTableColumnStatistics(String dbName, String tableName,
+      List<String> colNames) throws NoSuchObjectException, MetaException, TException;
 
-  public ColumnStatistics getPartitionColumnStatistics(String dbName, String tableName,
-    String partitionName, String colName) throws NoSuchObjectException, MetaException, TException,
-            InvalidInputException, InvalidObjectException;
+  /**
+   * Get partitions column statistics given dbName, tableName, multiple partitions and colName-s
+   * @return ColumnStatistics struct for a given db, table and columns
+   */
+  public Map<String, List<ColumnStatisticsObj>> getPartitionColumnStatistics(String dbName,
+      String tableName,  List<String> partNames, List<String> colNames)
+          throws NoSuchObjectException, MetaException, TException;
 
   /**
    * Delete partition level column statistics given dbName, tableName, partName and colName

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java?rev=1562653&r1=1562652&r2=1562653&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java Thu Jan 30 00:24:28 2014
@@ -41,6 +41,10 @@ import javax.jdo.datastore.JDOConnection
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
+import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData;
+import org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc;
+import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.Order;
@@ -50,6 +54,8 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.metastore.model.MDatabase;
+import org.apache.hadoop.hive.metastore.model.MPartitionColumnStatistics;
+import org.apache.hadoop.hive.metastore.model.MTableColumnStatistics;
 import org.apache.hadoop.hive.metastore.parser.ExpressionTree;
 import org.apache.hadoop.hive.metastore.parser.ExpressionTree.FilterBuilder;
 import org.apache.hadoop.hive.metastore.parser.ExpressionTree.LeafNode;
@@ -59,6 +65,9 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.parser.ExpressionTree.TreeVisitor;
 import org.apache.hadoop.hive.metastore.parser.FilterLexer;
 import org.apache.hadoop.hive.serde.serdeConstants;
+import org.datanucleus.store.schema.SchemaTool;
+
+import com.google.common.collect.Lists;
 
 /**
  * This class contains the optimizations for MetaStore that rely on direct SQL access to
@@ -107,8 +116,11 @@ class MetaStoreDirectSql {
       tx = pm.currentTransaction();
       tx.begin();
     }
-    // Force the underlying db to initialize.
+    // Force the underlying db to initialize. This is for tests where tables might not
+    // exist otherwise. It would be nice if there was a "create db" command.s
     pm.newQuery(MDatabase.class, "name == ''").execute();
+    pm.newQuery(MTableColumnStatistics.class, "dbName == ''").execute();
+    pm.newQuery(MPartitionColumnStatistics.class, "dbName == ''").execute();
     // Self-test query. If it doesn't work, we will self-disable. What a PITA...
     boolean isCompatibleDatastore = false;
     String selfTestQuery = "select \"DB_ID\" from \"DBS\"";
@@ -171,9 +183,8 @@ class MetaStoreDirectSql {
     if (partNames.isEmpty()) {
       return new ArrayList<Partition>();
     }
-    String list = repeat(",?", partNames.size()).substring(1);
     return getPartitionsViaSqlFilterInternal(dbName, tblName, null,
-        "\"PARTITIONS\".\"PART_NAME\" in (" + list + ")",
+        "\"PARTITIONS\".\"PART_NAME\" in (" + makeParams(partNames.size()) + ")",
         partNames, new ArrayList<String>(), max);
   }
 
@@ -630,11 +641,7 @@ class MetaStoreDirectSql {
       query.closeAll();
       return 0;
     }
-    if (!(result instanceof List<?>)) {
-      throw new MetaException("Wrong result type " + result.getClass());
-    }
-    @SuppressWarnings("unchecked")
-    List<Object[]> list = (List<Object[]>)result;
+    List<Object[]> list = ensureList(result);
     Iterator<Object[]> iter = list.iterator();
     Object[] fields = null;
     for (Map.Entry<Long, T> entry : tree.entrySet()) {
@@ -838,4 +845,135 @@ class MetaStoreDirectSql {
           : "(" + tableValue + " " + node.operator.getSqlOp() + " ?)");
     }
   }
+
+  public ColumnStatistics getTableStats(
+      String dbName, String tableName, List<String> colNames) throws MetaException {
+    if (colNames.isEmpty()) {
+      return null;
+    }
+    boolean doTrace = LOG.isDebugEnabled();
+    long start = doTrace ? System.nanoTime() : 0;
+    String queryText = "select " + STATS_COLLIST + " from \"TAB_COL_STATS\" "
+      + " where \"DB_NAME\" = ? and \"TABLE_NAME\" = ? and \"COLUMN_NAME\" in ("
+      + makeParams(colNames.size()) + ")";
+    Query query = pm.newQuery("javax.jdo.query.SQL", queryText);
+    Object[] params = new Object[colNames.size() + 2];
+    params[0] = dbName;
+    params[1] = tableName;
+    for (int i = 0; i < colNames.size(); ++i) {
+      params[i + 2] = colNames.get(i);
+    }
+    Object qResult = query.executeWithArray(params);
+    long queryTime = doTrace ? System.nanoTime() : 0;
+    if (qResult == null) {
+      query.closeAll();
+      return null;
+    }
+    List<Object[]> list = ensureList(qResult);
+    if (list.isEmpty()) return null;
+    ColumnStatisticsDesc csd = new ColumnStatisticsDesc(true, dbName, tableName);
+    ColumnStatistics result = makeColumnStats(list, csd, 0);
+    timingTrace(doTrace, queryText, start, queryTime);
+    query.closeAll();
+    return result;
+  }
+
+  public List<ColumnStatistics> getPartitionStats(String dbName, String tableName,
+      List<String> partNames, List<String> colNames) throws MetaException {
+    if (colNames.isEmpty() || partNames.isEmpty()) {
+      return Lists.newArrayList();
+    }
+    boolean doTrace = LOG.isDebugEnabled();
+    long start = doTrace ? System.nanoTime() : 0;
+    String queryText = "select \"PARTITION_NAME\", " + STATS_COLLIST + " from \"PART_COL_STATS\""
+      + " where \"DB_NAME\" = ? and \"TABLE_NAME\" = ? and \"COLUMN_NAME\" in ("
+      + makeParams(colNames.size()) + ") AND \"PARTITION_NAME\" in ("
+      + makeParams(partNames.size()) + ") order by \"PARTITION_NAME\"";
+
+    Query query = pm.newQuery("javax.jdo.query.SQL", queryText);
+    Object[] params = new Object[colNames.size() + partNames.size() + 2];
+    int paramI = 0;
+    params[paramI++] = dbName;
+    params[paramI++] = tableName;
+    for (String colName : colNames) {
+      params[paramI++] = colName;
+    }
+    for (String partName : partNames) {
+      params[paramI++] = partName;
+    }
+    Object qResult = query.executeWithArray(params);
+    long queryTime = doTrace ? System.nanoTime() : 0;
+    if (qResult == null) {
+      query.closeAll();
+      return Lists.newArrayList();
+    }
+    List<Object[]> list = ensureList(qResult);
+    List<ColumnStatistics> result = new ArrayList<ColumnStatistics>(
+        Math.min(list.size(), partNames.size()));
+    String lastPartName = null;
+    int from = 0;
+    for (int i = 0; i <= list.size(); ++i) {
+      boolean isLast = i == list.size();
+      String partName = isLast ? null : (String)list.get(i)[0];
+      if (!isLast && partName.equals(lastPartName)) {
+        continue;
+      } else if (from != i) {
+        ColumnStatisticsDesc csd = new ColumnStatisticsDesc(false, dbName, tableName);
+        csd.setPartName(lastPartName);
+        result.add(makeColumnStats(list.subList(from, i), csd, 1));
+      }
+      lastPartName = partName;
+      from = i;
+    }
+
+    timingTrace(doTrace, queryText, start, queryTime);
+    query.closeAll();
+    return result;
+  }
+
+  /** The common query part for table and partition stats */
+  private static final String STATS_COLLIST =
+      "\"COLUMN_NAME\", \"COLUMN_TYPE\", \"LONG_LOW_VALUE\", \"LONG_HIGH_VALUE\", "
+    + "\"DOUBLE_LOW_VALUE\", \"DOUBLE_HIGH_VALUE\", \"NUM_NULLS\", \"NUM_DISTINCTS\", "
+    + "\"AVG_COL_LEN\", \"MAX_COL_LEN\", \"NUM_TRUES\", \"NUM_FALSES\", \"LAST_ANALYZED\"";
+
+  private ColumnStatistics makeColumnStats(
+      List<Object[]> list, ColumnStatisticsDesc csd, int offset) {
+    ColumnStatistics result = new ColumnStatistics();
+    result.setStatsDesc(csd);
+    List<ColumnStatisticsObj> csos = new ArrayList<ColumnStatisticsObj>(list.size());
+    for (Object[] row : list) {
+      // LastAnalyzed is stored per column but thrift has it per several;
+      // get the lowest for now as nobody actually uses this field.
+      Object laObj = row[offset + 12];
+      if (laObj != null && (!csd.isSetLastAnalyzed() || csd.getLastAnalyzed() > (Long)laObj)) {
+        csd.setLastAnalyzed((Long)laObj);
+      }
+      ColumnStatisticsData data = new ColumnStatisticsData();
+      // see STATS_COLLIST
+      int i = offset;
+      ColumnStatisticsObj cso = new ColumnStatisticsObj((String)row[i++], (String)row[i++], data);
+      Object llow = row[i++], lhigh = row[i++], dlow = row[i++], dhigh = row[i++],
+        nulls = row[i++], dist = row[i++], avglen = row[i++], maxlen = row[i++],
+        trues = row[i++], falses = row[i++];
+      StatObjectConverter.fillColumnStatisticsData(cso.getColType(), data,
+          llow, lhigh, dlow, dhigh, nulls, dist, avglen, maxlen, trues, falses);
+      csos.add(cso);
+    }
+    result.setStatsObj(csos);
+    return result;
+  }
+
+  @SuppressWarnings("unchecked")
+  private List<Object[]> ensureList(Object result) throws MetaException {
+    if (!(result instanceof List<?>)) {
+      throw new MetaException("Wrong result type " + result.getClass());
+    }
+    return (List<Object[]>)result;
+  }
+
+  private String makeParams(int size) {
+    // W/ size 0, query will fail, but at least we'd get to see the query in debug output.
+    return (size == 0) ? "" : repeat(",?", size).substring(1);
+  }
 }