You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by da...@apache.org on 2015/09/22 07:03:58 UTC

[27/50] [abbrv] hive git commit: HIVE-11552 : implement basic methods for getting/putting file metadata (Sergey Shelukhin, reviewed by Alan Gates)

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py b/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
index 0b80390..7fcdd7e 100644
--- a/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
+++ b/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
@@ -9750,6 +9750,726 @@ class FireEventResponse:
   def __ne__(self, other):
     return not (self == other)
 
+class MetadataPpdResult:
+  """
+  Attributes:
+   - metadata
+   - includeBitset
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'metadata', None, None, ), # 1
+    (2, TType.STRING, 'includeBitset', None, None, ), # 2
+  )
+
+  def __init__(self, metadata=None, includeBitset=None,):
+    self.metadata = metadata
+    self.includeBitset = includeBitset
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.metadata = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.includeBitset = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('MetadataPpdResult')
+    if self.metadata is not None:
+      oprot.writeFieldBegin('metadata', TType.STRING, 1)
+      oprot.writeString(self.metadata)
+      oprot.writeFieldEnd()
+    if self.includeBitset is not None:
+      oprot.writeFieldBegin('includeBitset', TType.STRING, 2)
+      oprot.writeString(self.includeBitset)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.metadata is None:
+      raise TProtocol.TProtocolException(message='Required field metadata is unset!')
+    if self.includeBitset is None:
+      raise TProtocol.TProtocolException(message='Required field includeBitset is unset!')
+    return
+
+
+  def __hash__(self):
+    value = 17
+    value = (value * 31) ^ hash(self.metadata)
+    value = (value * 31) ^ hash(self.includeBitset)
+    return value
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class GetFileMetadataByExprResult:
+  """
+  Attributes:
+   - metadata
+   - isSupported
+   - unknownFileIds
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.MAP, 'metadata', (TType.I64,None,TType.STRUCT,(MetadataPpdResult, MetadataPpdResult.thrift_spec)), None, ), # 1
+    (2, TType.BOOL, 'isSupported', None, None, ), # 2
+    (3, TType.LIST, 'unknownFileIds', (TType.I64,None), None, ), # 3
+  )
+
+  def __init__(self, metadata=None, isSupported=None, unknownFileIds=None,):
+    self.metadata = metadata
+    self.isSupported = isSupported
+    self.unknownFileIds = unknownFileIds
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.MAP:
+          self.metadata = {}
+          (_ktype463, _vtype464, _size462 ) = iprot.readMapBegin()
+          for _i466 in xrange(_size462):
+            _key467 = iprot.readI64();
+            _val468 = MetadataPpdResult()
+            _val468.read(iprot)
+            self.metadata[_key467] = _val468
+          iprot.readMapEnd()
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.BOOL:
+          self.isSupported = iprot.readBool();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.LIST:
+          self.unknownFileIds = []
+          (_etype472, _size469) = iprot.readListBegin()
+          for _i473 in xrange(_size469):
+            _elem474 = iprot.readI64();
+            self.unknownFileIds.append(_elem474)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('GetFileMetadataByExprResult')
+    if self.metadata is not None:
+      oprot.writeFieldBegin('metadata', TType.MAP, 1)
+      oprot.writeMapBegin(TType.I64, TType.STRUCT, len(self.metadata))
+      for kiter475,viter476 in self.metadata.items():
+        oprot.writeI64(kiter475)
+        viter476.write(oprot)
+      oprot.writeMapEnd()
+      oprot.writeFieldEnd()
+    if self.isSupported is not None:
+      oprot.writeFieldBegin('isSupported', TType.BOOL, 2)
+      oprot.writeBool(self.isSupported)
+      oprot.writeFieldEnd()
+    if self.unknownFileIds is not None:
+      oprot.writeFieldBegin('unknownFileIds', TType.LIST, 3)
+      oprot.writeListBegin(TType.I64, len(self.unknownFileIds))
+      for iter477 in self.unknownFileIds:
+        oprot.writeI64(iter477)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.metadata is None:
+      raise TProtocol.TProtocolException(message='Required field metadata is unset!')
+    if self.isSupported is None:
+      raise TProtocol.TProtocolException(message='Required field isSupported is unset!')
+    if self.unknownFileIds is None:
+      raise TProtocol.TProtocolException(message='Required field unknownFileIds is unset!')
+    return
+
+
+  def __hash__(self):
+    value = 17
+    value = (value * 31) ^ hash(self.metadata)
+    value = (value * 31) ^ hash(self.isSupported)
+    value = (value * 31) ^ hash(self.unknownFileIds)
+    return value
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class GetFileMetadataByExprRequest:
+  """
+  Attributes:
+   - fileIds
+   - expr
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.LIST, 'fileIds', (TType.I64,None), None, ), # 1
+    (2, TType.STRING, 'expr', None, None, ), # 2
+  )
+
+  def __init__(self, fileIds=None, expr=None,):
+    self.fileIds = fileIds
+    self.expr = expr
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.LIST:
+          self.fileIds = []
+          (_etype481, _size478) = iprot.readListBegin()
+          for _i482 in xrange(_size478):
+            _elem483 = iprot.readI64();
+            self.fileIds.append(_elem483)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.expr = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('GetFileMetadataByExprRequest')
+    if self.fileIds is not None:
+      oprot.writeFieldBegin('fileIds', TType.LIST, 1)
+      oprot.writeListBegin(TType.I64, len(self.fileIds))
+      for iter484 in self.fileIds:
+        oprot.writeI64(iter484)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    if self.expr is not None:
+      oprot.writeFieldBegin('expr', TType.STRING, 2)
+      oprot.writeString(self.expr)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.fileIds is None:
+      raise TProtocol.TProtocolException(message='Required field fileIds is unset!')
+    if self.expr is None:
+      raise TProtocol.TProtocolException(message='Required field expr is unset!')
+    return
+
+
+  def __hash__(self):
+    value = 17
+    value = (value * 31) ^ hash(self.fileIds)
+    value = (value * 31) ^ hash(self.expr)
+    return value
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class GetFileMetadataResult:
+  """
+  Attributes:
+   - metadata
+   - isSupported
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.MAP, 'metadata', (TType.I64,None,TType.STRING,None), None, ), # 1
+    (2, TType.BOOL, 'isSupported', None, None, ), # 2
+  )
+
+  def __init__(self, metadata=None, isSupported=None,):
+    self.metadata = metadata
+    self.isSupported = isSupported
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.MAP:
+          self.metadata = {}
+          (_ktype486, _vtype487, _size485 ) = iprot.readMapBegin()
+          for _i489 in xrange(_size485):
+            _key490 = iprot.readI64();
+            _val491 = iprot.readString();
+            self.metadata[_key490] = _val491
+          iprot.readMapEnd()
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.BOOL:
+          self.isSupported = iprot.readBool();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('GetFileMetadataResult')
+    if self.metadata is not None:
+      oprot.writeFieldBegin('metadata', TType.MAP, 1)
+      oprot.writeMapBegin(TType.I64, TType.STRING, len(self.metadata))
+      for kiter492,viter493 in self.metadata.items():
+        oprot.writeI64(kiter492)
+        oprot.writeString(viter493)
+      oprot.writeMapEnd()
+      oprot.writeFieldEnd()
+    if self.isSupported is not None:
+      oprot.writeFieldBegin('isSupported', TType.BOOL, 2)
+      oprot.writeBool(self.isSupported)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.metadata is None:
+      raise TProtocol.TProtocolException(message='Required field metadata is unset!')
+    if self.isSupported is None:
+      raise TProtocol.TProtocolException(message='Required field isSupported is unset!')
+    return
+
+
+  def __hash__(self):
+    value = 17
+    value = (value * 31) ^ hash(self.metadata)
+    value = (value * 31) ^ hash(self.isSupported)
+    return value
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class GetFileMetadataRequest:
+  """
+  Attributes:
+   - fileIds
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.LIST, 'fileIds', (TType.I64,None), None, ), # 1
+  )
+
+  def __init__(self, fileIds=None,):
+    self.fileIds = fileIds
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.LIST:
+          self.fileIds = []
+          (_etype497, _size494) = iprot.readListBegin()
+          for _i498 in xrange(_size494):
+            _elem499 = iprot.readI64();
+            self.fileIds.append(_elem499)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('GetFileMetadataRequest')
+    if self.fileIds is not None:
+      oprot.writeFieldBegin('fileIds', TType.LIST, 1)
+      oprot.writeListBegin(TType.I64, len(self.fileIds))
+      for iter500 in self.fileIds:
+        oprot.writeI64(iter500)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.fileIds is None:
+      raise TProtocol.TProtocolException(message='Required field fileIds is unset!')
+    return
+
+
+  def __hash__(self):
+    value = 17
+    value = (value * 31) ^ hash(self.fileIds)
+    return value
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class PutFileMetadataResult:
+
+  thrift_spec = (
+  )
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('PutFileMetadataResult')
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    return
+
+
+  def __hash__(self):
+    value = 17
+    return value
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class PutFileMetadataRequest:
+  """
+  Attributes:
+   - fileIds
+   - metadata
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.LIST, 'fileIds', (TType.I64,None), None, ), # 1
+    (2, TType.LIST, 'metadata', (TType.STRING,None), None, ), # 2
+  )
+
+  def __init__(self, fileIds=None, metadata=None,):
+    self.fileIds = fileIds
+    self.metadata = metadata
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.LIST:
+          self.fileIds = []
+          (_etype504, _size501) = iprot.readListBegin()
+          for _i505 in xrange(_size501):
+            _elem506 = iprot.readI64();
+            self.fileIds.append(_elem506)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.LIST:
+          self.metadata = []
+          (_etype510, _size507) = iprot.readListBegin()
+          for _i511 in xrange(_size507):
+            _elem512 = iprot.readString();
+            self.metadata.append(_elem512)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('PutFileMetadataRequest')
+    if self.fileIds is not None:
+      oprot.writeFieldBegin('fileIds', TType.LIST, 1)
+      oprot.writeListBegin(TType.I64, len(self.fileIds))
+      for iter513 in self.fileIds:
+        oprot.writeI64(iter513)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    if self.metadata is not None:
+      oprot.writeFieldBegin('metadata', TType.LIST, 2)
+      oprot.writeListBegin(TType.STRING, len(self.metadata))
+      for iter514 in self.metadata:
+        oprot.writeString(iter514)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.fileIds is None:
+      raise TProtocol.TProtocolException(message='Required field fileIds is unset!')
+    if self.metadata is None:
+      raise TProtocol.TProtocolException(message='Required field metadata is unset!')
+    return
+
+
+  def __hash__(self):
+    value = 17
+    value = (value * 31) ^ hash(self.fileIds)
+    value = (value * 31) ^ hash(self.metadata)
+    return value
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class ClearFileMetadataResult:
+
+  thrift_spec = (
+  )
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('ClearFileMetadataResult')
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    return
+
+
+  def __hash__(self):
+    value = 17
+    return value
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class ClearFileMetadataRequest:
+  """
+  Attributes:
+   - fileIds
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.LIST, 'fileIds', (TType.I64,None), None, ), # 1
+  )
+
+  def __init__(self, fileIds=None,):
+    self.fileIds = fileIds
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.LIST:
+          self.fileIds = []
+          (_etype518, _size515) = iprot.readListBegin()
+          for _i519 in xrange(_size515):
+            _elem520 = iprot.readI64();
+            self.fileIds.append(_elem520)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('ClearFileMetadataRequest')
+    if self.fileIds is not None:
+      oprot.writeFieldBegin('fileIds', TType.LIST, 1)
+      oprot.writeListBegin(TType.I64, len(self.fileIds))
+      for iter521 in self.fileIds:
+        oprot.writeI64(iter521)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.fileIds is None:
+      raise TProtocol.TProtocolException(message='Required field fileIds is unset!')
+    return
+
+
+  def __hash__(self):
+    value = 17
+    value = (value * 31) ^ hash(self.fileIds)
+    return value
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
 class GetAllFunctionsResponse:
   """
   Attributes:
@@ -9776,11 +10496,11 @@ class GetAllFunctionsResponse:
       if fid == 1:
         if ftype == TType.LIST:
           self.functions = []
-          (_etype465, _size462) = iprot.readListBegin()
-          for _i466 in xrange(_size462):
-            _elem467 = Function()
-            _elem467.read(iprot)
-            self.functions.append(_elem467)
+          (_etype525, _size522) = iprot.readListBegin()
+          for _i526 in xrange(_size522):
+            _elem527 = Function()
+            _elem527.read(iprot)
+            self.functions.append(_elem527)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
@@ -9797,8 +10517,8 @@ class GetAllFunctionsResponse:
     if self.functions is not None:
       oprot.writeFieldBegin('functions', TType.LIST, 1)
       oprot.writeListBegin(TType.STRUCT, len(self.functions))
-      for iter468 in self.functions:
-        iter468.write(oprot)
+      for iter528 in self.functions:
+        iter528.write(oprot)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     oprot.writeFieldStop()

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb b/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
index 4bd4302..771de51 100644
--- a/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
+++ b/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
@@ -2231,6 +2231,173 @@ class FireEventResponse
   ::Thrift::Struct.generate_accessors self
 end
 
+class MetadataPpdResult
+  include ::Thrift::Struct, ::Thrift::Struct_Union
+  METADATA = 1
+  INCLUDEBITSET = 2
+
+  FIELDS = {
+    METADATA => {:type => ::Thrift::Types::STRING, :name => 'metadata', :binary => true},
+    INCLUDEBITSET => {:type => ::Thrift::Types::STRING, :name => 'includeBitset', :binary => true}
+  }
+
+  def struct_fields; FIELDS; end
+
+  def validate
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field metadata is unset!') unless @metadata
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field includeBitset is unset!') unless @includeBitset
+  end
+
+  ::Thrift::Struct.generate_accessors self
+end
+
+class GetFileMetadataByExprResult
+  include ::Thrift::Struct, ::Thrift::Struct_Union
+  METADATA = 1
+  ISSUPPORTED = 2
+  UNKNOWNFILEIDS = 3
+
+  FIELDS = {
+    METADATA => {:type => ::Thrift::Types::MAP, :name => 'metadata', :key => {:type => ::Thrift::Types::I64}, :value => {:type => ::Thrift::Types::STRUCT, :class => ::MetadataPpdResult}},
+    ISSUPPORTED => {:type => ::Thrift::Types::BOOL, :name => 'isSupported'},
+    UNKNOWNFILEIDS => {:type => ::Thrift::Types::LIST, :name => 'unknownFileIds', :element => {:type => ::Thrift::Types::I64}}
+  }
+
+  def struct_fields; FIELDS; end
+
+  def validate
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field metadata is unset!') unless @metadata
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field isSupported is unset!') if @isSupported.nil?
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field unknownFileIds is unset!') unless @unknownFileIds
+  end
+
+  ::Thrift::Struct.generate_accessors self
+end
+
+class GetFileMetadataByExprRequest
+  include ::Thrift::Struct, ::Thrift::Struct_Union
+  FILEIDS = 1
+  EXPR = 2
+
+  FIELDS = {
+    FILEIDS => {:type => ::Thrift::Types::LIST, :name => 'fileIds', :element => {:type => ::Thrift::Types::I64}},
+    EXPR => {:type => ::Thrift::Types::STRING, :name => 'expr', :binary => true}
+  }
+
+  def struct_fields; FIELDS; end
+
+  def validate
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field fileIds is unset!') unless @fileIds
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field expr is unset!') unless @expr
+  end
+
+  ::Thrift::Struct.generate_accessors self
+end
+
+class GetFileMetadataResult
+  include ::Thrift::Struct, ::Thrift::Struct_Union
+  METADATA = 1
+  ISSUPPORTED = 2
+
+  FIELDS = {
+    METADATA => {:type => ::Thrift::Types::MAP, :name => 'metadata', :key => {:type => ::Thrift::Types::I64}, :value => {:type => ::Thrift::Types::STRING, :binary => true}},
+    ISSUPPORTED => {:type => ::Thrift::Types::BOOL, :name => 'isSupported'}
+  }
+
+  def struct_fields; FIELDS; end
+
+  def validate
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field metadata is unset!') unless @metadata
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field isSupported is unset!') if @isSupported.nil?
+  end
+
+  ::Thrift::Struct.generate_accessors self
+end
+
+class GetFileMetadataRequest
+  include ::Thrift::Struct, ::Thrift::Struct_Union
+  FILEIDS = 1
+
+  FIELDS = {
+    FILEIDS => {:type => ::Thrift::Types::LIST, :name => 'fileIds', :element => {:type => ::Thrift::Types::I64}}
+  }
+
+  def struct_fields; FIELDS; end
+
+  def validate
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field fileIds is unset!') unless @fileIds
+  end
+
+  ::Thrift::Struct.generate_accessors self
+end
+
+class PutFileMetadataResult
+  include ::Thrift::Struct, ::Thrift::Struct_Union
+
+  FIELDS = {
+
+  }
+
+  def struct_fields; FIELDS; end
+
+  def validate
+  end
+
+  ::Thrift::Struct.generate_accessors self
+end
+
+class PutFileMetadataRequest
+  include ::Thrift::Struct, ::Thrift::Struct_Union
+  FILEIDS = 1
+  METADATA = 2
+
+  FIELDS = {
+    FILEIDS => {:type => ::Thrift::Types::LIST, :name => 'fileIds', :element => {:type => ::Thrift::Types::I64}},
+    METADATA => {:type => ::Thrift::Types::LIST, :name => 'metadata', :element => {:type => ::Thrift::Types::STRING, :binary => true}}
+  }
+
+  def struct_fields; FIELDS; end
+
+  def validate
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field fileIds is unset!') unless @fileIds
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field metadata is unset!') unless @metadata
+  end
+
+  ::Thrift::Struct.generate_accessors self
+end
+
+class ClearFileMetadataResult
+  include ::Thrift::Struct, ::Thrift::Struct_Union
+
+  FIELDS = {
+
+  }
+
+  def struct_fields; FIELDS; end
+
+  def validate
+  end
+
+  ::Thrift::Struct.generate_accessors self
+end
+
+class ClearFileMetadataRequest
+  include ::Thrift::Struct, ::Thrift::Struct_Union
+  FILEIDS = 1
+
+  FIELDS = {
+    FILEIDS => {:type => ::Thrift::Types::LIST, :name => 'fileIds', :element => {:type => ::Thrift::Types::I64}}
+  }
+
+  def struct_fields; FIELDS; end
+
+  def validate
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field fileIds is unset!') unless @fileIds
+  end
+
+  ::Thrift::Struct.generate_accessors self
+end
+
 class GetAllFunctionsResponse
   include ::Thrift::Struct, ::Thrift::Struct_Union
   FUNCTIONS = 1

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb b/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb
index 90eb902..8625c7b 100644
--- a/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb
+++ b/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb
@@ -2104,6 +2104,66 @@ module ThriftHiveMetastore
       return
     end
 
+    def get_file_metadata_by_expr(req)
+      send_get_file_metadata_by_expr(req)
+      return recv_get_file_metadata_by_expr()
+    end
+
+    def send_get_file_metadata_by_expr(req)
+      send_message('get_file_metadata_by_expr', Get_file_metadata_by_expr_args, :req => req)
+    end
+
+    def recv_get_file_metadata_by_expr()
+      result = receive_message(Get_file_metadata_by_expr_result)
+      return result.success unless result.success.nil?
+      raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'get_file_metadata_by_expr failed: unknown result')
+    end
+
+    def get_file_metadata(req)
+      send_get_file_metadata(req)
+      return recv_get_file_metadata()
+    end
+
+    def send_get_file_metadata(req)
+      send_message('get_file_metadata', Get_file_metadata_args, :req => req)
+    end
+
+    def recv_get_file_metadata()
+      result = receive_message(Get_file_metadata_result)
+      return result.success unless result.success.nil?
+      raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'get_file_metadata failed: unknown result')
+    end
+
+    def put_file_metadata(req)
+      send_put_file_metadata(req)
+      return recv_put_file_metadata()
+    end
+
+    def send_put_file_metadata(req)
+      send_message('put_file_metadata', Put_file_metadata_args, :req => req)
+    end
+
+    def recv_put_file_metadata()
+      result = receive_message(Put_file_metadata_result)
+      return result.success unless result.success.nil?
+      raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'put_file_metadata failed: unknown result')
+    end
+
+    def clear_file_metadata(req)
+      send_clear_file_metadata(req)
+      return recv_clear_file_metadata()
+    end
+
+    def send_clear_file_metadata(req)
+      send_message('clear_file_metadata', Clear_file_metadata_args, :req => req)
+    end
+
+    def recv_clear_file_metadata()
+      result = receive_message(Clear_file_metadata_result)
+      return result.success unless result.success.nil?
+      raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'clear_file_metadata failed: unknown result')
+    end
+
   end
 
   class Processor < ::FacebookService::Processor 
@@ -3702,6 +3762,34 @@ module ThriftHiveMetastore
       write_result(result, oprot, 'flushCache', seqid)
     end
 
+    def process_get_file_metadata_by_expr(seqid, iprot, oprot)
+      args = read_args(iprot, Get_file_metadata_by_expr_args)
+      result = Get_file_metadata_by_expr_result.new()
+      result.success = @handler.get_file_metadata_by_expr(args.req)
+      write_result(result, oprot, 'get_file_metadata_by_expr', seqid)
+    end
+
+    def process_get_file_metadata(seqid, iprot, oprot)
+      args = read_args(iprot, Get_file_metadata_args)
+      result = Get_file_metadata_result.new()
+      result.success = @handler.get_file_metadata(args.req)
+      write_result(result, oprot, 'get_file_metadata', seqid)
+    end
+
+    def process_put_file_metadata(seqid, iprot, oprot)
+      args = read_args(iprot, Put_file_metadata_args)
+      result = Put_file_metadata_result.new()
+      result.success = @handler.put_file_metadata(args.req)
+      write_result(result, oprot, 'put_file_metadata', seqid)
+    end
+
+    def process_clear_file_metadata(seqid, iprot, oprot)
+      args = read_args(iprot, Clear_file_metadata_args)
+      result = Clear_file_metadata_result.new()
+      result.success = @handler.clear_file_metadata(args.req)
+      write_result(result, oprot, 'clear_file_metadata', seqid)
+    end
+
   end
 
   # HELPER FUNCTIONS AND STRUCTURES
@@ -8466,5 +8554,133 @@ module ThriftHiveMetastore
     ::Thrift::Struct.generate_accessors self
   end
 
+  class Get_file_metadata_by_expr_args
+    include ::Thrift::Struct, ::Thrift::Struct_Union
+    REQ = 1
+
+    FIELDS = {
+      REQ => {:type => ::Thrift::Types::STRUCT, :name => 'req', :class => ::GetFileMetadataByExprRequest}
+    }
+
+    def struct_fields; FIELDS; end
+
+    def validate
+    end
+
+    ::Thrift::Struct.generate_accessors self
+  end
+
+  class Get_file_metadata_by_expr_result
+    include ::Thrift::Struct, ::Thrift::Struct_Union
+    SUCCESS = 0
+
+    FIELDS = {
+      SUCCESS => {:type => ::Thrift::Types::STRUCT, :name => 'success', :class => ::GetFileMetadataByExprResult}
+    }
+
+    def struct_fields; FIELDS; end
+
+    def validate
+    end
+
+    ::Thrift::Struct.generate_accessors self
+  end
+
+  class Get_file_metadata_args
+    include ::Thrift::Struct, ::Thrift::Struct_Union
+    REQ = 1
+
+    FIELDS = {
+      REQ => {:type => ::Thrift::Types::STRUCT, :name => 'req', :class => ::GetFileMetadataRequest}
+    }
+
+    def struct_fields; FIELDS; end
+
+    def validate
+    end
+
+    ::Thrift::Struct.generate_accessors self
+  end
+
+  class Get_file_metadata_result
+    include ::Thrift::Struct, ::Thrift::Struct_Union
+    SUCCESS = 0
+
+    FIELDS = {
+      SUCCESS => {:type => ::Thrift::Types::STRUCT, :name => 'success', :class => ::GetFileMetadataResult}
+    }
+
+    def struct_fields; FIELDS; end
+
+    def validate
+    end
+
+    ::Thrift::Struct.generate_accessors self
+  end
+
+  class Put_file_metadata_args
+    include ::Thrift::Struct, ::Thrift::Struct_Union
+    REQ = 1
+
+    FIELDS = {
+      REQ => {:type => ::Thrift::Types::STRUCT, :name => 'req', :class => ::PutFileMetadataRequest}
+    }
+
+    def struct_fields; FIELDS; end
+
+    def validate
+    end
+
+    ::Thrift::Struct.generate_accessors self
+  end
+
+  class Put_file_metadata_result
+    include ::Thrift::Struct, ::Thrift::Struct_Union
+    SUCCESS = 0
+
+    FIELDS = {
+      SUCCESS => {:type => ::Thrift::Types::STRUCT, :name => 'success', :class => ::PutFileMetadataResult}
+    }
+
+    def struct_fields; FIELDS; end
+
+    def validate
+    end
+
+    ::Thrift::Struct.generate_accessors self
+  end
+
+  class Clear_file_metadata_args
+    include ::Thrift::Struct, ::Thrift::Struct_Union
+    REQ = 1
+
+    FIELDS = {
+      REQ => {:type => ::Thrift::Types::STRUCT, :name => 'req', :class => ::ClearFileMetadataRequest}
+    }
+
+    def struct_fields; FIELDS; end
+
+    def validate
+    end
+
+    ::Thrift::Struct.generate_accessors self
+  end
+
+  class Clear_file_metadata_result
+    include ::Thrift::Struct, ::Thrift::Struct_Union
+    SUCCESS = 0
+
+    FIELDS = {
+      SUCCESS => {:type => ::Thrift::Types::STRUCT, :name => 'success', :class => ::ClearFileMetadataResult}
+    }
+
+    def struct_fields; FIELDS; end
+
+    def validate
+    end
+
+    ::Thrift::Struct.generate_accessors self
+  end
+
 end
 

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index da3d278..a06efc6 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -52,6 +52,8 @@ import org.apache.hadoop.hive.metastore.api.AddPartitionsResult;
 import org.apache.hadoop.hive.metastore.api.AggrStats;
 import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
 import org.apache.hadoop.hive.metastore.api.CheckLockRequest;
+import org.apache.hadoop.hive.metastore.api.ClearFileMetadataRequest;
+import org.apache.hadoop.hive.metastore.api.ClearFileMetadataResult;
 import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
 import org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc;
 import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
@@ -69,6 +71,10 @@ import org.apache.hadoop.hive.metastore.api.FireEventRequest;
 import org.apache.hadoop.hive.metastore.api.FireEventResponse;
 import org.apache.hadoop.hive.metastore.api.Function;
 import org.apache.hadoop.hive.metastore.api.GetAllFunctionsResponse;
+import org.apache.hadoop.hive.metastore.api.GetFileMetadataByExprRequest;
+import org.apache.hadoop.hive.metastore.api.GetFileMetadataByExprResult;
+import org.apache.hadoop.hive.metastore.api.GetFileMetadataRequest;
+import org.apache.hadoop.hive.metastore.api.GetFileMetadataResult;
 import org.apache.hadoop.hive.metastore.api.GetOpenTxnsInfoResponse;
 import org.apache.hadoop.hive.metastore.api.GetOpenTxnsResponse;
 import org.apache.hadoop.hive.metastore.api.GetPrincipalsInRoleRequest;
@@ -114,6 +120,8 @@ import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet;
 import org.apache.hadoop.hive.metastore.api.PrincipalType;
 import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
 import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo;
+import org.apache.hadoop.hive.metastore.api.PutFileMetadataRequest;
+import org.apache.hadoop.hive.metastore.api.PutFileMetadataResult;
 import org.apache.hadoop.hive.metastore.api.RequestPartsSpec;
 import org.apache.hadoop.hive.metastore.api.Role;
 import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant;
@@ -167,14 +175,6 @@ import org.apache.hadoop.hive.metastore.events.PreEventContext;
 import org.apache.hadoop.hive.metastore.events.PreLoadPartitionDoneEvent;
 import org.apache.hadoop.hive.metastore.events.PreReadDatabaseEvent;
 import org.apache.hadoop.hive.metastore.events.PreReadTableEvent;
-import org.apache.hadoop.hive.metastore.model.MDBPrivilege;
-import org.apache.hadoop.hive.metastore.model.MGlobalPrivilege;
-import org.apache.hadoop.hive.metastore.model.MPartitionColumnPrivilege;
-import org.apache.hadoop.hive.metastore.model.MPartitionPrivilege;
-import org.apache.hadoop.hive.metastore.model.MRole;
-import org.apache.hadoop.hive.metastore.model.MRoleMap;
-import org.apache.hadoop.hive.metastore.model.MTableColumnPrivilege;
-import org.apache.hadoop.hive.metastore.model.MTablePrivilege;
 import org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy;
 import org.apache.hadoop.hive.metastore.txn.TxnHandler;
 import org.apache.hadoop.hive.serde2.Deserializer;
@@ -208,6 +208,7 @@ import org.apache.thrift.transport.TTransportFactory;
 import javax.jdo.JDOException;
 
 import java.io.IOException;
+import java.nio.ByteBuffer;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
 import java.util.AbstractMap;
@@ -292,8 +293,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
     }
   }
 
-  public static class HMSHandler extends FacebookBase implements
-      IHMSHandler {
+  public static class HMSHandler extends FacebookBase implements IHMSHandler {
     public static final Log LOG = HiveMetaStore.LOG;
     private String rawStoreClassName;
     private final HiveConf hiveConf; // stores datastore (jpox) properties,
@@ -5700,6 +5700,40 @@ public class HiveMetaStore extends ThriftHiveMetastore {
       }
 
     }
+
+    @Override
+    public GetFileMetadataByExprResult get_file_metadata_by_expr(GetFileMetadataByExprRequest req)
+        throws TException {
+      throw new UnsupportedOperationException("Not implemented yet");
+    }
+
+    @Override
+    public GetFileMetadataResult get_file_metadata(GetFileMetadataRequest req) throws TException {
+      List<Long> fileIds = req.getFileIds();
+      ByteBuffer[] metadatas = getMS().getFileMetadata(fileIds);
+      GetFileMetadataResult result = new GetFileMetadataResult();
+      result.setIsSupported(metadatas != null);
+      if (metadatas != null) {
+        assert metadatas.length == fileIds.size();
+        for (int i = 0; i < metadatas.length; ++i) {
+          result.putToMetadata(fileIds.get(i), metadatas[i]);
+        }
+      }
+      return result;
+    }
+
+    @Override
+    public PutFileMetadataResult put_file_metadata(PutFileMetadataRequest req) throws TException {
+      getMS().putFileMetadata(req.getFileIds(), req.getMetadata());
+      return new PutFileMetadataResult();
+    }
+
+    @Override
+    public ClearFileMetadataResult clear_file_metadata(ClearFileMetadataRequest req)
+        throws TException {
+      getMS().putFileMetadata(req.getFileIds(), null);
+      return new ClearFileMetadataResult();
+    }
   }
 
 

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index 59378db..1b2700a 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@ -23,6 +23,7 @@ import static org.apache.commons.lang.StringUtils.join;
 import java.io.IOException;
 import java.net.InetAddress;
 import java.net.URI;
+import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
@@ -7615,4 +7616,14 @@ public class ObjectStore implements RawStore, Configurable {
     event.setMessage((dbEvent.getMessage()));
     return event;
   }
+
+  @Override
+  public ByteBuffer[] getFileMetadata(List<Long> fileIds) {
+    return null; // Not supported for now; callers have to handle this accordingly.
+  }
+
+  @Override
+  public void putFileMetadata(List<Long> fileIds, List<ByteBuffer> metadata) {
+    // Not supported for now.
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
index 9db1907..1968256 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
@@ -22,6 +22,7 @@ import java.lang.annotation.ElementType;
 import java.lang.annotation.Retention;
 import java.lang.annotation.RetentionPolicy;
 import java.lang.annotation.Target;
+import java.nio.ByteBuffer;
 import java.util.List;
 import java.util.Map;
 
@@ -592,4 +593,8 @@ public interface RawStore extends Configurable {
    * flush statistics objects.  This should be called at the beginning of each query.
    */
   public void flushCache();
+
+  ByteBuffer[] getFileMetadata(List<Long> fileIds) throws MetaException;
+
+  void putFileMetadata(List<Long> fileIds, List<ByteBuffer> metadata) throws MetaException;
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
index 66c46a5..f1336dc 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
@@ -27,17 +27,20 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Row;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.filter.CompareFilter;
 import org.apache.hadoop.hbase.filter.Filter;
 import org.apache.hadoop.hbase.filter.RegexStringComparator;
 import org.apache.hadoop.hbase.filter.RowFilter;
+import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest;
 import org.apache.hadoop.hive.common.ObjectPair;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.AggrStats;
@@ -58,6 +61,7 @@ import org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator;
 import org.apache.hive.common.util.BloomFilter;
 
 import java.io.IOException;
+import java.nio.ByteBuffer;
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
 import java.util.ArrayList;
@@ -88,6 +92,7 @@ public class HBaseReadWrite {
   @VisibleForTesting final static String SEQUENCES_TABLE = "HBMS_SEQUENCES";
   @VisibleForTesting final static String TABLE_TABLE = "HBMS_TBLS";
   @VisibleForTesting final static String USER_TO_ROLE_TABLE = "HBMS_USER_TO_ROLE";
+  @VisibleForTesting final static String FILE_METADATA_TABLE = "HBMS_FILE_METADATA";
   @VisibleForTesting final static byte[] CATALOG_CF = "c".getBytes(HBaseUtils.ENCODING);
   @VisibleForTesting final static byte[] STATS_CF = "s".getBytes(HBaseUtils.ENCODING);
   @VisibleForTesting final static String NO_CACHE_CONF = "no.use.cache";
@@ -96,7 +101,8 @@ public class HBaseReadWrite {
    */
   public final static String[] tableNames = { AGGR_STATS_TABLE, DB_TABLE, FUNC_TABLE, GLOBAL_PRIVS_TABLE,
                                        PART_TABLE, USER_TO_ROLE_TABLE, ROLE_TABLE, SD_TABLE,
-                                       SECURITY_TABLE, SEQUENCES_TABLE, TABLE_TABLE};
+                                       SECURITY_TABLE, SEQUENCES_TABLE, TABLE_TABLE,
+                                       FILE_METADATA_TABLE };
   public final static Map<String, List<byte[]>> columnFamilies =
       new HashMap<String, List<byte[]>> (tableNames.length);
 
@@ -112,6 +118,8 @@ public class HBaseReadWrite {
     columnFamilies.put(SECURITY_TABLE, Arrays.asList(CATALOG_CF));
     columnFamilies.put(SEQUENCES_TABLE, Arrays.asList(CATALOG_CF));
     columnFamilies.put(TABLE_TABLE, Arrays.asList(CATALOG_CF, STATS_CF));
+    // Stats CF will contain PPD stats.
+    columnFamilies.put(FILE_METADATA_TABLE, Arrays.asList(CATALOG_CF, STATS_CF));
   }
 
   /**
@@ -1714,6 +1722,37 @@ public class HBaseReadWrite {
   }
 
   /**********************************************************************************************
+   * File metadata related methods
+   *********************************************************************************************/
+
+  /**
+   * @param fileIds file ID list.
+   * @return Serialized file metadata.
+   */
+  ByteBuffer[] getFileMetadata(List<Long> fileIds) throws IOException {
+    byte[][] keys = new byte[fileIds.size()][];
+    for (int i = 0; i < fileIds.size(); ++i) {
+      keys[i] = HBaseUtils.makeLongKey(fileIds.get(i));
+    }
+    ByteBuffer[] result = new ByteBuffer[keys.length];
+    multiRead(FILE_METADATA_TABLE, CATALOG_CF, CATALOG_COL, keys, result);
+    return result;
+  }
+
+  /**
+   * @param fileIds file ID list.
+   * @param metadata Serialized file metadata.
+   */
+  void storeFileMetadata(List<Long> fileIds, List<ByteBuffer> metadata)
+      throws IOException, InterruptedException {
+    byte[][] keys = new byte[fileIds.size()][];
+    for (int i = 0; i < fileIds.size(); ++i) {
+      keys[i] = HBaseUtils.makeLongKey(fileIds.get(i));
+    }
+    multiModify(FILE_METADATA_TABLE, keys, CATALOG_CF, CATALOG_COL, metadata);
+  }
+
+  /**********************************************************************************************
    * Security related methods
    *********************************************************************************************/
 
@@ -1899,6 +1938,49 @@ public class HBaseReadWrite {
     return res.getValue(colFam, colName);
   }
 
+  private void multiRead(String table, byte[] colFam, byte[] colName,
+      byte[][] keys, ByteBuffer[] resultDest) throws IOException {
+    assert keys.length == resultDest.length;
+    @SuppressWarnings("deprecation")
+    HTableInterface htab = conn.getHBaseTable(table);
+    List<Get> gets = new ArrayList<>(keys.length);
+    for (byte[] key : keys) {
+      Get g = new Get(key);
+      g.addColumn(colFam, colName);
+      gets.add(g);
+    }
+    Result[] results = htab.get(gets);
+    for (int i = 0; i < results.length; ++i) {
+      Result r = results[i];
+      resultDest[i] = (r.isEmpty() ? null : r.getValueAsByteBuffer(colFam, colName));
+    }
+  }
+
+  private void multiModify(String table, byte[][] keys, byte[] colFam,
+      byte[] colName, List<ByteBuffer> values) throws IOException, InterruptedException {
+    assert values == null || keys.length == values.size();
+    // HBase APIs are weird. To supply bytebuffer value, you have to also have bytebuffer
+    // column name, but not column family. So there. Perhaps we should add these to constants too.
+    ByteBuffer colNameBuf = ByteBuffer.wrap(colName);
+    @SuppressWarnings("deprecation")
+    HTableInterface htab = conn.getHBaseTable(table);
+    List<Row> actions = new ArrayList<>(keys.length);
+    for (int i = 0; i < keys.length; ++i) {
+      ByteBuffer value = (values != null) ? values.get(i) : null;
+      if (value == null) {
+        actions.add(new Delete(keys[i]));
+      } else {
+        Put p = new Put(keys[i]);
+        p.addColumn(colFam, colNameBuf, HConstants.LATEST_TIMESTAMP, value);
+        actions.add(p);
+      }
+    }
+    Object[] results = new Object[keys.length];
+    htab.batch(actions, results);
+    // TODO: should we check results array? we don't care about partial results
+    conn.flush(htab);
+  }
+
   private Result read(String table, byte[] key, byte[] colFam, byte[][] colNames)
       throws IOException {
     HTableInterface htab = conn.getHBaseTable(table);

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
index 717e094..f30fcab 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
@@ -67,6 +67,7 @@ import org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy;
 import org.apache.thrift.TException;
 
 import java.io.IOException;
+import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -2239,4 +2240,34 @@ public class HBaseStore implements RawStore {
   @VisibleForTesting HBaseReadWrite backdoor() {
     return getHBase();
   }
+
+  @Override
+  public ByteBuffer[] getFileMetadata(List<Long> fileIds) throws MetaException {
+    openTransaction();
+    boolean commit = true;
+    try {
+      return getHBase().getFileMetadata(fileIds);
+    } catch (IOException e) {
+      commit = false;
+      LOG.error("Unable to get file metadata", e);
+      throw new MetaException("Error reading file metadata " + e.getMessage());
+    } finally {
+      commitOrRoleBack(commit);
+    }
+  }
+
+  @Override
+  public void putFileMetadata(List<Long> fileIds, List<ByteBuffer> metadata) throws MetaException {
+    openTransaction();
+    boolean commit = false;
+    try {
+      getHBase().storeFileMetadata(fileIds, metadata);
+      commit = true;
+    } catch (IOException | InterruptedException e) {
+      LOG.error("Unable to store file metadata", e);
+      throw new MetaException("Error storing file metadata " + e.getMessage());
+    } finally {
+      commitOrRoleBack(commit);
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java
index b6fa591..841afd4 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java
@@ -1302,4 +1302,17 @@ class HBaseUtils {
     keyEnd[keyEnd.length - 1]++;
     return keyEnd;
   }
+
+  static byte[] makeLongKey(long v) {
+    byte[] b = new byte[8];
+    b[0] = (byte)(v >>> 56);
+    b[1] = (byte)(v >>> 48);
+    b[2] = (byte)(v >>> 40);
+    b[3] = (byte)(v >>> 32);
+    b[4] = (byte)(v >>> 24);
+    b[5] = (byte)(v >>> 16);
+    b[6] = (byte)(v >>>  8);
+    b[7] = (byte)(v >>>  0);
+    return b;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
----------------------------------------------------------------------
diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
index 6efadba..0f3331a 100644
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
+++ b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.metastore;
 
+import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
@@ -759,4 +760,13 @@ public class DummyRawStoreControlledCommit implements RawStore, Configurable {
   public void flushCache() {
     objectStore.flushCache();
   }
+
+  @Override
+  public ByteBuffer[] getFileMetadata(List<Long> fileIds) {
+    return null;
+  }
+
+  @Override
+  public void putFileMetadata(List<Long> fileIds, List<ByteBuffer> metadata) {
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
----------------------------------------------------------------------
diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
index 00fca8c..126a2c2 100644
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
+++ b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.metastore;
 
+import java.nio.ByteBuffer;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
@@ -777,6 +778,14 @@ public class DummyRawStoreForJdoConnection implements RawStore {
 
   }
 
+  @Override
+  public ByteBuffer[] getFileMetadata(List<Long> fileIds) {
+    return null;
+  }
+
+  @Override
+  public void putFileMetadata(List<Long> fileIds, List<ByteBuffer> metadata) {
+  }
 }
 
 

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Adjacency.java
----------------------------------------------------------------------
diff --git a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Adjacency.java b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Adjacency.java
index b26ab96..2153f0e 100644
--- a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Adjacency.java
+++ b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Adjacency.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-14")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-17")
 public class Adjacency implements org.apache.thrift.TBase<Adjacency, Adjacency._Fields>, java.io.Serializable, Cloneable, Comparable<Adjacency> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Adjacency");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Graph.java
----------------------------------------------------------------------
diff --git a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Graph.java b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Graph.java
index 0a13175..f864c18 100644
--- a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Graph.java
+++ b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Graph.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-14")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-17")
 public class Graph implements org.apache.thrift.TBase<Graph, Graph._Fields>, java.io.Serializable, Cloneable, Comparable<Graph> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Graph");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Operator.java
----------------------------------------------------------------------
diff --git a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Operator.java b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Operator.java
index 991974c..a7ec4e4 100644
--- a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Operator.java
+++ b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Operator.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-14")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-17")
 public class Operator implements org.apache.thrift.TBase<Operator, Operator._Fields>, java.io.Serializable, Cloneable, Comparable<Operator> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Operator");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Query.java
----------------------------------------------------------------------
diff --git a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Query.java b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Query.java
index f98a7e1..2f64123 100644
--- a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Query.java
+++ b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Query.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-14")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-17")
 public class Query implements org.apache.thrift.TBase<Query, Query._Fields>, java.io.Serializable, Cloneable, Comparable<Query> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Query");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/QueryPlan.java
----------------------------------------------------------------------
diff --git a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/QueryPlan.java b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/QueryPlan.java
index 0994fda..5ccceb1 100644
--- a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/QueryPlan.java
+++ b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/QueryPlan.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-14")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-17")
 public class QueryPlan implements org.apache.thrift.TBase<QueryPlan, QueryPlan._Fields>, java.io.Serializable, Cloneable, Comparable<QueryPlan> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("QueryPlan");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Stage.java
----------------------------------------------------------------------
diff --git a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Stage.java b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Stage.java
index e0cd86c..706e335 100644
--- a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Stage.java
+++ b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Stage.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-14")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-17")
 public class Stage implements org.apache.thrift.TBase<Stage, Stage._Fields>, java.io.Serializable, Cloneable, Comparable<Stage> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Stage");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Task.java
----------------------------------------------------------------------
diff --git a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Task.java b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Task.java
index 66e5e30..2d55d7a 100644
--- a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Task.java
+++ b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Task.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-14")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-17")
 public class Task implements org.apache.thrift.TBase<Task, Task._Fields>, java.io.Serializable, Cloneable, Comparable<Task> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Task");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/InnerStruct.java
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/InnerStruct.java b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/InnerStruct.java
index 68bb885..eed53fa 100644
--- a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/InnerStruct.java
+++ b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/InnerStruct.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-14")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-17")
 public class InnerStruct implements org.apache.thrift.TBase<InnerStruct, InnerStruct._Fields>, java.io.Serializable, Cloneable, Comparable<InnerStruct> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("InnerStruct");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/ThriftTestObj.java
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/ThriftTestObj.java b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/ThriftTestObj.java
index 208fa82..4410307 100644
--- a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/ThriftTestObj.java
+++ b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/ThriftTestObj.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-14")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-17")
 public class ThriftTestObj implements org.apache.thrift.TBase<ThriftTestObj, ThriftTestObj._Fields>, java.io.Serializable, Cloneable, Comparable<ThriftTestObj> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ThriftTestObj");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java
index 6d32947..59a1f7e 100644
--- a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java
+++ b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-14")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-17")
 public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields>, java.io.Serializable, Cloneable, Comparable<Complex> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Complex");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/IntString.java
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/IntString.java b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/IntString.java
index 26e7b38..901fc4b 100644
--- a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/IntString.java
+++ b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/IntString.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-14")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-17")
 public class IntString implements org.apache.thrift.TBase<IntString, IntString._Fields>, java.io.Serializable, Cloneable, Comparable<IntString> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("IntString");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MegaStruct.java
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MegaStruct.java b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MegaStruct.java
index d937a9c..cc3f375 100644
--- a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MegaStruct.java
+++ b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MegaStruct.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-14")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-17")
 public class MegaStruct implements org.apache.thrift.TBase<MegaStruct, MegaStruct._Fields>, java.io.Serializable, Cloneable, Comparable<MegaStruct> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("MegaStruct");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MiniStruct.java
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MiniStruct.java b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MiniStruct.java
index c25156a..e7498f4 100644
--- a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MiniStruct.java
+++ b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MiniStruct.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-14")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-17")
 public class MiniStruct implements org.apache.thrift.TBase<MiniStruct, MiniStruct._Fields>, java.io.Serializable, Cloneable, Comparable<MiniStruct> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("MiniStruct");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/SetIntString.java
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/SetIntString.java b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/SetIntString.java
index d1bd61d..a2cbda2 100644
--- a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/SetIntString.java
+++ b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/SetIntString.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-14")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-17")
 public class SetIntString implements org.apache.thrift.TBase<SetIntString, SetIntString._Fields>, java.io.Serializable, Cloneable, Comparable<SetIntString> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("SetIntString");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/HiveClusterStatus.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/HiveClusterStatus.java b/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/HiveClusterStatus.java
index d4b6972..7396d02 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/HiveClusterStatus.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/HiveClusterStatus.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-3")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-17")
 public class HiveClusterStatus implements org.apache.thrift.TBase<HiveClusterStatus, HiveClusterStatus._Fields>, java.io.Serializable, Cloneable, Comparable<HiveClusterStatus> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HiveClusterStatus");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/HiveServerException.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/HiveServerException.java b/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/HiveServerException.java
index 760c81e..e15a9e0 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/HiveServerException.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/HiveServerException.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-3")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-17")
 public class HiveServerException extends TException implements org.apache.thrift.TBase<HiveServerException, HiveServerException._Fields>, java.io.Serializable, Cloneable, Comparable<HiveServerException> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HiveServerException");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/ThriftHive.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/ThriftHive.java b/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/ThriftHive.java
index df793b1..2a7fd9b 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/ThriftHive.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/ThriftHive.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-3")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-17")
 public class ThriftHive {
 
   public interface Iface extends org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface {

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TArrayTypeEntry.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TArrayTypeEntry.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TArrayTypeEntry.java
index 5625516..841139b 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TArrayTypeEntry.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TArrayTypeEntry.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-3")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-17")
 public class TArrayTypeEntry implements org.apache.thrift.TBase<TArrayTypeEntry, TArrayTypeEntry._Fields>, java.io.Serializable, Cloneable, Comparable<TArrayTypeEntry> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TArrayTypeEntry");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBinaryColumn.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBinaryColumn.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBinaryColumn.java
index 202399a..bfea569 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBinaryColumn.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBinaryColumn.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-3")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-17")
 public class TBinaryColumn implements org.apache.thrift.TBase<TBinaryColumn, TBinaryColumn._Fields>, java.io.Serializable, Cloneable, Comparable<TBinaryColumn> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TBinaryColumn");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBoolColumn.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBoolColumn.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBoolColumn.java
index 921e9de..5c10fde 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBoolColumn.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBoolColumn.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-3")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-17")
 public class TBoolColumn implements org.apache.thrift.TBase<TBoolColumn, TBoolColumn._Fields>, java.io.Serializable, Cloneable, Comparable<TBoolColumn> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TBoolColumn");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBoolValue.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBoolValue.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBoolValue.java
index 201c9fb..86b5ce3 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBoolValue.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBoolValue.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-3")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-17")
 public class TBoolValue implements org.apache.thrift.TBase<TBoolValue, TBoolValue._Fields>, java.io.Serializable, Cloneable, Comparable<TBoolValue> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TBoolValue");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TByteColumn.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TByteColumn.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TByteColumn.java
index cd9b6da..3d42927 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TByteColumn.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TByteColumn.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-3")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-17")
 public class TByteColumn implements org.apache.thrift.TBase<TByteColumn, TByteColumn._Fields>, java.io.Serializable, Cloneable, Comparable<TByteColumn> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TByteColumn");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TByteValue.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TByteValue.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TByteValue.java
index 42b5bd5..04f8e7c 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TByteValue.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TByteValue.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-3")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-17")
 public class TByteValue implements org.apache.thrift.TBase<TByteValue, TByteValue._Fields>, java.io.Serializable, Cloneable, Comparable<TByteValue> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TByteValue");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCLIService.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCLIService.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCLIService.java
index 6bdd53d..2630215 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCLIService.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCLIService.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-3")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-17")
 public class TCLIService {
 
   public interface Iface {

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelDelegationTokenReq.java
----------------------------------------------------------------------
diff --git a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelDelegationTokenReq.java b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelDelegationTokenReq.java
index 1097869..cdabe7d 100644
--- a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelDelegationTokenReq.java
+++ b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelDelegationTokenReq.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-3")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-17")
 public class TCancelDelegationTokenReq implements org.apache.thrift.TBase<TCancelDelegationTokenReq, TCancelDelegationTokenReq._Fields>, java.io.Serializable, Cloneable, Comparable<TCancelDelegationTokenReq> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TCancelDelegationTokenReq");