You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2014/08/22 23:37:21 UTC
svn commit: r1619936 [12/49] - in /hive/branches/cbo: ./ accumulo-handler/
ant/src/org/apache/hadoop/hive/ant/ bin/
common/src/java/org/apache/hadoop/hive/ant/
common/src/java/org/apache/hadoop/hive/common/type/
common/src/java/org/apache/hadoop/hive/c...
Modified: hive/branches/cbo/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
URL: http://svn.apache.org/viewvc/hive/branches/cbo/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py (original)
+++ hive/branches/cbo/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py Fri Aug 22 21:36:47 2014
@@ -4233,6 +4233,77 @@ class AggrStats:
def __ne__(self, other):
return not (self == other)
+class SetPartitionsStatsRequest:
+ """
+ Attributes:
+ - colStats
+ """
+
+ thrift_spec = (
+ None, # 0
+ (1, TType.LIST, 'colStats', (TType.STRUCT,(ColumnStatistics, ColumnStatistics.thrift_spec)), None, ), # 1
+ )
+
+ def __init__(self, colStats=None,):
+ self.colStats = colStats
+
+ def read(self, iprot):
+ if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+ fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+ return
+ iprot.readStructBegin()
+ while True:
+ (fname, ftype, fid) = iprot.readFieldBegin()
+ if ftype == TType.STOP:
+ break
+ if fid == 1:
+ if ftype == TType.LIST:
+ self.colStats = []
+ (_etype226, _size223) = iprot.readListBegin()
+ for _i227 in xrange(_size223):
+ _elem228 = ColumnStatistics()
+ _elem228.read(iprot)
+ self.colStats.append(_elem228)
+ iprot.readListEnd()
+ else:
+ iprot.skip(ftype)
+ else:
+ iprot.skip(ftype)
+ iprot.readFieldEnd()
+ iprot.readStructEnd()
+
+ def write(self, oprot):
+ if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+ oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+ return
+ oprot.writeStructBegin('SetPartitionsStatsRequest')
+ if self.colStats is not None:
+ oprot.writeFieldBegin('colStats', TType.LIST, 1)
+ oprot.writeListBegin(TType.STRUCT, len(self.colStats))
+ for iter229 in self.colStats:
+ iter229.write(oprot)
+ oprot.writeListEnd()
+ oprot.writeFieldEnd()
+ oprot.writeFieldStop()
+ oprot.writeStructEnd()
+
+ def validate(self):
+ if self.colStats is None:
+ raise TProtocol.TProtocolException(message='Required field colStats is unset!')
+ return
+
+
+ def __repr__(self):
+ L = ['%s=%r' % (key, value)
+ for key, value in self.__dict__.iteritems()]
+ return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+ def __eq__(self, other):
+ return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+ def __ne__(self, other):
+ return not (self == other)
+
class Schema:
"""
Attributes:
@@ -4262,22 +4333,22 @@ class Schema:
if fid == 1:
if ftype == TType.LIST:
self.fieldSchemas = []
- (_etype226, _size223) = iprot.readListBegin()
- for _i227 in xrange(_size223):
- _elem228 = FieldSchema()
- _elem228.read(iprot)
- self.fieldSchemas.append(_elem228)
+ (_etype233, _size230) = iprot.readListBegin()
+ for _i234 in xrange(_size230):
+ _elem235 = FieldSchema()
+ _elem235.read(iprot)
+ self.fieldSchemas.append(_elem235)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.MAP:
self.properties = {}
- (_ktype230, _vtype231, _size229 ) = iprot.readMapBegin()
- for _i233 in xrange(_size229):
- _key234 = iprot.readString();
- _val235 = iprot.readString();
- self.properties[_key234] = _val235
+ (_ktype237, _vtype238, _size236 ) = iprot.readMapBegin()
+ for _i240 in xrange(_size236):
+ _key241 = iprot.readString();
+ _val242 = iprot.readString();
+ self.properties[_key241] = _val242
iprot.readMapEnd()
else:
iprot.skip(ftype)
@@ -4294,16 +4365,16 @@ class Schema:
if self.fieldSchemas is not None:
oprot.writeFieldBegin('fieldSchemas', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.fieldSchemas))
- for iter236 in self.fieldSchemas:
- iter236.write(oprot)
+ for iter243 in self.fieldSchemas:
+ iter243.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.properties is not None:
oprot.writeFieldBegin('properties', TType.MAP, 2)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.properties))
- for kiter237,viter238 in self.properties.items():
- oprot.writeString(kiter237)
- oprot.writeString(viter238)
+ for kiter244,viter245 in self.properties.items():
+ oprot.writeString(kiter244)
+ oprot.writeString(viter245)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
@@ -4350,11 +4421,11 @@ class EnvironmentContext:
if fid == 1:
if ftype == TType.MAP:
self.properties = {}
- (_ktype240, _vtype241, _size239 ) = iprot.readMapBegin()
- for _i243 in xrange(_size239):
- _key244 = iprot.readString();
- _val245 = iprot.readString();
- self.properties[_key244] = _val245
+ (_ktype247, _vtype248, _size246 ) = iprot.readMapBegin()
+ for _i250 in xrange(_size246):
+ _key251 = iprot.readString();
+ _val252 = iprot.readString();
+ self.properties[_key251] = _val252
iprot.readMapEnd()
else:
iprot.skip(ftype)
@@ -4371,9 +4442,9 @@ class EnvironmentContext:
if self.properties is not None:
oprot.writeFieldBegin('properties', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.properties))
- for kiter246,viter247 in self.properties.items():
- oprot.writeString(kiter246)
- oprot.writeString(viter247)
+ for kiter253,viter254 in self.properties.items():
+ oprot.writeString(kiter253)
+ oprot.writeString(viter254)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
@@ -4423,11 +4494,11 @@ class PartitionsByExprResult:
if fid == 1:
if ftype == TType.LIST:
self.partitions = []
- (_etype251, _size248) = iprot.readListBegin()
- for _i252 in xrange(_size248):
- _elem253 = Partition()
- _elem253.read(iprot)
- self.partitions.append(_elem253)
+ (_etype258, _size255) = iprot.readListBegin()
+ for _i259 in xrange(_size255):
+ _elem260 = Partition()
+ _elem260.read(iprot)
+ self.partitions.append(_elem260)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -4449,8 +4520,8 @@ class PartitionsByExprResult:
if self.partitions is not None:
oprot.writeFieldBegin('partitions', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.partitions))
- for iter254 in self.partitions:
- iter254.write(oprot)
+ for iter261 in self.partitions:
+ iter261.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.hasUnknownPartitions is not None:
@@ -4619,11 +4690,11 @@ class TableStatsResult:
if fid == 1:
if ftype == TType.LIST:
self.tableStats = []
- (_etype258, _size255) = iprot.readListBegin()
- for _i259 in xrange(_size255):
- _elem260 = ColumnStatisticsObj()
- _elem260.read(iprot)
- self.tableStats.append(_elem260)
+ (_etype265, _size262) = iprot.readListBegin()
+ for _i266 in xrange(_size262):
+ _elem267 = ColumnStatisticsObj()
+ _elem267.read(iprot)
+ self.tableStats.append(_elem267)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -4640,8 +4711,8 @@ class TableStatsResult:
if self.tableStats is not None:
oprot.writeFieldBegin('tableStats', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.tableStats))
- for iter261 in self.tableStats:
- iter261.write(oprot)
+ for iter268 in self.tableStats:
+ iter268.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
@@ -4690,17 +4761,17 @@ class PartitionsStatsResult:
if fid == 1:
if ftype == TType.MAP:
self.partStats = {}
- (_ktype263, _vtype264, _size262 ) = iprot.readMapBegin()
- for _i266 in xrange(_size262):
- _key267 = iprot.readString();
- _val268 = []
- (_etype272, _size269) = iprot.readListBegin()
- for _i273 in xrange(_size269):
- _elem274 = ColumnStatisticsObj()
- _elem274.read(iprot)
- _val268.append(_elem274)
+ (_ktype270, _vtype271, _size269 ) = iprot.readMapBegin()
+ for _i273 in xrange(_size269):
+ _key274 = iprot.readString();
+ _val275 = []
+ (_etype279, _size276) = iprot.readListBegin()
+ for _i280 in xrange(_size276):
+ _elem281 = ColumnStatisticsObj()
+ _elem281.read(iprot)
+ _val275.append(_elem281)
iprot.readListEnd()
- self.partStats[_key267] = _val268
+ self.partStats[_key274] = _val275
iprot.readMapEnd()
else:
iprot.skip(ftype)
@@ -4717,11 +4788,11 @@ class PartitionsStatsResult:
if self.partStats is not None:
oprot.writeFieldBegin('partStats', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.LIST, len(self.partStats))
- for kiter275,viter276 in self.partStats.items():
- oprot.writeString(kiter275)
- oprot.writeListBegin(TType.STRUCT, len(viter276))
- for iter277 in viter276:
- iter277.write(oprot)
+ for kiter282,viter283 in self.partStats.items():
+ oprot.writeString(kiter282)
+ oprot.writeListBegin(TType.STRUCT, len(viter283))
+ for iter284 in viter283:
+ iter284.write(oprot)
oprot.writeListEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
@@ -4787,10 +4858,10 @@ class TableStatsRequest:
elif fid == 3:
if ftype == TType.LIST:
self.colNames = []
- (_etype281, _size278) = iprot.readListBegin()
- for _i282 in xrange(_size278):
- _elem283 = iprot.readString();
- self.colNames.append(_elem283)
+ (_etype288, _size285) = iprot.readListBegin()
+ for _i289 in xrange(_size285):
+ _elem290 = iprot.readString();
+ self.colNames.append(_elem290)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -4815,8 +4886,8 @@ class TableStatsRequest:
if self.colNames is not None:
oprot.writeFieldBegin('colNames', TType.LIST, 3)
oprot.writeListBegin(TType.STRING, len(self.colNames))
- for iter284 in self.colNames:
- oprot.writeString(iter284)
+ for iter291 in self.colNames:
+ oprot.writeString(iter291)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
@@ -4888,20 +4959,20 @@ class PartitionsStatsRequest:
elif fid == 3:
if ftype == TType.LIST:
self.colNames = []
- (_etype288, _size285) = iprot.readListBegin()
- for _i289 in xrange(_size285):
- _elem290 = iprot.readString();
- self.colNames.append(_elem290)
+ (_etype295, _size292) = iprot.readListBegin()
+ for _i296 in xrange(_size292):
+ _elem297 = iprot.readString();
+ self.colNames.append(_elem297)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.LIST:
self.partNames = []
- (_etype294, _size291) = iprot.readListBegin()
- for _i295 in xrange(_size291):
- _elem296 = iprot.readString();
- self.partNames.append(_elem296)
+ (_etype301, _size298) = iprot.readListBegin()
+ for _i302 in xrange(_size298):
+ _elem303 = iprot.readString();
+ self.partNames.append(_elem303)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -4926,15 +4997,15 @@ class PartitionsStatsRequest:
if self.colNames is not None:
oprot.writeFieldBegin('colNames', TType.LIST, 3)
oprot.writeListBegin(TType.STRING, len(self.colNames))
- for iter297 in self.colNames:
- oprot.writeString(iter297)
+ for iter304 in self.colNames:
+ oprot.writeString(iter304)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.partNames is not None:
oprot.writeFieldBegin('partNames', TType.LIST, 4)
oprot.writeListBegin(TType.STRING, len(self.partNames))
- for iter298 in self.partNames:
- oprot.writeString(iter298)
+ for iter305 in self.partNames:
+ oprot.writeString(iter305)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
@@ -4989,11 +5060,11 @@ class AddPartitionsResult:
if fid == 1:
if ftype == TType.LIST:
self.partitions = []
- (_etype302, _size299) = iprot.readListBegin()
- for _i303 in xrange(_size299):
- _elem304 = Partition()
- _elem304.read(iprot)
- self.partitions.append(_elem304)
+ (_etype309, _size306) = iprot.readListBegin()
+ for _i310 in xrange(_size306):
+ _elem311 = Partition()
+ _elem311.read(iprot)
+ self.partitions.append(_elem311)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -5010,8 +5081,8 @@ class AddPartitionsResult:
if self.partitions is not None:
oprot.writeFieldBegin('partitions', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.partitions))
- for iter305 in self.partitions:
- iter305.write(oprot)
+ for iter312 in self.partitions:
+ iter312.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
@@ -5080,11 +5151,11 @@ class AddPartitionsRequest:
elif fid == 3:
if ftype == TType.LIST:
self.parts = []
- (_etype309, _size306) = iprot.readListBegin()
- for _i310 in xrange(_size306):
- _elem311 = Partition()
- _elem311.read(iprot)
- self.parts.append(_elem311)
+ (_etype316, _size313) = iprot.readListBegin()
+ for _i317 in xrange(_size313):
+ _elem318 = Partition()
+ _elem318.read(iprot)
+ self.parts.append(_elem318)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -5119,8 +5190,8 @@ class AddPartitionsRequest:
if self.parts is not None:
oprot.writeFieldBegin('parts', TType.LIST, 3)
oprot.writeListBegin(TType.STRUCT, len(self.parts))
- for iter312 in self.parts:
- iter312.write(oprot)
+ for iter319 in self.parts:
+ iter319.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.ifNotExists is not None:
@@ -5183,11 +5254,11 @@ class DropPartitionsResult:
if fid == 1:
if ftype == TType.LIST:
self.partitions = []
- (_etype316, _size313) = iprot.readListBegin()
- for _i317 in xrange(_size313):
- _elem318 = Partition()
- _elem318.read(iprot)
- self.partitions.append(_elem318)
+ (_etype323, _size320) = iprot.readListBegin()
+ for _i324 in xrange(_size320):
+ _elem325 = Partition()
+ _elem325.read(iprot)
+ self.partitions.append(_elem325)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -5204,8 +5275,8 @@ class DropPartitionsResult:
if self.partitions is not None:
oprot.writeFieldBegin('partitions', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.partitions))
- for iter319 in self.partitions:
- iter319.write(oprot)
+ for iter326 in self.partitions:
+ iter326.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
@@ -5329,21 +5400,21 @@ class RequestPartsSpec:
if fid == 1:
if ftype == TType.LIST:
self.names = []
- (_etype323, _size320) = iprot.readListBegin()
- for _i324 in xrange(_size320):
- _elem325 = iprot.readString();
- self.names.append(_elem325)
+ (_etype330, _size327) = iprot.readListBegin()
+ for _i331 in xrange(_size327):
+ _elem332 = iprot.readString();
+ self.names.append(_elem332)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.exprs = []
- (_etype329, _size326) = iprot.readListBegin()
- for _i330 in xrange(_size326):
- _elem331 = DropPartitionsExpr()
- _elem331.read(iprot)
- self.exprs.append(_elem331)
+ (_etype336, _size333) = iprot.readListBegin()
+ for _i337 in xrange(_size333):
+ _elem338 = DropPartitionsExpr()
+ _elem338.read(iprot)
+ self.exprs.append(_elem338)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -5360,15 +5431,15 @@ class RequestPartsSpec:
if self.names is not None:
oprot.writeFieldBegin('names', TType.LIST, 1)
oprot.writeListBegin(TType.STRING, len(self.names))
- for iter332 in self.names:
- oprot.writeString(iter332)
+ for iter339 in self.names:
+ oprot.writeString(iter339)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.exprs is not None:
oprot.writeFieldBegin('exprs', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.exprs))
- for iter333 in self.exprs:
- iter333.write(oprot)
+ for iter340 in self.exprs:
+ iter340.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
@@ -5695,11 +5766,11 @@ class Function:
elif fid == 8:
if ftype == TType.LIST:
self.resourceUris = []
- (_etype337, _size334) = iprot.readListBegin()
- for _i338 in xrange(_size334):
- _elem339 = ResourceUri()
- _elem339.read(iprot)
- self.resourceUris.append(_elem339)
+ (_etype344, _size341) = iprot.readListBegin()
+ for _i345 in xrange(_size341):
+ _elem346 = ResourceUri()
+ _elem346.read(iprot)
+ self.resourceUris.append(_elem346)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -5744,8 +5815,8 @@ class Function:
if self.resourceUris is not None:
oprot.writeFieldBegin('resourceUris', TType.LIST, 8)
oprot.writeListBegin(TType.STRUCT, len(self.resourceUris))
- for iter340 in self.resourceUris:
- iter340.write(oprot)
+ for iter347 in self.resourceUris:
+ iter347.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
@@ -5904,11 +5975,11 @@ class GetOpenTxnsInfoResponse:
elif fid == 2:
if ftype == TType.LIST:
self.open_txns = []
- (_etype344, _size341) = iprot.readListBegin()
- for _i345 in xrange(_size341):
- _elem346 = TxnInfo()
- _elem346.read(iprot)
- self.open_txns.append(_elem346)
+ (_etype351, _size348) = iprot.readListBegin()
+ for _i352 in xrange(_size348):
+ _elem353 = TxnInfo()
+ _elem353.read(iprot)
+ self.open_txns.append(_elem353)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -5929,8 +6000,8 @@ class GetOpenTxnsInfoResponse:
if self.open_txns is not None:
oprot.writeFieldBegin('open_txns', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.open_txns))
- for iter347 in self.open_txns:
- iter347.write(oprot)
+ for iter354 in self.open_txns:
+ iter354.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
@@ -5989,10 +6060,10 @@ class GetOpenTxnsResponse:
elif fid == 2:
if ftype == TType.SET:
self.open_txns = set()
- (_etype351, _size348) = iprot.readSetBegin()
- for _i352 in xrange(_size348):
- _elem353 = iprot.readI64();
- self.open_txns.add(_elem353)
+ (_etype358, _size355) = iprot.readSetBegin()
+ for _i359 in xrange(_size355):
+ _elem360 = iprot.readI64();
+ self.open_txns.add(_elem360)
iprot.readSetEnd()
else:
iprot.skip(ftype)
@@ -6013,8 +6084,8 @@ class GetOpenTxnsResponse:
if self.open_txns is not None:
oprot.writeFieldBegin('open_txns', TType.SET, 2)
oprot.writeSetBegin(TType.I64, len(self.open_txns))
- for iter354 in self.open_txns:
- oprot.writeI64(iter354)
+ for iter361 in self.open_txns:
+ oprot.writeI64(iter361)
oprot.writeSetEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
@@ -6155,10 +6226,10 @@ class OpenTxnsResponse:
if fid == 1:
if ftype == TType.LIST:
self.txn_ids = []
- (_etype358, _size355) = iprot.readListBegin()
- for _i359 in xrange(_size355):
- _elem360 = iprot.readI64();
- self.txn_ids.append(_elem360)
+ (_etype365, _size362) = iprot.readListBegin()
+ for _i366 in xrange(_size362):
+ _elem367 = iprot.readI64();
+ self.txn_ids.append(_elem367)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -6175,8 +6246,8 @@ class OpenTxnsResponse:
if self.txn_ids is not None:
oprot.writeFieldBegin('txn_ids', TType.LIST, 1)
oprot.writeListBegin(TType.I64, len(self.txn_ids))
- for iter361 in self.txn_ids:
- oprot.writeI64(iter361)
+ for iter368 in self.txn_ids:
+ oprot.writeI64(iter368)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
@@ -6472,11 +6543,11 @@ class LockRequest:
if fid == 1:
if ftype == TType.LIST:
self.component = []
- (_etype365, _size362) = iprot.readListBegin()
- for _i366 in xrange(_size362):
- _elem367 = LockComponent()
- _elem367.read(iprot)
- self.component.append(_elem367)
+ (_etype372, _size369) = iprot.readListBegin()
+ for _i373 in xrange(_size369):
+ _elem374 = LockComponent()
+ _elem374.read(iprot)
+ self.component.append(_elem374)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -6508,8 +6579,8 @@ class LockRequest:
if self.component is not None:
oprot.writeFieldBegin('component', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.component))
- for iter368 in self.component:
- iter368.write(oprot)
+ for iter375 in self.component:
+ iter375.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.txnid is not None:
@@ -7010,11 +7081,11 @@ class ShowLocksResponse:
if fid == 1:
if ftype == TType.LIST:
self.locks = []
- (_etype372, _size369) = iprot.readListBegin()
- for _i373 in xrange(_size369):
- _elem374 = ShowLocksResponseElement()
- _elem374.read(iprot)
- self.locks.append(_elem374)
+ (_etype379, _size376) = iprot.readListBegin()
+ for _i380 in xrange(_size376):
+ _elem381 = ShowLocksResponseElement()
+ _elem381.read(iprot)
+ self.locks.append(_elem381)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -7031,8 +7102,8 @@ class ShowLocksResponse:
if self.locks is not None:
oprot.writeFieldBegin('locks', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.locks))
- for iter375 in self.locks:
- iter375.write(oprot)
+ for iter382 in self.locks:
+ iter382.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
@@ -7230,20 +7301,20 @@ class HeartbeatTxnRangeResponse:
if fid == 1:
if ftype == TType.SET:
self.aborted = set()
- (_etype379, _size376) = iprot.readSetBegin()
- for _i380 in xrange(_size376):
- _elem381 = iprot.readI64();
- self.aborted.add(_elem381)
+ (_etype386, _size383) = iprot.readSetBegin()
+ for _i387 in xrange(_size383):
+ _elem388 = iprot.readI64();
+ self.aborted.add(_elem388)
iprot.readSetEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.SET:
self.nosuch = set()
- (_etype385, _size382) = iprot.readSetBegin()
- for _i386 in xrange(_size382):
- _elem387 = iprot.readI64();
- self.nosuch.add(_elem387)
+ (_etype392, _size389) = iprot.readSetBegin()
+ for _i393 in xrange(_size389):
+ _elem394 = iprot.readI64();
+ self.nosuch.add(_elem394)
iprot.readSetEnd()
else:
iprot.skip(ftype)
@@ -7260,15 +7331,15 @@ class HeartbeatTxnRangeResponse:
if self.aborted is not None:
oprot.writeFieldBegin('aborted', TType.SET, 1)
oprot.writeSetBegin(TType.I64, len(self.aborted))
- for iter388 in self.aborted:
- oprot.writeI64(iter388)
+ for iter395 in self.aborted:
+ oprot.writeI64(iter395)
oprot.writeSetEnd()
oprot.writeFieldEnd()
if self.nosuch is not None:
oprot.writeFieldBegin('nosuch', TType.SET, 2)
oprot.writeSetBegin(TType.I64, len(self.nosuch))
- for iter389 in self.nosuch:
- oprot.writeI64(iter389)
+ for iter396 in self.nosuch:
+ oprot.writeI64(iter396)
oprot.writeSetEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
@@ -7635,11 +7706,11 @@ class ShowCompactResponse:
if fid == 1:
if ftype == TType.LIST:
self.compacts = []
- (_etype393, _size390) = iprot.readListBegin()
- for _i394 in xrange(_size390):
- _elem395 = ShowCompactResponseElement()
- _elem395.read(iprot)
- self.compacts.append(_elem395)
+ (_etype400, _size397) = iprot.readListBegin()
+ for _i401 in xrange(_size397):
+ _elem402 = ShowCompactResponseElement()
+ _elem402.read(iprot)
+ self.compacts.append(_elem402)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -7656,8 +7727,8 @@ class ShowCompactResponse:
if self.compacts is not None:
oprot.writeFieldBegin('compacts', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.compacts))
- for iter396 in self.compacts:
- iter396.write(oprot)
+ for iter403 in self.compacts:
+ iter403.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
Modified: hive/branches/cbo/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
URL: http://svn.apache.org/viewvc/hive/branches/cbo/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb (original)
+++ hive/branches/cbo/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb Fri Aug 22 21:36:47 2014
@@ -1028,6 +1028,23 @@ class AggrStats
::Thrift::Struct.generate_accessors self
end
+class SetPartitionsStatsRequest
+ include ::Thrift::Struct, ::Thrift::Struct_Union
+ COLSTATS = 1
+
+ FIELDS = {
+ COLSTATS => {:type => ::Thrift::Types::LIST, :name => 'colStats', :element => {:type => ::Thrift::Types::STRUCT, :class => ::ColumnStatistics}}
+ }
+
+ def struct_fields; FIELDS; end
+
+ def validate
+ raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field colStats is unset!') unless @colStats
+ end
+
+ ::Thrift::Struct.generate_accessors self
+end
+
class Schema
include ::Thrift::Struct, ::Thrift::Struct_Union
FIELDSCHEMAS = 1
Modified: hive/branches/cbo/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb
URL: http://svn.apache.org/viewvc/hive/branches/cbo/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb (original)
+++ hive/branches/cbo/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb Fri Aug 22 21:36:47 2014
@@ -12,6 +12,37 @@ module ThriftHiveMetastore
class Client < ::FacebookService::Client
include ::Thrift::Client
+ def getMetaConf(key)
+ send_getMetaConf(key)
+ return recv_getMetaConf()
+ end
+
+ def send_getMetaConf(key)
+ send_message('getMetaConf', GetMetaConf_args, :key => key)
+ end
+
+ def recv_getMetaConf()
+ result = receive_message(GetMetaConf_result)
+ return result.success unless result.success.nil?
+ raise result.o1 unless result.o1.nil?
+ raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'getMetaConf failed: unknown result')
+ end
+
+ def setMetaConf(key, value)
+ send_setMetaConf(key, value)
+ recv_setMetaConf()
+ end
+
+ def send_setMetaConf(key, value)
+ send_message('setMetaConf', SetMetaConf_args, :key => key, :value => value)
+ end
+
+ def recv_setMetaConf()
+ result = receive_message(SetMetaConf_result)
+ raise result.o1 unless result.o1.nil?
+ return
+ end
+
def create_database(database)
send_create_database(database)
recv_create_database()
@@ -1248,6 +1279,25 @@ module ThriftHiveMetastore
raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'get_aggr_stats_for failed: unknown result')
end
+ def set_aggr_stats_for(request)
+ send_set_aggr_stats_for(request)
+ return recv_set_aggr_stats_for()
+ end
+
+ def send_set_aggr_stats_for(request)
+ send_message('set_aggr_stats_for', Set_aggr_stats_for_args, :request => request)
+ end
+
+ def recv_set_aggr_stats_for()
+ result = receive_message(Set_aggr_stats_for_result)
+ return result.success unless result.success.nil?
+ raise result.o1 unless result.o1.nil?
+ raise result.o2 unless result.o2.nil?
+ raise result.o3 unless result.o3.nil?
+ raise result.o4 unless result.o4.nil?
+ raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'set_aggr_stats_for failed: unknown result')
+ end
+
def delete_partition_column_statistics(db_name, tbl_name, part_name, col_name)
send_delete_partition_column_statistics(db_name, tbl_name, part_name, col_name)
return recv_delete_partition_column_statistics()
@@ -1864,6 +1914,28 @@ module ThriftHiveMetastore
class Processor < ::FacebookService::Processor
include ::Thrift::Processor
+ def process_getMetaConf(seqid, iprot, oprot)
+ args = read_args(iprot, GetMetaConf_args)
+ result = GetMetaConf_result.new()
+ begin
+ result.success = @handler.getMetaConf(args.key)
+ rescue ::MetaException => o1
+ result.o1 = o1
+ end
+ write_result(result, oprot, 'getMetaConf', seqid)
+ end
+
+ def process_setMetaConf(seqid, iprot, oprot)
+ args = read_args(iprot, SetMetaConf_args)
+ result = SetMetaConf_result.new()
+ begin
+ @handler.setMetaConf(args.key, args.value)
+ rescue ::MetaException => o1
+ result.o1 = o1
+ end
+ write_result(result, oprot, 'setMetaConf', seqid)
+ end
+
def process_create_database(seqid, iprot, oprot)
args = read_args(iprot, Create_database_args)
result = Create_database_result.new()
@@ -2854,6 +2926,23 @@ module ThriftHiveMetastore
write_result(result, oprot, 'get_aggr_stats_for', seqid)
end
+ def process_set_aggr_stats_for(seqid, iprot, oprot)
+ args = read_args(iprot, Set_aggr_stats_for_args)
+ result = Set_aggr_stats_for_result.new()
+ begin
+ result.success = @handler.set_aggr_stats_for(args.request)
+ rescue ::NoSuchObjectException => o1
+ result.o1 = o1
+ rescue ::InvalidObjectException => o2
+ result.o2 = o2
+ rescue ::MetaException => o3
+ result.o3 = o3
+ rescue ::InvalidInputException => o4
+ result.o4 = o4
+ end
+ write_result(result, oprot, 'set_aggr_stats_for', seqid)
+ end
+
def process_delete_partition_column_statistics(seqid, iprot, oprot)
args = read_args(iprot, Delete_partition_column_statistics_args)
result = Delete_partition_column_statistics_result.new()
@@ -3286,6 +3375,74 @@ module ThriftHiveMetastore
# HELPER FUNCTIONS AND STRUCTURES
+ class GetMetaConf_args
+ include ::Thrift::Struct, ::Thrift::Struct_Union
+ KEY = 1
+
+ FIELDS = {
+ KEY => {:type => ::Thrift::Types::STRING, :name => 'key'}
+ }
+
+ def struct_fields; FIELDS; end
+
+ def validate
+ end
+
+ ::Thrift::Struct.generate_accessors self
+ end
+
+ class GetMetaConf_result
+ include ::Thrift::Struct, ::Thrift::Struct_Union
+ SUCCESS = 0
+ O1 = 1
+
+ FIELDS = {
+ SUCCESS => {:type => ::Thrift::Types::STRING, :name => 'success'},
+ O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => ::MetaException}
+ }
+
+ def struct_fields; FIELDS; end
+
+ def validate
+ end
+
+ ::Thrift::Struct.generate_accessors self
+ end
+
+ class SetMetaConf_args
+ include ::Thrift::Struct, ::Thrift::Struct_Union
+ KEY = 1
+ VALUE = 2
+
+ FIELDS = {
+ KEY => {:type => ::Thrift::Types::STRING, :name => 'key'},
+ VALUE => {:type => ::Thrift::Types::STRING, :name => 'value'}
+ }
+
+ def struct_fields; FIELDS; end
+
+ def validate
+ end
+
+ ::Thrift::Struct.generate_accessors self
+ end
+
+ class SetMetaConf_result
+ include ::Thrift::Struct, ::Thrift::Struct_Union
+ O1 = 1
+
+ FIELDS = {
+ O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => ::MetaException}
+ }
+
+ def struct_fields; FIELDS; end
+
+ def validate
+ end
+
+ ::Thrift::Struct.generate_accessors self
+ end
+
class Create_database_args
include ::Thrift::Struct, ::Thrift::Struct_Union
DATABASE = 1
@@ -6143,6 +6300,46 @@ module ThriftHiveMetastore
::Thrift::Struct.generate_accessors self
end
+ class Set_aggr_stats_for_args
+ include ::Thrift::Struct, ::Thrift::Struct_Union
+ REQUEST = 1
+
+ FIELDS = {
+ REQUEST => {:type => ::Thrift::Types::STRUCT, :name => 'request', :class => ::SetPartitionsStatsRequest}
+ }
+
+ def struct_fields; FIELDS; end
+
+ def validate
+ end
+
+ ::Thrift::Struct.generate_accessors self
+ end
+
+ class Set_aggr_stats_for_result
+ include ::Thrift::Struct, ::Thrift::Struct_Union
+ SUCCESS = 0
+ O1 = 1
+ O2 = 2
+ O3 = 3
+ O4 = 4
+
+ FIELDS = {
+ SUCCESS => {:type => ::Thrift::Types::BOOL, :name => 'success'},
+ O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => ::NoSuchObjectException},
+ O2 => {:type => ::Thrift::Types::STRUCT, :name => 'o2', :class => ::InvalidObjectException},
+ O3 => {:type => ::Thrift::Types::STRUCT, :name => 'o3', :class => ::MetaException},
+ O4 => {:type => ::Thrift::Types::STRUCT, :name => 'o4', :class => ::InvalidInputException}
+ }
+
+ def struct_fields; FIELDS; end
+
+ def validate
+ end
+
+ ::Thrift::Struct.generate_accessors self
+ end
+
class Delete_partition_column_statistics_args
include ::Thrift::Struct, ::Thrift::Struct_Union
DB_NAME = 1
Modified: hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java (original)
+++ hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java Fri Aug 22 21:36:47 2014
@@ -222,10 +222,10 @@ public class HiveAlterHandler implements
if (success && moveData) {
// change the file name in hdfs
// check that src exists otherwise there is no need to copy the data
+ // rename the src to destination
try {
- if (srcFs.exists(srcPath)) {
- // rename the src to destination
- srcFs.rename(srcPath, destPath);
+ if (srcFs.exists(srcPath) && !srcFs.rename(srcPath, destPath)) {
+ throw new IOException("Renaming " + srcPath + " to " + destPath + " is failed");
}
} catch (IOException e) {
boolean revertMetaDataTransaction = false;
Modified: hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (original)
+++ hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java Fri Aug 22 21:36:47 2014
@@ -122,6 +122,7 @@ import org.apache.hadoop.hive.metastore.
import org.apache.hadoop.hive.metastore.api.RequestPartsSpec;
import org.apache.hadoop.hive.metastore.api.Role;
import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant;
+import org.apache.hadoop.hive.metastore.api.SetPartitionsStatsRequest;
import org.apache.hadoop.hive.metastore.api.ShowCompactRequest;
import org.apache.hadoop.hive.metastore.api.ShowCompactResponse;
import org.apache.hadoop.hive.metastore.api.ShowLocksRequest;
@@ -142,6 +143,7 @@ import org.apache.hadoop.hive.metastore.
import org.apache.hadoop.hive.metastore.events.AddPartitionEvent;
import org.apache.hadoop.hive.metastore.events.AlterPartitionEvent;
import org.apache.hadoop.hive.metastore.events.AlterTableEvent;
+import org.apache.hadoop.hive.metastore.events.ConfigChangeEvent;
import org.apache.hadoop.hive.metastore.events.CreateDatabaseEvent;
import org.apache.hadoop.hive.metastore.events.CreateTableEvent;
import org.apache.hadoop.hive.metastore.events.DropDatabaseEvent;
@@ -348,15 +350,19 @@ public class HiveMetaStore extends Thrif
}
public HMSHandler(String name) throws MetaException {
- super(name);
- hiveConf = new HiveConf(this.getClass());
- init();
+ this(name, new HiveConf(HMSHandler.class), true);
}
public HMSHandler(String name, HiveConf conf) throws MetaException {
+ this(name, conf, true);
+ }
+
+ public HMSHandler(String name, HiveConf conf, boolean init) throws MetaException {
super(name);
hiveConf = conf;
- init();
+ if (init) {
+ init();
+ }
}
public HiveConf getHiveConf() {
@@ -378,7 +384,7 @@ public class HiveMetaStore extends Thrif
}
}
- private boolean init() throws MetaException {
+ public void init() throws MetaException {
rawStoreClassName = hiveConf.getVar(HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL);
initListeners = MetaStoreUtils.getMetaStoreListeners(
MetaStoreInitListener.class, hiveConf,
@@ -436,7 +442,6 @@ public class HiveMetaStore extends Thrif
Timer cleaner = new Timer("Metastore Events Cleaner Thread", true);
cleaner.schedule(new EventCleanerTask(this), cleanFreq, cleanFreq);
}
- return true;
}
private String addPrefix(String s) {
@@ -448,10 +453,11 @@ public class HiveMetaStore extends Thrif
threadLocalConf.set(conf);
RawStore ms = threadLocalMS.get();
if (ms != null) {
- ms.setConf(conf);
+ ms.setConf(conf); // reload if DS related configuration is changed
}
}
+ @Override
public Configuration getConf() {
Configuration conf = threadLocalConf.get();
if (conf == null) {
@@ -465,6 +471,35 @@ public class HiveMetaStore extends Thrif
return wh;
}
+ @Override
+ public void setMetaConf(String key, String value) throws MetaException {
+ ConfVars confVar = HiveConf.getMetaConf(key);
+ if (confVar == null) {
+ throw new MetaException("Invalid configuration key " + key);
+ }
+ String validate = confVar.validate(value);
+ if (validate != null) {
+ throw new MetaException("Invalid configuration value " + value + " for key " + key +
+ " by " + validate);
+ }
+ Configuration configuration = getConf();
+ String oldValue = configuration.get(key);
+ configuration.set(key, value);
+
+ for (MetaStoreEventListener listener : listeners) {
+ listener.onConfigChange(new ConfigChangeEvent(this, key, oldValue, value));
+ }
+ }
+
+ @Override
+ public String getMetaConf(String key) throws MetaException {
+ ConfVars confVar = HiveConf.getMetaConf(key);
+ if (confVar == null) {
+ throw new MetaException("Invalid configuration key " + key);
+ }
+ return getConf().get(key);
+ }
+
/**
* Get a cached RawStore.
*
@@ -676,8 +711,11 @@ public class HiveMetaStore extends Thrif
logInfo("Shutting down the object store...");
RawStore ms = threadLocalMS.get();
if (ms != null) {
- ms.shutdown();
- threadLocalMS.remove();
+ try {
+ ms.shutdown();
+ } finally {
+ threadLocalMS.remove();
+ }
}
logInfo("Metastore shutdown complete.");
}
@@ -4999,12 +5037,27 @@ public class HiveMetaStore extends Thrif
}
+ @Override
+ public boolean set_aggr_stats_for(SetPartitionsStatsRequest request)
+ throws NoSuchObjectException, InvalidObjectException, MetaException,
+ InvalidInputException, TException {
+ boolean ret = true;
+ for (ColumnStatistics colStats : request.getColStats()) {
+ ret = ret && update_partition_column_statistics(colStats);
+ }
+ return ret;
+ }
+
}
public static IHMSHandler newHMSHandler(String name, HiveConf hiveConf) throws MetaException {
- return RetryingHMSHandler.getProxy(hiveConf, name);
+ return newHMSHandler(name, hiveConf, false);
}
+ public static IHMSHandler newHMSHandler(String name, HiveConf hiveConf, boolean local)
+ throws MetaException {
+ return RetryingHMSHandler.getProxy(hiveConf, name, local);
+ }
/**
* Discard a current delegation token.
Modified: hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java (original)
+++ hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java Fri Aug 22 21:36:47 2014
@@ -105,6 +105,7 @@ import org.apache.hadoop.hive.metastore.
import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
import org.apache.hadoop.hive.metastore.api.RequestPartsSpec;
import org.apache.hadoop.hive.metastore.api.Role;
+import org.apache.hadoop.hive.metastore.api.SetPartitionsStatsRequest;
import org.apache.hadoop.hive.metastore.api.ShowCompactRequest;
import org.apache.hadoop.hive.metastore.api.ShowCompactResponse;
import org.apache.hadoop.hive.metastore.api.ShowLocksRequest;
@@ -173,7 +174,7 @@ public class HiveMetaStoreClient impleme
if (localMetaStore) {
// instantiate the metastore server handler directly instead of connecting
// through the network
- client = HiveMetaStore.newHMSHandler("hive client", conf);
+ client = HiveMetaStore.newHMSHandler("hive client", conf, true);
isConnected = true;
snapshotActiveConf();
return;
@@ -443,6 +444,16 @@ public class HiveMetaStoreClient impleme
}
}
+ @Override
+ public void setMetaConf(String key, String value) throws TException {
+ client.setMetaConf(key, value);
+ }
+
+ @Override
+ public String getMetaConf(String key) throws TException {
+ return client.getMetaConf(key);
+ }
+
/**
* @param new_part
* @return the added partition
@@ -1254,6 +1265,13 @@ public class HiveMetaStoreClient impleme
InvalidInputException{
return client.update_partition_column_statistics(statsObj);
}
+
+ /** {@inheritDoc} */
+ public boolean setPartitionColumnStatistics(SetPartitionsStatsRequest request)
+ throws NoSuchObjectException, InvalidObjectException, MetaException, TException,
+ InvalidInputException{
+ return client.set_aggr_stats_for(request);
+ }
/** {@inheritDoc} */
@Override
Modified: hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/IHMSHandler.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/IHMSHandler.java?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/IHMSHandler.java (original)
+++ hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/IHMSHandler.java Fri Aug 22 21:36:47 2014
@@ -18,10 +18,11 @@
package org.apache.hadoop.hive.metastore;
-import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configurable;
+import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore;
-public interface IHMSHandler extends ThriftHiveMetastore.Iface {
+public interface IHMSHandler extends ThriftHiveMetastore.Iface, Configurable {
- public abstract void setConf(Configuration conf);
+ void init() throws MetaException;
}
Modified: hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java (original)
+++ hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java Fri Aug 22 21:36:47 2014
@@ -65,6 +65,7 @@ import org.apache.hadoop.hive.metastore.
import org.apache.hadoop.hive.metastore.api.PrincipalType;
import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
import org.apache.hadoop.hive.metastore.api.Role;
+import org.apache.hadoop.hive.metastore.api.SetPartitionsStatsRequest;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.metastore.api.UnknownDBException;
import org.apache.hadoop.hive.metastore.api.UnknownPartitionException;
@@ -77,17 +78,30 @@ import org.apache.hadoop.hive.metastore.
public interface IMetaStoreClient {
/**
- * Returns whether current client is convertible with conf or not
+ * Returns whether current client is compatible with conf argument or not
* @return
*/
- public boolean isCompatibleWith(HiveConf conf);
+ boolean isCompatibleWith(HiveConf conf);
/**
* Tries to reconnect this MetaStoreClient to the MetaStore.
*/
- public void reconnect() throws MetaException;
+ void reconnect() throws MetaException;
- public void close();
+ /**
+ * close connection to meta store
+ */
+ void close();
+
+ /**
+ * set meta variable which is open to end users
+ */
+ void setMetaConf(String key, String value) throws MetaException, TException;
+
+ /**
+ * get current meta variable
+ */
+ String getMetaConf(String key) throws MetaException, TException;
/**
* Get the names of all databases in the MetaStore that match the given pattern.
@@ -96,8 +110,7 @@ public interface IMetaStoreClient {
* @throws MetaException
* @throws TException
*/
- public List<String> getDatabases(String databasePattern)
- throws MetaException, TException;
+ List<String> getDatabases(String databasePattern) throws MetaException, TException;
/**
* Get the names of all databases in the MetaStore.
@@ -105,8 +118,7 @@ public interface IMetaStoreClient {
* @throws MetaException
* @throws TException
*/
- public List<String> getAllDatabases()
- throws MetaException, TException;
+ List<String> getAllDatabases() throws MetaException, TException;
/**
* Get the names of all tables in the specified database that satisfy the supplied
@@ -118,7 +130,7 @@ public interface IMetaStoreClient {
* @throws TException
* @throws UnknownDBException
*/
- public List<String> getTables(String dbName, String tablePattern)
+ List<String> getTables(String dbName, String tablePattern)
throws MetaException, TException, UnknownDBException;
/**
@@ -129,8 +141,7 @@ public interface IMetaStoreClient {
* @throws TException
* @throws UnknownDBException
*/
- public List<String> getAllTables(String dbName)
- throws MetaException, TException, UnknownDBException;
+ List<String> getAllTables(String dbName) throws MetaException, TException, UnknownDBException;
/**
* Get a list of table names that match a filter.
@@ -168,7 +179,7 @@ public interface IMetaStoreClient {
* The maximum number of tables returned
* @return A list of table names that match the desired filter
*/
- public List<String> listTableNamesByFilter(String dbName, String filter, short maxTables)
+ List<String> listTableNamesByFilter(String dbName, String filter, short maxTables)
throws MetaException, TException, InvalidOperationException, UnknownDBException;
@@ -186,7 +197,7 @@ public interface IMetaStoreClient {
* @throws TException
* A thrift communication error occurred
*/
- public void dropTable(String dbname, String tableName, boolean deleteData,
+ void dropTable(String dbname, String tableName, boolean deleteData,
boolean ignoreUknownTab) throws MetaException, TException,
NoSuchObjectException;
@@ -210,14 +221,13 @@ public interface IMetaStoreClient {
* This method will be removed in release 0.7.0.
*/
@Deprecated
- public void dropTable(String tableName, boolean deleteData)
- throws MetaException, UnknownTableException, TException,
- NoSuchObjectException;
+ void dropTable(String tableName, boolean deleteData)
+ throws MetaException, UnknownTableException, TException, NoSuchObjectException;
- public void dropTable(String dbname, String tableName)
+ void dropTable(String dbname, String tableName)
throws MetaException, TException, NoSuchObjectException;
- public boolean tableExists(String databaseName, String tableName) throws MetaException,
+ boolean tableExists(String databaseName, String tableName) throws MetaException,
TException, UnknownDBException;
/**
@@ -231,7 +241,7 @@ public interface IMetaStoreClient {
* This method will be removed in release 0.7.0.
*/
@Deprecated
- public boolean tableExists(String tableName) throws MetaException,
+ boolean tableExists(String tableName) throws MetaException,
TException, UnknownDBException;
/**
@@ -250,8 +260,7 @@ public interface IMetaStoreClient {
* This method will be removed in release 0.7.0.
*/
@Deprecated
- public Table getTable(String tableName) throws MetaException, TException,
- NoSuchObjectException;
+ Table getTable(String tableName) throws MetaException, TException, NoSuchObjectException;
/**
* Get a Database Object
@@ -261,8 +270,8 @@ public interface IMetaStoreClient {
* @throws MetaException Could not fetch the database
* @throws TException A thrift communication error occurred
*/
- public Database getDatabase(String databaseName)
- throws NoSuchObjectException, MetaException, TException;
+ Database getDatabase(String databaseName)
+ throws NoSuchObjectException, MetaException, TException;
/**
@@ -280,7 +289,7 @@ public interface IMetaStoreClient {
* @throws NoSuchObjectException
* In case the table wasn't found.
*/
- public Table getTable(String dbName, String tableName) throws MetaException,
+ Table getTable(String dbName, String tableName) throws MetaException,
TException, NoSuchObjectException;
/**
@@ -302,7 +311,7 @@ public interface IMetaStoreClient {
* @throws MetaException
* Any other errors
*/
- public List<Table> getTableObjectsByName(String dbName, List<String> tableNames)
+ List<Table> getTableObjectsByName(String dbName, List<String> tableNames)
throws MetaException, InvalidOperationException, UnknownDBException, TException;
/**
@@ -317,11 +326,11 @@ public interface IMetaStoreClient {
* @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#append_partition(java.lang.String,
* java.lang.String, java.util.List)
*/
- public Partition appendPartition(String tableName, String dbName,
+ Partition appendPartition(String tableName, String dbName,
List<String> partVals) throws InvalidObjectException,
AlreadyExistsException, MetaException, TException;
- public Partition appendPartition(String tableName, String dbName, String name)
+ Partition appendPartition(String tableName, String dbName, String name)
throws InvalidObjectException, AlreadyExistsException, MetaException, TException;
/**
@@ -339,9 +348,8 @@ public interface IMetaStoreClient {
* @throws TException
* Thrift exception
*/
- public Partition add_partition(Partition partition)
- throws InvalidObjectException, AlreadyExistsException, MetaException,
- TException;
+ Partition add_partition(Partition partition)
+ throws InvalidObjectException, AlreadyExistsException, MetaException, TException;
/**
* Add partitions to the table.
@@ -357,7 +365,7 @@ public interface IMetaStoreClient {
* @throws TException
* Thrift exception
*/
- public int add_partitions(List<Partition> partitions)
+ int add_partitions(List<Partition> partitions)
throws InvalidObjectException, AlreadyExistsException, MetaException, TException;
/**
@@ -368,7 +376,7 @@ public interface IMetaStoreClient {
* @param needResults Whether the results are needed
* @return the partitions that were added, or null if !needResults
*/
- public List<Partition> add_partitions(
+ List<Partition> add_partitions(
List<Partition> partitions, boolean ifNotExists, boolean needResults)
throws InvalidObjectException, AlreadyExistsException, MetaException, TException;
@@ -382,7 +390,7 @@ public interface IMetaStoreClient {
* @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#get_partition(java.lang.String,
* java.lang.String, java.util.List)
*/
- public Partition getPartition(String tblName, String dbName,
+ Partition getPartition(String tblName, String dbName,
List<String> partVals) throws NoSuchObjectException, MetaException, TException;
/**
@@ -393,7 +401,7 @@ public interface IMetaStoreClient {
* @param destTableName
* @return partition object
*/
- public Partition exchange_partition(Map<String, String> partitionSpecs,
+ Partition exchange_partition(Map<String, String> partitionSpecs,
String sourceDb, String sourceTable, String destdb,
String destTableName) throws MetaException, NoSuchObjectException,
InvalidObjectException, TException;
@@ -408,7 +416,7 @@ public interface IMetaStoreClient {
* @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#get_partition(java.lang.String,
* java.lang.String, java.util.List)
*/
- public Partition getPartition(String dbName, String tblName,
+ Partition getPartition(String dbName, String tblName,
String name) throws MetaException, UnknownTableException, NoSuchObjectException, TException;
@@ -424,7 +432,7 @@ public interface IMetaStoreClient {
* @throws NoSuchObjectException
* @throws TException
*/
- public Partition getPartitionWithAuthInfo(String dbName, String tableName,
+ Partition getPartitionWithAuthInfo(String dbName, String tableName,
List<String> pvals, String userName, List<String> groupNames)
throws MetaException, UnknownTableException, NoSuchObjectException, TException;
@@ -437,16 +445,16 @@ public interface IMetaStoreClient {
* @throws MetaException
* @throws TException
*/
- public List<Partition> listPartitions(String db_name, String tbl_name,
+ List<Partition> listPartitions(String db_name, String tbl_name,
short max_parts) throws NoSuchObjectException, MetaException, TException;
- public List<Partition> listPartitions(String db_name, String tbl_name,
+ List<Partition> listPartitions(String db_name, String tbl_name,
List<String> part_vals, short max_parts) throws NoSuchObjectException, MetaException, TException;
- public List<String> listPartitionNames(String db_name, String tbl_name,
+ List<String> listPartitionNames(String db_name, String tbl_name,
short max_parts) throws MetaException, TException;
- public List<String> listPartitionNames(String db_name, String tbl_name,
+ List<String> listPartitionNames(String db_name, String tbl_name,
List<String> part_vals, short max_parts)
throws MetaException, TException, NoSuchObjectException;
@@ -464,7 +472,7 @@ public interface IMetaStoreClient {
* @throws NoSuchObjectException
* @throws TException
*/
- public List<Partition> listPartitionsByFilter(String db_name, String tbl_name,
+ List<Partition> listPartitionsByFilter(String db_name, String tbl_name,
String filter, short max_parts) throws MetaException,
NoSuchObjectException, TException;
@@ -481,7 +489,7 @@ public interface IMetaStoreClient {
* @param result the resulting list of partitions
* @return whether the resulting list contains partitions which may or may not match the expr
*/
- public boolean listPartitionsByExpr(String db_name, String tbl_name,
+ boolean listPartitionsByExpr(String db_name, String tbl_name,
byte[] expr, String default_partition_name, short max_parts, List<Partition> result)
throws TException;
@@ -494,7 +502,7 @@ public interface IMetaStoreClient {
* @return the list of partitions
* @throws NoSuchObjectException
*/
- public List<Partition> listPartitionsWithAuthInfo(String dbName,
+ List<Partition> listPartitionsWithAuthInfo(String dbName,
String tableName, short s, String userName, List<String> groupNames)
throws MetaException, TException, NoSuchObjectException;
@@ -508,7 +516,7 @@ public interface IMetaStoreClient {
* @throws MetaException
* @throws TException
*/
- public List<Partition> getPartitionsByNames(String db_name, String tbl_name,
+ List<Partition> getPartitionsByNames(String db_name, String tbl_name,
List<String> part_names) throws NoSuchObjectException, MetaException, TException;
/**
@@ -521,7 +529,7 @@ public interface IMetaStoreClient {
* @return the list of paritions
* @throws NoSuchObjectException
*/
- public List<Partition> listPartitionsWithAuthInfo(String dbName,
+ List<Partition> listPartitionsWithAuthInfo(String dbName,
String tableName, List<String> partialPvals, short s, String userName,
List<String> groupNames) throws MetaException, TException, NoSuchObjectException;
@@ -538,7 +546,7 @@ public interface IMetaStoreClient {
* @throws UnknownPartitionException
* @throws InvalidPartitionException
*/
- public void markPartitionForEvent(String db_name, String tbl_name, Map<String,String> partKVs,
+ void markPartitionForEvent(String db_name, String tbl_name, Map<String,String> partKVs,
PartitionEventType eventType) throws MetaException, NoSuchObjectException, TException,
UnknownTableException, UnknownDBException, UnknownPartitionException, InvalidPartitionException;
@@ -555,7 +563,7 @@ public interface IMetaStoreClient {
* @throws UnknownPartitionException
* @throws InvalidPartitionException
*/
- public boolean isPartitionMarkedForEvent(String db_name, String tbl_name, Map<String,String> partKVs,
+ boolean isPartitionMarkedForEvent(String db_name, String tbl_name, Map<String,String> partKVs,
PartitionEventType eventType) throws MetaException, NoSuchObjectException, TException,
UnknownTableException, UnknownDBException, UnknownPartitionException, InvalidPartitionException;
@@ -564,9 +572,7 @@ public interface IMetaStoreClient {
* @throws TException
* @throws MetaException
*/
- public void validatePartitionNameCharacters(List<String> partVals)
- throws TException, MetaException;
-
+ void validatePartitionNameCharacters(List<String> partVals) throws TException, MetaException;
/**
* @param tbl
@@ -578,25 +584,25 @@ public interface IMetaStoreClient {
* @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#create_table(org.apache.hadoop.hive.metastore.api.Table)
*/
- public void createTable(Table tbl) throws AlreadyExistsException,
+ void createTable(Table tbl) throws AlreadyExistsException,
InvalidObjectException, MetaException, NoSuchObjectException, TException;
- public void alter_table(String defaultDatabaseName, String tblName,
+ void alter_table(String defaultDatabaseName, String tblName,
Table table) throws InvalidOperationException, MetaException, TException;
- public void createDatabase(Database db)
+ void createDatabase(Database db)
throws InvalidObjectException, AlreadyExistsException, MetaException, TException;
- public void dropDatabase(String name)
+ void dropDatabase(String name)
throws NoSuchObjectException, InvalidOperationException, MetaException, TException;
- public void dropDatabase(String name, boolean deleteData, boolean ignoreUnknownDb)
+ void dropDatabase(String name, boolean deleteData, boolean ignoreUnknownDb)
throws NoSuchObjectException, InvalidOperationException, MetaException, TException;
- public void dropDatabase(String name, boolean deleteData, boolean ignoreUnknownDb, boolean cascade)
+ void dropDatabase(String name, boolean deleteData, boolean ignoreUnknownDb, boolean cascade)
throws NoSuchObjectException, InvalidOperationException, MetaException, TException;
- public void alterDatabase(String name, Database db)
+ void alterDatabase(String name, Database db)
throws NoSuchObjectException, MetaException, TException;
/**
@@ -612,7 +618,7 @@ public interface IMetaStoreClient {
* @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#drop_partition(java.lang.String,
* java.lang.String, java.util.List, boolean)
*/
- public boolean dropPartition(String db_name, String tbl_name,
+ boolean dropPartition(String db_name, String tbl_name,
List<String> part_vals, boolean deleteData) throws NoSuchObjectException,
MetaException, TException;
@@ -620,7 +626,7 @@ public interface IMetaStoreClient {
List<ObjectPair<Integer, byte[]>> partExprs, boolean deleteData, boolean ignoreProtection,
boolean ifExists) throws NoSuchObjectException, MetaException, TException;
- public boolean dropPartition(String db_name, String tbl_name,
+ boolean dropPartition(String db_name, String tbl_name,
String name, boolean deleteData) throws NoSuchObjectException,
MetaException, TException;
/**
@@ -639,7 +645,7 @@ public interface IMetaStoreClient {
* @throws TException
* if error in communicating with metastore server
*/
- public void alter_partition(String dbName, String tblName, Partition newPart)
+ void alter_partition(String dbName, String tblName, Partition newPart)
throws InvalidOperationException, MetaException, TException;
/**
@@ -658,7 +664,7 @@ public interface IMetaStoreClient {
* @throws TException
* if error in communicating with metastore server
*/
- public void alter_partitions(String dbName, String tblName, List<Partition> newParts)
+ void alter_partitions(String dbName, String tblName, List<Partition> newParts)
throws InvalidOperationException, MetaException, TException;
/**
@@ -679,7 +685,7 @@ public interface IMetaStoreClient {
* @throws TException
* if error in communicating with metastore server
*/
- public void renamePartition(final String dbname, final String name, final List<String> part_vals, final Partition newPart)
+ void renamePartition(final String dbname, final String name, final List<String> part_vals, final Partition newPart)
throws InvalidOperationException, MetaException, TException;
/**
@@ -692,7 +698,7 @@ public interface IMetaStoreClient {
* @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#get_fields(java.lang.String,
* java.lang.String)
*/
- public List<FieldSchema> getFields(String db, String tableName)
+ List<FieldSchema> getFields(String db, String tableName)
throws MetaException, TException, UnknownTableException,
UnknownDBException;
@@ -706,7 +712,7 @@ public interface IMetaStoreClient {
* @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#get_schema(java.lang.String,
* java.lang.String)
*/
- public List<FieldSchema> getSchema(String db, String tableName)
+ List<FieldSchema> getSchema(String db, String tableName)
throws MetaException, TException, UnknownTableException,
UnknownDBException;
@@ -719,7 +725,7 @@ public interface IMetaStoreClient {
* @throws TException
* @throws ConfigValSecurityException
*/
- public String getConfigValue(String name, String defaultValue)
+ String getConfigValue(String name, String defaultValue)
throws TException, ConfigValSecurityException;
/**
@@ -730,7 +736,7 @@ public interface IMetaStoreClient {
* @throws MetaException
* @throws TException
*/
- public List<String> partitionNameToVals(String name)
+ List<String> partitionNameToVals(String name)
throws MetaException, TException;
/**
*
@@ -740,7 +746,7 @@ public interface IMetaStoreClient {
* @throws MetaException
* @throws TException
*/
- public Map<String, String> partitionNameToSpec(String name)
+ Map<String, String> partitionNameToSpec(String name)
throws MetaException, TException;
/**
@@ -752,10 +758,10 @@ public interface IMetaStoreClient {
* @throws TException
* @throws AlreadyExistsException
*/
- public void createIndex(Index index, Table indexTable) throws InvalidObjectException,
+ void createIndex(Index index, Table indexTable) throws InvalidObjectException,
MetaException, NoSuchObjectException, TException, AlreadyExistsException;
- public void alter_index(String dbName, String tblName, String indexName,
+ void alter_index(String dbName, String tblName, String indexName,
Index index) throws InvalidOperationException, MetaException, TException;
/**
@@ -769,7 +775,7 @@ public interface IMetaStoreClient {
* @throws NoSuchObjectException
* @throws TException
*/
- public Index getIndex(String dbName, String tblName, String indexName)
+ Index getIndex(String dbName, String tblName, String indexName)
throws MetaException, UnknownTableException, NoSuchObjectException,
TException;
@@ -784,7 +790,7 @@ public interface IMetaStoreClient {
* @throws MetaException
* @throws TException
*/
- public List<Index> listIndexes(String db_name, String tbl_name,
+ List<Index> listIndexes(String db_name, String tbl_name,
short max) throws NoSuchObjectException, MetaException, TException;
/**
@@ -797,7 +803,7 @@ public interface IMetaStoreClient {
* @throws MetaException
* @throws TException
*/
- public List<String> listIndexNames(String db_name, String tbl_name,
+ List<String> listIndexNames(String db_name, String tbl_name,
short max) throws MetaException, TException;
/**
@@ -810,7 +816,7 @@ public interface IMetaStoreClient {
* @throws MetaException
* @throws TException
*/
- public boolean dropIndex(String db_name, String tbl_name,
+ boolean dropIndex(String db_name, String tbl_name,
String name, boolean deleteData) throws NoSuchObjectException,
MetaException, TException;
@@ -825,7 +831,7 @@ public interface IMetaStoreClient {
* @throws InvalidInputException
*/
- public boolean updateTableColumnStatistics(ColumnStatistics statsObj)
+ boolean updateTableColumnStatistics(ColumnStatistics statsObj)
throws NoSuchObjectException, InvalidObjectException, MetaException, TException,
InvalidInputException;
@@ -840,7 +846,7 @@ public interface IMetaStoreClient {
* @throws InvalidInputException
*/
- public boolean updatePartitionColumnStatistics(ColumnStatistics statsObj)
+ boolean updatePartitionColumnStatistics(ColumnStatistics statsObj)
throws NoSuchObjectException, InvalidObjectException, MetaException, TException,
InvalidInputException;
@@ -848,14 +854,14 @@ public interface IMetaStoreClient {
* Get table column statistics given dbName, tableName and multiple colName-s
* @return ColumnStatistics struct for a given db, table and columns
*/
- public List<ColumnStatisticsObj> getTableColumnStatistics(String dbName, String tableName,
+ List<ColumnStatisticsObj> getTableColumnStatistics(String dbName, String tableName,
List<String> colNames) throws NoSuchObjectException, MetaException, TException;
/**
* Get partitions column statistics given dbName, tableName, multiple partitions and colName-s
* @return ColumnStatistics struct for a given db, table and columns
*/
- public Map<String, List<ColumnStatisticsObj>> getPartitionColumnStatistics(String dbName,
+ Map<String, List<ColumnStatisticsObj>> getPartitionColumnStatistics(String dbName,
String tableName, List<String> partNames, List<String> colNames)
throws NoSuchObjectException, MetaException, TException;
@@ -873,24 +879,23 @@ public interface IMetaStoreClient {
* @throws InvalidInputException
*/
- public boolean deletePartitionColumnStatistics(String dbName, String tableName,
+ boolean deletePartitionColumnStatistics(String dbName, String tableName,
String partName, String colName) throws NoSuchObjectException, MetaException,
InvalidObjectException, TException, InvalidInputException;
- /**
- * Delete table level column statistics given dbName, tableName and colName
- * @param dbName
- * @param tableName
- * @param colName
- * @return boolean indicating the outcome of the operation
- * @throws NoSuchObjectException
- * @throws MetaException
- * @throws InvalidObjectException
- * @throws TException
- * @throws InvalidInputException
- */
-
- public boolean deleteTableColumnStatistics(String dbName, String tableName, String colName) throws
+ /**
+ * Delete table level column statistics given dbName, tableName and colName
+ * @param dbName
+ * @param tableName
+ * @param colName
+ * @return boolean indicating the outcome of the operation
+ * @throws NoSuchObjectException
+ * @throws MetaException
+ * @throws InvalidObjectException
+ * @throws TException
+ * @throws InvalidInputException
+ */
+ boolean deleteTableColumnStatistics(String dbName, String tableName, String colName) throws
NoSuchObjectException, MetaException, InvalidObjectException, TException, InvalidInputException;
/**
@@ -900,7 +905,7 @@ public interface IMetaStoreClient {
* @throws MetaException
* @throws TException
*/
- public boolean create_role(Role role)
+ boolean create_role(Role role)
throws MetaException, TException;
/**
@@ -911,7 +916,7 @@ public interface IMetaStoreClient {
* @throws MetaException
* @throws TException
*/
- public boolean drop_role(String role_name) throws MetaException, TException;
+ boolean drop_role(String role_name) throws MetaException, TException;
/**
* list all role names
@@ -919,7 +924,7 @@ public interface IMetaStoreClient {
* @throws TException
* @throws MetaException
*/
- public List<String> listRoleNames() throws MetaException, TException;
+ List<String> listRoleNames() throws MetaException, TException;
/**
*
@@ -933,7 +938,7 @@ public interface IMetaStoreClient {
* @throws MetaException
* @throws TException
*/
- public boolean grant_role(String role_name, String user_name,
+ boolean grant_role(String role_name, String user_name,
PrincipalType principalType, String grantor, PrincipalType grantorType,
boolean grantOption) throws MetaException, TException;
@@ -948,7 +953,7 @@ public interface IMetaStoreClient {
* @throws MetaException
* @throws TException
*/
- public boolean revoke_role(String role_name, String user_name,
+ boolean revoke_role(String role_name, String user_name,
PrincipalType principalType, boolean grantOption) throws MetaException, TException;
/**
@@ -959,7 +964,7 @@ public interface IMetaStoreClient {
* @throws MetaException
* @throws TException
*/
- public List<Role> list_roles(String principalName, PrincipalType principalType)
+ List<Role> list_roles(String principalName, PrincipalType principalType)
throws MetaException, TException;
/**
@@ -972,7 +977,7 @@ public interface IMetaStoreClient {
* @throws MetaException
* @throws TException
*/
- public PrincipalPrivilegeSet get_privilege_set(HiveObjectRef hiveObject,
+ PrincipalPrivilegeSet get_privilege_set(HiveObjectRef hiveObject,
String user_name, List<String> group_names) throws MetaException,
TException;
@@ -985,7 +990,7 @@ public interface IMetaStoreClient {
* @throws MetaException
* @throws TException
*/
- public List<HiveObjectPrivilege> list_privileges(String principal_name,
+ List<HiveObjectPrivilege> list_privileges(String principal_name,
PrincipalType principal_type, HiveObjectRef hiveObject)
throws MetaException, TException;
@@ -995,7 +1000,7 @@ public interface IMetaStoreClient {
* @throws MetaException
* @throws TException
*/
- public boolean grant_privileges(PrivilegeBag privileges)
+ boolean grant_privileges(PrivilegeBag privileges)
throws MetaException, TException;
/**
@@ -1004,7 +1009,7 @@ public interface IMetaStoreClient {
* @throws MetaException
* @throws TException
*/
- public boolean revoke_privileges(PrivilegeBag privileges, boolean grantOption)
+ boolean revoke_privileges(PrivilegeBag privileges, boolean grantOption)
throws MetaException, TException;
/**
@@ -1014,7 +1019,7 @@ public interface IMetaStoreClient {
* @throws MetaException
* @throws TException
*/
- public String getDelegationToken(String owner, String renewerKerberosPrincipalName)
+ String getDelegationToken(String owner, String renewerKerberosPrincipalName)
throws MetaException, TException;
/**
@@ -1023,28 +1028,28 @@ public interface IMetaStoreClient {
* @throws MetaException
* @throws TException
*/
- public long renewDelegationToken(String tokenStrForm) throws MetaException, TException;
+ long renewDelegationToken(String tokenStrForm) throws MetaException, TException;
/**
* @param tokenStrForm
* @throws MetaException
* @throws TException
*/
- public void cancelDelegationToken(String tokenStrForm) throws MetaException, TException;
+ void cancelDelegationToken(String tokenStrForm) throws MetaException, TException;
- public void createFunction(Function func)
+ void createFunction(Function func)
throws InvalidObjectException, MetaException, TException;
- public void alterFunction(String dbName, String funcName, Function newFunction)
+ void alterFunction(String dbName, String funcName, Function newFunction)
throws InvalidObjectException, MetaException, TException;
- public void dropFunction(String dbName, String funcName) throws MetaException,
+ void dropFunction(String dbName, String funcName) throws MetaException,
NoSuchObjectException, InvalidObjectException, InvalidInputException, TException;
- public Function getFunction(String dbName, String funcName)
+ Function getFunction(String dbName, String funcName)
throws MetaException, TException;
- public List<String> getFunctions(String dbName, String pattern)
+ List<String> getFunctions(String dbName, String pattern)
throws MetaException, TException;
/**
@@ -1052,7 +1057,7 @@ public interface IMetaStoreClient {
* @return list of valid transactions
* @throws TException
*/
- public ValidTxnList getValidTxns() throws TException;
+ ValidTxnList getValidTxns() throws TException;
/**
* Initiate a transaction.
@@ -1062,7 +1067,7 @@ public interface IMetaStoreClient {
* @return transaction identifier
* @throws TException
*/
- public long openTxn(String user) throws TException;
+ long openTxn(String user) throws TException;
/**
* Initiate a batch of transactions. It is not guaranteed that the
@@ -1089,7 +1094,7 @@ public interface IMetaStoreClient {
* optimistically assuming that the result matches the request.
* @throws TException
*/
- public OpenTxnsResponse openTxns(String user, int numTxns) throws TException;
+ OpenTxnsResponse openTxns(String user, int numTxns) throws TException;
/**
* Rollback a transaction. This will also unlock any locks associated with
@@ -1100,7 +1105,7 @@ public interface IMetaStoreClient {
* deleted.
* @throws TException
*/
- public void rollbackTxn(long txnid) throws NoSuchTxnException, TException;
+ void rollbackTxn(long txnid) throws NoSuchTxnException, TException;
/**
* Commit a transaction. This will also unlock any locks associated with
@@ -1113,7 +1118,7 @@ public interface IMetaStoreClient {
* aborted. This can result from the transaction timing out.
* @throws TException
*/
- public void commitTxn(long txnid)
+ void commitTxn(long txnid)
throws NoSuchTxnException, TxnAbortedException, TException;
/**
@@ -1123,7 +1128,7 @@ public interface IMetaStoreClient {
* @return List of currently opened transactions, included aborted ones.
* @throws TException
*/
- public GetOpenTxnsInfoResponse showTxns() throws TException;
+ GetOpenTxnsInfoResponse showTxns() throws TException;
/**
* Request a set of locks. All locks needed for a particular query, DML,
@@ -1153,7 +1158,7 @@ public interface IMetaStoreClient {
* aborted. This can result from the transaction timing out.
* @throws TException
*/
- public LockResponse lock(LockRequest request)
+ LockResponse lock(LockRequest request)
throws NoSuchTxnException, TxnAbortedException, TException;
/**
@@ -1177,7 +1182,7 @@ public interface IMetaStoreClient {
* This can result from the lock timing out and being unlocked by the system.
* @throws TException
*/
- public LockResponse checkLock(long lockid)
+ LockResponse checkLock(long lockid)
throws NoSuchTxnException, TxnAbortedException, NoSuchLockException,
TException;
@@ -1192,7 +1197,7 @@ public interface IMetaStoreClient {
* transaction.
* @throws TException
*/
- public void unlock(long lockid)
+ void unlock(long lockid)
throws NoSuchLockException, TxnOpenException, TException;
/**
@@ -1200,7 +1205,7 @@ public interface IMetaStoreClient {
* @return List of currently held and waiting locks.
* @throws TException
*/
- public ShowLocksResponse showLocks() throws TException;
+ ShowLocksResponse showLocks() throws TException;
/**
* Send a heartbeat to indicate that the client holding these locks (if
@@ -1222,7 +1227,7 @@ public interface IMetaStoreClient {
* This can result from the lock timing out and being unlocked by the system.
* @throws TException
*/
- public void heartbeat(long txnid, long lockid)
+ void heartbeat(long txnid, long lockid)
throws NoSuchLockException, NoSuchTxnException, TxnAbortedException,
TException;
@@ -1236,7 +1241,7 @@ public interface IMetaStoreClient {
* have already been closed) and which were aborted.
* @throws TException
*/
- public HeartbeatTxnRangeResponse heartbeatTxnRange(long min, long max) throws TException;
+ HeartbeatTxnRangeResponse heartbeatTxnRange(long min, long max) throws TException;
/**
* Send a request to compact a table or partition. This will not block until the compaction is
@@ -1251,7 +1256,7 @@ public interface IMetaStoreClient {
* @param type Whether this is a major or minor compaction.
* @throws TException
*/
- public void compact(String dbname, String tableName, String partitionName, CompactionType type)
+ void compact(String dbname, String tableName, String partitionName, CompactionType type)
throws TException;
/**
@@ -1260,10 +1265,10 @@ public interface IMetaStoreClient {
* in progress, and finished but waiting to clean the existing files.
* @throws TException
*/
- public ShowCompactResponse showCompactions() throws TException;
+ ShowCompactResponse showCompactions() throws TException;
- public class IncompatibleMetastoreException extends MetaException {
- public IncompatibleMetastoreException(String message) {
+ class IncompatibleMetastoreException extends MetaException {
+ IncompatibleMetastoreException(String message) {
super(message);
}
}
@@ -1294,4 +1299,6 @@ public interface IMetaStoreClient {
public AggrStats getAggrColStatsFor(String dbName, String tblName,
List<String> colNames, List<String> partName) throws NoSuchObjectException, MetaException, TException;
+
+ boolean setPartitionColumnStatistics(SetPartitionsStatsRequest request) throws NoSuchObjectException, InvalidObjectException, MetaException, TException, InvalidInputException;
}
Modified: hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreEventListener.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreEventListener.java?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreEventListener.java (original)
+++ hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreEventListener.java Fri Aug 22 21:36:47 2014
@@ -24,6 +24,7 @@ import org.apache.hadoop.hive.metastore.
import org.apache.hadoop.hive.metastore.events.AddPartitionEvent;
import org.apache.hadoop.hive.metastore.events.AlterPartitionEvent;
import org.apache.hadoop.hive.metastore.events.AlterTableEvent;
+import org.apache.hadoop.hive.metastore.events.ConfigChangeEvent;
import org.apache.hadoop.hive.metastore.events.CreateDatabaseEvent;
import org.apache.hadoop.hive.metastore.events.CreateTableEvent;
import org.apache.hadoop.hive.metastore.events.DropDatabaseEvent;
@@ -50,6 +51,13 @@ public abstract class MetaStoreEventList
* @param tableEvent table event.
* @throws MetaException
*/
+ public void onConfigChange(ConfigChangeEvent tableEvent) throws MetaException {
+ }
+
+ /**
+ * @param tableEvent table event.
+ * @throws MetaException
+ */
public void onCreateTable (CreateTableEvent tableEvent) throws MetaException {
}
Modified: hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java (original)
+++ hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java Fri Aug 22 21:36:47 2014
@@ -2564,13 +2564,13 @@ public class ObjectStore implements RawS
}
// For now only alter name, owner, paramters, cols, bucketcols are allowed
+ oldt.setDatabase(newt.getDatabase());
oldt.setTableName(newt.getTableName().toLowerCase());
oldt.setParameters(newt.getParameters());
oldt.setOwner(newt.getOwner());
// Fully copy over the contents of the new SD into the old SD,
// so we don't create an extra SD in the metastore db that has no references.
copyMSD(newt.getSd(), oldt.getSd());
- oldt.setDatabase(newt.getDatabase());
oldt.setRetention(newt.getRetention());
oldt.setPartitionKeys(newt.getPartitionKeys());
oldt.setTableType(newt.getTableType());