You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2014/02/25 08:59:20 UTC
svn commit: r1571600 [14/46] - in /hive/branches/tez: ./
ant/src/org/apache/hadoop/hive/ant/
common/src/java/org/apache/hadoop/hive/common/
common/src/java/org/apache/hadoop/hive/common/type/
common/src/java/org/apache/hadoop/hive/conf/ common/src/java...
Modified: hive/branches/tez/metastore/src/gen/thrift/gen-py/hive_metastore/constants.py
URL: http://svn.apache.org/viewvc/hive/branches/tez/metastore/src/gen/thrift/gen-py/hive_metastore/constants.py?rev=1571600&r1=1571599&r2=1571600&view=diff
==============================================================================
--- hive/branches/tez/metastore/src/gen/thrift/gen-py/hive_metastore/constants.py (original)
+++ hive/branches/tez/metastore/src/gen/thrift/gen-py/hive_metastore/constants.py Tue Feb 25 07:58:52 2014
@@ -15,6 +15,7 @@ HIVE_FILTER_FIELD_PARAMS = "hive_filter_
HIVE_FILTER_FIELD_LAST_ACCESS = "hive_filter_field_last_access__"
IS_ARCHIVED = "is_archived"
ORIGINAL_LOCATION = "original_location"
+IS_IMMUTABLE = "immutable"
META_TABLE_COLUMNS = "columns"
META_TABLE_COLUMN_TYPES = "columns.types"
BUCKET_FIELD_NAME = "bucket_field_name"
Modified: hive/branches/tez/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
URL: http://svn.apache.org/viewvc/hive/branches/tez/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py?rev=1571600&r1=1571599&r2=1571600&view=diff
==============================================================================
--- hive/branches/tez/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py (original)
+++ hive/branches/tez/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py Tue Feb 25 07:58:52 2014
@@ -69,6 +69,34 @@ class PartitionEventType:
"LOAD_DONE": 1,
}
+class FunctionType:
+ JAVA = 1
+
+ _VALUES_TO_NAMES = {
+ 1: "JAVA",
+ }
+
+ _NAMES_TO_VALUES = {
+ "JAVA": 1,
+ }
+
+class ResourceType:
+ JAR = 1
+ FILE = 2
+ ARCHIVE = 3
+
+ _VALUES_TO_NAMES = {
+ 1: "JAR",
+ 2: "FILE",
+ 3: "ARCHIVE",
+ }
+
+ _NAMES_TO_VALUES = {
+ "JAR": 1,
+ "FILE": 2,
+ "ARCHIVE": 3,
+ }
+
class Version:
"""
@@ -1015,6 +1043,8 @@ class Database:
- locationUri
- parameters
- privileges
+ - ownerName
+ - ownerType
"""
thrift_spec = (
@@ -1024,14 +1054,18 @@ class Database:
(3, TType.STRING, 'locationUri', None, None, ), # 3
(4, TType.MAP, 'parameters', (TType.STRING,None,TType.STRING,None), None, ), # 4
(5, TType.STRUCT, 'privileges', (PrincipalPrivilegeSet, PrincipalPrivilegeSet.thrift_spec), None, ), # 5
+ (6, TType.STRING, 'ownerName', None, None, ), # 6
+ (7, TType.I32, 'ownerType', None, None, ), # 7
)
- def __init__(self, name=None, description=None, locationUri=None, parameters=None, privileges=None,):
+ def __init__(self, name=None, description=None, locationUri=None, parameters=None, privileges=None, ownerName=None, ownerType=None,):
self.name = name
self.description = description
self.locationUri = locationUri
self.parameters = parameters
self.privileges = privileges
+ self.ownerName = ownerName
+ self.ownerType = ownerType
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -1074,6 +1108,16 @@ class Database:
self.privileges.read(iprot)
else:
iprot.skip(ftype)
+ elif fid == 6:
+ if ftype == TType.STRING:
+ self.ownerName = iprot.readString();
+ else:
+ iprot.skip(ftype)
+ elif fid == 7:
+ if ftype == TType.I32:
+ self.ownerType = iprot.readI32();
+ else:
+ iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
@@ -1108,6 +1152,14 @@ class Database:
oprot.writeFieldBegin('privileges', TType.STRUCT, 5)
self.privileges.write(oprot)
oprot.writeFieldEnd()
+ if self.ownerName is not None:
+ oprot.writeFieldBegin('ownerName', TType.STRING, 6)
+ oprot.writeString(self.ownerName)
+ oprot.writeFieldEnd()
+ if self.ownerType is not None:
+ oprot.writeFieldBegin('ownerType', TType.I32, 7)
+ oprot.writeI32(self.ownerType)
+ oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
@@ -4420,6 +4472,231 @@ class DropPartitionsRequest:
def __ne__(self, other):
return not (self == other)
+class ResourceUri:
+ """
+ Attributes:
+ - resourceType
+ - uri
+ """
+
+ thrift_spec = (
+ None, # 0
+ (1, TType.I32, 'resourceType', None, None, ), # 1
+ (2, TType.STRING, 'uri', None, None, ), # 2
+ )
+
+ def __init__(self, resourceType=None, uri=None,):
+ self.resourceType = resourceType
+ self.uri = uri
+
+ def read(self, iprot):
+ if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+ fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+ return
+ iprot.readStructBegin()
+ while True:
+ (fname, ftype, fid) = iprot.readFieldBegin()
+ if ftype == TType.STOP:
+ break
+ if fid == 1:
+ if ftype == TType.I32:
+ self.resourceType = iprot.readI32();
+ else:
+ iprot.skip(ftype)
+ elif fid == 2:
+ if ftype == TType.STRING:
+ self.uri = iprot.readString();
+ else:
+ iprot.skip(ftype)
+ else:
+ iprot.skip(ftype)
+ iprot.readFieldEnd()
+ iprot.readStructEnd()
+
+ def write(self, oprot):
+ if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+ oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+ return
+ oprot.writeStructBegin('ResourceUri')
+ if self.resourceType is not None:
+ oprot.writeFieldBegin('resourceType', TType.I32, 1)
+ oprot.writeI32(self.resourceType)
+ oprot.writeFieldEnd()
+ if self.uri is not None:
+ oprot.writeFieldBegin('uri', TType.STRING, 2)
+ oprot.writeString(self.uri)
+ oprot.writeFieldEnd()
+ oprot.writeFieldStop()
+ oprot.writeStructEnd()
+
+ def validate(self):
+ return
+
+
+ def __repr__(self):
+ L = ['%s=%r' % (key, value)
+ for key, value in self.__dict__.iteritems()]
+ return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+ def __eq__(self, other):
+ return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+ def __ne__(self, other):
+ return not (self == other)
+
+class Function:
+ """
+ Attributes:
+ - functionName
+ - dbName
+ - className
+ - ownerName
+ - ownerType
+ - createTime
+ - functionType
+ - resourceUris
+ """
+
+ thrift_spec = (
+ None, # 0
+ (1, TType.STRING, 'functionName', None, None, ), # 1
+ (2, TType.STRING, 'dbName', None, None, ), # 2
+ (3, TType.STRING, 'className', None, None, ), # 3
+ (4, TType.STRING, 'ownerName', None, None, ), # 4
+ (5, TType.I32, 'ownerType', None, None, ), # 5
+ (6, TType.I32, 'createTime', None, None, ), # 6
+ (7, TType.I32, 'functionType', None, None, ), # 7
+ (8, TType.LIST, 'resourceUris', (TType.STRUCT,(ResourceUri, ResourceUri.thrift_spec)), None, ), # 8
+ )
+
+ def __init__(self, functionName=None, dbName=None, className=None, ownerName=None, ownerType=None, createTime=None, functionType=None, resourceUris=None,):
+ self.functionName = functionName
+ self.dbName = dbName
+ self.className = className
+ self.ownerName = ownerName
+ self.ownerType = ownerType
+ self.createTime = createTime
+ self.functionType = functionType
+ self.resourceUris = resourceUris
+
+ def read(self, iprot):
+ if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+ fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+ return
+ iprot.readStructBegin()
+ while True:
+ (fname, ftype, fid) = iprot.readFieldBegin()
+ if ftype == TType.STOP:
+ break
+ if fid == 1:
+ if ftype == TType.STRING:
+ self.functionName = iprot.readString();
+ else:
+ iprot.skip(ftype)
+ elif fid == 2:
+ if ftype == TType.STRING:
+ self.dbName = iprot.readString();
+ else:
+ iprot.skip(ftype)
+ elif fid == 3:
+ if ftype == TType.STRING:
+ self.className = iprot.readString();
+ else:
+ iprot.skip(ftype)
+ elif fid == 4:
+ if ftype == TType.STRING:
+ self.ownerName = iprot.readString();
+ else:
+ iprot.skip(ftype)
+ elif fid == 5:
+ if ftype == TType.I32:
+ self.ownerType = iprot.readI32();
+ else:
+ iprot.skip(ftype)
+ elif fid == 6:
+ if ftype == TType.I32:
+ self.createTime = iprot.readI32();
+ else:
+ iprot.skip(ftype)
+ elif fid == 7:
+ if ftype == TType.I32:
+ self.functionType = iprot.readI32();
+ else:
+ iprot.skip(ftype)
+ elif fid == 8:
+ if ftype == TType.LIST:
+ self.resourceUris = []
+ (_etype316, _size313) = iprot.readListBegin()
+ for _i317 in xrange(_size313):
+ _elem318 = ResourceUri()
+ _elem318.read(iprot)
+ self.resourceUris.append(_elem318)
+ iprot.readListEnd()
+ else:
+ iprot.skip(ftype)
+ else:
+ iprot.skip(ftype)
+ iprot.readFieldEnd()
+ iprot.readStructEnd()
+
+ def write(self, oprot):
+ if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+ oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+ return
+ oprot.writeStructBegin('Function')
+ if self.functionName is not None:
+ oprot.writeFieldBegin('functionName', TType.STRING, 1)
+ oprot.writeString(self.functionName)
+ oprot.writeFieldEnd()
+ if self.dbName is not None:
+ oprot.writeFieldBegin('dbName', TType.STRING, 2)
+ oprot.writeString(self.dbName)
+ oprot.writeFieldEnd()
+ if self.className is not None:
+ oprot.writeFieldBegin('className', TType.STRING, 3)
+ oprot.writeString(self.className)
+ oprot.writeFieldEnd()
+ if self.ownerName is not None:
+ oprot.writeFieldBegin('ownerName', TType.STRING, 4)
+ oprot.writeString(self.ownerName)
+ oprot.writeFieldEnd()
+ if self.ownerType is not None:
+ oprot.writeFieldBegin('ownerType', TType.I32, 5)
+ oprot.writeI32(self.ownerType)
+ oprot.writeFieldEnd()
+ if self.createTime is not None:
+ oprot.writeFieldBegin('createTime', TType.I32, 6)
+ oprot.writeI32(self.createTime)
+ oprot.writeFieldEnd()
+ if self.functionType is not None:
+ oprot.writeFieldBegin('functionType', TType.I32, 7)
+ oprot.writeI32(self.functionType)
+ oprot.writeFieldEnd()
+ if self.resourceUris is not None:
+ oprot.writeFieldBegin('resourceUris', TType.LIST, 8)
+ oprot.writeListBegin(TType.STRUCT, len(self.resourceUris))
+ for iter319 in self.resourceUris:
+ iter319.write(oprot)
+ oprot.writeListEnd()
+ oprot.writeFieldEnd()
+ oprot.writeFieldStop()
+ oprot.writeStructEnd()
+
+ def validate(self):
+ return
+
+
+ def __repr__(self):
+ L = ['%s=%r' % (key, value)
+ for key, value in self.__dict__.iteritems()]
+ return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+ def __eq__(self, other):
+ return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+ def __ne__(self, other):
+ return not (self == other)
+
class MetaException(TException):
"""
Attributes:
Modified: hive/branches/tez/metastore/src/gen/thrift/gen-rb/hive_metastore_constants.rb
URL: http://svn.apache.org/viewvc/hive/branches/tez/metastore/src/gen/thrift/gen-rb/hive_metastore_constants.rb?rev=1571600&r1=1571599&r2=1571600&view=diff
==============================================================================
--- hive/branches/tez/metastore/src/gen/thrift/gen-rb/hive_metastore_constants.rb (original)
+++ hive/branches/tez/metastore/src/gen/thrift/gen-rb/hive_metastore_constants.rb Tue Feb 25 07:58:52 2014
@@ -19,6 +19,8 @@ IS_ARCHIVED = %q"is_archived"
ORIGINAL_LOCATION = %q"original_location"
+IS_IMMUTABLE = %q"immutable"
+
META_TABLE_COLUMNS = %q"columns"
META_TABLE_COLUMN_TYPES = %q"columns.types"
Modified: hive/branches/tez/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
URL: http://svn.apache.org/viewvc/hive/branches/tez/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb?rev=1571600&r1=1571599&r2=1571600&view=diff
==============================================================================
--- hive/branches/tez/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb (original)
+++ hive/branches/tez/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb Tue Feb 25 07:58:52 2014
@@ -32,6 +32,20 @@ module PartitionEventType
VALID_VALUES = Set.new([LOAD_DONE]).freeze
end
+module FunctionType
+ JAVA = 1
+ VALUE_MAP = {1 => "JAVA"}
+ VALID_VALUES = Set.new([JAVA]).freeze
+end
+
+module ResourceType
+ JAR = 1
+ FILE = 2
+ ARCHIVE = 3
+ VALUE_MAP = {1 => "JAR", 2 => "FILE", 3 => "ARCHIVE"}
+ VALID_VALUES = Set.new([JAR, FILE, ARCHIVE]).freeze
+end
+
class Version
include ::Thrift::Struct, ::Thrift::Struct_Union
VERSION = 1
@@ -244,18 +258,25 @@ class Database
LOCATIONURI = 3
PARAMETERS = 4
PRIVILEGES = 5
+ OWNERNAME = 6
+ OWNERTYPE = 7
FIELDS = {
NAME => {:type => ::Thrift::Types::STRING, :name => 'name'},
DESCRIPTION => {:type => ::Thrift::Types::STRING, :name => 'description'},
LOCATIONURI => {:type => ::Thrift::Types::STRING, :name => 'locationUri'},
PARAMETERS => {:type => ::Thrift::Types::MAP, :name => 'parameters', :key => {:type => ::Thrift::Types::STRING}, :value => {:type => ::Thrift::Types::STRING}},
- PRIVILEGES => {:type => ::Thrift::Types::STRUCT, :name => 'privileges', :class => ::PrincipalPrivilegeSet, :optional => true}
+ PRIVILEGES => {:type => ::Thrift::Types::STRUCT, :name => 'privileges', :class => ::PrincipalPrivilegeSet, :optional => true},
+ OWNERNAME => {:type => ::Thrift::Types::STRING, :name => 'ownerName', :optional => true},
+ OWNERTYPE => {:type => ::Thrift::Types::I32, :name => 'ownerType', :optional => true, :enum_class => ::PrincipalType}
}
def struct_fields; FIELDS; end
def validate
+ unless @ownerType.nil? || ::PrincipalType::VALID_VALUES.include?(@ownerType)
+ raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Invalid value of field ownerType!')
+ end
end
::Thrift::Struct.generate_accessors self
@@ -1007,6 +1028,63 @@ class DropPartitionsRequest
::Thrift::Struct.generate_accessors self
end
+class ResourceUri
+ include ::Thrift::Struct, ::Thrift::Struct_Union
+ RESOURCETYPE = 1
+ URI = 2
+
+ FIELDS = {
+ RESOURCETYPE => {:type => ::Thrift::Types::I32, :name => 'resourceType', :enum_class => ::ResourceType},
+ URI => {:type => ::Thrift::Types::STRING, :name => 'uri'}
+ }
+
+ def struct_fields; FIELDS; end
+
+ def validate
+ unless @resourceType.nil? || ::ResourceType::VALID_VALUES.include?(@resourceType)
+ raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Invalid value of field resourceType!')
+ end
+ end
+
+ ::Thrift::Struct.generate_accessors self
+end
+
+class Function
+ include ::Thrift::Struct, ::Thrift::Struct_Union
+ FUNCTIONNAME = 1
+ DBNAME = 2
+ CLASSNAME = 3
+ OWNERNAME = 4
+ OWNERTYPE = 5
+ CREATETIME = 6
+ FUNCTIONTYPE = 7
+ RESOURCEURIS = 8
+
+ FIELDS = {
+ FUNCTIONNAME => {:type => ::Thrift::Types::STRING, :name => 'functionName'},
+ DBNAME => {:type => ::Thrift::Types::STRING, :name => 'dbName'},
+ CLASSNAME => {:type => ::Thrift::Types::STRING, :name => 'className'},
+ OWNERNAME => {:type => ::Thrift::Types::STRING, :name => 'ownerName'},
+ OWNERTYPE => {:type => ::Thrift::Types::I32, :name => 'ownerType', :enum_class => ::PrincipalType},
+ CREATETIME => {:type => ::Thrift::Types::I32, :name => 'createTime'},
+ FUNCTIONTYPE => {:type => ::Thrift::Types::I32, :name => 'functionType', :enum_class => ::FunctionType},
+ RESOURCEURIS => {:type => ::Thrift::Types::LIST, :name => 'resourceUris', :element => {:type => ::Thrift::Types::STRUCT, :class => ::ResourceUri}}
+ }
+
+ def struct_fields; FIELDS; end
+
+ def validate
+ unless @ownerType.nil? || ::PrincipalType::VALID_VALUES.include?(@ownerType)
+ raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Invalid value of field ownerType!')
+ end
+ unless @functionType.nil? || ::FunctionType::VALID_VALUES.include?(@functionType)
+ raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Invalid value of field functionType!')
+ end
+ end
+
+ ::Thrift::Struct.generate_accessors self
+end
+
class MetaException < ::Thrift::Exception
include ::Thrift::Struct, ::Thrift::Struct_Union
def initialize(message=nil)
Modified: hive/branches/tez/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb
URL: http://svn.apache.org/viewvc/hive/branches/tez/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb?rev=1571600&r1=1571599&r2=1571600&view=diff
==============================================================================
--- hive/branches/tez/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb (original)
+++ hive/branches/tez/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb Tue Feb 25 07:58:52 2014
@@ -1269,6 +1269,89 @@ module ThriftHiveMetastore
raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'delete_table_column_statistics failed: unknown result')
end
+ def create_function(func)
+ send_create_function(func)
+ recv_create_function()
+ end
+
+ def send_create_function(func)
+ send_message('create_function', Create_function_args, :func => func)
+ end
+
+ def recv_create_function()
+ result = receive_message(Create_function_result)
+ raise result.o1 unless result.o1.nil?
+ raise result.o2 unless result.o2.nil?
+ raise result.o3 unless result.o3.nil?
+ raise result.o4 unless result.o4.nil?
+ return
+ end
+
+ def drop_function(dbName, funcName)
+ send_drop_function(dbName, funcName)
+ recv_drop_function()
+ end
+
+ def send_drop_function(dbName, funcName)
+ send_message('drop_function', Drop_function_args, :dbName => dbName, :funcName => funcName)
+ end
+
+ def recv_drop_function()
+ result = receive_message(Drop_function_result)
+ raise result.o1 unless result.o1.nil?
+ raise result.o3 unless result.o3.nil?
+ return
+ end
+
+ def alter_function(dbName, funcName, newFunc)
+ send_alter_function(dbName, funcName, newFunc)
+ recv_alter_function()
+ end
+
+ def send_alter_function(dbName, funcName, newFunc)
+ send_message('alter_function', Alter_function_args, :dbName => dbName, :funcName => funcName, :newFunc => newFunc)
+ end
+
+ def recv_alter_function()
+ result = receive_message(Alter_function_result)
+ raise result.o1 unless result.o1.nil?
+ raise result.o2 unless result.o2.nil?
+ return
+ end
+
+ def get_functions(dbName, pattern)
+ send_get_functions(dbName, pattern)
+ return recv_get_functions()
+ end
+
+ def send_get_functions(dbName, pattern)
+ send_message('get_functions', Get_functions_args, :dbName => dbName, :pattern => pattern)
+ end
+
+ def recv_get_functions()
+ result = receive_message(Get_functions_result)
+ return result.success unless result.success.nil?
+ raise result.o1 unless result.o1.nil?
+ raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'get_functions failed: unknown result')
+ end
+
+ def get_function(dbName, funcName)
+ send_get_function(dbName, funcName)
+ return recv_get_function()
+ end
+
+ def send_get_function(dbName, funcName)
+ send_message('get_function', Get_function_args, :dbName => dbName, :funcName => funcName)
+ end
+
+ def recv_get_function()
+ result = receive_message(Get_function_result)
+ return result.success unless result.success.nil?
+ raise result.o1 unless result.o1.nil?
+ raise result.o2 unless result.o2.nil?
+ raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'get_function failed: unknown result')
+ end
+
def create_role(role)
send_create_role(role)
return recv_create_role()
@@ -2508,6 +2591,73 @@ module ThriftHiveMetastore
write_result(result, oprot, 'delete_table_column_statistics', seqid)
end
+ def process_create_function(seqid, iprot, oprot)
+ args = read_args(iprot, Create_function_args)
+ result = Create_function_result.new()
+ begin
+ @handler.create_function(args.func)
+ rescue ::AlreadyExistsException => o1
+ result.o1 = o1
+ rescue ::InvalidObjectException => o2
+ result.o2 = o2
+ rescue ::MetaException => o3
+ result.o3 = o3
+ rescue ::NoSuchObjectException => o4
+ result.o4 = o4
+ end
+ write_result(result, oprot, 'create_function', seqid)
+ end
+
+ def process_drop_function(seqid, iprot, oprot)
+ args = read_args(iprot, Drop_function_args)
+ result = Drop_function_result.new()
+ begin
+ @handler.drop_function(args.dbName, args.funcName)
+ rescue ::NoSuchObjectException => o1
+ result.o1 = o1
+ rescue ::MetaException => o3
+ result.o3 = o3
+ end
+ write_result(result, oprot, 'drop_function', seqid)
+ end
+
+ def process_alter_function(seqid, iprot, oprot)
+ args = read_args(iprot, Alter_function_args)
+ result = Alter_function_result.new()
+ begin
+ @handler.alter_function(args.dbName, args.funcName, args.newFunc)
+ rescue ::InvalidOperationException => o1
+ result.o1 = o1
+ rescue ::MetaException => o2
+ result.o2 = o2
+ end
+ write_result(result, oprot, 'alter_function', seqid)
+ end
+
+ def process_get_functions(seqid, iprot, oprot)
+ args = read_args(iprot, Get_functions_args)
+ result = Get_functions_result.new()
+ begin
+ result.success = @handler.get_functions(args.dbName, args.pattern)
+ rescue ::MetaException => o1
+ result.o1 = o1
+ end
+ write_result(result, oprot, 'get_functions', seqid)
+ end
+
+ def process_get_function(seqid, iprot, oprot)
+ args = read_args(iprot, Get_function_args)
+ result = Get_function_result.new()
+ begin
+ result.success = @handler.get_function(args.dbName, args.funcName)
+ rescue ::MetaException => o1
+ result.o1 = o1
+ rescue ::NoSuchObjectException => o2
+ result.o2 = o2
+ end
+ write_result(result, oprot, 'get_function', seqid)
+ end
+
def process_create_role(seqid, iprot, oprot)
args = read_args(iprot, Create_role_args)
result = Create_role_result.new()
@@ -5577,6 +5727,192 @@ module ThriftHiveMetastore
::Thrift::Struct.generate_accessors self
end
+ class Create_function_args
+ include ::Thrift::Struct, ::Thrift::Struct_Union
+ FUNC = 1
+
+ FIELDS = {
+ FUNC => {:type => ::Thrift::Types::STRUCT, :name => 'func', :class => ::Function}
+ }
+
+ def struct_fields; FIELDS; end
+
+ def validate
+ end
+
+ ::Thrift::Struct.generate_accessors self
+ end
+
+ class Create_function_result
+ include ::Thrift::Struct, ::Thrift::Struct_Union
+ O1 = 1
+ O2 = 2
+ O3 = 3
+ O4 = 4
+
+ FIELDS = {
+ O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => ::AlreadyExistsException},
+ O2 => {:type => ::Thrift::Types::STRUCT, :name => 'o2', :class => ::InvalidObjectException},
+ O3 => {:type => ::Thrift::Types::STRUCT, :name => 'o3', :class => ::MetaException},
+ O4 => {:type => ::Thrift::Types::STRUCT, :name => 'o4', :class => ::NoSuchObjectException}
+ }
+
+ def struct_fields; FIELDS; end
+
+ def validate
+ end
+
+ ::Thrift::Struct.generate_accessors self
+ end
+
+ class Drop_function_args
+ include ::Thrift::Struct, ::Thrift::Struct_Union
+ DBNAME = 1
+ FUNCNAME = 2
+
+ FIELDS = {
+ DBNAME => {:type => ::Thrift::Types::STRING, :name => 'dbName'},
+ FUNCNAME => {:type => ::Thrift::Types::STRING, :name => 'funcName'}
+ }
+
+ def struct_fields; FIELDS; end
+
+ def validate
+ end
+
+ ::Thrift::Struct.generate_accessors self
+ end
+
+ class Drop_function_result
+ include ::Thrift::Struct, ::Thrift::Struct_Union
+ O1 = 1
+ O3 = 2
+
+ FIELDS = {
+ O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => ::NoSuchObjectException},
+ O3 => {:type => ::Thrift::Types::STRUCT, :name => 'o3', :class => ::MetaException}
+ }
+
+ def struct_fields; FIELDS; end
+
+ def validate
+ end
+
+ ::Thrift::Struct.generate_accessors self
+ end
+
+ class Alter_function_args
+ include ::Thrift::Struct, ::Thrift::Struct_Union
+ DBNAME = 1
+ FUNCNAME = 2
+ NEWFUNC = 3
+
+ FIELDS = {
+ DBNAME => {:type => ::Thrift::Types::STRING, :name => 'dbName'},
+ FUNCNAME => {:type => ::Thrift::Types::STRING, :name => 'funcName'},
+ NEWFUNC => {:type => ::Thrift::Types::STRUCT, :name => 'newFunc', :class => ::Function}
+ }
+
+ def struct_fields; FIELDS; end
+
+ def validate
+ end
+
+ ::Thrift::Struct.generate_accessors self
+ end
+
+ class Alter_function_result
+ include ::Thrift::Struct, ::Thrift::Struct_Union
+ O1 = 1
+ O2 = 2
+
+ FIELDS = {
+ O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => ::InvalidOperationException},
+ O2 => {:type => ::Thrift::Types::STRUCT, :name => 'o2', :class => ::MetaException}
+ }
+
+ def struct_fields; FIELDS; end
+
+ def validate
+ end
+
+ ::Thrift::Struct.generate_accessors self
+ end
+
+ class Get_functions_args
+ include ::Thrift::Struct, ::Thrift::Struct_Union
+ DBNAME = 1
+ PATTERN = 2
+
+ FIELDS = {
+ DBNAME => {:type => ::Thrift::Types::STRING, :name => 'dbName'},
+ PATTERN => {:type => ::Thrift::Types::STRING, :name => 'pattern'}
+ }
+
+ def struct_fields; FIELDS; end
+
+ def validate
+ end
+
+ ::Thrift::Struct.generate_accessors self
+ end
+
+ class Get_functions_result
+ include ::Thrift::Struct, ::Thrift::Struct_Union
+ SUCCESS = 0
+ O1 = 1
+
+ FIELDS = {
+ SUCCESS => {:type => ::Thrift::Types::LIST, :name => 'success', :element => {:type => ::Thrift::Types::STRING}},
+ O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => ::MetaException}
+ }
+
+ def struct_fields; FIELDS; end
+
+ def validate
+ end
+
+ ::Thrift::Struct.generate_accessors self
+ end
+
+ class Get_function_args
+ include ::Thrift::Struct, ::Thrift::Struct_Union
+ DBNAME = 1
+ FUNCNAME = 2
+
+ FIELDS = {
+ DBNAME => {:type => ::Thrift::Types::STRING, :name => 'dbName'},
+ FUNCNAME => {:type => ::Thrift::Types::STRING, :name => 'funcName'}
+ }
+
+ def struct_fields; FIELDS; end
+
+ def validate
+ end
+
+ ::Thrift::Struct.generate_accessors self
+ end
+
+ class Get_function_result
+ include ::Thrift::Struct, ::Thrift::Struct_Union
+ SUCCESS = 0
+ O1 = 1
+ O2 = 2
+
+ FIELDS = {
+ SUCCESS => {:type => ::Thrift::Types::STRUCT, :name => 'success', :class => ::Function},
+ O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => ::MetaException},
+ O2 => {:type => ::Thrift::Types::STRUCT, :name => 'o2', :class => ::NoSuchObjectException}
+ }
+
+ def struct_fields; FIELDS; end
+
+ def validate
+ end
+
+ ::Thrift::Struct.generate_accessors self
+ end
+
class Create_role_args
include ::Thrift::Struct, ::Thrift::Struct_Union
ROLE = 1
Modified: hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=1571600&r1=1571599&r2=1571600&view=diff
==============================================================================
--- hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (original)
+++ hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java Tue Feb 25 07:58:52 2014
@@ -51,6 +51,7 @@ import org.apache.commons.logging.LogFac
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.FileUtils;
+import org.apache.hadoop.hive.common.JavaUtils;
import org.apache.hadoop.hive.common.LogUtils;
import org.apache.hadoop.hive.common.LogUtils.LogInitializationException;
import org.apache.hadoop.hive.common.classification.InterfaceAudience;
@@ -72,6 +73,7 @@ import org.apache.hadoop.hive.metastore.
import org.apache.hadoop.hive.metastore.api.DropPartitionsResult;
import org.apache.hadoop.hive.metastore.api.EnvironmentContext;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.Function;
import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
import org.apache.hadoop.hive.metastore.api.HiveObjectType;
@@ -453,9 +455,11 @@ public class HiveMetaStore extends Thrif
try {
ms.getDatabase(DEFAULT_DATABASE_NAME);
} catch (NoSuchObjectException e) {
- ms.createDatabase(
- new Database(DEFAULT_DATABASE_NAME, DEFAULT_DATABASE_COMMENT,
- wh.getDefaultDatabasePath(DEFAULT_DATABASE_NAME).toString(), null));
+ Database db = new Database(DEFAULT_DATABASE_NAME, DEFAULT_DATABASE_COMMENT,
+ wh.getDefaultDatabasePath(DEFAULT_DATABASE_NAME).toString(), null);
+ db.setOwnerName(PUBLIC);
+ db.setOwnerType(PrincipalType.ROLE);
+ ms.createDatabase(db);
}
HMSHandler.createDefaultDB = true;
}
@@ -795,8 +799,17 @@ public class HiveMetaStore extends Thrif
firePreEvent(new PreDropDatabaseEvent(db, this));
List<String> allTables = get_all_tables(db.getName());
- if (!cascade && !allTables.isEmpty()) {
- throw new InvalidOperationException("Database " + db.getName() + " is not empty");
+ List<String> allFunctions = get_functions(db.getName(), "*");
+
+ if (!cascade) {
+ if (!allTables.isEmpty()) {
+ throw new InvalidOperationException(
+ "Database " + db.getName() + " is not empty. One or more tables exist.");
+ }
+ if (!allFunctions.isEmpty()) {
+ throw new InvalidOperationException(
+ "Database " + db.getName() + " is not empty. One or more functions exist.");
+ }
}
Path path = new Path(db.getLocationUri()).getParent();
if (!wh.isWritable(path)) {
@@ -807,7 +820,12 @@ public class HiveMetaStore extends Thrif
Path databasePath = wh.getDnsPath(wh.getDatabasePath(db));
- // first drop tables
+ // drop any functions before dropping db
+ for (String funcName : allFunctions) {
+ drop_function(name, funcName);
+ }
+
+ // drop tables before dropping db
int tableBatchSize = HiveConf.getIntVar(hiveConf,
ConfVars.METASTORE_BATCH_RETRIEVE_MAX);
@@ -4555,6 +4573,136 @@ public class HiveMetaStore extends Thrif
me.initCause(e);
return me;
}
+
+ private void validateFunctionInfo(Function func) throws InvalidObjectException, MetaException {
+ if (!MetaStoreUtils.validateName(func.getFunctionName())) {
+ throw new InvalidObjectException(func.getFunctionName() + " is not a valid object name");
+ }
+ String className = func.getClassName();
+ if (className == null) {
+ throw new InvalidObjectException("Function class name cannot be null");
+ }
+ }
+
+ @Override
+ public void create_function(Function func) throws AlreadyExistsException,
+ InvalidObjectException, MetaException, NoSuchObjectException,
+ TException {
+ validateFunctionInfo(func);
+
+ boolean success = false;
+ RawStore ms = getMS();
+ try {
+ ms.openTransaction();
+
+ Database db = ms.getDatabase(func.getDbName());
+ if (db == null) {
+ throw new NoSuchObjectException("The database " + func.getDbName() + " does not exist");
+ }
+ Function existingFunc = ms.getFunction(func.getDbName(), func.getFunctionName());
+ if (existingFunc != null) {
+ throw new AlreadyExistsException(
+ "Function " + func.getFunctionName() + " already exists");
+ }
+
+ // set create time
+ long time = System.currentTimeMillis() / 1000;
+ func.setCreateTime((int) time);
+ ms.createFunction(func);
+ success = ms.commitTransaction();
+ } finally {
+ if (!success) {
+ ms.rollbackTransaction();
+ }
+ }
+ }
+
+ @Override
+ public void drop_function(String dbName, String funcName)
+ throws NoSuchObjectException, MetaException,
+ InvalidObjectException, InvalidInputException {
+ boolean success = false;
+ Function func = null;
+ RawStore ms = getMS();
+
+ try {
+ ms.openTransaction();
+
+ func = ms.getFunction(dbName, funcName);
+ if (func == null) {
+ throw new NoSuchObjectException("Function " + funcName + " does not exist");
+ }
+ ms.dropFunction(dbName, funcName);
+ success = ms.commitTransaction();
+ } finally {
+ if (!success) {
+ ms.rollbackTransaction();
+ }
+ }
+ }
+
+ @Override
+ public void alter_function(String dbName, String funcName, Function newFunc)
+ throws InvalidOperationException, MetaException, TException {
+ validateFunctionInfo(newFunc);
+ boolean success = false;
+ RawStore ms = getMS();
+ try {
+ ms.openTransaction();
+ ms.alterFunction(dbName, funcName, newFunc);
+ success = ms.commitTransaction();
+ } finally {
+ if (!success) {
+ ms.rollbackTransaction();
+ }
+ }
+ }
+
+ @Override
+ public List<String> get_functions(String dbName, String pattern)
+ throws MetaException {
+ startFunction("get_functions", ": db=" + dbName + " pat=" + pattern);
+
+ RawStore ms = getMS();
+ Exception ex = null;
+ List<String> funcNames = null;
+
+ try {
+ funcNames = ms.getFunctions(dbName, pattern);
+ } catch (Exception e) {
+ ex = e;
+ throw newMetaException(e);
+ } finally {
+ endFunction("get_functions", funcNames != null, ex);
+ }
+
+ return funcNames;
+ }
+
+ @Override
+ public Function get_function(String dbName, String funcName)
+ throws MetaException, NoSuchObjectException, TException {
+ startFunction("get_function", ": " + dbName + "." + funcName);
+
+ RawStore ms = getMS();
+ Function func = null;
+ Exception ex = null;
+
+ try {
+ func = ms.getFunction(dbName, funcName);
+ if (func == null) {
+ throw new NoSuchObjectException(
+ "Function " + dbName + "." + funcName + " does not exist");
+ }
+ } catch (Exception e) {
+ ex = e;
+ throw newMetaException(e);
+ } finally {
+ endFunction("get_database", func != null, ex);
+ }
+
+ return func;
+ }
}
public static IHMSHandler newHMSHandler(String name, HiveConf hiveConf) throws MetaException {
Modified: hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java?rev=1571600&r1=1571599&r2=1571600&view=diff
==============================================================================
--- hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java (original)
+++ hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java Tue Feb 25 07:58:52 2014
@@ -58,6 +58,7 @@ import org.apache.hadoop.hive.metastore.
import org.apache.hadoop.hive.metastore.api.DropPartitionsResult;
import org.apache.hadoop.hive.metastore.api.EnvironmentContext;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.Function;
import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
import org.apache.hadoop.hive.metastore.api.Index;
@@ -1289,6 +1290,14 @@ public class HiveMetaStoreClient impleme
return copy;
}
+ private Function deepCopy(Function func) {
+ Function copy = null;
+ if (func != null) {
+ copy = new Function(func);
+ }
+ return copy;
+ }
+
private List<Partition> deepCopyPartitions(List<Partition> partitions) {
return deepCopyPartitions(partitions, null);
}
@@ -1491,4 +1500,35 @@ public class HiveMetaStoreClient impleme
assert partKVs != null;
return client.isPartitionMarkedForEvent(db_name, tbl_name, partKVs, eventType);
}
+
+ @Override
+ public void createFunction(Function func) throws InvalidObjectException,
+ MetaException, TException {
+ client.create_function(func);
+ }
+
+ @Override
+ public void alterFunction(String dbName, String funcName, Function newFunction)
+ throws InvalidObjectException, MetaException, TException {
+ client.alter_function(dbName, funcName, newFunction);
+ }
+
+ @Override
+ public void dropFunction(String dbName, String funcName)
+ throws MetaException, NoSuchObjectException, InvalidObjectException,
+ InvalidInputException, TException {
+ client.drop_function(dbName, funcName);
+ }
+
+ @Override
+ public Function getFunction(String dbName, String funcName)
+ throws MetaException, TException {
+ return deepCopy(client.get_function(dbName, funcName));
+ }
+
+ @Override
+ public List<String> getFunctions(String dbName, String pattern)
+ throws MetaException, TException {
+ return client.get_functions(dbName, pattern);
+ }
}
Modified: hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java?rev=1571600&r1=1571599&r2=1571600&view=diff
==============================================================================
--- hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java (original)
+++ hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java Tue Feb 25 07:58:52 2014
@@ -28,6 +28,7 @@ import org.apache.hadoop.hive.metastore.
import org.apache.hadoop.hive.metastore.api.ConfigValSecurityException;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.Function;
import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
import org.apache.hadoop.hive.metastore.api.Index;
@@ -1004,6 +1005,21 @@ public interface IMetaStoreClient {
*/
public void cancelDelegationToken(String tokenStrForm) throws MetaException, TException;
+ public void createFunction(Function func)
+ throws InvalidObjectException, MetaException, TException;
+
+ public void alterFunction(String dbName, String funcName, Function newFunction)
+ throws InvalidObjectException, MetaException, TException;
+
+ public void dropFunction(String dbName, String funcName) throws MetaException,
+ NoSuchObjectException, InvalidObjectException, InvalidInputException, TException;
+
+ public Function getFunction(String dbName, String funcName)
+ throws MetaException, TException;
+
+ public List<String> getFunctions(String dbName, String pattern)
+ throws MetaException, TException;
+
public class IncompatibleMetastoreException extends MetaException {
public IncompatibleMetastoreException(String message) {
Modified: hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java?rev=1571600&r1=1571599&r2=1571600&view=diff
==============================================================================
--- hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java (original)
+++ hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java Tue Feb 25 07:58:52 2014
@@ -44,6 +44,7 @@ import org.apache.hadoop.conf.Configurat
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.hive.common.JavaUtils;
import org.apache.hadoop.hive.common.StatsSetupConst;
import org.apache.hadoop.hive.conf.HiveConf;
@@ -1180,6 +1181,29 @@ public class MetaStoreUtils {
return "TRUE".equalsIgnoreCase(params.get("EXTERNAL"));
}
+ /**
+ * Determines whether a table is an immutable table.
+ * Immutable tables are write-once/replace, and do not support append. Partitioned
+ * immutable tables do support additions by way of creation of new partitions, but
+ * do not allow the partitions themselves to be appended to. "INSERT INTO" will not
+ * work for Immutable tables.
+ *
+ * @param table table of interest
+ *
+ * @return true if immutable
+ */
+ public static boolean isImmutableTable(Table table) {
+ if (table == null){
+ return false;
+ }
+ Map<String, String> params = table.getParameters();
+ if (params == null) {
+ return false;
+ }
+
+ return "TRUE".equalsIgnoreCase(params.get(hive_metastoreConstants.IS_IMMUTABLE));
+ }
+
public static boolean isArchived(
org.apache.hadoop.hive.metastore.api.Partition part) {
Map<String, String> params = part.getParameters();
@@ -1208,6 +1232,35 @@ public class MetaStoreUtils {
}
/**
+ * Filter that filters out hidden files
+ */
+ private static final PathFilter hiddenFileFilter = new PathFilter() {
+ public boolean accept(Path p) {
+ String name = p.getName();
+ return !name.startsWith("_") && !name.startsWith(".");
+ }
+ };
+
+ /**
+ * Utility method that determines if a specified directory already has
+ * contents (non-hidden files) or not - useful to determine if an
+ * immutable table already has contents, for example.
+ *
+ * @param path
+ * @throws IOException
+ */
+ public static boolean isDirEmpty(FileSystem fs, Path path) throws IOException {
+
+ if (fs.exists(path)) {
+ FileStatus[] status = fs.globStatus(new Path(path, "*"), hiddenFileFilter);
+ if (status.length > 0) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ /**
* Returns true if partial has the same values as full for all values that
* aren't empty in partial.
*/
Modified: hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java?rev=1571600&r1=1571599&r2=1571600&view=diff
==============================================================================
--- hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java (original)
+++ hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java Tue Feb 25 07:58:52 2014
@@ -39,7 +39,6 @@ import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import javax.jdo.JDODataStoreException;
-import javax.jdo.JDOEnhanceException;
import javax.jdo.JDOHelper;
import javax.jdo.JDOObjectNotFoundException;
import javax.jdo.PersistenceManager;
@@ -49,7 +48,6 @@ import javax.jdo.Transaction;
import javax.jdo.datastore.DataStoreCache;
import javax.jdo.identity.IntIdentity;
-import org.antlr.runtime.CharStream;
import org.antlr.runtime.CommonTokenStream;
import org.antlr.runtime.RecognitionException;
import org.apache.commons.logging.Log;
@@ -62,15 +60,13 @@ import org.apache.hadoop.hive.common.cla
import org.apache.hadoop.hive.common.classification.InterfaceStability;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.metastore.api.BinaryColumnStatsData;
-import org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData;
import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
-import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData;
import org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc;
import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.DoubleColumnStatsData;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.Function;
+import org.apache.hadoop.hive.metastore.api.FunctionType;
import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
import org.apache.hadoop.hive.metastore.api.HiveObjectType;
@@ -78,7 +74,6 @@ import org.apache.hadoop.hive.metastore.
import org.apache.hadoop.hive.metastore.api.InvalidInputException;
import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
import org.apache.hadoop.hive.metastore.api.InvalidPartitionException;
-import org.apache.hadoop.hive.metastore.api.LongColumnStatsData;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
import org.apache.hadoop.hive.metastore.api.Order;
@@ -88,11 +83,12 @@ import org.apache.hadoop.hive.metastore.
import org.apache.hadoop.hive.metastore.api.PrincipalType;
import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo;
+import org.apache.hadoop.hive.metastore.api.ResourceType;
+import org.apache.hadoop.hive.metastore.api.ResourceUri;
import org.apache.hadoop.hive.metastore.api.Role;
import org.apache.hadoop.hive.metastore.api.SerDeInfo;
import org.apache.hadoop.hive.metastore.api.SkewedInfo;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
-import org.apache.hadoop.hive.metastore.api.StringColumnStatsData;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.metastore.api.Type;
import org.apache.hadoop.hive.metastore.api.UnknownDBException;
@@ -103,6 +99,7 @@ import org.apache.hadoop.hive.metastore.
import org.apache.hadoop.hive.metastore.model.MDatabase;
import org.apache.hadoop.hive.metastore.model.MDelegationToken;
import org.apache.hadoop.hive.metastore.model.MFieldSchema;
+import org.apache.hadoop.hive.metastore.model.MFunction;
import org.apache.hadoop.hive.metastore.model.MGlobalPrivilege;
import org.apache.hadoop.hive.metastore.model.MIndex;
import org.apache.hadoop.hive.metastore.model.MMasterKey;
@@ -112,6 +109,7 @@ import org.apache.hadoop.hive.metastore.
import org.apache.hadoop.hive.metastore.model.MPartitionColumnStatistics;
import org.apache.hadoop.hive.metastore.model.MPartitionEvent;
import org.apache.hadoop.hive.metastore.model.MPartitionPrivilege;
+import org.apache.hadoop.hive.metastore.model.MResourceUri;
import org.apache.hadoop.hive.metastore.model.MRole;
import org.apache.hadoop.hive.metastore.model.MRoleMap;
import org.apache.hadoop.hive.metastore.model.MSerDeInfo;
@@ -134,8 +132,6 @@ import org.apache.hadoop.util.StringUtil
import org.apache.thrift.TException;
import org.datanucleus.store.rdbms.exceptions.MissingTableException;
-import org.antlr.runtime.Token;
-
import com.google.common.collect.Lists;
@@ -184,6 +180,7 @@ public class ObjectStore implements RawS
public ObjectStore() {
}
+ @Override
public Configuration getConf() {
return hiveConf;
}
@@ -193,6 +190,7 @@ public class ObjectStore implements RawS
* on connection retries. In cases of connection retries, conf will usually
* contain modified values.
*/
+ @Override
@SuppressWarnings("nls")
public void setConf(Configuration conf) {
// Although an instance of ObjectStore is accessed by one thread, there may
@@ -267,7 +265,7 @@ public class ObjectStore implements RawS
@SuppressWarnings("unchecked")
Class<? extends PartitionExpressionProxy> clazz =
(Class<? extends PartitionExpressionProxy>)MetaStoreUtils.getClass(className);
- return (PartitionExpressionProxy)MetaStoreUtils.newInstance(
+ return MetaStoreUtils.newInstance(
clazz, new Class<?>[0], new Object[0]);
} catch (MetaException e) {
LOG.error("Error loading PartitionExpressionProxy", e);
@@ -340,6 +338,7 @@ public class ObjectStore implements RawS
return getPMF().getPersistenceManager();
}
+ @Override
public void shutdown() {
if (pm != null) {
pm.close();
@@ -353,6 +352,7 @@ public class ObjectStore implements RawS
* @return an active transaction
*/
+ @Override
public boolean openTransaction() {
openTrasactionCalls++;
if (openTrasactionCalls == 1) {
@@ -376,6 +376,7 @@ public class ObjectStore implements RawS
*
* @return Always returns true
*/
+ @Override
@SuppressWarnings("nls")
public boolean commitTransaction() {
if (TXN_STATUS.ROLLBACK == transactionStatus) {
@@ -421,6 +422,7 @@ public class ObjectStore implements RawS
/**
* Rolls back the current transaction if it is active
*/
+ @Override
public void rollbackTransaction() {
if (openTrasactionCalls < 1) {
debugLog("rolling back transaction: no open transactions: " + openTrasactionCalls);
@@ -440,6 +442,7 @@ public class ObjectStore implements RawS
}
}
+ @Override
public void createDatabase(Database db) throws InvalidObjectException, MetaException {
boolean commited = false;
MDatabase mdb = new MDatabase();
@@ -447,6 +450,9 @@ public class ObjectStore implements RawS
mdb.setLocationUri(db.getLocationUri());
mdb.setDescription(db.getDescription());
mdb.setParameters(db.getParameters());
+ mdb.setOwnerName(db.getOwnerName());
+ PrincipalType ownerType = db.getOwnerType();
+ mdb.setOwnerType((null == ownerType ? PrincipalType.USER.name() : ownerType.name()));
try {
openTransaction();
pm.makePersistent(mdb);
@@ -482,6 +488,7 @@ public class ObjectStore implements RawS
return mdb;
}
+ @Override
public Database getDatabase(String name) throws NoSuchObjectException {
MDatabase mdb = null;
boolean commited = false;
@@ -499,6 +506,9 @@ public class ObjectStore implements RawS
db.setDescription(mdb.getDescription());
db.setLocationUri(mdb.getLocationUri());
db.setParameters(mdb.getParameters());
+ db.setOwnerName(mdb.getOwnerName());
+ String type = mdb.getOwnerType();
+ db.setOwnerType((null == type || type.trim().isEmpty()) ? null : PrincipalType.valueOf(type));
return db;
}
@@ -510,6 +520,7 @@ public class ObjectStore implements RawS
* @throws MetaException
* @throws NoSuchObjectException
*/
+ @Override
public boolean alterDatabase(String dbName, Database db)
throws MetaException, NoSuchObjectException {
@@ -531,6 +542,7 @@ public class ObjectStore implements RawS
return true;
}
+ @Override
public boolean dropDatabase(String dbname) throws NoSuchObjectException, MetaException {
boolean success = false;
LOG.info("Dropping database " + dbname + " along with all tables");
@@ -558,6 +570,7 @@ public class ObjectStore implements RawS
}
+ @Override
public List<String> getDatabases(String pattern) throws MetaException {
boolean commited = false;
List<String> databases = null;
@@ -595,6 +608,7 @@ public class ObjectStore implements RawS
return databases;
}
+ @Override
public List<String> getAllDatabases() throws MetaException {
return getDatabases(".*");
}
@@ -626,6 +640,7 @@ public class ObjectStore implements RawS
return ret;
}
+ @Override
public boolean createType(Type type) {
boolean success = false;
MType mtype = getMType(type);
@@ -643,6 +658,7 @@ public class ObjectStore implements RawS
return success;
}
+ @Override
public Type getType(String typeName) {
Type type = null;
boolean commited = false;
@@ -665,6 +681,7 @@ public class ObjectStore implements RawS
return type;
}
+ @Override
public boolean dropType(String typeName) {
boolean success = false;
try {
@@ -689,6 +706,7 @@ public class ObjectStore implements RawS
return success;
}
+ @Override
public void createTable(Table tbl) throws InvalidObjectException, MetaException {
boolean commited = false;
try {
@@ -751,6 +769,7 @@ public class ObjectStore implements RawS
}
}
+ @Override
public boolean dropTable(String dbName, String tableName) throws MetaException,
NoSuchObjectException, InvalidObjectException, InvalidInputException {
boolean success = false;
@@ -801,6 +820,7 @@ public class ObjectStore implements RawS
return success;
}
+ @Override
public Table getTable(String dbName, String tableName) throws MetaException {
boolean commited = false;
Table tbl = null;
@@ -816,6 +836,7 @@ public class ObjectStore implements RawS
return tbl;
}
+ @Override
public List<String> getTables(String dbName, String pattern)
throws MetaException {
boolean commited = false;
@@ -858,6 +879,7 @@ public class ObjectStore implements RawS
return tbls;
}
+ @Override
public List<String> getAllTables(String dbName) throws MetaException {
return getTables(dbName, ".*");
}
@@ -883,6 +905,7 @@ public class ObjectStore implements RawS
return mtbl;
}
+ @Override
public List<Table> getTableObjectsByName(String db, List<String> tbl_names)
throws MetaException, UnknownDBException {
List<Table> tables = new ArrayList<Table>();
@@ -1296,6 +1319,7 @@ public class ObjectStore implements RawS
return success;
}
+ @Override
public Partition getPartition(String dbName, String tableName,
List<String> part_vals) throws NoSuchObjectException, MetaException {
openTransaction();
@@ -1511,6 +1535,7 @@ public class ObjectStore implements RawS
return success;
}
+ @Override
public List<Partition> getPartitions(
String dbName, String tableName, int maxParts) throws MetaException, NoSuchObjectException {
return getPartitionsInternal(dbName, tableName, maxParts, true, true);
@@ -1520,10 +1545,12 @@ public class ObjectStore implements RawS
String dbName, String tblName, final int maxParts, boolean allowSql, boolean allowJdo)
throws MetaException, NoSuchObjectException {
return new GetListHelper<Partition>(dbName, tblName, allowSql, allowJdo) {
+ @Override
protected List<Partition> getSqlResult(GetHelper<List<Partition>> ctx) throws MetaException {
Integer max = (maxParts < 0) ? null : maxParts;
return directSql.getPartitions(dbName, tblName, max);
}
+ @Override
protected List<Partition> getJdoResult(
GetHelper<List<Partition>> ctx) throws MetaException, NoSuchObjectException {
return convertToParts(listMPartitions(dbName, tblName, maxParts));
@@ -1626,6 +1653,7 @@ public class ObjectStore implements RawS
}
// TODO:pc implement max
+ @Override
public List<String> listPartitionNames(String dbName, String tableName,
short max) throws MetaException {
List<String> pns = null;
@@ -1824,9 +1852,11 @@ public class ObjectStore implements RawS
final List<String> partNames, boolean allowSql, boolean allowJdo)
throws MetaException, NoSuchObjectException {
return new GetListHelper<Partition>(dbName, tblName, allowSql, allowJdo) {
+ @Override
protected List<Partition> getSqlResult(GetHelper<List<Partition>> ctx) throws MetaException {
return directSql.getPartitionsViaSqlFilter(dbName, tblName, partNames, null);
}
+ @Override
protected List<Partition> getJdoResult(
GetHelper<List<Partition>> ctx) throws MetaException, NoSuchObjectException {
return getPartitionsViaOrmFilter(dbName, tblName, partNames);
@@ -1865,6 +1895,7 @@ public class ObjectStore implements RawS
final AtomicBoolean hasUnknownPartitions = new AtomicBoolean(false);
result.addAll(new GetListHelper<Partition>(dbName, tblName, allowSql, allowJdo) {
+ @Override
protected List<Partition> getSqlResult(GetHelper<List<Partition>> ctx) throws MetaException {
// If we have some sort of expression tree, try SQL filter pushdown.
List<Partition> result = null;
@@ -1880,6 +1911,7 @@ public class ObjectStore implements RawS
}
return result;
}
+ @Override
protected List<Partition> getJdoResult(
GetHelper<List<Partition>> ctx) throws MetaException, NoSuchObjectException {
// If we have some sort of expression tree, try JDOQL filter pushdown.
@@ -2271,6 +2303,7 @@ public class ObjectStore implements RawS
? getFilterParser(filter).tree : ExpressionTree.EMPTY_TREE;
return new GetListHelper<Partition>(dbName, tblName, allowSql, allowJdo) {
+ @Override
protected List<Partition> getSqlResult(GetHelper<List<Partition>> ctx) throws MetaException {
List<Partition> parts = directSql.getPartitionsViaSqlFilter(
ctx.getTable(), tree, (maxParts < 0) ? null : (int)maxParts);
@@ -2281,6 +2314,7 @@ public class ObjectStore implements RawS
}
return parts;
}
+ @Override
protected List<Partition> getJdoResult(
GetHelper<List<Partition>> ctx) throws MetaException, NoSuchObjectException {
return getPartitionsViaOrmFilter(ctx.getTable(), tree, maxParts, true);
@@ -2499,6 +2533,7 @@ public class ObjectStore implements RawS
return partNames;
}
+ @Override
public void alterTable(String dbname, String name, Table newTable)
throws InvalidObjectException, MetaException {
boolean success = false;
@@ -2540,6 +2575,7 @@ public class ObjectStore implements RawS
}
}
+ @Override
public void alterIndex(String dbname, String baseTblName, String name, Index newIndex)
throws InvalidObjectException, MetaException {
boolean success = false;
@@ -2593,6 +2629,7 @@ public class ObjectStore implements RawS
}
}
+ @Override
public void alterPartition(String dbname, String name, List<String> part_vals, Partition newPart)
throws InvalidObjectException, MetaException {
boolean success = false;
@@ -2617,6 +2654,7 @@ public class ObjectStore implements RawS
}
}
+ @Override
public void alterPartitions(String dbname, String name, List<List<String>> part_vals,
List<Partition> newParts) throws InvalidObjectException, MetaException {
boolean success = false;
@@ -3116,7 +3154,13 @@ public class ObjectStore implements RawS
return success;
}
- private List<MRoleMap> listRoles(String userName,
+ /**
+ * Get all the roles in the role hierarchy that this user and groupNames belongs to
+ * @param userName
+ * @param groupNames
+ * @return
+ */
+ private Set<String> listAllRolesInHierarchy(String userName,
List<String> groupNames) {
List<MRoleMap> ret = new ArrayList<MRoleMap>();
if(userName != null) {
@@ -3127,7 +3171,29 @@ public class ObjectStore implements RawS
ret.addAll(listRoles(groupName, PrincipalType.GROUP));
}
}
- return ret;
+ // get names of these roles and its ancestors
+ Set<String> roleNames = new HashSet<String>();
+ getAllRoleAncestors(roleNames, ret);
+ return roleNames;
+ }
+
+ /**
+ * Add role names of parentRoles and its parents to processedRoles
+ *
+ * @param processedRoleNames
+ * @param parentRoles
+ */
+ private void getAllRoleAncestors(Set<String> processedRoleNames, List<MRoleMap> parentRoles) {
+ for (MRoleMap parentRole : parentRoles) {
+ String parentRoleName = parentRole.getRole().getRoleName();
+ if (!processedRoleNames.contains(parentRoleName)) {
+ // unprocessed role: get its parents, add it to processed, and call this
+ // function recursively
+ List<MRoleMap> nextParentRoles = listRoles(parentRoleName, PrincipalType.ROLE);
+ processedRoleNames.add(parentRoleName);
+ getAllRoleAncestors(processedRoleNames, nextParentRoles);
+ }
+ }
}
@SuppressWarnings("unchecked")
@@ -3187,6 +3253,7 @@ public class ObjectStore implements RawS
return mRoleMemebership;
}
+ @Override
public Role getRole(String roleName) throws NoSuchObjectException {
MRole mRole = this.getMRole(roleName);
if (mRole == null) {
@@ -3216,6 +3283,7 @@ public class ObjectStore implements RawS
return mrole;
}
+ @Override
public List<String> listRoleNames() {
boolean success = false;
try {
@@ -3333,13 +3401,12 @@ public class ObjectStore implements RawS
}
ret.setGroupPrivileges(dbGroupPriv);
}
- List<MRoleMap> roles = listRoles(userName, groupNames);
- if (roles != null && roles.size() > 0) {
+ Set<String> roleNames = listAllRolesInHierarchy(userName, groupNames);
+ if (roleNames != null && roleNames.size() > 0) {
Map<String, List<PrivilegeGrantInfo>> dbRolePriv = new HashMap<String, List<PrivilegeGrantInfo>>();
- for (MRoleMap role : roles) {
- String name = role.getRole().getRoleName();
+ for (String roleName : roleNames) {
dbRolePriv
- .put(name, getDBPrivilege(dbName, name, PrincipalType.ROLE));
+ .put(roleName, getDBPrivilege(dbName, roleName, PrincipalType.ROLE));
}
ret.setRolePrivileges(dbRolePriv);
}
@@ -3377,11 +3444,10 @@ public class ObjectStore implements RawS
}
ret.setGroupPrivileges(partGroupPriv);
}
- List<MRoleMap> roles = listRoles(userName, groupNames);
- if (roles != null && roles.size() > 0) {
+ Set<String> roleNames = listAllRolesInHierarchy(userName, groupNames);
+ if (roleNames != null && roleNames.size() > 0) {
Map<String, List<PrivilegeGrantInfo>> partRolePriv = new HashMap<String, List<PrivilegeGrantInfo>>();
- for (MRoleMap role : roles) {
- String roleName = role.getRole().getRoleName();
+ for (String roleName : roleNames) {
partRolePriv.put(roleName, getPartitionPrivilege(dbName, tableName,
partition, roleName, PrincipalType.ROLE));
}
@@ -3421,11 +3487,10 @@ public class ObjectStore implements RawS
}
ret.setGroupPrivileges(tableGroupPriv);
}
- List<MRoleMap> roles = listRoles(userName, groupNames);
- if (roles != null && roles.size() > 0) {
+ Set<String> roleNames = listAllRolesInHierarchy(userName, groupNames);
+ if (roleNames != null && roleNames.size() > 0) {
Map<String, List<PrivilegeGrantInfo>> tableRolePriv = new HashMap<String, List<PrivilegeGrantInfo>>();
- for (MRoleMap role : roles) {
- String roleName = role.getRole().getRoleName();
+ for (String roleName : roleNames) {
tableRolePriv.put(roleName, getTablePrivilege(dbName, tableName,
roleName, PrincipalType.ROLE));
}
@@ -3467,11 +3532,10 @@ public class ObjectStore implements RawS
}
ret.setGroupPrivileges(columnGroupPriv);
}
- List<MRoleMap> roles = listRoles(userName, groupNames);
- if (roles != null && roles.size() > 0) {
+ Set<String> roleNames = listAllRolesInHierarchy(userName, groupNames);
+ if (roleNames != null && roleNames.size() > 0) {
Map<String, List<PrivilegeGrantInfo>> columnRolePriv = new HashMap<String, List<PrivilegeGrantInfo>>();
- for (MRoleMap role : roles) {
- String roleName = role.getRole().getRoleName();
+ for (String roleName : roleNames) {
columnRolePriv.put(roleName, getColumnPrivilege(dbName, tableName,
columnName, partitionName, roleName, PrincipalType.ROLE));
}
@@ -4388,6 +4452,7 @@ public class ObjectStore implements RawS
return new ObjectPair<Query, Object[]>(query, params);
}
+ @Override
@SuppressWarnings("unchecked")
public List<MTablePrivilege> listAllTableGrants(
String principalName, PrincipalType principalType, String dbName,
@@ -4489,6 +4554,7 @@ public class ObjectStore implements RawS
return mSecurityColList;
}
+ @Override
@SuppressWarnings("unchecked")
public List<MPartitionColumnPrivilege> listPrincipalPartitionColumnGrants(
String principalName, PrincipalType principalType, String dbName,
@@ -5493,6 +5559,7 @@ public class ObjectStore implements RawS
}
}
+ @Override
public boolean updateTableColumnStatistics(ColumnStatistics colStats)
throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException {
boolean committed = false;
@@ -5520,6 +5587,7 @@ public class ObjectStore implements RawS
}
}
+ @Override
public boolean updatePartitionColumnStatistics(ColumnStatistics colStats, List<String> partVals)
throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException {
boolean committed = false;
@@ -5611,6 +5679,7 @@ public class ObjectStore implements RawS
}
}
+ @Override
public ColumnStatistics getTableColumnStatistics(String dbName, String tableName,
List<String> colNames) throws MetaException, NoSuchObjectException {
return getTableColumnStatisticsInternal(dbName, tableName, colNames, true, true);
@@ -5620,9 +5689,11 @@ public class ObjectStore implements RawS
String dbName, String tableName, final List<String> colNames, boolean allowSql,
boolean allowJdo) throws MetaException, NoSuchObjectException {
return new GetStatHelper(dbName.toLowerCase(), tableName.toLowerCase(), allowSql, allowJdo) {
+ @Override
protected ColumnStatistics getSqlResult(GetHelper<ColumnStatistics> ctx) throws MetaException {
return directSql.getTableStats(dbName, tblName, colNames);
}
+ @Override
protected ColumnStatistics getJdoResult(
GetHelper<ColumnStatistics> ctx) throws MetaException, NoSuchObjectException {
List<MTableColumnStatistics> mStats = getMTableColumnStatistics(getTable(), colNames);
@@ -5642,6 +5713,7 @@ public class ObjectStore implements RawS
}.run(true);
}
+ @Override
public List<ColumnStatistics> getPartitionColumnStatistics(String dbName, String tableName,
List<String> partNames, List<String> colNames) throws MetaException, NoSuchObjectException {
return getPartitionColumnStatisticsInternal(
@@ -5652,10 +5724,12 @@ public class ObjectStore implements RawS
String dbName, String tableName, final List<String> partNames, final List<String> colNames,
boolean allowSql, boolean allowJdo) throws MetaException, NoSuchObjectException {
return new GetListHelper<ColumnStatistics>(dbName, tableName, allowSql, allowJdo) {
+ @Override
protected List<ColumnStatistics> getSqlResult(
GetHelper<List<ColumnStatistics>> ctx) throws MetaException {
return directSql.getPartitionStats(dbName, tblName, partNames, colNames);
}
+ @Override
protected List<ColumnStatistics> getJdoResult(
GetHelper<List<ColumnStatistics>> ctx) throws MetaException, NoSuchObjectException {
List<MPartitionColumnStatistics> mStats =
@@ -5749,6 +5823,7 @@ public class ObjectStore implements RawS
queryWithParams.getFirst().deletePersistentAll(queryWithParams.getSecond());
}
+ @Override
public boolean deletePartitionColumnStatistics(String dbName, String tableName,
String partName, List<String> partVals, String colName)
throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException {
@@ -5837,6 +5912,7 @@ public class ObjectStore implements RawS
return ret;
}
+ @Override
public boolean deleteTableColumnStatistics(String dbName, String tableName, String colName)
throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException
{
@@ -6318,4 +6394,218 @@ public class ObjectStore implements RawS
}
return sb.toString();
}
+
+ private Function convertToFunction(MFunction mfunc) {
+ if (mfunc == null) {
+ return null;
+ }
+
+ Function func = new Function(mfunc.getFunctionName(),
+ mfunc.getDatabase().getName(),
+ mfunc.getClassName(),
+ mfunc.getOwnerName(),
+ PrincipalType.valueOf(mfunc.getOwnerType()),
+ mfunc.getCreateTime(),
+ FunctionType.findByValue(mfunc.getFunctionType()),
+ convertToResourceUriList(mfunc.getResourceUris()));
+ return func;
+ }
+
+ private MFunction convertToMFunction(Function func) throws InvalidObjectException {
+ if (func == null) {
+ return null;
+ }
+
+ MDatabase mdb = null;
+ try {
+ mdb = getMDatabase(func.getDbName());
+ } catch (NoSuchObjectException e) {
+ LOG.error(StringUtils.stringifyException(e));
+ throw new InvalidObjectException("Database " + func.getDbName() + " doesn't exist.");
+ }
+
+ MFunction mfunc = new MFunction(func.getFunctionName(),
+ mdb,
+ func.getClassName(),
+ func.getOwnerName(),
+ func.getOwnerType().name(),
+ func.getCreateTime(),
+ func.getFunctionType().getValue(),
+ convertToMResourceUriList(func.getResourceUris()));
+ return mfunc;
+ }
+
+ private List<ResourceUri> convertToResourceUriList(List<MResourceUri> mresourceUriList) {
+ List<ResourceUri> resourceUriList = null;
+ if (mresourceUriList != null) {
+ resourceUriList = new ArrayList<ResourceUri>(mresourceUriList.size());
+ for (MResourceUri mres : mresourceUriList) {
+ resourceUriList.add(
+ new ResourceUri(ResourceType.findByValue(mres.getResourceType()), mres.getUri()));
+ }
+ }
+ return resourceUriList;
+ }
+
+ private List<MResourceUri> convertToMResourceUriList(List<ResourceUri> resourceUriList) {
+ List<MResourceUri> mresourceUriList = null;
+ if (resourceUriList != null) {
+ mresourceUriList = new ArrayList<MResourceUri>(resourceUriList.size());
+ for (ResourceUri res : resourceUriList) {
+ mresourceUriList.add(new MResourceUri(res.getResourceType().getValue(), res.getUri()));
+ }
+ }
+ return mresourceUriList;
+ }
+
+ @Override
+ public void createFunction(Function func) throws InvalidObjectException, MetaException {
+ boolean committed = false;
+ try {
+ openTransaction();
+ MFunction mfunc = convertToMFunction(func);
+ pm.makePersistent(mfunc);
+ committed = commitTransaction();
+ } finally {
+ if (!committed) {
+ rollbackTransaction();
+ }
+ }
+ }
+
+ @Override
+ public void alterFunction(String dbName, String funcName, Function newFunction)
+ throws InvalidObjectException, MetaException {
+ boolean success = false;
+ try {
+ openTransaction();
+ funcName = funcName.toLowerCase();
+ dbName = dbName.toLowerCase();
+ MFunction newf = convertToMFunction(newFunction);
+ if (newf == null) {
+ throw new InvalidObjectException("new function is invalid");
+ }
+
+ MFunction oldf = getMFunction(dbName, funcName);
+ if (oldf == null) {
+ throw new MetaException("function " + funcName + " doesn't exist");
+ }
+
+ // For now only alter name, owner, class name, type
+ oldf.setFunctionName(newf.getFunctionName().toLowerCase());
+ oldf.setDatabase(newf.getDatabase());
+ oldf.setOwnerName(newf.getOwnerName());
+ oldf.setOwnerType(newf.getOwnerType());
+ oldf.setClassName(newf.getClassName());
+ oldf.setFunctionType(newf.getFunctionType());
+
+ // commit the changes
+ success = commitTransaction();
+ } finally {
+ if (!success) {
+ rollbackTransaction();
+ }
+ }
+ }
+
+ @Override
+ public void dropFunction(String dbName, String funcName) throws MetaException,
+ NoSuchObjectException, InvalidObjectException, InvalidInputException {
+ boolean success = false;
+ try {
+ openTransaction();
+ MFunction mfunc = getMFunction(dbName, funcName);
+ pm.retrieve(mfunc);
+ if (mfunc != null) {
+ // TODO: When function privileges are implemented, they should be deleted here.
+ pm.deletePersistentAll(mfunc);
+ }
+ success = commitTransaction();
+ } finally {
+ if (!success) {
+ rollbackTransaction();
+ }
+ }
+ }
+
+ private MFunction getMFunction(String db, String function) {
+ MFunction mfunc = null;
+ boolean commited = false;
+ try {
+ openTransaction();
+ db = db.toLowerCase().trim();
+ function = function.toLowerCase().trim();
+ Query query = pm.newQuery(MFunction.class, "functionName == function && database.name == db");
+ query.declareParameters("java.lang.String function, java.lang.String db");
+ query.setUnique(true);
+ mfunc = (MFunction) query.execute(function, db);
+ pm.retrieve(mfunc);
+ commited = commitTransaction();
+ } finally {
+ if (!commited) {
+ rollbackTransaction();
+ }
+ }
+ return mfunc;
+ }
+
+ @Override
+ public Function getFunction(String dbName, String funcName) throws MetaException {
+ boolean commited = false;
+ Function func = null;
+ try {
+ openTransaction();
+ func = convertToFunction(getMFunction(dbName, funcName));
+ commited = commitTransaction();
+ } finally {
+ if (!commited) {
+ rollbackTransaction();
+ }
+ }
+ return func;
+ }
+
+ @Override
+ public List<String> getFunctions(String dbName, String pattern)
+ throws MetaException {
+ boolean commited = false;
+ List<String> funcs = null;
+ try {
+ openTransaction();
+ dbName = dbName.toLowerCase().trim();
+ // Take the pattern and split it on the | to get all the composing
+ // patterns
+ String[] subpatterns = pattern.trim().split("\\|");
+ String query =
+ "select functionName from org.apache.hadoop.hive.metastore.model.MFunction "
+ + "where database.name == dbName && (";
+ boolean first = true;
+ for (String subpattern : subpatterns) {
+ subpattern = "(?i)" + subpattern.replaceAll("\\*", ".*");
+ if (!first) {
+ query = query + " || ";
+ }
+ query = query + " functionName.matches(\"" + subpattern + "\")";
+ first = false;
+ }
+ query = query + ")";
+
+ Query q = pm.newQuery(query);
+ q.declareParameters("java.lang.String dbName");
+ q.setResult("functionName");
+ q.setOrdering("functionName ascending");
+ Collection names = (Collection) q.execute(dbName);
+ funcs = new ArrayList<String>();
+ for (Iterator i = names.iterator(); i.hasNext();) {
+ funcs.add((String) i.next());
+ }
+ commited = commitTransaction();
+ } finally {
+ if (!commited) {
+ rollbackTransaction();
+ }
+ }
+ return funcs;
+ }
+
}
Modified: hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java?rev=1571600&r1=1571599&r2=1571600&view=diff
==============================================================================
--- hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java (original)
+++ hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java Tue Feb 25 07:58:52 2014
@@ -29,6 +29,7 @@ import java.util.Set;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.Function;
import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
import org.apache.hadoop.hive.metastore.api.Index;
import org.apache.hadoop.hive.metastore.api.InvalidInputException;
@@ -486,4 +487,56 @@ public interface RawStore extends Config
List<HiveObjectPrivilege> listTableColumnGrantsAll(
String dbName, String tableName, String columnName);
+
+ /**
+ * Register a user-defined function based on the function specification passed in.
+ * @param func
+ * @throws InvalidObjectException
+ * @throws MetaException
+ */
+ public void createFunction(Function func)
+ throws InvalidObjectException, MetaException;
+
+ /**
+ * Alter function based on new function specs.
+ * @param dbName
+ * @param name
+ * @param newFunction
+ * @throws InvalidObjectException
+ * @throws MetaException
+ */
+ public void alterFunction(String dbName, String funcName, Function newFunction)
+ throws InvalidObjectException, MetaException;
+
+ /**
+ * Drop a function definition.
+ * @param dbName
+ * @param functionName
+ * @return
+ * @throws MetaException
+ * @throws NoSuchObjectException
+ * @throws InvalidObjectException
+ * @throws InvalidInputException
+ */
+ public void dropFunction(String dbName, String funcName)
+ throws MetaException, NoSuchObjectException, InvalidObjectException, InvalidInputException;
+
+ /**
+ * Retrieve function by name.
+ * @param dbName
+ * @param functionName
+ * @return
+ * @throws MetaException
+ */
+ public Function getFunction(String dbName, String funcName) throws MetaException;
+
+ /**
+ * Retrieve list of function names based on name pattern.
+ * @param dbName
+ * @param pattern
+ * @return
+ * @throws MetaException
+ */
+ public List<String> getFunctions(String dbName, String pattern) throws MetaException;
+
}
Modified: hive/branches/tez/metastore/src/model/org/apache/hadoop/hive/metastore/model/MDatabase.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/metastore/src/model/org/apache/hadoop/hive/metastore/model/MDatabase.java?rev=1571600&r1=1571599&r2=1571600&view=diff
==============================================================================
--- hive/branches/tez/metastore/src/model/org/apache/hadoop/hive/metastore/model/MDatabase.java (original)
+++ hive/branches/tez/metastore/src/model/org/apache/hadoop/hive/metastore/model/MDatabase.java Tue Feb 25 07:58:52 2014
@@ -32,6 +32,8 @@ public class MDatabase {
private String locationUri;
private String description;
private Map<String, String> parameters;
+ private String ownerName;
+ private String ownerType;
/**
* Default construction to keep jpox/jdo happy
@@ -107,4 +109,20 @@ public class MDatabase {
public void setParameters(Map<String, String> parameters) {
this.parameters = parameters;
}
+
+ public String getOwnerName() {
+ return ownerName;
+ }
+
+ public void setOwnerName(String ownerName) {
+ this.ownerName = ownerName;
+ }
+
+ public String getOwnerType() {
+ return ownerType;
+ }
+
+ public void setOwnerType(String ownerType) {
+ this.ownerType = ownerType;
+ }
}
Modified: hive/branches/tez/metastore/src/model/package.jdo
URL: http://svn.apache.org/viewvc/hive/branches/tez/metastore/src/model/package.jdo?rev=1571600&r1=1571599&r2=1571600&view=diff
==============================================================================
--- hive/branches/tez/metastore/src/model/package.jdo (original)
+++ hive/branches/tez/metastore/src/model/package.jdo Tue Feb 25 07:58:52 2014
@@ -53,6 +53,12 @@
<column name="PARAM_VALUE" length="4000" jdbc-type="VARCHAR"/>
</value>
</field>
+ <field name="ownerName">
+ <column name="OWNER_NAME" length="128" jdbc-type="VARCHAR" allows-null="true"/>
+ </field>
+ <field name="ownerType">
+ <column name="OWNER_TYPE" length="10" jdbc-type="VARCHAR" allows-null="true"/>
+ </field>
</class>
<class name="MFieldSchema" embedded-only="true" table="TYPE_FIELDS" detachable="true">
@@ -911,6 +917,62 @@
</field>
</class>
+ <class name="MResourceUri" embedded-only="true" table="RESOURCE_URI" detachable="true">
+ <field name="resourceType">
+ <column name="RESOURCE_TYPE" jdbc-type="INTEGER"/>
+ </field>
+ <field name="uri">
+ <column name="RESOURCE_URI" length="4000" jdbc-type="VARCHAR"/>
+ </field>
+ </class>
+
+ <class name="MFunction" table="FUNCS" identity-type="datastore" detachable="true">
+ <datastore-identity>
+ <column name="FUNC_ID"/>
+ </datastore-identity>
+ <index name="UniqueFunction" unique="true">
+ <column name="FUNC_NAME"/>
+ <column name="DB_ID"/>
+ </index>
+ <field name="functionName">
+ <column name="FUNC_NAME" length="128" jdbc-type="VARCHAR"/>
+ </field>
+ <field name="database">
+ <column name="DB_ID"/>
+ </field>
+ <field name="functionType">
+ <column name="FUNC_TYPE" jdbc-type="integer"/>
+ </field>
+ <field name="className">
+ <column name="CLASS_NAME" length="4000" jdbc-type="VARCHAR"/>
+ </field>
+ <field name="ownerName">
+ <column name="OWNER_NAME" length="128" jdbc-type="VARCHAR"/>
+ </field>
+ <field name="ownerType">
+ <column name="OWNER_TYPE" length="10" jdbc-type="VARCHAR"/>
+ </field>
+ <field name="createTime">
+ <column name="CREATE_TIME" jdbc-type="integer"/>
+ </field>
+ <field name="resourceUris" table="FUNC_RU">
+ <collection element-type="MResourceUri"/>
+ <join>
+ <column name="FUNC_ID"/>
+ </join>
+ <element>
+ <embedded>
+ <field name="resourceType">
+ <column name="RESOURCE_TYPE" jdbc-type="INTEGER"/>
+ </field>
+ <field name="uri">
+ <column name="RESOURCE_URI" length="4000" jdbc-type="VARCHAR"/>
+ </field>
+ </embedded>
+ </element>
+ </field>
+ </class>
+
</package>
</jdo>
Modified: hive/branches/tez/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java?rev=1571600&r1=1571599&r2=1571600&view=diff
==============================================================================
--- hive/branches/tez/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java (original)
+++ hive/branches/tez/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java Tue Feb 25 07:58:52 2014
@@ -27,6 +27,7 @@ import org.apache.hadoop.conf.Configurab
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.Function;
import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
import org.apache.hadoop.hive.metastore.api.Index;
import org.apache.hadoop.hive.metastore.api.InvalidInputException;
@@ -662,4 +663,35 @@ public class DummyRawStoreControlledComm
throws MetaException, NoSuchObjectException {
objectStore.dropPartitions(dbName, tblName, partNames);
}
+
+ public void createFunction(Function func) throws InvalidObjectException,
+ MetaException {
+ objectStore.createFunction(func);
+ }
+
+ @Override
+ public void alterFunction(String dbName, String funcName, Function newFunction)
+ throws InvalidObjectException, MetaException {
+ objectStore.alterFunction(dbName, funcName, newFunction);
+ }
+
+ @Override
+ public void dropFunction(String dbName, String funcName)
+ throws MetaException, NoSuchObjectException, InvalidObjectException,
+ InvalidInputException {
+ objectStore.dropFunction(dbName, funcName);
+ }
+
+ @Override
+ public Function getFunction(String dbName, String funcName)
+ throws MetaException {
+ return objectStore.getFunction(dbName, funcName);
+ }
+
+ @Override
+ public List<String> getFunctions(String dbName, String pattern)
+ throws MetaException {
+ return objectStore.getFunctions(dbName, pattern);
+ }
+
}
Modified: hive/branches/tez/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java?rev=1571600&r1=1571599&r2=1571600&view=diff
==============================================================================
--- hive/branches/tez/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java (original)
+++ hive/branches/tez/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java Tue Feb 25 07:58:52 2014
@@ -28,6 +28,7 @@ import org.apache.hadoop.conf.Configurat
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.Function;
import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
import org.apache.hadoop.hive.metastore.api.Index;
import org.apache.hadoop.hive.metastore.api.InvalidInputException;
@@ -690,6 +691,33 @@ public class DummyRawStoreForJdoConnecti
@Override
public void dropPartitions(String dbName, String tblName, List<String> partNames) {
}
+
+ public void createFunction(Function func) throws InvalidObjectException,
+ MetaException {
+ }
+
+ @Override
+ public void alterFunction(String dbName, String funcName, Function newFunction)
+ throws InvalidObjectException, MetaException {
+ }
+
+ @Override
+ public void dropFunction(String dbName, String funcName)
+ throws MetaException, NoSuchObjectException, InvalidObjectException,
+ InvalidInputException {
+ }
+
+ @Override
+ public Function getFunction(String dbName, String funcName)
+ throws MetaException {
+ return null;
+ }
+
+ @Override
+ public List<String> getFunctions(String dbName, String pattern)
+ throws MetaException {
+ return null;
+ }
}
Modified: hive/branches/tez/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/pom.xml?rev=1571600&r1=1571599&r2=1571600&view=diff
==============================================================================
--- hive/branches/tez/pom.xml (original)
+++ hive/branches/tez/pom.xml Tue Feb 25 07:58:52 2014
@@ -755,6 +755,41 @@
</systemPropertyVariables>
</configuration>
</plugin>
+ <plugin>
+ <groupId>org.apache.rat</groupId>
+ <artifactId>apache-rat-plugin</artifactId>
+ <version>0.10</version>
+ <configuration>
+ <excludes>
+ <exclude>data/**</exclude>
+ <exclude>conf/**</exclude>
+ <exclude>checkstyle/**</exclude>
+ <exclude>bin/**</exclude>
+ <exclude>itests/**</exclude>
+ <exclude>docs/**</exclude>
+ <exclude>**/*.txt</exclude>
+ <exclude>**/*.log</exclude>
+ <exclude>**/*.arcconfig</exclude>
+ <exclude>**/package-info.java</exclude>
+ <exclude>**/*.properties</exclude>
+ <exclude>**/*.q</exclude>
+ <exclude>**/*.q.out</exclude>
+ <exclude>**/*.xml</exclude>
+ <exclude>**/gen/**</exclude>
+ <exclude>**/scripts/**</exclude>
+ <exclude>**/resources/**</exclude>
+ <exclude>**/*.rc</exclude>
+ <exclude>**/*.rcfile</exclude>
+ <exclude>**/*.qv</exclude>
+ <exclude>**/*.out</exclude>
+ <exclude>**/RecordTestObj.java</exclude>
+ <exclude>**/*.m</exclude>
+ <exclude>**/gen-java/**</exclude>
+ <exclude>**/testdata/**</exclude>
+ <exclude>**/ptest2/*.md</exclude>
+ </excludes>
+ </configuration>
+ </plugin>
</plugins>
</build>