You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by jv...@apache.org on 2010/07/15 02:44:51 UTC
svn commit: r964270 - in /hadoop/hive/trunk: ./
common/src/java/org/apache/hadoop/hive/conf/ metastore/if/
metastore/src/gen-cpp/
metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/
metastore/src/gen-php/ metastore/src/gen-py/hive_metastor...
Author: jvs
Date: Thu Jul 15 00:44:50 2010
New Revision: 964270
URL: http://svn.apache.org/viewvc?rev=964270&view=rev
Log:
HIVE-1428. ALTER TABLE ADD PARTITION failes with a remote Thrift
metastore
(Pradeep Kamath via jvs)
Added:
hadoop/hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreRemote.java
Modified:
hadoop/hive/trunk/CHANGES.txt
hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
hadoop/hive/trunk/metastore/if/hive_metastore.thrift
hadoop/hive/trunk/metastore/src/gen-cpp/ThriftHiveMetastore.cpp
hadoop/hive/trunk/metastore/src/gen-cpp/ThriftHiveMetastore.h
hadoop/hive/trunk/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
hadoop/hive/trunk/metastore/src/gen-php/ThriftHiveMetastore.php
hadoop/hive/trunk/metastore/src/gen-py/hive_metastore/ThriftHiveMetastore.py
hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
hadoop/hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=964270&r1=964269&r2=964270&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Thu Jul 15 00:44:50 2010
@@ -51,6 +51,10 @@ Trunk - Unreleased
HIVE-1056. Predicate push down does not work with UDTF's
(Paul Yang via He Yongqiang)
+ HIVE-1428. ALTER TABLE ADD PARTITION failes with a remote Thrift
+ metastore
+ (Pradeep Kamath via jvs)
+
Release 0.6.0 - Unreleased
INCOMPATIBLE CHANGES
Modified: hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=964270&r1=964269&r2=964270&view=diff
==============================================================================
--- hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Thu Jul 15 00:44:50 2010
@@ -111,7 +111,7 @@ public class HiveConf extends Configurat
METASTOREWAREHOUSE("hive.metastore.warehouse.dir", ""),
METASTOREURIS("hive.metastore.uris", ""),
// Number of times to retry a connection to a Thrift metastore server
- METATORETHRIFTRETRIES("hive.metastore.connect.retries", ""),
+ METATORETHRIFTRETRIES("hive.metastore.connect.retries", 3),
METASTOREPWD("javax.jdo.option.ConnectionPassword", ""),
// Class name of JDO connection url hook
METASTORECONNECTURLHOOK("hive.metastore.ds.connection.url.hook", ""),
Modified: hadoop/hive/trunk/metastore/if/hive_metastore.thrift
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/if/hive_metastore.thrift?rev=964270&r1=964269&r2=964270&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/if/hive_metastore.thrift (original)
+++ hadoop/hive/trunk/metastore/if/hive_metastore.thrift Thu Jul 15 00:44:50 2010
@@ -201,7 +201,7 @@ service ThriftHiveMetastore extends fb30
bool drop_partition_by_name(1:string db_name, 2:string tbl_name, 3:string part_name, 4:bool deleteData)
throws(1:NoSuchObjectException o1, 2:MetaException o2)
Partition get_partition(1:string db_name, 2:string tbl_name, 3:list<string> part_vals)
- throws(1:MetaException o1)
+ throws(1:MetaException o1, 2:NoSuchObjectException o2)
Partition get_partition_by_name(1:string db_name 2:string tbl_name, 3:string part_name)
throws(1:MetaException o1, 2:NoSuchObjectException o2)
Modified: hadoop/hive/trunk/metastore/src/gen-cpp/ThriftHiveMetastore.cpp
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/gen-cpp/ThriftHiveMetastore.cpp?rev=964270&r1=964269&r2=964270&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/gen-cpp/ThriftHiveMetastore.cpp (original)
+++ hadoop/hive/trunk/metastore/src/gen-cpp/ThriftHiveMetastore.cpp Thu Jul 15 00:44:50 2010
@@ -4576,6 +4576,14 @@ uint32_t ThriftHiveMetastore_get_partiti
xfer += iprot->skip(ftype);
}
break;
+ case 2:
+ if (ftype == apache::thrift::protocol::T_STRUCT) {
+ xfer += this->o2.read(iprot);
+ this->__isset.o2 = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
default:
xfer += iprot->skip(ftype);
break;
@@ -4602,6 +4610,10 @@ uint32_t ThriftHiveMetastore_get_partiti
xfer += oprot->writeFieldBegin("o1", apache::thrift::protocol::T_STRUCT, 1);
xfer += this->o1.write(oprot);
xfer += oprot->writeFieldEnd();
+ } else if (this->__isset.o2) {
+ xfer += oprot->writeFieldBegin("o2", apache::thrift::protocol::T_STRUCT, 2);
+ xfer += this->o2.write(oprot);
+ xfer += oprot->writeFieldEnd();
}
xfer += oprot->writeFieldStop();
xfer += oprot->writeStructEnd();
@@ -4644,6 +4656,14 @@ uint32_t ThriftHiveMetastore_get_partiti
xfer += iprot->skip(ftype);
}
break;
+ case 2:
+ if (ftype == apache::thrift::protocol::T_STRUCT) {
+ xfer += this->o2.read(iprot);
+ this->__isset.o2 = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
default:
xfer += iprot->skip(ftype);
break;
@@ -8154,6 +8174,9 @@ void ThriftHiveMetastoreClient::recv_get
if (result.__isset.o1) {
throw result.o1;
}
+ if (result.__isset.o2) {
+ throw result.o2;
+ }
throw apache::thrift::TApplicationException(apache::thrift::TApplicationException::MISSING_RESULT, "get_partition failed: unknown result");
}
@@ -9476,6 +9499,9 @@ void ThriftHiveMetastoreProcessor::proce
} catch (MetaException &o1) {
result.o1 = o1;
result.__isset.o1 = true;
+ } catch (NoSuchObjectException &o2) {
+ result.o2 = o2;
+ result.__isset.o2 = true;
} catch (const std::exception& e) {
apache::thrift::TApplicationException x(e.what());
oprot->writeMessageBegin("get_partition", apache::thrift::protocol::T_EXCEPTION, seqid);
Modified: hadoop/hive/trunk/metastore/src/gen-cpp/ThriftHiveMetastore.h
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/gen-cpp/ThriftHiveMetastore.h?rev=964270&r1=964269&r2=964270&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/gen-cpp/ThriftHiveMetastore.h (original)
+++ hadoop/hive/trunk/metastore/src/gen-cpp/ThriftHiveMetastore.h Thu Jul 15 00:44:50 2010
@@ -2392,11 +2392,13 @@ class ThriftHiveMetastore_get_partition_
Partition success;
MetaException o1;
+ NoSuchObjectException o2;
struct __isset {
- __isset() : success(false), o1(false) {}
+ __isset() : success(false), o1(false), o2(false) {}
bool success;
bool o1;
+ bool o2;
} __isset;
bool operator == (const ThriftHiveMetastore_get_partition_result & rhs) const
@@ -2405,6 +2407,8 @@ class ThriftHiveMetastore_get_partition_
return false;
if (!(o1 == rhs.o1))
return false;
+ if (!(o2 == rhs.o2))
+ return false;
return true;
}
bool operator != (const ThriftHiveMetastore_get_partition_result &rhs) const {
@@ -2426,11 +2430,13 @@ class ThriftHiveMetastore_get_partition_
Partition* success;
MetaException o1;
+ NoSuchObjectException o2;
struct __isset {
- __isset() : success(false), o1(false) {}
+ __isset() : success(false), o1(false), o2(false) {}
bool success;
bool o1;
+ bool o2;
} __isset;
uint32_t read(apache::thrift::protocol::TProtocol* iprot);
Modified: hadoop/hive/trunk/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java?rev=964270&r1=964269&r2=964270&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java (original)
+++ hadoop/hive/trunk/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java Thu Jul 15 00:44:50 2010
@@ -65,7 +65,7 @@ public class ThriftHiveMetastore {
public boolean drop_partition_by_name(String db_name, String tbl_name, String part_name, boolean deleteData) throws NoSuchObjectException, MetaException, TException;
- public Partition get_partition(String db_name, String tbl_name, List<String> part_vals) throws MetaException, TException;
+ public Partition get_partition(String db_name, String tbl_name, List<String> part_vals) throws MetaException, NoSuchObjectException, TException;
public Partition get_partition_by_name(String db_name, String tbl_name, String part_name) throws MetaException, NoSuchObjectException, TException;
@@ -893,7 +893,7 @@ public class ThriftHiveMetastore {
throw new TApplicationException(TApplicationException.MISSING_RESULT, "drop_partition_by_name failed: unknown result");
}
- public Partition get_partition(String db_name, String tbl_name, List<String> part_vals) throws MetaException, TException
+ public Partition get_partition(String db_name, String tbl_name, List<String> part_vals) throws MetaException, NoSuchObjectException, TException
{
send_get_partition(db_name, tbl_name, part_vals);
return recv_get_partition();
@@ -911,7 +911,7 @@ public class ThriftHiveMetastore {
oprot_.getTransport().flush();
}
- public Partition recv_get_partition() throws MetaException, TException
+ public Partition recv_get_partition() throws MetaException, NoSuchObjectException, TException
{
TMessage msg = iprot_.readMessageBegin();
if (msg.type == TMessageType.EXCEPTION) {
@@ -928,6 +928,9 @@ public class ThriftHiveMetastore {
if (result.o1 != null) {
throw result.o1;
}
+ if (result.o2 != null) {
+ throw result.o2;
+ }
throw new TApplicationException(TApplicationException.MISSING_RESULT, "get_partition failed: unknown result");
}
@@ -1956,6 +1959,8 @@ public class ThriftHiveMetastore {
result.success = iface_.get_partition(args.db_name, args.tbl_name, args.part_vals);
} catch (MetaException o1) {
result.o1 = o1;
+ } catch (NoSuchObjectException o2) {
+ result.o2 = o2;
} catch (Throwable th) {
LOGGER.error("Internal error processing get_partition", th);
TApplicationException x = new TApplicationException(TApplicationException.INTERNAL_ERROR, "Internal error processing get_partition");
@@ -14407,11 +14412,14 @@ public class ThriftHiveMetastore {
private static final TStruct STRUCT_DESC = new TStruct("get_partition_result");
private static final TField SUCCESS_FIELD_DESC = new TField("success", TType.STRUCT, (short)0);
private static final TField O1_FIELD_DESC = new TField("o1", TType.STRUCT, (short)1);
+ private static final TField O2_FIELD_DESC = new TField("o2", TType.STRUCT, (short)2);
private Partition success;
public static final int SUCCESS = 0;
private MetaException o1;
public static final int O1 = 1;
+ private NoSuchObjectException o2;
+ public static final int O2 = 2;
private final Isset __isset = new Isset();
private static final class Isset implements java.io.Serializable {
@@ -14422,6 +14430,8 @@ public class ThriftHiveMetastore {
new StructMetaData(TType.STRUCT, Partition.class)));
put(O1, new FieldMetaData("o1", TFieldRequirementType.DEFAULT,
new FieldValueMetaData(TType.STRUCT)));
+ put(O2, new FieldMetaData("o2", TFieldRequirementType.DEFAULT,
+ new FieldValueMetaData(TType.STRUCT)));
}});
static {
@@ -14433,11 +14443,13 @@ public class ThriftHiveMetastore {
public get_partition_result(
Partition success,
- MetaException o1)
+ MetaException o1,
+ NoSuchObjectException o2)
{
this();
this.success = success;
this.o1 = o1;
+ this.o2 = o2;
}
/**
@@ -14450,6 +14462,9 @@ public class ThriftHiveMetastore {
if (other.isSetO1()) {
this.o1 = new MetaException(other.o1);
}
+ if (other.isSetO2()) {
+ this.o2 = new NoSuchObjectException(other.o2);
+ }
}
@Override
@@ -14491,6 +14506,23 @@ public class ThriftHiveMetastore {
return this.o1 != null;
}
+ public NoSuchObjectException getO2() {
+ return this.o2;
+ }
+
+ public void setO2(NoSuchObjectException o2) {
+ this.o2 = o2;
+ }
+
+ public void unsetO2() {
+ this.o2 = null;
+ }
+
+ // Returns true if field o2 is set (has been asigned a value) and false otherwise
+ public boolean isSetO2() {
+ return this.o2 != null;
+ }
+
public void setFieldValue(int fieldID, Object value) {
switch (fieldID) {
case SUCCESS:
@@ -14509,6 +14541,14 @@ public class ThriftHiveMetastore {
}
break;
+ case O2:
+ if (value == null) {
+ unsetO2();
+ } else {
+ setO2((NoSuchObjectException)value);
+ }
+ break;
+
default:
throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!");
}
@@ -14522,6 +14562,9 @@ public class ThriftHiveMetastore {
case O1:
return getO1();
+ case O2:
+ return getO2();
+
default:
throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!");
}
@@ -14534,6 +14577,8 @@ public class ThriftHiveMetastore {
return isSetSuccess();
case O1:
return isSetO1();
+ case O2:
+ return isSetO2();
default:
throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!");
}
@@ -14570,6 +14615,15 @@ public class ThriftHiveMetastore {
return false;
}
+ boolean this_present_o2 = true && this.isSetO2();
+ boolean that_present_o2 = true && that.isSetO2();
+ if (this_present_o2 || that_present_o2) {
+ if (!(this_present_o2 && that_present_o2))
+ return false;
+ if (!this.o2.equals(that.o2))
+ return false;
+ }
+
return true;
}
@@ -14605,6 +14659,14 @@ public class ThriftHiveMetastore {
TProtocolUtil.skip(iprot, field.type);
}
break;
+ case O2:
+ if (field.type == TType.STRUCT) {
+ this.o2 = new NoSuchObjectException();
+ this.o2.read(iprot);
+ } else {
+ TProtocolUtil.skip(iprot, field.type);
+ }
+ break;
default:
TProtocolUtil.skip(iprot, field.type);
break;
@@ -14627,6 +14689,10 @@ public class ThriftHiveMetastore {
oprot.writeFieldBegin(O1_FIELD_DESC);
this.o1.write(oprot);
oprot.writeFieldEnd();
+ } else if (this.isSetO2()) {
+ oprot.writeFieldBegin(O2_FIELD_DESC);
+ this.o2.write(oprot);
+ oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
@@ -14652,6 +14718,14 @@ public class ThriftHiveMetastore {
sb.append(this.o1);
}
first = false;
+ if (!first) sb.append(", ");
+ sb.append("o2:");
+ if (this.o2 == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.o2);
+ }
+ first = false;
sb.append(")");
return sb.toString();
}
Modified: hadoop/hive/trunk/metastore/src/gen-php/ThriftHiveMetastore.php
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/gen-php/ThriftHiveMetastore.php?rev=964270&r1=964269&r2=964270&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/gen-php/ThriftHiveMetastore.php (original)
+++ hadoop/hive/trunk/metastore/src/gen-php/ThriftHiveMetastore.php Thu Jul 15 00:44:50 2010
@@ -1255,6 +1255,9 @@ class ThriftHiveMetastoreClient extends
if ($result->o1 !== null) {
throw $result->o1;
}
+ if ($result->o2 !== null) {
+ throw $result->o2;
+ }
throw new Exception("get_partition failed: unknown result");
}
@@ -6260,6 +6263,7 @@ class metastore_ThriftHiveMetastore_get_
public $success = null;
public $o1 = null;
+ public $o2 = null;
public function __construct($vals=null) {
if (!isset(self::$_TSPEC)) {
@@ -6274,6 +6278,11 @@ class metastore_ThriftHiveMetastore_get_
'type' => TType::STRUCT,
'class' => 'metastore_MetaException',
),
+ 2 => array(
+ 'var' => 'o2',
+ 'type' => TType::STRUCT,
+ 'class' => 'metastore_NoSuchObjectException',
+ ),
);
}
if (is_array($vals)) {
@@ -6283,6 +6292,9 @@ class metastore_ThriftHiveMetastore_get_
if (isset($vals['o1'])) {
$this->o1 = $vals['o1'];
}
+ if (isset($vals['o2'])) {
+ $this->o2 = $vals['o2'];
+ }
}
}
@@ -6321,6 +6333,14 @@ class metastore_ThriftHiveMetastore_get_
$xfer += $input->skip($ftype);
}
break;
+ case 2:
+ if ($ftype == TType::STRUCT) {
+ $this->o2 = new metastore_NoSuchObjectException();
+ $xfer += $this->o2->read($input);
+ } else {
+ $xfer += $input->skip($ftype);
+ }
+ break;
default:
$xfer += $input->skip($ftype);
break;
@@ -6347,6 +6367,11 @@ class metastore_ThriftHiveMetastore_get_
$xfer += $this->o1->write($output);
$xfer += $output->writeFieldEnd();
}
+ if ($this->o2 !== null) {
+ $xfer += $output->writeFieldBegin('o2', TType::STRUCT, 2);
+ $xfer += $this->o2->write($output);
+ $xfer += $output->writeFieldEnd();
+ }
$xfer += $output->writeFieldStop();
$xfer += $output->writeStructEnd();
return $xfer;
Modified: hadoop/hive/trunk/metastore/src/gen-py/hive_metastore/ThriftHiveMetastore.py
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/gen-py/hive_metastore/ThriftHiveMetastore.py?rev=964270&r1=964269&r2=964270&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/gen-py/hive_metastore/ThriftHiveMetastore.py (original)
+++ hadoop/hive/trunk/metastore/src/gen-py/hive_metastore/ThriftHiveMetastore.py Thu Jul 15 00:44:50 2010
@@ -1015,6 +1015,8 @@ class Client(fb303.FacebookService.Clien
return result.success
if result.o1 != None:
raise result.o1
+ if result.o2 != None:
+ raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partition failed: unknown result");
def get_partition_by_name(self, db_name, tbl_name, part_name):
@@ -1722,6 +1724,8 @@ class Processor(fb303.FacebookService.Pr
result.success = self._handler.get_partition(args.db_name, args.tbl_name, args.part_vals)
except MetaException, o1:
result.o1 = o1
+ except NoSuchObjectException, o2:
+ result.o2 = o2
oprot.writeMessageBegin("get_partition", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
@@ -4970,16 +4974,19 @@ class get_partition_result:
Attributes:
- success
- o1
+ - o2
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (Partition, Partition.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
+ (2, TType.STRUCT, 'o2', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 2
)
- def __init__(self, success=None, o1=None,):
+ def __init__(self, success=None, o1=None, o2=None,):
self.success = success
self.o1 = o1
+ self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -5002,6 +5009,12 @@ class get_partition_result:
self.o1.read(iprot)
else:
iprot.skip(ftype)
+ elif fid == 2:
+ if ftype == TType.STRUCT:
+ self.o2 = NoSuchObjectException()
+ self.o2.read(iprot)
+ else:
+ iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
@@ -5020,6 +5033,10 @@ class get_partition_result:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
+ if self.o2 != None:
+ oprot.writeFieldBegin('o2', TType.STRUCT, 2)
+ self.o2.write(oprot)
+ oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
Modified: hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=964270&r1=964269&r2=964270&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (original)
+++ hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java Thu Jul 15 00:44:50 2010
@@ -867,8 +867,14 @@ public class HiveMetaStore extends Thrif
.makePartName(tbl.getPartitionKeys(), part_vals));
part.getSd().setLocation(partLocation.toString());
- Partition old_part = get_partition(part.getDbName(), part
+ Partition old_part = null;
+ try {
+ old_part = get_partition(part.getDbName(), part
.getTableName(), part.getValues());
+ } catch (NoSuchObjectException e) {
+ // this means there is no existing partition
+ old_part = null;
+ }
if (old_part != null) {
throw new AlreadyExistsException("Partition already exists:" + part);
}
@@ -989,8 +995,14 @@ public class HiveMetaStore extends Thrif
Path partLocation = null;
try {
ms.openTransaction();
- Partition old_part = get_partition(part.getDbName(), part
+ Partition old_part = null;
+ try {
+ old_part = get_partition(part.getDbName(), part
.getTableName(), part.getValues());
+ } catch(NoSuchObjectException e) {
+ // this means there is no existing partition
+ old_part = null;
+ }
if (old_part != null) {
throw new AlreadyExistsException("Partition already exists:" + part);
}
@@ -1152,7 +1164,7 @@ public class HiveMetaStore extends Thrif
}
public Partition get_partition(final String db_name, final String tbl_name,
- final List<String> part_vals) throws MetaException {
+ final List<String> part_vals) throws MetaException, NoSuchObjectException {
incrementCounter("get_partition");
logStartFunction("get_partition", db_name, tbl_name);
@@ -1166,6 +1178,8 @@ public class HiveMetaStore extends Thrif
});
} catch (MetaException e) {
throw e;
+ } catch (NoSuchObjectException e) {
+ throw e;
} catch (Exception e) {
assert(e instanceof RuntimeException);
throw (RuntimeException)e;
Modified: hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java?rev=964270&r1=964269&r2=964270&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java (original)
+++ hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java Thu Jul 15 00:44:50 2010
@@ -511,7 +511,7 @@ public class HiveMetaStoreClient impleme
* java.lang.String, java.util.List)
*/
public Partition getPartition(String db_name, String tbl_name,
- List<String> part_vals) throws MetaException, TException {
+ List<String> part_vals) throws NoSuchObjectException, MetaException, TException {
return deepCopy(client.get_partition(db_name, tbl_name, part_vals));
}
Modified: hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java?rev=964270&r1=964269&r2=964270&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java (original)
+++ hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java Thu Jul 15 00:44:50 2010
@@ -174,7 +174,7 @@ public interface IMetaStoreClient {
* java.lang.String, java.util.List)
*/
public Partition getPartition(String tblName, String dbName,
- List<String> partVals) throws MetaException, TException;
+ List<String> partVals) throws NoSuchObjectException, MetaException, TException;
/**
* @param dbName
Modified: hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java?rev=964270&r1=964269&r2=964270&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java (original)
+++ hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java Thu Jul 15 00:44:50 2010
@@ -772,10 +772,13 @@ public class ObjectStore implements RawS
}
public Partition getPartition(String dbName, String tableName,
- List<String> part_vals) throws MetaException {
+ List<String> part_vals) throws NoSuchObjectException, MetaException {
openTransaction();
Partition part = convertToPart(getMPartition(dbName, tableName, part_vals));
commitTransaction();
+ if(part == null) {
+ throw new NoSuchObjectException();
+ }
return part;
}
Modified: hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java?rev=964270&r1=964269&r2=964270&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java (original)
+++ hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java Thu Jul 15 00:44:50 2010
@@ -85,7 +85,7 @@ public interface RawStore extends Config
throws InvalidObjectException, MetaException;
public abstract Partition getPartition(String dbName, String tableName,
- List<String> part_vals) throws MetaException;
+ List<String> part_vals) throws MetaException, NoSuchObjectException;
public abstract boolean dropPartition(String dbName, String tableName,
List<String> part_vals) throws MetaException;
Modified: hadoop/hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java?rev=964270&r1=964269&r2=964270&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java (original)
+++ hadoop/hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java Thu Jul 15 00:44:50 2010
@@ -36,6 +36,7 @@ import org.apache.hadoop.hive.metastore.
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
+import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
import org.apache.hadoop.hive.metastore.api.Order;
import org.apache.hadoop.hive.metastore.api.Partition;
@@ -117,6 +118,11 @@ public class TestHiveMetaStore extends T
* @throws Exception
*/
public void testPartition() throws Exception {
+ partitionTester(client, hiveConf, false);
+ }
+
+ public static void partitionTester(HiveMetaStoreClient client, HiveConf hiveConf,
+ boolean isThriftClient) throws Exception {
try {
String dbName = "compdb";
String tblName = "comptbl";
@@ -175,6 +181,14 @@ public class TestHiveMetaStore extends T
client.createTable(tbl);
+ if(isThriftClient) {
+ // the createTable() above does not update the location in the 'tbl'
+ // object when the client is a thrift client and the code below relies
+ // on the location being present in the 'tbl' object - so get the table
+ // from the metastore
+ tbl = client.getTable(dbName, tblName);
+ }
+
Partition part = new Partition();
part.setDbName(dbName);
part.setTableName(tblName);
@@ -202,6 +216,16 @@ public class TestHiveMetaStore extends T
part3.getSd().setSerdeInfo(tbl.getSd().getSerdeInfo());
part3.getSd().setLocation(tbl.getSd().getLocation() + "/part2");
+ // check if the partition exists (it shouldn;t)
+ boolean exceptionThrown = false;
+ try {
+ Partition p = client.getPartition(dbName, tblName, vals);
+ } catch(Exception e) {
+ assertEquals("partition should not have existed",
+ NoSuchObjectException.class, e.getClass());
+ exceptionThrown = true;
+ }
+ assertTrue("getPartition() should have thrown NoSuchObjectException", exceptionThrown);
Partition retp = client.add_partition(part);
assertNotNull("Unable to create partition " + part, retp);
Partition retp2 = client.add_partition(part2);
@@ -210,6 +234,15 @@ public class TestHiveMetaStore extends T
assertNotNull("Unable to create partition " + part3, retp3);
Partition part_get = client.getPartition(dbName, tblName, part.getValues());
+ if(isThriftClient) {
+ // since we are using thrift, 'part' will not have the create time and
+ // last DDL time set since it does not get updated in the add_partition()
+ // call - likewise part2 and part3 - set it correctly so that equals check
+ // doesn't fail
+ adjust(client, part, dbName, tblName);
+ adjust(client, part2, dbName, tblName);
+ adjust(client, part3, dbName, tblName);
+ }
assertTrue("Partitions are not same", part.equals(part_get));
String partName = "ds=2008-07-01 14%3A13%3A12/hr=14";
@@ -261,7 +294,7 @@ public class TestHiveMetaStore extends T
assertTrue("Not all part names returned", partialNames.containsAll(partNames));
// Verify escaped partition names don't return partitions
- boolean exceptionThrown = false;
+ exceptionThrown = false;
try {
String badPartName = "ds=2008-07-01 14%3A13%3A12/hrs=14";
client.getPartition(dbName, tblName, badPartName);
@@ -891,4 +924,12 @@ public class TestHiveMetaStore extends T
}
assert (threwException);
}
+
+ private static void adjust(HiveMetaStoreClient client, Partition part,
+ String dbName, String tblName)
+ throws NoSuchObjectException, MetaException, TException {
+ Partition part_get = client.getPartition(dbName, tblName, part.getValues());
+ part.setCreateTime(part_get.getCreateTime());
+ part.putToParameters(org.apache.hadoop.hive.metastore.api.Constants.DDL_TIME, Long.toString(part_get.getCreateTime()));
+ }
}
Added: hadoop/hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreRemote.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreRemote.java?rev=964270&view=auto
==============================================================================
--- hadoop/hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreRemote.java (added)
+++ hadoop/hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreRemote.java Thu Jul 15 00:44:50 2010
@@ -0,0 +1,79 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+import junit.framework.TestCase;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+
+
+public class TestHiveMetaStoreRemote extends TestCase {
+ private static final String METASTORE_PORT = "29083";
+private HiveMetaStoreClient client;
+ private HiveConf hiveConf;
+ boolean isServerRunning = false;
+
+ private static class RunMS implements Runnable {
+
+ @Override
+ public void run() {
+ System.out.println("Running metastore!");
+ String [] args = new String [1];
+ args[0] = METASTORE_PORT;
+ HiveMetaStore.main(args);
+ }
+
+ }
+
+ @Override
+ protected void setUp() throws Exception {
+ super.setUp();
+ if(isServerRunning) {
+ return;
+ }
+ Thread t = new Thread(new RunMS());
+ t.start();
+
+ // Wait a little bit for the metastore to start. Should probably have
+ // a better way of detecting if the metastore has started?
+ Thread.sleep(5000);
+
+ // Set conf to connect to the local metastore.
+ hiveConf = new HiveConf(this.getClass());
+ // hive.metastore.local should be defined in HiveConf
+ hiveConf.set("hive.metastore.local", "false");
+ hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + METASTORE_PORT);
+ hiveConf.setIntVar(HiveConf.ConfVars.METATORETHRIFTRETRIES, 3);
+
+ client = new HiveMetaStoreClient(hiveConf);
+ // Now you have the client - run necessary tests.
+ isServerRunning = true;
+ }
+
+ /**
+ * tests create table and partition and tries to drop the table without
+ * droppping the partition
+ *
+ * @throws Exception
+ */
+ public void testPartition() throws Exception {
+ TestHiveMetaStore.partitionTester(client, hiveConf, true);
+ }
+
+}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java?rev=964270&r1=964269&r2=964270&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java Thu Jul 15 00:44:50 2010
@@ -765,6 +765,16 @@ public class Hive {
org.apache.hadoop.hive.metastore.api.Partition tpart = null;
try {
tpart = getMSC().getPartition(tbl.getDbName(), tbl.getTableName(), pvals);
+ } catch (NoSuchObjectException nsoe) {
+ // this means no partition exists for the given partition
+ // key value pairs - thrift cannot handle null return values, hence
+ // getPartition() throws NoSuchObjectException to indicate null partition
+ tpart = null;
+ } catch (Exception e) {
+ LOG.error(StringUtils.stringifyException(e));
+ throw new HiveException(e);
+ }
+ try {
if (forceCreate) {
if (tpart == null) {
LOG.debug("creating partition for table " + tbl.getTableName()