You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by cw...@apache.org on 2011/04/25 09:52:24 UTC
svn commit: r1096427 - in /hive/trunk: metastore/if/
metastore/src/gen/thrift/gen-cpp/
metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/
metastore/src/gen/thrift/gen-php/hive_metastore/
metastore/src/gen/thrift/gen-py/hive_met...
Author: cws
Date: Mon Apr 25 07:52:23 2011
New Revision: 1096427
URL: http://svn.apache.org/viewvc?rev=1096427&view=rev
Log:
Add 'DROP DATABASE ... CASCADE/RESTRICT' (Siying Dong via cws)
Added:
hive/trunk/ql/src/test/queries/clientnegative/database_drop_not_empty_restrict.q
hive/trunk/ql/src/test/results/clientnegative/database_drop_not_empty_restrict.q.out
Modified:
hive/trunk/metastore/if/hive_metastore.thrift
hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h
hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp
hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
hive/trunk/metastore/src/gen/thrift/gen-php/hive_metastore/ThriftHiveMetastore.php
hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore-remote
hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py
hive/trunk/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropDatabaseDesc.java
hive/trunk/ql/src/test/queries/clientpositive/database.q
hive/trunk/ql/src/test/results/clientpositive/database.q.out
Modified: hive/trunk/metastore/if/hive_metastore.thrift
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/if/hive_metastore.thrift?rev=1096427&r1=1096426&r2=1096427&view=diff
==============================================================================
--- hive/trunk/metastore/if/hive_metastore.thrift (original)
+++ hive/trunk/metastore/if/hive_metastore.thrift Mon Apr 25 07:52:23 2011
@@ -209,7 +209,7 @@ service ThriftHiveMetastore extends fb30
{
void create_database(1:Database database) throws(1:AlreadyExistsException o1, 2:InvalidObjectException o2, 3:MetaException o3)
Database get_database(1:string name) throws(1:NoSuchObjectException o1, 2:MetaException o2)
- void drop_database(1:string name, 2:bool deleteData) throws(1:NoSuchObjectException o1, 2:InvalidOperationException o2, 3:MetaException o3)
+ void drop_database(1:string name, 2:bool deleteData, 3:bool cascade) throws(1:NoSuchObjectException o1, 2:InvalidOperationException o2, 3:MetaException o3)
list<string> get_databases(1:string pattern) throws(1:MetaException o1)
list<string> get_all_databases() throws(1:MetaException o1)
void alter_database(1:string dbname, 2:Database db) throws(1:MetaException o1, 2:NoSuchObjectException o2)
Modified: hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp?rev=1096427&r1=1096426&r2=1096427&view=diff
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp Mon Apr 25 07:52:23 2011
@@ -439,6 +439,14 @@ uint32_t ThriftHiveMetastore_drop_databa
xfer += iprot->skip(ftype);
}
break;
+ case 3:
+ if (ftype == ::apache::thrift::protocol::T_BOOL) {
+ xfer += iprot->readBool(this->cascade);
+ this->__isset.cascade = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
default:
xfer += iprot->skip(ftype);
break;
@@ -460,6 +468,9 @@ uint32_t ThriftHiveMetastore_drop_databa
xfer += oprot->writeFieldBegin("deleteData", ::apache::thrift::protocol::T_BOOL, 2);
xfer += oprot->writeBool(this->deleteData);
xfer += oprot->writeFieldEnd();
+ xfer += oprot->writeFieldBegin("cascade", ::apache::thrift::protocol::T_BOOL, 3);
+ xfer += oprot->writeBool(this->cascade);
+ xfer += oprot->writeFieldEnd();
xfer += oprot->writeFieldStop();
xfer += oprot->writeStructEnd();
return xfer;
@@ -474,6 +485,9 @@ uint32_t ThriftHiveMetastore_drop_databa
xfer += oprot->writeFieldBegin("deleteData", ::apache::thrift::protocol::T_BOOL, 2);
xfer += oprot->writeBool((*(this->deleteData)));
xfer += oprot->writeFieldEnd();
+ xfer += oprot->writeFieldBegin("cascade", ::apache::thrift::protocol::T_BOOL, 3);
+ xfer += oprot->writeBool((*(this->cascade)));
+ xfer += oprot->writeFieldEnd();
xfer += oprot->writeFieldStop();
xfer += oprot->writeStructEnd();
return xfer;
@@ -13133,13 +13147,13 @@ void ThriftHiveMetastoreClient::recv_get
throw ::apache::thrift::TApplicationException(::apache::thrift::TApplicationException::MISSING_RESULT, "get_database failed: unknown result");
}
-void ThriftHiveMetastoreClient::drop_database(const std::string& name, const bool deleteData)
+void ThriftHiveMetastoreClient::drop_database(const std::string& name, const bool deleteData, const bool cascade)
{
- send_drop_database(name, deleteData);
+ send_drop_database(name, deleteData, cascade);
recv_drop_database();
}
-void ThriftHiveMetastoreClient::send_drop_database(const std::string& name, const bool deleteData)
+void ThriftHiveMetastoreClient::send_drop_database(const std::string& name, const bool deleteData, const bool cascade)
{
int32_t cseqid = 0;
oprot_->writeMessageBegin("drop_database", ::apache::thrift::protocol::T_CALL, cseqid);
@@ -13147,6 +13161,7 @@ void ThriftHiveMetastoreClient::send_dro
ThriftHiveMetastore_drop_database_pargs args;
args.name = &name;
args.deleteData = &deleteData;
+ args.cascade = &cascade;
args.write(oprot_);
oprot_->writeMessageEnd();
@@ -16872,7 +16887,7 @@ void ThriftHiveMetastoreProcessor::proce
ThriftHiveMetastore_drop_database_result result;
try {
- iface_->drop_database(args.name, args.deleteData);
+ iface_->drop_database(args.name, args.deleteData, args.cascade);
} catch (NoSuchObjectException &o1) {
result.o1 = o1;
result.__isset.o1 = true;
Modified: hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h?rev=1096427&r1=1096426&r2=1096427&view=diff
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h Mon Apr 25 07:52:23 2011
@@ -17,7 +17,7 @@ class ThriftHiveMetastoreIf : virtual pu
virtual ~ThriftHiveMetastoreIf() {}
virtual void create_database(const Database& database) = 0;
virtual void get_database(Database& _return, const std::string& name) = 0;
- virtual void drop_database(const std::string& name, const bool deleteData) = 0;
+ virtual void drop_database(const std::string& name, const bool deleteData, const bool cascade) = 0;
virtual void get_databases(std::vector<std::string> & _return, const std::string& pattern) = 0;
virtual void get_all_databases(std::vector<std::string> & _return) = 0;
virtual void alter_database(const std::string& dbname, const Database& db) = 0;
@@ -83,7 +83,7 @@ class ThriftHiveMetastoreNull : virtual
void get_database(Database& /* _return */, const std::string& /* name */) {
return;
}
- void drop_database(const std::string& /* name */, const bool /* deleteData */) {
+ void drop_database(const std::string& /* name */, const bool /* deleteData */, const bool /* cascade */) {
return;
}
void get_databases(std::vector<std::string> & /* _return */, const std::string& /* pattern */) {
@@ -487,21 +487,23 @@ class ThriftHiveMetastore_get_database_p
};
typedef struct _ThriftHiveMetastore_drop_database_args__isset {
- _ThriftHiveMetastore_drop_database_args__isset() : name(false), deleteData(false) {}
+ _ThriftHiveMetastore_drop_database_args__isset() : name(false), deleteData(false), cascade(false) {}
bool name;
bool deleteData;
+ bool cascade;
} _ThriftHiveMetastore_drop_database_args__isset;
class ThriftHiveMetastore_drop_database_args {
public:
- ThriftHiveMetastore_drop_database_args() : name(""), deleteData(0) {
+ ThriftHiveMetastore_drop_database_args() : name(""), deleteData(0), cascade(0) {
}
virtual ~ThriftHiveMetastore_drop_database_args() throw() {}
std::string name;
bool deleteData;
+ bool cascade;
_ThriftHiveMetastore_drop_database_args__isset __isset;
@@ -511,6 +513,8 @@ class ThriftHiveMetastore_drop_database_
return false;
if (!(deleteData == rhs.deleteData))
return false;
+ if (!(cascade == rhs.cascade))
+ return false;
return true;
}
bool operator != (const ThriftHiveMetastore_drop_database_args &rhs) const {
@@ -533,6 +537,7 @@ class ThriftHiveMetastore_drop_database_
const std::string* name;
const bool* deleteData;
+ const bool* cascade;
uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
@@ -6911,8 +6916,8 @@ class ThriftHiveMetastoreClient : virtua
void get_database(Database& _return, const std::string& name);
void send_get_database(const std::string& name);
void recv_get_database(Database& _return);
- void drop_database(const std::string& name, const bool deleteData);
- void send_drop_database(const std::string& name, const bool deleteData);
+ void drop_database(const std::string& name, const bool deleteData, const bool cascade);
+ void send_drop_database(const std::string& name, const bool deleteData, const bool cascade);
void recv_drop_database();
void get_databases(std::vector<std::string> & _return, const std::string& pattern);
void send_get_databases(const std::string& pattern);
@@ -7244,10 +7249,10 @@ class ThriftHiveMetastoreMultiface : vir
}
}
- void drop_database(const std::string& name, const bool deleteData) {
+ void drop_database(const std::string& name, const bool deleteData, const bool cascade) {
uint32_t sz = ifaces_.size();
for (uint32_t i = 0; i < sz; ++i) {
- ifaces_[i]->drop_database(name, deleteData);
+ ifaces_[i]->drop_database(name, deleteData, cascade);
}
}
Modified: hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp?rev=1096427&r1=1096426&r2=1096427&view=diff
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp Mon Apr 25 07:52:23 2011
@@ -32,7 +32,7 @@ class ThriftHiveMetastoreHandler : virtu
printf("get_database\n");
}
- void drop_database(const std::string& name, const bool deleteData) {
+ void drop_database(const std::string& name, const bool deleteData, const bool cascade) {
// Your implementation goes here
printf("drop_database\n");
}
Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java?rev=1096427&r1=1096426&r2=1096427&view=diff
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java Mon Apr 25 07:52:23 2011
@@ -37,7 +37,7 @@ public class ThriftHiveMetastore {
public Database get_database(String name) throws NoSuchObjectException, MetaException, TException;
- public void drop_database(String name, boolean deleteData) throws NoSuchObjectException, InvalidOperationException, MetaException, TException;
+ public void drop_database(String name, boolean deleteData, boolean cascade) throws NoSuchObjectException, InvalidOperationException, MetaException, TException;
public List<String> get_databases(String pattern) throws MetaException, TException;
@@ -155,7 +155,7 @@ public class ThriftHiveMetastore {
public void get_database(String name, AsyncMethodCallback<AsyncClient.get_database_call> resultHandler) throws TException;
- public void drop_database(String name, boolean deleteData, AsyncMethodCallback<AsyncClient.drop_database_call> resultHandler) throws TException;
+ public void drop_database(String name, boolean deleteData, boolean cascade, AsyncMethodCallback<AsyncClient.drop_database_call> resultHandler) throws TException;
public void get_databases(String pattern, AsyncMethodCallback<AsyncClient.get_databases_call> resultHandler) throws TException;
@@ -372,18 +372,19 @@ public class ThriftHiveMetastore {
throw new TApplicationException(TApplicationException.MISSING_RESULT, "get_database failed: unknown result");
}
- public void drop_database(String name, boolean deleteData) throws NoSuchObjectException, InvalidOperationException, MetaException, TException
+ public void drop_database(String name, boolean deleteData, boolean cascade) throws NoSuchObjectException, InvalidOperationException, MetaException, TException
{
- send_drop_database(name, deleteData);
+ send_drop_database(name, deleteData, cascade);
recv_drop_database();
}
- public void send_drop_database(String name, boolean deleteData) throws TException
+ public void send_drop_database(String name, boolean deleteData, boolean cascade) throws TException
{
oprot_.writeMessageBegin(new TMessage("drop_database", TMessageType.CALL, ++seqid_));
drop_database_args args = new drop_database_args();
args.setName(name);
args.setDeleteData(deleteData);
+ args.setCascade(cascade);
args.write(oprot_);
oprot_.writeMessageEnd();
oprot_.getTransport().flush();
@@ -2772,19 +2773,21 @@ public class ThriftHiveMetastore {
}
}
- public void drop_database(String name, boolean deleteData, AsyncMethodCallback<drop_database_call> resultHandler) throws TException {
+ public void drop_database(String name, boolean deleteData, boolean cascade, AsyncMethodCallback<drop_database_call> resultHandler) throws TException {
checkReady();
- drop_database_call method_call = new drop_database_call(name, deleteData, resultHandler, this, protocolFactory, transport);
+ drop_database_call method_call = new drop_database_call(name, deleteData, cascade, resultHandler, this, protocolFactory, transport);
manager.call(method_call);
}
public static class drop_database_call extends TAsyncMethodCall {
private String name;
private boolean deleteData;
- public drop_database_call(String name, boolean deleteData, AsyncMethodCallback<drop_database_call> resultHandler, TAsyncClient client, TProtocolFactory protocolFactory, TNonblockingTransport transport) throws TException {
+ private boolean cascade;
+ public drop_database_call(String name, boolean deleteData, boolean cascade, AsyncMethodCallback<drop_database_call> resultHandler, TAsyncClient client, TProtocolFactory protocolFactory, TNonblockingTransport transport) throws TException {
super(client, protocolFactory, transport, resultHandler, false);
this.name = name;
this.deleteData = deleteData;
+ this.cascade = cascade;
}
public void write_args(TProtocol prot) throws TException {
@@ -2792,6 +2795,7 @@ public class ThriftHiveMetastore {
drop_database_args args = new drop_database_args();
args.setName(name);
args.setDeleteData(deleteData);
+ args.setCascade(cascade);
args.write(prot);
prot.writeMessageEnd();
}
@@ -4901,7 +4905,7 @@ public class ThriftHiveMetastore {
iprot.readMessageEnd();
drop_database_result result = new drop_database_result();
try {
- iface_.drop_database(args.name, args.deleteData);
+ iface_.drop_database(args.name, args.deleteData, args.cascade);
} catch (NoSuchObjectException o1) {
result.o1 = o1;
} catch (InvalidOperationException o2) {
@@ -8537,14 +8541,17 @@ public class ThriftHiveMetastore {
private static final TField NAME_FIELD_DESC = new TField("name", TType.STRING, (short)1);
private static final TField DELETE_DATA_FIELD_DESC = new TField("deleteData", TType.BOOL, (short)2);
+ private static final TField CASCADE_FIELD_DESC = new TField("cascade", TType.BOOL, (short)3);
private String name;
private boolean deleteData;
+ private boolean cascade;
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements TFieldIdEnum {
NAME((short)1, "name"),
- DELETE_DATA((short)2, "deleteData");
+ DELETE_DATA((short)2, "deleteData"),
+ CASCADE((short)3, "cascade");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
@@ -8563,6 +8570,8 @@ public class ThriftHiveMetastore {
return NAME;
case 2: // DELETE_DATA
return DELETE_DATA;
+ case 3: // CASCADE
+ return CASCADE;
default:
return null;
}
@@ -8604,7 +8613,8 @@ public class ThriftHiveMetastore {
// isset id assignments
private static final int __DELETEDATA_ISSET_ID = 0;
- private BitSet __isset_bit_vector = new BitSet(1);
+ private static final int __CASCADE_ISSET_ID = 1;
+ private BitSet __isset_bit_vector = new BitSet(2);
public static final Map<_Fields, FieldMetaData> metaDataMap;
static {
@@ -8613,6 +8623,8 @@ public class ThriftHiveMetastore {
new FieldValueMetaData(TType.STRING)));
tmpMap.put(_Fields.DELETE_DATA, new FieldMetaData("deleteData", TFieldRequirementType.DEFAULT,
new FieldValueMetaData(TType.BOOL)));
+ tmpMap.put(_Fields.CASCADE, new FieldMetaData("cascade", TFieldRequirementType.DEFAULT,
+ new FieldValueMetaData(TType.BOOL)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
FieldMetaData.addStructMetaDataMap(drop_database_args.class, metaDataMap);
}
@@ -8622,12 +8634,15 @@ public class ThriftHiveMetastore {
public drop_database_args(
String name,
- boolean deleteData)
+ boolean deleteData,
+ boolean cascade)
{
this();
this.name = name;
this.deleteData = deleteData;
setDeleteDataIsSet(true);
+ this.cascade = cascade;
+ setCascadeIsSet(true);
}
/**
@@ -8640,6 +8655,7 @@ public class ThriftHiveMetastore {
this.name = other.name;
}
this.deleteData = other.deleteData;
+ this.cascade = other.cascade;
}
public drop_database_args deepCopy() {
@@ -8651,6 +8667,8 @@ public class ThriftHiveMetastore {
this.name = null;
setDeleteDataIsSet(false);
this.deleteData = false;
+ setCascadeIsSet(false);
+ this.cascade = false;
}
public String getName() {
@@ -8698,6 +8716,28 @@ public class ThriftHiveMetastore {
__isset_bit_vector.set(__DELETEDATA_ISSET_ID, value);
}
+ public boolean isCascade() {
+ return this.cascade;
+ }
+
+ public void setCascade(boolean cascade) {
+ this.cascade = cascade;
+ setCascadeIsSet(true);
+ }
+
+ public void unsetCascade() {
+ __isset_bit_vector.clear(__CASCADE_ISSET_ID);
+ }
+
+ /** Returns true if field cascade is set (has been asigned a value) and false otherwise */
+ public boolean isSetCascade() {
+ return __isset_bit_vector.get(__CASCADE_ISSET_ID);
+ }
+
+ public void setCascadeIsSet(boolean value) {
+ __isset_bit_vector.set(__CASCADE_ISSET_ID, value);
+ }
+
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case NAME:
@@ -8716,6 +8756,14 @@ public class ThriftHiveMetastore {
}
break;
+ case CASCADE:
+ if (value == null) {
+ unsetCascade();
+ } else {
+ setCascade((Boolean)value);
+ }
+ break;
+
}
}
@@ -8727,6 +8775,9 @@ public class ThriftHiveMetastore {
case DELETE_DATA:
return new Boolean(isDeleteData());
+ case CASCADE:
+ return new Boolean(isCascade());
+
}
throw new IllegalStateException();
}
@@ -8742,6 +8793,8 @@ public class ThriftHiveMetastore {
return isSetName();
case DELETE_DATA:
return isSetDeleteData();
+ case CASCADE:
+ return isSetCascade();
}
throw new IllegalStateException();
}
@@ -8777,6 +8830,15 @@ public class ThriftHiveMetastore {
return false;
}
+ boolean this_present_cascade = true;
+ boolean that_present_cascade = true;
+ if (this_present_cascade || that_present_cascade) {
+ if (!(this_present_cascade && that_present_cascade))
+ return false;
+ if (this.cascade != that.cascade)
+ return false;
+ }
+
return true;
}
@@ -8813,6 +8875,16 @@ public class ThriftHiveMetastore {
return lastComparison;
}
}
+ lastComparison = Boolean.valueOf(isSetCascade()).compareTo(typedOther.isSetCascade());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (isSetCascade()) {
+ lastComparison = TBaseHelper.compareTo(this.cascade, typedOther.cascade);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
return 0;
}
@@ -8845,6 +8917,14 @@ public class ThriftHiveMetastore {
TProtocolUtil.skip(iprot, field.type);
}
break;
+ case 3: // CASCADE
+ if (field.type == TType.BOOL) {
+ this.cascade = iprot.readBool();
+ setCascadeIsSet(true);
+ } else {
+ TProtocolUtil.skip(iprot, field.type);
+ }
+ break;
default:
TProtocolUtil.skip(iprot, field.type);
}
@@ -8866,6 +8946,9 @@ public class ThriftHiveMetastore {
oprot.writeFieldBegin(DELETE_DATA_FIELD_DESC);
oprot.writeBool(this.deleteData);
oprot.writeFieldEnd();
+ oprot.writeFieldBegin(CASCADE_FIELD_DESC);
+ oprot.writeBool(this.cascade);
+ oprot.writeFieldEnd();
oprot.writeFieldStop();
oprot.writeStructEnd();
}
@@ -8886,6 +8969,10 @@ public class ThriftHiveMetastore {
sb.append("deleteData:");
sb.append(this.deleteData);
first = false;
+ if (!first) sb.append(", ");
+ sb.append("cascade:");
+ sb.append(this.cascade);
+ first = false;
sb.append(")");
return sb.toString();
}
Modified: hive/trunk/metastore/src/gen/thrift/gen-php/hive_metastore/ThriftHiveMetastore.php
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-php/hive_metastore/ThriftHiveMetastore.php?rev=1096427&r1=1096426&r2=1096427&view=diff
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-php/hive_metastore/ThriftHiveMetastore.php (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-php/hive_metastore/ThriftHiveMetastore.php Mon Apr 25 07:52:23 2011
@@ -12,7 +12,7 @@ include_once $GLOBALS['THRIFT_ROOT'].'/p
interface ThriftHiveMetastoreIf extends FacebookServiceIf {
public function create_database($database);
public function get_database($name);
- public function drop_database($name, $deleteData);
+ public function drop_database($name, $deleteData, $cascade);
public function get_databases($pattern);
public function get_all_databases();
public function alter_database($dbname, $db);
@@ -188,17 +188,18 @@ class ThriftHiveMetastoreClient extends
throw new Exception("get_database failed: unknown result");
}
- public function drop_database($name, $deleteData)
+ public function drop_database($name, $deleteData, $cascade)
{
- $this->send_drop_database($name, $deleteData);
+ $this->send_drop_database($name, $deleteData, $cascade);
$this->recv_drop_database();
}
- public function send_drop_database($name, $deleteData)
+ public function send_drop_database($name, $deleteData, $cascade)
{
$args = new metastore_ThriftHiveMetastore_drop_database_args();
$args->name = $name;
$args->deleteData = $deleteData;
+ $args->cascade = $cascade;
$bin_accel = ($this->output_ instanceof TProtocol::$TBINARYPROTOCOLACCELERATED) && function_exists('thrift_protocol_write_binary');
if ($bin_accel)
{
@@ -3730,6 +3731,7 @@ class metastore_ThriftHiveMetastore_drop
public $name = null;
public $deleteData = null;
+ public $cascade = null;
public function __construct($vals=null) {
if (!isset(self::$_TSPEC)) {
@@ -3742,6 +3744,10 @@ class metastore_ThriftHiveMetastore_drop
'var' => 'deleteData',
'type' => TType::BOOL,
),
+ 3 => array(
+ 'var' => 'cascade',
+ 'type' => TType::BOOL,
+ ),
);
}
if (is_array($vals)) {
@@ -3751,6 +3757,9 @@ class metastore_ThriftHiveMetastore_drop
if (isset($vals['deleteData'])) {
$this->deleteData = $vals['deleteData'];
}
+ if (isset($vals['cascade'])) {
+ $this->cascade = $vals['cascade'];
+ }
}
}
@@ -3787,6 +3796,13 @@ class metastore_ThriftHiveMetastore_drop
$xfer += $input->skip($ftype);
}
break;
+ case 3:
+ if ($ftype == TType::BOOL) {
+ $xfer += $input->readBool($this->cascade);
+ } else {
+ $xfer += $input->skip($ftype);
+ }
+ break;
default:
$xfer += $input->skip($ftype);
break;
@@ -3810,6 +3826,11 @@ class metastore_ThriftHiveMetastore_drop
$xfer += $output->writeBool($this->deleteData);
$xfer += $output->writeFieldEnd();
}
+ if ($this->cascade !== null) {
+ $xfer += $output->writeFieldBegin('cascade', TType::BOOL, 3);
+ $xfer += $output->writeBool($this->cascade);
+ $xfer += $output->writeFieldEnd();
+ }
$xfer += $output->writeFieldStop();
$xfer += $output->writeStructEnd();
return $xfer;
Modified: hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore-remote
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore-remote?rev=1096427&r1=1096426&r2=1096427&view=diff
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore-remote (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore-remote Mon Apr 25 07:52:23 2011
@@ -23,7 +23,7 @@ if len(sys.argv) <= 1 or sys.argv[1] ==
print 'Functions:'
print ' void create_database(Database database)'
print ' Database get_database(string name)'
- print ' void drop_database(string name, bool deleteData)'
+ print ' void drop_database(string name, bool deleteData, bool cascade)'
print ' get_databases(string pattern)'
print ' get_all_databases()'
print ' void alter_database(string dbname, Database db)'
@@ -141,10 +141,10 @@ elif cmd == 'get_database':
pp.pprint(client.get_database(args[0],))
elif cmd == 'drop_database':
- if len(args) != 2:
- print 'drop_database requires 2 args'
+ if len(args) != 3:
+ print 'drop_database requires 3 args'
sys.exit(1)
- pp.pprint(client.drop_database(args[0],eval(args[1]),))
+ pp.pprint(client.drop_database(args[0],eval(args[1]),eval(args[2]),))
elif cmd == 'get_databases':
if len(args) != 1:
Modified: hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py?rev=1096427&r1=1096426&r2=1096427&view=diff
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py Mon Apr 25 07:52:23 2011
@@ -34,11 +34,12 @@ class Iface(fb303.FacebookService.Iface)
"""
pass
- def drop_database(self, name, deleteData):
+ def drop_database(self, name, deleteData, cascade):
"""
Parameters:
- name
- deleteData
+ - cascade
"""
pass
@@ -568,20 +569,22 @@ class Client(fb303.FacebookService.Clien
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_database failed: unknown result");
- def drop_database(self, name, deleteData):
+ def drop_database(self, name, deleteData, cascade):
"""
Parameters:
- name
- deleteData
+ - cascade
"""
- self.send_drop_database(name, deleteData)
+ self.send_drop_database(name, deleteData, cascade)
self.recv_drop_database()
- def send_drop_database(self, name, deleteData):
+ def send_drop_database(self, name, deleteData, cascade):
self._oprot.writeMessageBegin('drop_database', TMessageType.CALL, self._seqid)
args = drop_database_args()
args.name = name
args.deleteData = deleteData
+ args.cascade = cascade
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
@@ -2661,7 +2664,7 @@ class Processor(fb303.FacebookService.Pr
iprot.readMessageEnd()
result = drop_database_result()
try:
- self._handler.drop_database(args.name, args.deleteData)
+ self._handler.drop_database(args.name, args.deleteData, args.cascade)
except NoSuchObjectException, o1:
result.o1 = o1
except InvalidOperationException, o2:
@@ -3803,17 +3806,20 @@ class drop_database_args:
Attributes:
- name
- deleteData
+ - cascade
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
(2, TType.BOOL, 'deleteData', None, None, ), # 2
+ (3, TType.BOOL, 'cascade', None, None, ), # 3
)
- def __init__(self, name=None, deleteData=None,):
+ def __init__(self, name=None, deleteData=None, cascade=None,):
self.name = name
self.deleteData = deleteData
+ self.cascade = cascade
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -3834,6 +3840,11 @@ class drop_database_args:
self.deleteData = iprot.readBool();
else:
iprot.skip(ftype)
+ elif fid == 3:
+ if ftype == TType.BOOL:
+ self.cascade = iprot.readBool();
+ else:
+ iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
@@ -3852,6 +3863,10 @@ class drop_database_args:
oprot.writeFieldBegin('deleteData', TType.BOOL, 2)
oprot.writeBool(self.deleteData)
oprot.writeFieldEnd()
+ if self.cascade != None:
+ oprot.writeFieldBegin('cascade', TType.BOOL, 3)
+ oprot.writeBool(self.cascade)
+ oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
Modified: hive/trunk/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb?rev=1096427&r1=1096426&r2=1096427&view=diff
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb Mon Apr 25 07:52:23 2011
@@ -46,13 +46,13 @@ module ThriftHiveMetastore
raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'get_database failed: unknown result')
end
- def drop_database(name, deleteData)
- send_drop_database(name, deleteData)
+ def drop_database(name, deleteData, cascade)
+ send_drop_database(name, deleteData, cascade)
recv_drop_database()
end
- def send_drop_database(name, deleteData)
- send_message('drop_database', Drop_database_args, :name => name, :deleteData => deleteData)
+ def send_drop_database(name, deleteData, cascade)
+ send_message('drop_database', Drop_database_args, :name => name, :deleteData => deleteData, :cascade => cascade)
end
def recv_drop_database()
@@ -995,7 +995,7 @@ module ThriftHiveMetastore
args = read_args(iprot, Drop_database_args)
result = Drop_database_result.new()
begin
- @handler.drop_database(args.name, args.deleteData)
+ @handler.drop_database(args.name, args.deleteData, args.cascade)
rescue NoSuchObjectException => o1
result.o1 = o1
rescue InvalidOperationException => o2
@@ -1756,10 +1756,12 @@ module ThriftHiveMetastore
include ::Thrift::Struct, ::Thrift::Struct_Union
NAME = 1
DELETEDATA = 2
+ CASCADE = 3
FIELDS = {
NAME => {:type => ::Thrift::Types::STRING, :name => 'name'},
- DELETEDATA => {:type => ::Thrift::Types::BOOL, :name => 'deleteData'}
+ DELETEDATA => {:type => ::Thrift::Types::BOOL, :name => 'deleteData'},
+ CASCADE => {:type => ::Thrift::Types::BOOL, :name => 'cascade'}
}
def struct_fields; FIELDS; end
Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=1096427&r1=1096426&r2=1096427&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java Mon Apr 25 07:52:23 2011
@@ -633,7 +633,7 @@ public class HiveMetaStore extends Thrif
}
private void drop_database_core(RawStore ms,
- final String name, final boolean deleteData)
+ final String name, final boolean deleteData, final boolean cascade)
throws NoSuchObjectException, InvalidOperationException, MetaException,
IOException {
boolean success = false;
@@ -641,7 +641,8 @@ public class HiveMetaStore extends Thrif
try {
ms.openTransaction();
db = ms.getDatabase(name);
- if (!get_all_tables(db.getName()).isEmpty()) {
+ List<String> allTables = get_all_tables(db.getName());
+ if (!cascade && !allTables.isEmpty()) {
throw new InvalidOperationException("Database " + db.getName() + " is not empty");
}
Path path = new Path(db.getLocationUri()).getParent();
@@ -663,7 +664,7 @@ public class HiveMetaStore extends Thrif
}
}
- public void drop_database(final String dbName, final boolean deleteData)
+ public void drop_database(final String dbName, final boolean deleteData, final boolean cascade)
throws NoSuchObjectException, InvalidOperationException, MetaException {
startFunction("drop_database", ": " + dbName);
@@ -676,7 +677,7 @@ public class HiveMetaStore extends Thrif
executeWithRetry(new Command<Boolean>() {
@Override
Boolean run(RawStore ms) throws Exception {
- drop_database_core(ms, dbName, deleteData);
+ drop_database_core(ms, dbName, deleteData, cascade);
return Boolean.TRUE;
}
});
Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java?rev=1096427&r1=1096426&r2=1096427&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java Mon Apr 25 07:52:23 2011
@@ -381,12 +381,16 @@ public class HiveMetaStoreClient impleme
*/
public void dropDatabase(String name)
throws NoSuchObjectException, InvalidOperationException, MetaException, TException {
- dropDatabase(name, true, false);
+ dropDatabase(name, true, false, false);
}
-
public void dropDatabase(String name, boolean deleteData, boolean ignoreUnknownDb)
throws NoSuchObjectException, InvalidOperationException, MetaException, TException {
+ dropDatabase(name, deleteData, ignoreUnknownDb, false);
+ }
+
+ public void dropDatabase(String name, boolean deleteData, boolean ignoreUnknownDb, boolean cascade)
+ throws NoSuchObjectException, InvalidOperationException, MetaException, TException {
try {
getDatabase(name);
} catch (NoSuchObjectException e) {
@@ -395,7 +399,7 @@ public class HiveMetaStoreClient impleme
}
return;
}
- client.drop_database(name, deleteData);
+ client.drop_database(name, deleteData, cascade);
}
Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java?rev=1096427&r1=1096426&r2=1096427&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java Mon Apr 25 07:52:23 2011
@@ -389,6 +389,9 @@ public interface IMetaStoreClient {
public void dropDatabase(String name, boolean deleteData, boolean ignoreUnknownDb)
throws NoSuchObjectException, InvalidOperationException, MetaException, TException;
+ public void dropDatabase(String name, boolean deleteData, boolean ignoreUnknownDb, boolean cascade)
+ throws NoSuchObjectException, InvalidOperationException, MetaException, TException;
+
public void alterDatabase(String name, Database db)
throws NoSuchObjectException, MetaException, TException;
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=1096427&r1=1096426&r2=1096427&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Mon Apr 25 07:52:23 2011
@@ -75,7 +75,6 @@ import org.apache.hadoop.hive.ql.DriverC
import org.apache.hadoop.hive.ql.QueryPlan;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
-import org.apache.hadoop.hive.ql.io.RCFileInputFormat;
import org.apache.hadoop.hive.ql.io.rcfile.merge.BlockMergeTask;
import org.apache.hadoop.hive.ql.io.rcfile.merge.MergeWork;
import org.apache.hadoop.hive.ql.lockmgr.HiveLock;
@@ -142,7 +141,6 @@ import org.apache.hadoop.hive.serde2.dyn
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.hadoop.hive.shims.HadoopShims;
import org.apache.hadoop.hive.shims.ShimLoader;
-import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.util.ToolRunner;
/**
@@ -359,7 +357,7 @@ public class DDLTask extends Task<DDLWor
if (showIndexes != null) {
return showIndexes(db, showIndexes);
}
-
+
AlterTablePartMergeFilesDesc mergeFilesDesc = work.getMergeFilesDesc();
if(mergeFilesDesc != null) {
return mergeFiles(db, mergeFilesDesc);
@@ -387,10 +385,10 @@ public class DDLTask extends Task<DDLWor
* First, make sure the source table/partition is not
* archived/indexes/non-rcfile. If either of these is true, throw an
* exception.
- *
+ *
* The way how it does the merge is to create a BlockMergeTask from the
* mergeFilesDesc.
- *
+ *
* @param db
* @param mergeFilesDesc
* @return
@@ -879,8 +877,8 @@ public class DDLTask extends Task<DDLWor
return 0;
}
-
-
+
+
if (addPartitionDesc.getLocation() == null) {
db.createPartition(tbl, addPartitionDesc.getPartSpec(), null,
addPartitionDesc.getPartParams(),
@@ -1439,7 +1437,7 @@ public class DDLTask extends Task<DDLWor
validateAlterTableType(tbl, alterType, false);
}
-
+
private void validateAlterTableType(
Table tbl, AlterTableDesc.AlterTableTypes alterType,
boolean expectView) throws HiveException {
@@ -2912,7 +2910,7 @@ public class DDLTask extends Task<DDLWor
tbl, AlterTableDesc.AlterTableTypes.DROPPARTITION,
dropTbl.getExpectView());
}
-
+
// get all partitions of the table
List<String> partitionNames =
db.getPartitionNames(dropTbl.getTableName(), (short) -1);
@@ -3030,7 +3028,7 @@ public class DDLTask extends Task<DDLWor
*/
private int dropDatabase(Hive db, DropDatabaseDesc dropDb)
throws HiveException, NoSuchObjectException {
- db.dropDatabase(dropDb.getDatabaseName(), true, dropDb.getIfExists());
+ db.dropDatabase(dropDb.getDatabaseName(), true, dropDb.getIfExists(), dropDb.isCasdade());
return 0;
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java?rev=1096427&r1=1096426&r2=1096427&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java Mon Apr 25 07:52:23 2011
@@ -232,10 +232,9 @@ public class Hive {
* @see org.apache.hadoop.hive.metastore.HiveMetaStoreClient#dropDatabase(java.lang.String)
*/
public void dropDatabase(String name) throws HiveException, NoSuchObjectException {
- dropDatabase(name, true, false);
+ dropDatabase(name, true, false, false);
}
-
/**
* Drop a database
* @param name
@@ -247,8 +246,24 @@ public class Hive {
*/
public void dropDatabase(String name, boolean deleteData, boolean ignoreUnknownDb)
throws HiveException, NoSuchObjectException {
+ dropDatabase(name, deleteData, ignoreUnknownDb, false);
+ }
+
+ /**
+ * Drop a database
+ * @param name
+ * @param deleteData
+ * @param ignoreUnknownDb if true, will ignore NoSuchObjectException
+ * @param cascade if true, delete all tables on the DB if exists. Othewise, the query
+ * will fail if table still exists.
+ * @return
+ * @throws HiveException
+ * @throws NoSuchObjectException
+ */
+ public void dropDatabase(String name, boolean deleteData, boolean ignoreUnknownDb, boolean cascade)
+ throws HiveException, NoSuchObjectException {
try {
- getMSC().dropDatabase(name, deleteData, ignoreUnknownDb);
+ getMSC().dropDatabase(name, deleteData, ignoreUnknownDb, cascade);
} catch (NoSuchObjectException e) {
throw e;
} catch (Exception e) {
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1096427&r1=1096426&r2=1096427&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Mon Apr 25 07:52:23 2011
@@ -18,6 +18,7 @@
package org.apache.hadoop.hive.ql.parse;
+import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_CASCADE;
import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_DATABASECOMMENT;
import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_IFEXISTS;
import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_IFNOTEXISTS;
@@ -653,12 +654,17 @@ public class DDLSemanticAnalyzer extends
private void analyzeDropDatabase(ASTNode ast) throws SemanticException {
String dbName = unescapeIdentifier(ast.getChild(0).getText());
boolean ifExists = false;
+ boolean ifCascade = false;
if (null != ast.getFirstChildWithType(TOK_IFEXISTS)) {
ifExists = true;
}
- DropDatabaseDesc dropDatabaseDesc = new DropDatabaseDesc(dbName, ifExists);
+ if (null != ast.getFirstChildWithType(TOK_CASCADE)) {
+ ifCascade = true;
+ }
+
+ DropDatabaseDesc dropDatabaseDesc = new DropDatabaseDesc(dbName, ifExists, ifCascade);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropDatabaseDesc), conf));
}
@@ -1174,6 +1180,7 @@ public class DDLSemanticAnalyzer extends
mergeDesc.setOutputDir(outputDir);
addInputsOutputsAlterTable(tableName, partSpec);
+
DDLWork ddlWork = new DDLWork(getInputs(), getOutputs(), mergeDesc);
ddlWork.setNeedLock(true);
Task<? extends Serializable> mergeTask = TaskFactory.get(ddlWork, conf);
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g?rev=1096427&r1=1096426&r2=1096427&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g Mon Apr 25 07:52:23 2011
@@ -246,6 +246,8 @@ TOK_ALTERDATABASE_PROPERTIES;
TOK_ALTERTABLE_ALTERPARTS_MERGEFILES;
TOK_TABNAME;
TOK_TABSRC;
+TOK_RESTRICT;
+TOK_CASCADE;
}
@@ -348,6 +350,15 @@ ifExists
-> ^(TOK_IFEXISTS)
;
+restrictOrCascade
+@init { msgs.push("restrict or cascade clause"); }
+@after { msgs.pop(); }
+ : KW_RESTRICT
+ -> ^(TOK_RESTRICT)
+ | KW_CASCADE
+ -> ^(TOK_CASCADE)
+ ;
+
ifNotExists
@init { msgs.push("if not exists clause"); }
@after { msgs.pop(); }
@@ -392,8 +403,8 @@ switchDatabaseStatement
dropDatabaseStatement
@init { msgs.push("drop database statement"); }
@after { msgs.pop(); }
- : KW_DROP (KW_DATABASE|KW_SCHEMA) ifExists? Identifier
- -> ^(TOK_DROPDATABASE Identifier ifExists?)
+ : KW_DROP (KW_DATABASE|KW_SCHEMA) ifExists? Identifier restrictOrCascade?
+ -> ^(TOK_DROPDATABASE Identifier ifExists? restrictOrCascade?)
;
databaseComment
@@ -2246,6 +2257,8 @@ KW_OPTION: 'OPTION';
KW_CONCATENATE: 'CONCATENATE';
KW_SHOW_DATABASE: 'SHOW_DATABASE';
KW_UPDATE: 'UPDATE';
+KW_RESTRICT: 'RESTRICT';
+KW_CASCADE: 'CASCADE';
// Operators
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropDatabaseDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropDatabaseDesc.java?rev=1096427&r1=1096426&r2=1096427&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropDatabaseDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropDatabaseDesc.java Mon Apr 25 07:52:23 2011
@@ -30,11 +30,17 @@ public class DropDatabaseDesc extends DD
String databaseName;
boolean ifExists;
+ boolean cascade;
public DropDatabaseDesc(String databaseName, boolean ifExists) {
+ this(databaseName, ifExists, false);
+ }
+
+ public DropDatabaseDesc(String databaseName, boolean ifExists, boolean cascade) {
super();
this.databaseName = databaseName;
this.ifExists = ifExists;
+ this.cascade = cascade;
}
@Explain(displayName = "database")
@@ -54,4 +60,12 @@ public class DropDatabaseDesc extends DD
public void setIfExists(boolean ifExists) {
this.ifExists = ifExists;
}
+
+ public boolean isCasdade() {
+ return cascade;
+ }
+
+ public void setIsCascade(boolean cascade) {
+ this.cascade = cascade;
+ }
}
Added: hive/trunk/ql/src/test/queries/clientnegative/database_drop_not_empty_restrict.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/database_drop_not_empty_restrict.q?rev=1096427&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/database_drop_not_empty_restrict.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/database_drop_not_empty_restrict.q Mon Apr 25 07:52:23 2011
@@ -0,0 +1,8 @@
+SHOW DATABASES;
+
+-- Try to drop a non-empty database in restrict mode
+CREATE DATABASE db_drop_non_empty_restrict;
+USE db_drop_non_empty_restrict;
+CREATE TABLE t(a INT);
+USE default;
+DROP DATABASE db_drop_non_empty_restrict;
Modified: hive/trunk/ql/src/test/queries/clientpositive/database.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/database.q?rev=1096427&r1=1096426&r2=1096427&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/database.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/database.q Mon Apr 25 07:52:23 2011
@@ -93,6 +93,47 @@ USE default;
DROP DATABASE test_db;
SHOW DATABASES;
+-- DROP EMPTY DATABASE CASCADE
+CREATE DATABASE to_drop_db1;
+SHOW DATABASES;
+USE default;
+DROP DATABASE to_drop_db1 CASCADE;
+SHOW DATABASES;
+
+-- DROP NON-EMPTY DATABASE CASCADE
+CREATE DATABASE to_drop_db2;
+SHOW DATABASES;
+USE to_drop_db2;
+CREATE TABLE temp_tbl (c STRING);
+CREATE TABLE temp_tbl2 LIKE temp_tbl;
+INSERT OVERWRITE TABLE temp_tbl2 SELECT COUNT(*) FROM temp_tbl;
+USE default;
+DROP DATABASE to_drop_db2 CASCADE;
+SHOW DATABASES;
+
+-- DROP NON-EMPTY DATABASE CASCADE IF EXISTS
+CREATE DATABASE to_drop_db3;
+SHOW DATABASES;
+USE to_drop_db3;
+CREATE TABLE temp_tbl (c STRING);
+USE default;
+DROP DATABASE IF EXISTS to_drop_db3 CASCADE;
+SHOW DATABASES;
+
+-- DROP NON-EXISTING DATABASE CASCADE IF EXISTS
+DROP DATABASE IF EXISTS non_exists_db3 CASCADE;
+SHOW DATABASES;
+
+-- DROP NON-EXISTING DATABASE RESTRICT IF EXISTS
+DROP DATABASE IF EXISTS non_exists_db3 RESTRICT;
+
+-- DROP EMPTY DATABASE RESTRICT
+CREATE DATABASE to_drop_db4;
+SHOW DATABASES;
+DROP DATABASE to_drop_db4 RESTRICT;
+SHOW DATABASES;
+
+
--
-- Canonical Name Tests
--
Added: hive/trunk/ql/src/test/results/clientnegative/database_drop_not_empty_restrict.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/database_drop_not_empty_restrict.q.out?rev=1096427&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/database_drop_not_empty_restrict.q.out (added)
+++ hive/trunk/ql/src/test/results/clientnegative/database_drop_not_empty_restrict.q.out Mon Apr 25 07:52:23 2011
@@ -0,0 +1,28 @@
+PREHOOK: query: SHOW DATABASES
+PREHOOK: type: SHOWDATABASES
+POSTHOOK: query: SHOW DATABASES
+POSTHOOK: type: SHOWDATABASES
+default
+PREHOOK: query: -- Try to drop a non-empty database in restrict mode
+CREATE DATABASE db_drop_non_empty_restrict
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: -- Try to drop a non-empty database in restrict mode
+CREATE DATABASE db_drop_non_empty_restrict
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: USE db_drop_non_empty_restrict
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: USE db_drop_non_empty_restrict
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: CREATE TABLE t(a INT)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE t(a INT)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: db_drop_non_empty_restrict@t
+PREHOOK: query: USE default
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: USE default
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: DROP DATABASE db_drop_non_empty_restrict
+PREHOOK: type: DROPDATABASE
+FAILED: Error in metadata: InvalidOperationException(message:Database db_drop_non_empty_restrict is not empty)
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask
Modified: hive/trunk/ql/src/test/results/clientpositive/database.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/database.q.out?rev=1096427&r1=1096426&r2=1096427&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/database.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/database.q.out Mon Apr 25 07:52:23 2011
@@ -138,7 +138,7 @@ DESCRIBE EXTENDED test_table
POSTHOOK: type: DESCTABLE
col1 string
-Detailed Table Information Table(tableName:test_table, dbName:test_db, owner:krishnak, createTime:1300687179, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col1, type:string, comment:null)], location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/test_db.db/test_table, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1300687179}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+Detailed Table Information Table(tableName:test_table, dbName:test_db, owner:sdong, createTime:1302825475, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col1, type:string, comment:null)], location:pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/test_db.db/test_table, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1302825475}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
PREHOOK: query: -- CREATE LIKE in non-default DB
CREATE TABLE test_table_like LIKE test_table
PREHOOK: type: CREATETABLE
@@ -158,7 +158,7 @@ POSTHOOK: query: DESCRIBE EXTENDED test_
POSTHOOK: type: DESCTABLE
col1 string
-Detailed Table Information Table(tableName:test_table_like, dbName:test_db, owner:krishnak, createTime:1300687180, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col1, type:string, comment:null)], location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/test_db.db/test_table_like, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1300687180}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+Detailed Table Information Table(tableName:test_table_like, dbName:test_db, owner:sdong, createTime:1302825476, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col1, type:string, comment:null)], location:pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/test_db.db/test_table_like, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1302825476}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
PREHOOK: query: -- LOAD and SELECT
LOAD DATA LOCAL INPATH '../data/files/test.dat'
OVERWRITE INTO TABLE test_table
@@ -172,11 +172,11 @@ POSTHOOK: Output: test_db@test_table
PREHOOK: query: SELECT * FROM test_table
PREHOOK: type: QUERY
PREHOOK: Input: test_db@test_table
-PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-03-20_22-59-43_836_1930679319090877378/-mr-10000
+PREHOOK: Output: file:/tmp/sdong/hive_2011-04-14_16-57-56_779_6558577544544202317/-mr-10000
POSTHOOK: query: SELECT * FROM test_table
POSTHOOK: type: QUERY
POSTHOOK: Input: test_db@test_table
-POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-03-20_22-59-43_836_1930679319090877378/-mr-10000
+POSTHOOK: Output: file:/tmp/sdong/hive_2011-04-14_16-57-56_779_6558577544544202317/-mr-10000
1
2
3
@@ -212,11 +212,11 @@ test_table_like
PREHOOK: query: SELECT * FROM test_table
PREHOOK: type: QUERY
PREHOOK: Input: test_db@test_table
-PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-03-20_22-59-45_073_4649769445221560413/-mr-10000
+PREHOOK: Output: file:/tmp/sdong/hive_2011-04-14_16-57-58_217_230305294028870146/-mr-10000
POSTHOOK: query: SELECT * FROM test_table
POSTHOOK: type: QUERY
POSTHOOK: Input: test_db@test_table
-POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-03-20_22-59-45_073_4649769445221560413/-mr-10000
+POSTHOOK: Output: file:/tmp/sdong/hive_2011-04-14_16-57-58_217_230305294028870146/-mr-10000
PREHOOK: query: -- CREATE table that already exists in DEFAULT
USE test_db
PREHOOK: type: SWITCHDATABASE
@@ -238,11 +238,11 @@ test_table_like
PREHOOK: query: SELECT * FROM src LIMIT 10
PREHOOK: type: QUERY
PREHOOK: Input: test_db@src
-PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-03-20_22-59-45_508_3776174430484572228/-mr-10000
+PREHOOK: Output: file:/tmp/sdong/hive_2011-04-14_16-57-58_462_6058704561576462238/-mr-10000
POSTHOOK: query: SELECT * FROM src LIMIT 10
POSTHOOK: type: QUERY
POSTHOOK: Input: test_db@src
-POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-03-20_22-59-45_508_3776174430484572228/-mr-10000
+POSTHOOK: Output: file:/tmp/sdong/hive_2011-04-14_16-57-58_462_6058704561576462238/-mr-10000
PREHOOK: query: USE default
PREHOOK: type: SWITCHDATABASE
POSTHOOK: query: USE default
@@ -250,11 +250,11 @@ POSTHOOK: type: SWITCHDATABASE
PREHOOK: query: SELECT * FROM src LIMIT 10
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-03-20_22-59-45_688_1944274183205884689/-mr-10000
+PREHOOK: Output: file:/tmp/sdong/hive_2011-04-14_16-57-58_586_3051622495608934510/-mr-10000
POSTHOOK: query: SELECT * FROM src LIMIT 10
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-03-20_22-59-45_688_1944274183205884689/-mr-10000
+POSTHOOK: Output: file:/tmp/sdong/hive_2011-04-14_16-57-58_586_3051622495608934510/-mr-10000
238 val_238
86 val_86
311 val_311
@@ -312,6 +312,168 @@ PREHOOK: type: SHOWDATABASES
POSTHOOK: query: SHOW DATABASES
POSTHOOK: type: SHOWDATABASES
default
+PREHOOK: query: -- DROP EMPTY DATABASE CASCADE
+CREATE DATABASE to_drop_db1
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: -- DROP EMPTY DATABASE CASCADE
+CREATE DATABASE to_drop_db1
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: SHOW DATABASES
+PREHOOK: type: SHOWDATABASES
+POSTHOOK: query: SHOW DATABASES
+POSTHOOK: type: SHOWDATABASES
+default
+to_drop_db1
+PREHOOK: query: USE default
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: USE default
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: DROP DATABASE to_drop_db1 CASCADE
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: DROP DATABASE to_drop_db1 CASCADE
+POSTHOOK: type: DROPDATABASE
+PREHOOK: query: SHOW DATABASES
+PREHOOK: type: SHOWDATABASES
+POSTHOOK: query: SHOW DATABASES
+POSTHOOK: type: SHOWDATABASES
+default
+PREHOOK: query: -- DROP NON-EMPTY DATABASE CASCADE
+CREATE DATABASE to_drop_db2
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: -- DROP NON-EMPTY DATABASE CASCADE
+CREATE DATABASE to_drop_db2
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: SHOW DATABASES
+PREHOOK: type: SHOWDATABASES
+POSTHOOK: query: SHOW DATABASES
+POSTHOOK: type: SHOWDATABASES
+default
+to_drop_db2
+PREHOOK: query: USE to_drop_db2
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: USE to_drop_db2
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: CREATE TABLE temp_tbl (c STRING)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE temp_tbl (c STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: to_drop_db2@temp_tbl
+PREHOOK: query: CREATE TABLE temp_tbl2 LIKE temp_tbl
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE temp_tbl2 LIKE temp_tbl
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: to_drop_db2@temp_tbl2
+PREHOOK: query: INSERT OVERWRITE TABLE temp_tbl2 SELECT COUNT(*) FROM temp_tbl
+PREHOOK: type: QUERY
+PREHOOK: Input: to_drop_db2@temp_tbl
+PREHOOK: Output: to_drop_db2@temp_tbl2
+POSTHOOK: query: INSERT OVERWRITE TABLE temp_tbl2 SELECT COUNT(*) FROM temp_tbl
+POSTHOOK: type: QUERY
+POSTHOOK: Input: to_drop_db2@temp_tbl
+POSTHOOK: Output: to_drop_db2@temp_tbl2
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
+PREHOOK: query: USE default
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: USE default
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
+PREHOOK: query: DROP DATABASE to_drop_db2 CASCADE
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: DROP DATABASE to_drop_db2 CASCADE
+POSTHOOK: type: DROPDATABASE
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
+PREHOOK: query: SHOW DATABASES
+PREHOOK: type: SHOWDATABASES
+POSTHOOK: query: SHOW DATABASES
+POSTHOOK: type: SHOWDATABASES
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
+default
+PREHOOK: query: -- DROP NON-EMPTY DATABASE CASCADE IF EXISTS
+CREATE DATABASE to_drop_db3
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: -- DROP NON-EMPTY DATABASE CASCADE IF EXISTS
+CREATE DATABASE to_drop_db3
+POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
+PREHOOK: query: SHOW DATABASES
+PREHOOK: type: SHOWDATABASES
+POSTHOOK: query: SHOW DATABASES
+POSTHOOK: type: SHOWDATABASES
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
+default
+to_drop_db3
+PREHOOK: query: USE to_drop_db3
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: USE to_drop_db3
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
+PREHOOK: query: CREATE TABLE temp_tbl (c STRING)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE temp_tbl (c STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: to_drop_db3@temp_tbl
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
+PREHOOK: query: USE default
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: USE default
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
+PREHOOK: query: DROP DATABASE IF EXISTS to_drop_db3 CASCADE
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: DROP DATABASE IF EXISTS to_drop_db3 CASCADE
+POSTHOOK: type: DROPDATABASE
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
+PREHOOK: query: SHOW DATABASES
+PREHOOK: type: SHOWDATABASES
+POSTHOOK: query: SHOW DATABASES
+POSTHOOK: type: SHOWDATABASES
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
+default
+PREHOOK: query: -- DROP NON-EXISTING DATABASE CASCADE IF EXISTS
+DROP DATABASE IF EXISTS non_exists_db3 CASCADE
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: -- DROP NON-EXISTING DATABASE CASCADE IF EXISTS
+DROP DATABASE IF EXISTS non_exists_db3 CASCADE
+POSTHOOK: type: DROPDATABASE
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
+PREHOOK: query: SHOW DATABASES
+PREHOOK: type: SHOWDATABASES
+POSTHOOK: query: SHOW DATABASES
+POSTHOOK: type: SHOWDATABASES
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
+default
+PREHOOK: query: -- DROP NON-EXISTING DATABASE RESTRICT IF EXISTS
+DROP DATABASE IF EXISTS non_exists_db3 RESTRICT
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: -- DROP NON-EXISTING DATABASE RESTRICT IF EXISTS
+DROP DATABASE IF EXISTS non_exists_db3 RESTRICT
+POSTHOOK: type: DROPDATABASE
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
+PREHOOK: query: -- DROP EMPTY DATABASE RESTRICT
+CREATE DATABASE to_drop_db4
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: -- DROP EMPTY DATABASE RESTRICT
+CREATE DATABASE to_drop_db4
+POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
+PREHOOK: query: SHOW DATABASES
+PREHOOK: type: SHOWDATABASES
+POSTHOOK: query: SHOW DATABASES
+POSTHOOK: type: SHOWDATABASES
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
+default
+to_drop_db4
+PREHOOK: query: DROP DATABASE to_drop_db4 RESTRICT
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: DROP DATABASE to_drop_db4 RESTRICT
+POSTHOOK: type: DROPDATABASE
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
+PREHOOK: query: SHOW DATABASES
+PREHOOK: type: SHOWDATABASES
+POSTHOOK: query: SHOW DATABASES
+POSTHOOK: type: SHOWDATABASES
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
+default
PREHOOK: query: --
-- Canonical Name Tests
--
@@ -324,10 +486,12 @@ POSTHOOK: query: --
CREATE DATABASE db1
POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
PREHOOK: query: CREATE DATABASE db2
PREHOOK: type: CREATEDATABASE
POSTHOOK: query: CREATE DATABASE db2
POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
PREHOOK: query: -- CREATE foreign table
CREATE TABLE db1.src(key STRING, value STRING)
STORED AS TEXTFILE
@@ -337,6 +501,7 @@ CREATE TABLE db1.src(key STRING, value S
STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: db1@src
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
PREHOOK: query: -- LOAD into foreign table
LOAD DATA LOCAL INPATH '../data/files/kv1.txt'
OVERWRITE INTO TABLE db1.src
@@ -347,16 +512,18 @@ LOAD DATA LOCAL INPATH '../data/files/kv
OVERWRITE INTO TABLE db1.src
POSTHOOK: type: LOAD
POSTHOOK: Output: db1@src
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
PREHOOK: query: -- SELECT from foreign table
SELECT * FROM db1.src
PREHOOK: type: QUERY
PREHOOK: Input: db1@src
-PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-03-20_22-59-48_509_3695761285330281039/-mr-10000
+PREHOOK: Output: file:/tmp/sdong/hive_2011-04-14_16-58-07_137_8852311184592507877/-mr-10000
POSTHOOK: query: -- SELECT from foreign table
SELECT * FROM db1.src
POSTHOOK: type: QUERY
POSTHOOK: Input: db1@src
-POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-03-20_22-59-48_509_3695761285330281039/-mr-10000
+POSTHOOK: Output: file:/tmp/sdong/hive_2011-04-14_16-58-07_137_8852311184592507877/-mr-10000
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
238 val_238
86 val_86
311 val_311
@@ -868,6 +1035,7 @@ PARTITIONED BY (ds STRING, hr STRING)
STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: db1@srcpart
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
PREHOOK: query: -- LOAD data into Partitioned foreign table
LOAD DATA LOCAL INPATH '../data/files/kv1.txt'
OVERWRITE INTO TABLE db1.srcpart
@@ -881,18 +1049,20 @@ PARTITION (ds='2008-04-08', hr='11')
POSTHOOK: type: LOAD
POSTHOOK: Output: db1@srcpart
POSTHOOK: Output: db1@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
PREHOOK: query: -- SELECT from Partitioned foreign table
SELECT key, value FROM db1.srcpart
WHERE key < 100 AND ds='2008-04-08' AND hr='11'
PREHOOK: type: QUERY
PREHOOK: Input: db1@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-03-20_22-59-50_007_6983676544716069113/-mr-10000
+PREHOOK: Output: file:/tmp/sdong/hive_2011-04-14_16-58-07_948_3991317106720358085/-mr-10000
POSTHOOK: query: -- SELECT from Partitioned foreign table
SELECT key, value FROM db1.srcpart
WHERE key < 100 AND ds='2008-04-08' AND hr='11'
POSTHOOK: type: QUERY
POSTHOOK: Input: db1@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-03-20_22-59-50_007_6983676544716069113/-mr-10000
+POSTHOOK: Output: file:/tmp/sdong/hive_2011-04-14_16-58-07_948_3991317106720358085/-mr-10000
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
86 val_86
27 val_27
98 val_98
@@ -983,18 +1153,20 @@ PREHOOK: type: SWITCHDATABASE
POSTHOOK: query: -- SELECT JOINed product of two foreign tables
USE db2
POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
PREHOOK: query: SELECT a.* FROM db1.src a JOIN default.src1 b
ON (a.key = b.key)
PREHOOK: type: QUERY
PREHOOK: Input: db1@src
PREHOOK: Input: default@src1
-PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-03-20_22-59-56_370_3425232643487575292/-mr-10000
+PREHOOK: Output: file:/tmp/sdong/hive_2011-04-14_16-58-11_120_6942744088220032482/-mr-10000
POSTHOOK: query: SELECT a.* FROM db1.src a JOIN default.src1 b
ON (a.key = b.key)
POSTHOOK: type: QUERY
POSTHOOK: Input: db1@src
POSTHOOK: Input: default@src1
-POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-03-20_22-59-56_370_3425232643487575292/-mr-10000
+POSTHOOK: Output: file:/tmp/sdong/hive_2011-04-14_16-58-11_120_6942744088220032482/-mr-10000
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
128 val_128
128 val_128
128 val_128
@@ -1043,6 +1215,7 @@ SELECT value FROM default.src WHERE key
POSTHOOK: type: CREATETABLE_AS_SELECT
POSTHOOK: Input: default@src
POSTHOOK: Output: db2@conflict_name
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
PREHOOK: query: -- CREATE foreign table
CREATE TABLE db1.conflict_name AS
SELECT value FROM db1.src WHERE key = 8
@@ -1054,6 +1227,7 @@ SELECT value FROM db1.src WHERE key = 8
POSTHOOK: type: CREATETABLE_AS_SELECT
POSTHOOK: Input: db1@src
POSTHOOK: Output: db1@conflict_name
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
PREHOOK: query: -- query tables with the same names in different DBs
SELECT * FROM (
SELECT value FROM db1.conflict_name
@@ -1063,7 +1237,7 @@ UNION ALL
PREHOOK: type: QUERY
PREHOOK: Input: db1@conflict_name
PREHOOK: Input: db2@conflict_name
-PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-03-20_23-00-24_326_3288809132598871804/-mr-10000
+PREHOOK: Output: file:/tmp/sdong/hive_2011-04-14_16-58-20_546_4482330222868598236/-mr-10000
POSTHOOK: query: -- query tables with the same names in different DBs
SELECT * FROM (
SELECT value FROM db1.conflict_name
@@ -1073,13 +1247,15 @@ UNION ALL
POSTHOOK: type: QUERY
POSTHOOK: Input: db1@conflict_name
POSTHOOK: Input: db2@conflict_name
-POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-03-20_23-00-24_326_3288809132598871804/-mr-10000
+POSTHOOK: Output: file:/tmp/sdong/hive_2011-04-14_16-58-20_546_4482330222868598236/-mr-10000
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
val_66
val_8
PREHOOK: query: USE default
PREHOOK: type: SWITCHDATABASE
POSTHOOK: query: USE default
POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
PREHOOK: query: SELECT * FROM (
SELECT value FROM db1.conflict_name
UNION ALL
@@ -1088,7 +1264,7 @@ UNION ALL
PREHOOK: type: QUERY
PREHOOK: Input: db1@conflict_name
PREHOOK: Input: db2@conflict_name
-PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-03-20_23-00-35_498_5558293761378343191/-mr-10000
+PREHOOK: Output: file:/tmp/sdong/hive_2011-04-14_16-58-23_738_1416474990630728009/-mr-10000
POSTHOOK: query: SELECT * FROM (
SELECT value FROM db1.conflict_name
UNION ALL
@@ -1097,7 +1273,8 @@ UNION ALL
POSTHOOK: type: QUERY
POSTHOOK: Input: db1@conflict_name
POSTHOOK: Input: db2@conflict_name
-POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-03-20_23-00-35_498_5558293761378343191/-mr-10000
+POSTHOOK: Output: file:/tmp/sdong/hive_2011-04-14_16-58-23_738_1416474990630728009/-mr-10000
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
val_66
val_8
PREHOOK: query: -- TABLESAMPLES
@@ -1109,6 +1286,7 @@ CREATE TABLE bucketized_src (key INT, va
CLUSTERED BY (key) SORTED BY (key) INTO 1 BUCKETS
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucketized_src
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
PREHOOK: query: INSERT OVERWRITE TABLE bucketized_src
SELECT key, value FROM src WHERE key=66
PREHOOK: type: QUERY
@@ -1121,16 +1299,18 @@ POSTHOOK: Input: default@src
POSTHOOK: Output: default@bucketized_src
POSTHOOK: Lineage: bucketized_src.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: bucketized_src.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
PREHOOK: query: SELECT key FROM bucketized_src TABLESAMPLE(BUCKET 1 out of 1)
PREHOOK: type: QUERY
PREHOOK: Input: default@bucketized_src
-PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-03-20_23-01-01_901_106000097823469804/-mr-10000
+PREHOOK: Output: file:/tmp/sdong/hive_2011-04-14_16-58-31_681_3025368059661975205/-mr-10000
POSTHOOK: query: SELECT key FROM bucketized_src TABLESAMPLE(BUCKET 1 out of 1)
POSTHOOK: type: QUERY
POSTHOOK: Input: default@bucketized_src
-POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-03-20_23-01-01_901_106000097823469804/-mr-10000
+POSTHOOK: Output: file:/tmp/sdong/hive_2011-04-14_16-58-31_681_3025368059661975205/-mr-10000
POSTHOOK: Lineage: bucketized_src.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: bucketized_src.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
66
PREHOOK: query: -- CREATE TABLE LIKE
CREATE TABLE db2.src1 LIKE default.src
@@ -1141,56 +1321,62 @@ POSTHOOK: type: CREATETABLE
POSTHOOK: Output: db2@src1
POSTHOOK: Lineage: bucketized_src.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: bucketized_src.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
PREHOOK: query: USE db2
PREHOOK: type: SWITCHDATABASE
POSTHOOK: query: USE db2
POSTHOOK: type: SWITCHDATABASE
POSTHOOK: Lineage: bucketized_src.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: bucketized_src.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
PREHOOK: query: DESC EXTENDED src1
PREHOOK: type: DESCTABLE
POSTHOOK: query: DESC EXTENDED src1
POSTHOOK: type: DESCTABLE
POSTHOOK: Lineage: bucketized_src.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: bucketized_src.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
key string default
value string default
-Detailed Table Information Table(tableName:src1, dbName:db2, owner:null, createTime:1300687270, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/db2.db/src1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1300687270}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+Detailed Table Information Table(tableName:src1, dbName:db2, owner:null, createTime:1302825514, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/db2.db/src1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1302825514}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
PREHOOK: query: -- character escaping
SELECT key FROM `default`.src ORDER BY key LIMIT 1
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-03-20_23-01-10_834_4275631588062414168/-mr-10000
+PREHOOK: Output: file:/tmp/sdong/hive_2011-04-14_16-58-34_830_693129214774849311/-mr-10000
POSTHOOK: query: -- character escaping
SELECT key FROM `default`.src ORDER BY key LIMIT 1
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-03-20_23-01-10_834_4275631588062414168/-mr-10000
+POSTHOOK: Output: file:/tmp/sdong/hive_2011-04-14_16-58-34_830_693129214774849311/-mr-10000
POSTHOOK: Lineage: bucketized_src.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: bucketized_src.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
0
PREHOOK: query: SELECT key FROM `default`.`src` ORDER BY key LIMIT 1
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-03-20_23-01-21_022_1499500667725149462/-mr-10000
+PREHOOK: Output: file:/tmp/sdong/hive_2011-04-14_16-58-37_763_726145307624380559/-mr-10000
POSTHOOK: query: SELECT key FROM `default`.`src` ORDER BY key LIMIT 1
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-03-20_23-01-21_022_1499500667725149462/-mr-10000
+POSTHOOK: Output: file:/tmp/sdong/hive_2011-04-14_16-58-37_763_726145307624380559/-mr-10000
POSTHOOK: Lineage: bucketized_src.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: bucketized_src.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
0
PREHOOK: query: SELECT key FROM default.`src` ORDER BY key LIMIT 1
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-03-20_23-01-33_065_1701859253890034058/-mr-10000
+PREHOOK: Output: file:/tmp/sdong/hive_2011-04-14_16-58-40_728_8732527538266665217/-mr-10000
POSTHOOK: query: SELECT key FROM default.`src` ORDER BY key LIMIT 1
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-03-20_23-01-33_065_1701859253890034058/-mr-10000
+POSTHOOK: Output: file:/tmp/sdong/hive_2011-04-14_16-58-40_728_8732527538266665217/-mr-10000
POSTHOOK: Lineage: bucketized_src.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: bucketized_src.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
0
PREHOOK: query: USE default
PREHOOK: type: SWITCHDATABASE
@@ -1198,3 +1384,4 @@ POSTHOOK: query: USE default
POSTHOOK: type: SWITCHDATABASE
POSTHOOK: Lineage: bucketized_src.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: bucketized_src.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]