You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by xu...@apache.org on 2014/09/22 21:31:37 UTC

svn commit: r1626876 - in /hive/trunk: hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/ hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/ metastore/src/java/org/apache/hadoop/hive/metastore/ ql/src/java/org/apac...

Author: xuefu
Date: Mon Sep 22 19:31:37 2014
New Revision: 1626876

URL: http://svn.apache.org/r1626876
Log:
HIVE-7100: Users of hive should be able to specify skipTrash when dropping tables (David via Xuefu)

Added:
    hive/trunk/ql/src/test/queries/clientpositive/drop_table_purge.q
    hive/trunk/ql/src/test/results/clientpositive/drop_table_purge.q.out
Modified:
    hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java
    hive/trunk/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java
    hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
    hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
    hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreFsImpl.java
    hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
    hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreFS.java
    hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveRemote.java

Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java?rev=1626876&r1=1626875&r2=1626876&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java (original)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java Mon Sep 22 19:31:37 2014
@@ -90,6 +90,7 @@ public class TestHCatPartitionPublish {
     File workDir = handleWorkDir();
     conf.set("yarn.scheduler.capacity.root.queues", "default");
     conf.set("yarn.scheduler.capacity.root.default.capacity", "100");
+    conf.set("fs.pfile.impl", "org.apache.hadoop.fs.ProxyLocalFileSystem");
 
     fs = FileSystem.get(conf);
     System.setProperty("hadoop.log.dir", new File(workDir, "/logs").getAbsolutePath());

Modified: hive/trunk/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java?rev=1626876&r1=1626875&r2=1626876&view=diff
==============================================================================
--- hive/trunk/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java (original)
+++ hive/trunk/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java Mon Sep 22 19:31:37 2014
@@ -320,6 +320,6 @@ public class TestTempletonUtils {
     result = TempletonUtils.findContainingJar(FileSystem.class, ".*hadoop.*\\.jar.*");
     Assert.assertNotNull(result);
     result = TempletonUtils.findContainingJar(HadoopShimsSecure.class, ".*unknownjar.*");
-    Assert.assertNull(result);
+    Assert.assertNull("unexpectedly found jar for HadoopShimsSecure class: " + result, result);
   }
 }

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=1626876&r1=1626875&r2=1626876&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java Mon Sep 22 19:31:37 2014
@@ -1424,10 +1424,14 @@ public class HiveMetaStore extends Thrif
         if (!success) {
           ms.rollbackTransaction();
         } else if (deleteData && !isExternal) {
+          boolean ifPurge = false;
+          if (envContext != null){
+            ifPurge = Boolean.parseBoolean(envContext.getProperties().get("ifPurge"));
+          }
           // Delete the data in the partitions which have other locations
-          deletePartitionData(partPaths);
+          deletePartitionData(partPaths, ifPurge);
           // Delete the data in the table
-          deleteTableData(tblPath);
+          deleteTableData(tblPath, ifPurge);
           // ok even if the data is not deleted
         }
         for (MetaStoreEventListener listener : listeners) {
@@ -1444,9 +1448,21 @@ public class HiveMetaStore extends Thrif
      * @param tablePath
      */
     private void deleteTableData(Path tablePath) {
+      deleteTableData(tablePath, false);
+    }
+
+    /**
+     * Deletes the data in a table's location, if it fails logs an error
+     *
+     * @param tablePath
+     * @param ifPurge completely purge the table (skipping trash) while removing
+     *                data from warehouse
+     */
+    private void deleteTableData(Path tablePath, boolean ifPurge) {
+
       if (tablePath != null) {
         try {
-          wh.deleteDir(tablePath, true);
+          wh.deleteDir(tablePath, true, ifPurge);
         } catch (Exception e) {
           LOG.error("Failed to delete table directory: " + tablePath +
               " " + e.getMessage());
@@ -1461,10 +1477,22 @@ public class HiveMetaStore extends Thrif
      * @param partPaths
      */
     private void deletePartitionData(List<Path> partPaths) {
+      deletePartitionData(partPaths, false);
+    }
+
+    /**
+    * Give a list of partitions' locations, tries to delete each one
+    * and for each that fails logs an error.
+    *
+    * @param partPaths
+    * @param ifPurge completely purge the partition (skipping trash) while
+    *                removing data from warehouse
+    */
+    private void deletePartitionData(List<Path> partPaths, boolean ifPurge) {
       if (partPaths != null && !partPaths.isEmpty()) {
         for (Path partPath : partPaths) {
           try {
-            wh.deleteDir(partPath, true);
+            wh.deleteDir(partPath, true, ifPurge);
           } catch (Exception e) {
             LOG.error("Failed to delete partition directory: " + partPath +
                 " " + e.getMessage());

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java?rev=1626876&r1=1626875&r2=1626876&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java Mon Sep 22 19:31:37 2014
@@ -763,18 +763,34 @@ public class HiveMetaStoreClient impleme
   }
 
   /**
-   * @param name
-   * @param dbname
-   * @throws NoSuchObjectException
-   * @throws MetaException
-   * @throws TException
-   * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#drop_table(java.lang.String,
-   *      java.lang.String, boolean)
+   * {@inheritDoc}
+   * @see #dropTable(String, String, boolean, boolean, EnvironmentContext)
    */
   @Override
-  public void dropTable(String dbname, String name)
-      throws NoSuchObjectException, MetaException, TException {
-    dropTable(dbname, name, true, true, null);
+  public void dropTable(String dbname, String name, boolean deleteData,
+      boolean ignoreUnknownTab) throws MetaException, TException,
+      NoSuchObjectException, UnsupportedOperationException {
+    dropTable(dbname, name, deleteData, ignoreUnknownTab, null);
+  }
+
+  /**
+   * Drop the table and choose whether to save the data in the trash.
+   * @param ifPurge completely purge the table (skipping trash) while removing
+   *                data from warehouse
+   * @see #dropTable(String, String, boolean, boolean, EnvironmentContext)
+   */
+  public void dropTable(String dbname, String name, boolean deleteData,
+      boolean ignoreUnknownTab, boolean ifPurge)
+      throws MetaException, TException, NoSuchObjectException, UnsupportedOperationException {
+    //build new environmentContext with ifPurge;
+    EnvironmentContext envContext = null;
+    if(ifPurge){
+      Map<String, String> warehouseOptions = null;
+      warehouseOptions = new HashMap<String, String>();
+      warehouseOptions.put("ifPurge", "TRUE");
+      envContext = new EnvironmentContext(warehouseOptions);
+    }
+    dropTable(dbname, name, deleteData, ignoreUnknownTab, envContext);
   }
 
   /** {@inheritDoc} */
@@ -786,23 +802,37 @@ public class HiveMetaStoreClient impleme
   }
 
   /**
+   * @see #dropTable(String, String, boolean, boolean, EnvironmentContext)
+   */
+  @Override
+  public void dropTable(String dbname, String name)
+      throws NoSuchObjectException, MetaException, TException {
+    dropTable(dbname, name, true, true, null);
+  }
+
+  /**
+   * Drop the table and choose whether to: delete the underlying table data;
+   * throw if the table doesn't exist; save the data in the trash.
+   *
    * @param dbname
    * @param name
    * @param deleteData
    *          delete the underlying data or just delete the table in metadata
-   * @throws NoSuchObjectException
+   * @param ignoreUnknownTab
+   *          don't throw if the requested table doesn't exist
+   * @param envContext
+   *          for communicating with thrift
    * @throws MetaException
+   *           could not drop table properly
+   * @throws NoSuchObjectException
+   *           the table wasn't found
    * @throws TException
+   *           a thrift communication error occurred
+   * @throws UnsupportedOperationException
+   *           dropping an index table is not allowed
    * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#drop_table(java.lang.String,
    *      java.lang.String, boolean)
    */
-  @Override
-  public void dropTable(String dbname, String name, boolean deleteData,
-      boolean ignoreUnknownTab) throws MetaException, TException,
-      NoSuchObjectException, UnsupportedOperationException {
-    dropTable(dbname, name, deleteData, ignoreUnknownTab, null);
-  }
-
   public void dropTable(String dbname, String name, boolean deleteData,
       boolean ignoreUnknownTab, EnvironmentContext envContext) throws MetaException, TException,
       NoSuchObjectException, UnsupportedOperationException {

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreFsImpl.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreFsImpl.java?rev=1626876&r1=1626875&r2=1626876&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreFsImpl.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreFsImpl.java Mon Sep 22 19:31:37 2014
@@ -37,12 +37,14 @@ public class HiveMetaStoreFsImpl impleme
 
   @Override
   public boolean deleteDir(FileSystem fs, Path f, boolean recursive,
-      Configuration conf) throws MetaException {
+      boolean ifPurge, Configuration conf) throws MetaException {
     LOG.info("deleting  " + f);
     HadoopShims hadoopShim = ShimLoader.getHadoopShims();
 
     try {
-      if (hadoopShim.moveToAppropriateTrash(fs, f, conf)) {
+      if (ifPurge) {
+        LOG.info("Not moving "+ f +" to trash");
+      } else if (hadoopShim.moveToAppropriateTrash(fs, f, conf)) {
         LOG.info("Moved to trash: " + f);
         return true;
       }

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java?rev=1626876&r1=1626875&r2=1626876&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java Mon Sep 22 19:31:37 2014
@@ -192,6 +192,10 @@ public interface IMetaStoreClient {
    *          The database for this table
    * @param tableName
    *          The table to drop
+   * @param deleteData
+   *          Should we delete the underlying data
+   * @param ignoreUnknownTab
+   *          don't throw if the requested table doesn't exist
    * @throws MetaException
    *           Could not drop table properly.
    * @throws NoSuchObjectException
@@ -200,7 +204,16 @@ public interface IMetaStoreClient {
    *           A thrift communication error occurred
    */
   void dropTable(String dbname, String tableName, boolean deleteData,
-      boolean ignoreUknownTab) throws MetaException, TException,
+      boolean ignoreUnknownTab) throws MetaException, TException,
+      NoSuchObjectException;
+
+  /**
+   * @param ifPurge
+   *          completely purge the table (skipping trash) while removing data from warehouse
+   * @see #dropTable(String, String, boolean, boolean)
+   */
+  public void dropTable(String dbname, String tableName, boolean deleteData,
+      boolean ignoreUnknownTab, boolean ifPurge) throws MetaException, TException,
       NoSuchObjectException;
 
   /**
@@ -226,6 +239,9 @@ public interface IMetaStoreClient {
   void dropTable(String tableName, boolean deleteData)
       throws MetaException, UnknownTableException, TException, NoSuchObjectException;
 
+  /**
+   * @see #dropTable(String, String, boolean, boolean)
+   */
   void dropTable(String dbname, String tableName)
       throws MetaException, TException, NoSuchObjectException;
 

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreFS.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreFS.java?rev=1626876&r1=1626875&r2=1626876&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreFS.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreFS.java Mon Sep 22 19:31:37 2014
@@ -32,11 +32,12 @@ public interface MetaStoreFS {
    * delete a directory
    *
    * @param f
+   * @param ifPurge
    * @param recursive
    * @return true on success
    * @throws MetaException
    */
   public boolean deleteDir(FileSystem fs, Path f, boolean recursive,
-      Configuration conf) throws MetaException;
+      boolean ifPurge, Configuration conf) throws MetaException;
 
 }

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java?rev=1626876&r1=1626875&r2=1626876&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java Mon Sep 22 19:31:37 2014
@@ -224,8 +224,12 @@ public class Warehouse {
   }
 
   public boolean deleteDir(Path f, boolean recursive) throws MetaException {
+    return deleteDir(f, recursive, false);
+  }
+
+  public boolean deleteDir(Path f, boolean recursive, boolean ifPurge) throws MetaException {
     FileSystem fs = getFs(f);
-    return fsHandler.deleteDir(fs, f, recursive, conf);
+    return fsHandler.deleteDir(fs, f, recursive, ifPurge, conf);
   }
 
   public boolean isEmpty(Path path) throws IOException, MetaException {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=1626876&r1=1626875&r2=1626876&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Mon Sep 22 19:31:37 2014
@@ -3689,7 +3689,7 @@ public class DDLTask extends Task<DDLWor
     }
 
     // drop the table
-    db.dropTable(dropTbl.getTableName());
+    db.dropTable(dropTbl.getTableName(), dropTbl.getIfPurge());
     if (tbl != null) {
       // We have already locked the table in DDLSemanticAnalyzer, don't do it again here
       work.getOutputs().add(new WriteEntity(tbl, WriteEntity.WriteType.DDL_NO_LOCK));

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java?rev=1626876&r1=1626875&r2=1626876&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java Mon Sep 22 19:31:37 2014
@@ -878,6 +878,23 @@ public class Hive {
 
   /**
    * Drops table along with the data in it. If the table doesn't exist then it
+   * is a no-op. If ifPurge option is specified it is passed to the
+   * hdfs command that removes table data from warehouse to make it skip trash.
+   *
+   * @param tableName
+   *          table to drop
+   * @param ifPurge
+   *          completely purge the table (skipping trash) while removing data from warehouse
+   * @throws HiveException
+   *           thrown if the drop fails
+   */
+  public void dropTable(String tableName, boolean ifPurge) throws HiveException {
+    String[] names = Utilities.getDbTableName(tableName);
+    dropTable(names[0], names[1], true, true, ifPurge);
+  }
+
+  /**
+   * Drops table along with the data in it. If the table doesn't exist then it
    * is a no-op
    *
    * @param tableName
@@ -886,8 +903,7 @@ public class Hive {
    *           thrown if the drop fails
    */
   public void dropTable(String tableName) throws HiveException {
-    String[] names = Utilities.getDbTableName(tableName);
-    dropTable(names[0], names[1], true, true);
+    dropTable(tableName, false);
   }
 
   /**
@@ -902,7 +918,7 @@ public class Hive {
    *           thrown if the drop fails
    */
   public void dropTable(String dbName, String tableName) throws HiveException {
-    dropTable(dbName, tableName, true, true);
+    dropTable(dbName, tableName, true, true, false);
   }
 
   /**
@@ -913,14 +929,31 @@ public class Hive {
    * @param deleteData
    *          deletes the underlying data along with metadata
    * @param ignoreUnknownTab
-   *          an exception if thrown if this is falser and table doesn't exist
+   *          an exception is thrown if this is false and the table doesn't exist
    * @throws HiveException
    */
   public void dropTable(String dbName, String tableName, boolean deleteData,
       boolean ignoreUnknownTab) throws HiveException {
+    dropTable(dbName, tableName, deleteData, ignoreUnknownTab, false);
+  }
 
+  /**
+   * Drops the table.
+   *
+   * @param dbName
+   * @param tableName
+   * @param deleteData
+   *          deletes the underlying data along with metadata
+   * @param ignoreUnknownTab
+   *          an exception is thrown if this is false and the table doesn't exist
+   * @param ifPurge
+   *          completely purge the table skipping trash while removing data from warehouse
+   * @throws HiveException
+   */
+  public void dropTable(String dbName, String tableName, boolean deleteData,
+      boolean ignoreUnknownTab, boolean ifPurge) throws HiveException {
     try {
-      getMSC().dropTable(dbName, tableName, deleteData, ignoreUnknownTab);
+      getMSC().dropTable(dbName, tableName, deleteData, ignoreUnknownTab, ifPurge);
     } catch (NoSuchObjectException e) {
       if (!ignoreUnknownTab) {
         throw new HiveException(e);
@@ -1736,6 +1769,7 @@ private void constructOneLBLocationMap(F
   public List<Partition> dropPartitions(String dbName, String tblName,
       List<DropTableDesc.PartSpec> partSpecs,  boolean deleteData, boolean ignoreProtection,
       boolean ifExists) throws HiveException {
+    //TODO: add support for ifPurge
     try {
       Table tbl = getTable(dbName, tblName);
       List<ObjectPair<Integer, byte[]>> partExprs =

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java?rev=1626876&r1=1626875&r2=1626876&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java Mon Sep 22 19:31:37 2014
@@ -451,7 +451,11 @@ public class SessionHiveMetaStoreClient 
     // Delete table data
     if (deleteData && !MetaStoreUtils.isExternalTable(table)) {
       try {
-        getWh().deleteDir(tablePath, true);
+        boolean ifPurge = false;
+        if (envContext != null){
+          ifPurge = Boolean.parseBoolean(envContext.getProperties().get("ifPurge"));
+        }
+        getWh().deleteDir(tablePath, true, ifPurge);
       } catch (Exception err) {
         LOG.error("Failed to delete temp table directory: " + tablePath, err);
         // Forgive error

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1626876&r1=1626875&r2=1626876&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Mon Sep 22 19:31:37 2014
@@ -847,7 +847,8 @@ public class DDLSemanticAnalyzer extends
       outputs.add(new WriteEntity(tab, WriteEntity.WriteType.DDL_EXCLUSIVE));
     }
 
-    DropTableDesc dropTblDesc = new DropTableDesc(tableName, expectView, ifExists);
+    boolean ifPurge = (ast.getFirstChildWithType(HiveParser.KW_PURGE) != null);
+    DropTableDesc dropTblDesc = new DropTableDesc(tableName, expectView, ifExists, ifPurge);
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         dropTblDesc), conf));
   }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g?rev=1626876&r1=1626875&r2=1626876&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g Mon Sep 22 19:31:37 2014
@@ -479,8 +479,9 @@ import java.util.HashMap;
     xlateMap.put("KW_SUBQUERY", "SUBQUERY");
     xlateMap.put("KW_REWRITE", "REWRITE");
     xlateMap.put("KW_UPDATE", "UPDATE");
-
     xlateMap.put("KW_VALUES", "VALUES");
+    xlateMap.put("KW_PURGE", "PURGE");
+
 
     // Operators
     xlateMap.put("DOT", ".");
@@ -929,7 +930,7 @@ dropIndexStatement
 dropTableStatement
 @init { pushMsg("drop statement", state); }
 @after { popMsg(state); }
-    : KW_DROP KW_TABLE ifExists? tableName -> ^(TOK_DROPTABLE tableName ifExists?)
+    : KW_DROP KW_TABLE ifExists? tableName KW_PURGE? -> ^(TOK_DROPTABLE tableName ifExists? KW_PURGE?)
     ;
 
 alterStatement

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java?rev=1626876&r1=1626875&r2=1626876&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java Mon Sep 22 19:31:37 2014
@@ -52,6 +52,7 @@ public class DropTableDesc extends DDLDe
   ArrayList<PartSpec> partSpecs;
   boolean expectView;
   boolean ifExists;
+  boolean ifPurge;
   boolean ignoreProtection;
 
   public DropTableDesc() {
@@ -59,12 +60,14 @@ public class DropTableDesc extends DDLDe
 
   /**
    * @param tableName
+   * @param ifPurge
    */
-  public DropTableDesc(String tableName, boolean expectView, boolean ifExists) {
+  public DropTableDesc(String tableName, boolean expectView, boolean ifExists, boolean ifPurge) {
     this.tableName = tableName;
     this.partSpecs = null;
     this.expectView = expectView;
     this.ifExists = ifExists;
+    this.ifPurge = ifPurge;
     this.ignoreProtection = false;
   }
 
@@ -149,4 +152,19 @@ public class DropTableDesc extends DDLDe
   public void setIfExists(boolean ifExists) {
     this.ifExists = ifExists;
   }
+
+  /**
+   *  @return whether Purge was specified
+   */
+  public boolean getIfPurge() {
+      return ifPurge;
+  }
+
+  /**
+   * @param ifPurge
+   *          set whether Purge was specified
+   */
+  public void setIfPurge(boolean ifPurge) {
+      this.ifPurge = ifPurge;
+  }
 }

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java?rev=1626876&r1=1626875&r2=1626876&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java Mon Sep 22 19:31:37 2014
@@ -18,9 +18,12 @@
 
 package org.apache.hadoop.hive.ql.metadata;
 
+import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_CHECKPOINT_INTERVAL_KEY;
+import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_KEY;
 import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME;
 
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
@@ -28,7 +31,10 @@ import java.util.Map;
 
 import junit.framework.TestCase;
 
+import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.TrashPolicy;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
@@ -63,6 +69,9 @@ public class TestHive extends TestCase {
   protected void setUp() throws Exception {
     super.setUp();
     hiveConf = new HiveConf(this.getClass());
+    // enable trash so it can be tested
+    hiveConf.setFloat(FS_TRASH_CHECKPOINT_INTERVAL_KEY, 30);
+    hiveConf.setFloat(FS_TRASH_INTERVAL_KEY, 30);
     SessionState.start(hiveConf);
     try {
       hm = Hive.get(hiveConf);
@@ -79,6 +88,9 @@ public class TestHive extends TestCase {
   protected void tearDown() throws Exception {
     try {
       super.tearDown();
+      // disable trash
+      hiveConf.setFloat(FS_TRASH_CHECKPOINT_INTERVAL_KEY, 30);
+      hiveConf.setFloat(FS_TRASH_INTERVAL_KEY, 30);
       Hive.closeCurrent();
     } catch (Exception e) {
       System.err.println(StringUtils.stringifyException(e));
@@ -294,7 +306,7 @@ public class TestHive extends TestCase {
     try {
       String dbName = "db_for_testgettables";
       String table1Name = "table1";
-      hm.dropDatabase(dbName, true, true);
+      hm.dropDatabase(dbName, true, true, true);
 
       Database db = new Database();
       db.setName(dbName);
@@ -330,16 +342,92 @@ public class TestHive extends TestCase {
 
       // Drop all tables
       for (String tableName : hm.getAllTables(dbName)) {
+        Table table = hm.getTable(dbName, tableName);
         hm.dropTable(dbName, tableName);
+        assertFalse(fs.exists(table.getPath()));
       }
       hm.dropDatabase(dbName);
     } catch (Throwable e) {
       System.err.println(StringUtils.stringifyException(e));
-      System.err.println("testGetTables() failed");
+      System.err.println("testGetAndDropTables() failed");
       throw e;
     }
   }
 
+  public void testDropTableTrash() throws Throwable {
+    try {
+      String dbName = "db_for_testdroptable";
+      hm.dropDatabase(dbName, true, true, true);
+
+      Database db = new Database();
+      db.setName(dbName);
+      hm.createDatabase(db);
+
+      List<String> ts = new ArrayList<String>(2);
+      String tableBaseName = "droptable";
+      ts.add(tableBaseName + "1");
+      ts.add(tableBaseName + "2");
+      Table tbl1 = createTestTable(dbName, ts.get(0));
+      hm.createTable(tbl1);
+      Table tbl2 = createTestTable(dbName, ts.get(1));
+      hm.createTable(tbl2);
+      // test dropping tables and trash behavior
+      Table table1 = hm.getTable(dbName, ts.get(0));
+      assertNotNull(table1);
+      assertEquals(ts.get(0), table1.getTableName());
+      Path path1 = table1.getPath();
+      FileSystem fs = path1.getFileSystem(hiveConf);
+      assertTrue(fs.exists(path1));
+      // drop table and check that trash works
+      TrashPolicy tp = TrashPolicy.getInstance(hiveConf, fs, fs.getHomeDirectory());
+      assertNotNull("TrashPolicy instance should not be null", tp);
+      assertTrue("TrashPolicy is not enabled for filesystem: " + fs.getUri(), tp.isEnabled());
+      Path trashDir = tp.getCurrentTrashDir();
+      assertNotNull("trash directory should not be null", trashDir);
+      Path trash1 = Path.mergePaths(trashDir, path1);
+      Path pathglob = trash1.suffix("*");;
+      FileStatus before[] = fs.globStatus(pathglob);
+      hm.dropTable(dbName, ts.get(0));
+      assertFalse(fs.exists(path1));
+      FileStatus after[] = fs.globStatus(pathglob);
+      assertTrue("trash dir before and after DROP TABLE noPURGE are not different",
+                 before.length != after.length);
+
+      // drop a table without saving to trash by setting the purge option
+      Table table2 = hm.getTable(dbName, ts.get(1));
+      assertNotNull(table2);
+      assertEquals(ts.get(1), table2.getTableName());
+      Path path2 = table2.getPath();
+      assertTrue(fs.exists(path2));
+      Path trash2 = Path.mergePaths(trashDir, path2);
+      System.out.println("trashDir2 is " + trash2);
+      pathglob = trash2.suffix("*");
+      before = fs.globStatus(pathglob);
+      hm.dropTable(dbName, ts.get(1), true, true, true); // deleteData, ignoreUnknownTable, ifPurge
+      assertFalse(fs.exists(path2));
+      after = fs.globStatus(pathglob);
+      Arrays.sort(before);
+      Arrays.sort(after);
+      assertEquals("trash dir before and after DROP TABLE PURGE are different",
+                   before.length, after.length);
+      assertTrue("trash dir before and after DROP TABLE PURGE are different",
+                 Arrays.equals(before, after));
+
+      // Drop all tables
+      for (String tableName : hm.getAllTables(dbName)) {
+        Table table = hm.getTable(dbName, tableName);
+        hm.dropTable(dbName, tableName);
+        assertFalse(fs.exists(table.getPath()));
+      }
+      hm.dropDatabase(dbName);
+    } catch (Throwable e) {
+      System.err.println(StringUtils.stringifyException(e));
+      System.err.println("testDropTableTrash() failed");
+      throw e;
+    }
+  }
+
+
   public void testPartition() throws Throwable {
     try {
       String tableName = "table_for_testpartition";

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveRemote.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveRemote.java?rev=1626876&r1=1626875&r2=1626876&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveRemote.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveRemote.java Mon Sep 22 19:31:37 2014
@@ -84,6 +84,13 @@ public class TestHiveRemote extends Test
   }
 
   /**
+   * Cannot control trash in remote metastore, so skip this test
+   */
+  @Override
+  public void testDropTableTrash() {
+  }
+
+  /**
    * Finds a free port.
    *
    * @return a free port

Added: hive/trunk/ql/src/test/queries/clientpositive/drop_table_purge.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/drop_table_purge.q?rev=1626876&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/drop_table_purge.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/drop_table_purge.q Mon Sep 22 19:31:37 2014
@@ -0,0 +1,4 @@
+SET hive.metastore.batch.retrieve.max=1;
+CREATE TABLE IF NOT EXISTS temp(col STRING);
+
+DROP TABLE temp PURGE;

Added: hive/trunk/ql/src/test/results/clientpositive/drop_table_purge.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/drop_table_purge.q.out?rev=1626876&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/drop_table_purge.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/drop_table_purge.q.out Mon Sep 22 19:31:37 2014
@@ -0,0 +1,16 @@
+PREHOOK: query: CREATE TABLE IF NOT EXISTS temp(col STRING)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@temp
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS temp(col STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@temp
+PREHOOK: query: DROP TABLE temp PURGE
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@temp
+PREHOOK: Output: default@temp
+POSTHOOK: query: DROP TABLE temp PURGE
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@temp
+POSTHOOK: Output: default@temp