You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@drill.apache.org by ad...@apache.org on 2015/09/09 01:25:25 UTC

[5/5] drill git commit: DRILL-3535: Add support for Drop Table

DRILL-3535: Add support for Drop Table

this closes #140


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/2a191847
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/2a191847
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/2a191847

Branch: refs/heads/master
Commit: 2a191847154203871454b229d8ef322766aa9ee4
Parents: 41fc9ca
Author: Mehant Baid <me...@gmail.com>
Authored: Wed Aug 5 12:03:01 2015 -0700
Committer: adeneche <ad...@gmail.com>
Committed: Tue Sep 8 16:23:38 2015 -0700

----------------------------------------------------------------------
 exec/java-exec/src/main/codegen/data/Parser.tdd |   3 +-
 .../src/main/codegen/includes/parserImpls.ftl   |  17 ++
 .../planner/sql/handlers/DropTableHandler.java  |  77 ++++++++
 .../sql/parser/CompoundIdentifierConverter.java |   1 +
 .../exec/planner/sql/parser/SqlDropTable.java   |  95 ++++++++++
 .../apache/drill/exec/store/AbstractSchema.java |   6 +
 .../exec/store/dfs/BasicFormatMatcher.java      |  11 +-
 .../drill/exec/store/dfs/DrillFileSystem.java   |   4 +
 .../drill/exec/store/dfs/DrillPathFilter.java   |   5 +-
 .../drill/exec/store/dfs/FormatMatcher.java     |   3 +
 .../exec/store/dfs/WorkspaceSchemaFactory.java  | 124 +++++++++++++
 .../exec/store/parquet/ParquetFormatPlugin.java |   2 +-
 .../java/org/apache/drill/TestDropTable.java    | 174 +++++++++++++++++++
 .../TestImpersonationMetadata.java              |  44 ++++-
 14 files changed, 558 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/2a191847/exec/java-exec/src/main/codegen/data/Parser.tdd
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/data/Parser.tdd b/exec/java-exec/src/main/codegen/data/Parser.tdd
index 7c03ab0..6ff7fa4 100644
--- a/exec/java-exec/src/main/codegen/data/Parser.tdd
+++ b/exec/java-exec/src/main/codegen/data/Parser.tdd
@@ -46,7 +46,8 @@
     "SqlCreateOrReplaceView()",
     "SqlDropView()",
     "SqlShowFiles()",
-    "SqlCreateTable()"
+    "SqlCreateTable()",
+    "SqlDropTable()"
   ]
 
   # List of methods for parsing custom literals.

http://git-wip-us.apache.org/repos/asf/drill/blob/2a191847/exec/java-exec/src/main/codegen/includes/parserImpls.ftl
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/includes/parserImpls.ftl b/exec/java-exec/src/main/codegen/includes/parserImpls.ftl
index b1c22f2..c761d47 100644
--- a/exec/java-exec/src/main/codegen/includes/parserImpls.ftl
+++ b/exec/java-exec/src/main/codegen/includes/parserImpls.ftl
@@ -240,3 +240,20 @@ SqlNode SqlCreateTable() :
         return new SqlCreateTable(pos, tblName, fieldList, partitionFieldList, query);
     }
 }
+
+/**
+ * Parses a drop table statement.
+ * DROP TABLE table_name;
+ */
+SqlNode SqlDropTable() :
+{
+    SqlParserPos pos;
+}
+{
+    <DROP> { pos = getPos(); }
+    <TABLE>
+    {
+        return new SqlDropTable(pos, CompoundIdentifier());
+    }
+}
+

http://git-wip-us.apache.org/repos/asf/drill/blob/2a191847/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DropTableHandler.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DropTableHandler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DropTableHandler.java
new file mode 100644
index 0000000..211d256
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DropTableHandler.java
@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.planner.sql.handlers;
+
+import org.apache.calcite.schema.SchemaPlus;
+import org.apache.calcite.sql.SqlIdentifier;
+import org.apache.calcite.sql.SqlNode;
+import org.apache.calcite.tools.RelConversionException;
+import org.apache.calcite.tools.ValidationException;
+import org.apache.drill.common.exceptions.UserException;
+import org.apache.drill.exec.physical.PhysicalPlan;
+import org.apache.drill.exec.planner.sql.DirectPlan;
+import org.apache.drill.exec.planner.sql.SchemaUtilites;
+import org.apache.drill.exec.planner.sql.parser.SqlDropTable;
+import org.apache.drill.exec.store.AbstractSchema;
+
+import java.io.IOException;
+
+// SqlHandler for dropping a table.
+public class DropTableHandler extends DefaultSqlHandler {
+
+  private static org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DropTableHandler.class);
+
+  public DropTableHandler(SqlHandlerConfig config) {
+    super(config);
+  }
+
+  /**
+   * Function resolves the schema and invokes the drop method. Raises an exception if the schema is
+   * immutable.
+   * @param sqlNode - Table name identifier
+   * @return - Single row indicating drop succeeded, raise exception otherwise
+   * @throws ValidationException
+   * @throws RelConversionException
+   * @throws IOException
+   */
+  @Override
+  public PhysicalPlan getPlan(SqlNode sqlNode) throws ValidationException, RelConversionException, IOException {
+
+    SqlDropTable dropTableNode = ((SqlDropTable) sqlNode);
+    SqlIdentifier tableIdentifier = dropTableNode.getTableIdentifier();
+
+    SchemaPlus defaultSchema = context.getNewDefaultSchema();
+    AbstractSchema drillSchema = null;
+
+    if (tableIdentifier != null) {
+      drillSchema = SchemaUtilites.resolveToMutableDrillSchema(defaultSchema, dropTableNode.getSchema());
+    }
+
+    String tableName = ((SqlDropTable) sqlNode).getName();
+    if (drillSchema == null) {
+      throw UserException.validationError()
+          .message("Invalid table_name [%s]", tableName)
+          .build(logger);
+    }
+
+    drillSchema.dropTable(tableName);
+
+    return DirectPlan.createDirectPlan(context, true,
+        String.format("Table [%s] %s", tableName, "dropped"));
+  }
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/2a191847/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/CompoundIdentifierConverter.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/CompoundIdentifierConverter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/CompoundIdentifierConverter.java
index f9032a4..ebe6d39 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/CompoundIdentifierConverter.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/CompoundIdentifierConverter.java
@@ -160,6 +160,7 @@ public class CompoundIdentifierConverter extends SqlShuttle {
     rules.put(SqlUseSchema.class, R(D));
     rules.put(SqlJoin.class, R(D, D, D, D, D, E));
     rules.put(SqlOrderBy.class, R(D, E, D, D));
+    rules.put(SqlDropTable.class, R(D));
     REWRITE_RULES = ImmutableMap.copyOf(rules);
   }
 

http://git-wip-us.apache.org/repos/asf/drill/blob/2a191847/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlDropTable.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlDropTable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlDropTable.java
new file mode 100644
index 0000000..de272a1
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/SqlDropTable.java
@@ -0,0 +1,95 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.planner.sql.parser;
+
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.drill.exec.planner.sql.handlers.AbstractSqlHandler;
+import org.apache.drill.exec.planner.sql.handlers.DropTableHandler;
+import org.apache.drill.exec.planner.sql.handlers.SqlHandlerConfig;
+import org.apache.calcite.sql.SqlCall;
+import org.apache.calcite.sql.SqlIdentifier;
+import org.apache.calcite.sql.SqlKind;
+import org.apache.calcite.sql.SqlLiteral;
+import org.apache.calcite.sql.SqlNode;
+import org.apache.calcite.sql.SqlOperator;
+import org.apache.calcite.sql.SqlSpecialOperator;
+import org.apache.calcite.sql.SqlWriter;
+import org.apache.calcite.sql.parser.SqlParserPos;
+
+import com.google.common.collect.ImmutableList;
+
+public class SqlDropTable extends DrillSqlCall {
+  public static final SqlSpecialOperator OPERATOR = new SqlSpecialOperator("DROP_TABLE", SqlKind.OTHER) {
+    @Override
+    public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNode... operands) {
+      return new SqlDropTable(pos, (SqlIdentifier) operands[0]);
+    }
+  };
+
+  private SqlIdentifier tableName;
+
+  public SqlDropTable(SqlParserPos pos, SqlIdentifier tableName) {
+    super(pos);
+    this.tableName = tableName;
+  }
+
+  @Override
+  public SqlOperator getOperator() {
+    return OPERATOR;
+  }
+
+  @Override
+  public List<SqlNode> getOperandList() {
+    return Collections.singletonList((SqlNode) tableName);
+  }
+
+  @Override
+  public void unparse(SqlWriter writer, int leftPrec, int rightPrec) {
+    writer.keyword("DROP");
+    writer.keyword("TABLE");
+    tableName.unparse(writer, leftPrec, rightPrec);
+  }
+
+  @Override
+  public AbstractSqlHandler getSqlHandler(SqlHandlerConfig config) {
+    return new DropTableHandler(config);
+  }
+
+  public List<String> getSchema() {
+    if (tableName.isSimple()) {
+      return ImmutableList.of();
+    }
+
+    return tableName.names.subList(0, tableName.names.size()-1);
+  }
+
+  public String getName() {
+    if (tableName.isSimple()) {
+      return tableName.getSimple();
+    }
+
+    return tableName.names.get(tableName.names.size() - 1);
+  }
+
+  public SqlIdentifier getTableIdentifier() {
+    return tableName;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/2a191847/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractSchema.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractSchema.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractSchema.java
index a952cc2..0db51ab1 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractSchema.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractSchema.java
@@ -190,4 +190,10 @@ public abstract class AbstractSchema implements Schema, SchemaPartitionExplorer,
   public void close() throws Exception {
     // no-op: default implementation for most implementations.
   }
+
+  public void dropTable(String tableName) {
+    throw UserException.unsupportedError()
+        .message("Dropping tables is not supported in schema [%s]", getSchemaPath())
+        .build(logger);
+  }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/2a191847/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/BasicFormatMatcher.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/BasicFormatMatcher.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/BasicFormatMatcher.java
index 1cb9f82..7f4a8d4 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/BasicFormatMatcher.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/BasicFormatMatcher.java
@@ -72,7 +72,7 @@ public class BasicFormatMatcher extends FormatMatcher{
 
   @Override
   public FormatSelection isReadable(DrillFileSystem fs, FileSelection selection) throws IOException {
-    if (isReadable(fs, selection.getFirstPath(fs))) {
+    if (isFileReadable(fs, selection.getFirstPath(fs))) {
       if (plugin.getName() != null) {
         NamedFormatPluginConfig namedConfig = new NamedFormatPluginConfig();
         namedConfig.name = plugin.getName();
@@ -84,8 +84,12 @@ public class BasicFormatMatcher extends FormatMatcher{
     return null;
   }
 
-  protected final boolean isReadable(DrillFileSystem fs, FileStatus status) throws IOException {
-    CompressionCodec codec = null;
+  /*
+   * Function returns true if the file extension matches the pattern
+   */
+  @Override
+  public boolean isFileReadable(DrillFileSystem fs, FileStatus status) throws IOException {
+  CompressionCodec codec = null;
     if (compressible) {
       codec = codecFactory.getCodec(status.getPath());
     }
@@ -111,7 +115,6 @@ public class BasicFormatMatcher extends FormatMatcher{
     return false;
   }
 
-
   @Override
   @JsonIgnore
   public FormatPlugin getFormatPlugin() {

http://git-wip-us.apache.org/repos/asf/drill/blob/2a191847/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/DrillFileSystem.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/DrillFileSystem.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/DrillFileSystem.java
index 25dd811..5e720a2 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/DrillFileSystem.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/DrillFileSystem.java
@@ -71,6 +71,10 @@ import com.google.common.collect.Maps;
 public class DrillFileSystem extends FileSystem implements OpenFileTracker {
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DrillFileSystem.class);
   private final static boolean TRACKING_ENABLED = AssertionUtil.isAssertionsEnabled();
+
+  public static final String HIDDEN_FILE_PREFIX = "_";
+  public static final String DOT_FILE_PREFIX = ".";
+
   private final ConcurrentMap<DrillFSDataInputStream, DebugStackTrace> openedFiles = Maps.newConcurrentMap();
 
   private final FileSystem underlyingFs;

http://git-wip-us.apache.org/repos/asf/drill/blob/2a191847/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/DrillPathFilter.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/DrillPathFilter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/DrillPathFilter.java
index 81c8779..00f463d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/DrillPathFilter.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/DrillPathFilter.java
@@ -23,7 +23,10 @@ import org.apache.hadoop.mapred.Utils;
 public class DrillPathFilter extends Utils.OutputFileUtils.OutputFilesFilter {
   @Override
   public boolean accept(Path path) {
-    if (path.toString().contains("_metadata")) {
+    if (path.getName().startsWith(DrillFileSystem.HIDDEN_FILE_PREFIX)) {
+      return false;
+    }
+    if (path.getName().startsWith(DrillFileSystem.DOT_FILE_PREFIX)) {
       return false;
     }
     return super.accept(path);

http://git-wip-us.apache.org/repos/asf/drill/blob/2a191847/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatMatcher.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatMatcher.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatMatcher.java
index 0b8c7a8..c36e17c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatMatcher.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatMatcher.java
@@ -17,6 +17,8 @@
  */
 package org.apache.drill.exec.store.dfs;
 
+import org.apache.hadoop.fs.FileStatus;
+
 import java.io.IOException;
 
 public abstract class FormatMatcher {
@@ -24,5 +26,6 @@ public abstract class FormatMatcher {
 
   public abstract boolean supportDirectoryReads();
   public abstract FormatSelection isReadable(DrillFileSystem fs, FileSelection selection) throws IOException;
+  public abstract boolean isFileReadable(DrillFileSystem fs, FileStatus status) throws IOException;
   public abstract FormatPlugin getFormatPlugin();
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/2a191847/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
index a7e83f6..22d00a2 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
@@ -20,8 +20,11 @@ package org.apache.drill.exec.store.dfs;
 import java.io.IOException;
 import java.io.OutputStream;
 import java.util.Collections;
+import java.util.LinkedList;
 import java.util.List;
+import java.util.Queue;
 import java.util.Set;
+import java.util.concurrent.ThreadLocalRandom;
 import java.util.regex.Pattern;
 
 import com.google.common.base.Strings;
@@ -62,6 +65,7 @@ public class WorkspaceSchemaFactory {
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(WorkspaceSchemaFactory.class);
 
   private final List<FormatMatcher> fileMatchers;
+  private final List<FormatMatcher> dropFileMatchers;
   private final List<FormatMatcher> dirMatchers;
 
   private final WorkspaceConfig config;
@@ -104,6 +108,9 @@ public class WorkspaceSchemaFactory {
       final FormatMatcher fallbackMatcher = new BasicFormatMatcher(formatPlugin,
           ImmutableList.of(Pattern.compile(".*")), ImmutableList.<MagicString>of());
       fileMatchers.add(fallbackMatcher);
+      dropFileMatchers = fileMatchers.subList(0, fileMatchers.size() - 1);
+    } else {
+      dropFileMatchers = fileMatchers.subList(0, fileMatchers.size());
     }
   }
 
@@ -321,8 +328,125 @@ public class WorkspaceSchemaFactory {
       return null;
     }
 
+    private FormatMatcher findMatcher(FileStatus file) {
+      FormatMatcher matcher = null;
+      try {
+        for (FormatMatcher m : dropFileMatchers) {
+          if (m.isFileReadable(fs, file)) {
+            return m;
+          }
+        }
+      } catch (IOException e) {
+        logger.debug("Failed to find format matcher for file: %s", file, e);
+      }
+      return matcher;
+    }
+
     @Override
     public void destroy(DrillTable value) {
     }
+
+    /**
+     * Check if the table contains homogenenous files that can be read by Drill. Eg: parquet, json csv etc.
+     * However if it contains more than one of these formats or a totally different file format that Drill cannot
+     * understand then we will raise an exception.
+     * @param tableName - name of the table to be checked for homogeneous property
+     * @return
+     * @throws IOException
+     */
+    private boolean isHomogeneous(String tableName) throws IOException {
+      FileSelection fileSelection = FileSelection.create(fs, config.getLocation(), tableName);
+
+      if (fileSelection == null) {
+        throw UserException
+            .validationError()
+            .message(String.format("Table [%s] not found", tableName))
+            .build(logger);
+      }
+
+      FormatMatcher matcher = null;
+      Queue<FileStatus> listOfFiles = new LinkedList<>();
+      listOfFiles.addAll(fileSelection.getFileStatusList(fs));
+
+      while (!listOfFiles.isEmpty()) {
+        FileStatus currentFile = listOfFiles.poll();
+        if (currentFile.isDirectory()) {
+          listOfFiles.addAll(fs.list(true, currentFile.getPath()));
+        } else {
+          if (matcher != null) {
+            if (!matcher.isFileReadable(fs, currentFile)) {
+              return false;
+            }
+          } else {
+            matcher = findMatcher(currentFile);
+            // Did not match any of the file patterns, exit
+            if (matcher == null) {
+              return false;
+            }
+          }
+        }
+      }
+      return true;
+    }
+
+    /**
+     * We check if the table contains homogeneous file formats that Drill can read. Once the checks are performed
+     * we rename the file to start with an "_". After the rename we issue a recursive delete of the directory.
+     * @param table - Path of table to be dropped
+     */
+    @Override
+    public void dropTable(String table) {
+      DrillFileSystem fs = getFS();
+      String defaultLocation = getDefaultLocation();
+      try {
+        if (!isHomogeneous(table)) {
+          throw UserException
+              .validationError()
+              .message("Table contains different file formats. \n" +
+                  "Drop Table is only supported for directories that contain homogeneous file formats consumable by Drill")
+              .build(logger);
+        }
+
+        StringBuilder tableRenameBuilder = new StringBuilder();
+        int lastSlashIndex = table.lastIndexOf(Path.SEPARATOR);
+        if (lastSlashIndex != -1) {
+          tableRenameBuilder.append(table.substring(0, lastSlashIndex + 1));
+        }
+        // Generate unique identifier which will be added as a suffix to the table name
+        ThreadLocalRandom r = ThreadLocalRandom.current();
+        long time =  (System.currentTimeMillis()/1000);
+        Long p1 = ((Integer.MAX_VALUE - time) << 32) + r.nextInt();
+        Long p2 = r.nextLong();
+        final String fileNameDelimiter = DrillFileSystem.HIDDEN_FILE_PREFIX;
+        String[] pathSplit = table.split(Path.SEPARATOR);
+        /*
+         * Builds the string for the renamed table
+         * Prefixes the table name with an underscore (intent for this to be treated as a hidden file)
+         * and suffixes the table name with unique identifiers (similar to how we generate query id's)
+         * separated by underscores
+         */
+        tableRenameBuilder
+            .append(DrillFileSystem.HIDDEN_FILE_PREFIX)
+            .append(pathSplit[pathSplit.length - 1])
+            .append(fileNameDelimiter)
+            .append(p1.toString())
+            .append(fileNameDelimiter)
+            .append(p2.toString());
+
+        String tableRename = tableRenameBuilder.toString();
+        fs.rename(new Path(defaultLocation, table), new Path(defaultLocation, tableRename));
+        fs.delete(new Path(defaultLocation, tableRename), true);
+      } catch (AccessControlException e) {
+        throw UserException
+            .permissionError()
+            .message("Unauthorized to drop table", e)
+            .build(logger);
+      } catch (IOException e) {
+        throw UserException
+            .dataWriteError()
+            .message("Failed to drop table", e)
+            .build(logger);
+      }
+    }
   }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/2a191847/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFormatPlugin.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFormatPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFormatPlugin.java
index 56a1f00..446e12a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFormatPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFormatPlugin.java
@@ -223,7 +223,7 @@ public class ParquetFormatPlugin implements FormatPlugin{
           if (files.length == 0) {
             return false;
           }
-          return super.isReadable(fs, files[0]);
+          return super.isFileReadable(fs, files[0]);
         }
       } catch (IOException e) {
         logger.info("Failure while attempting to check for Parquet metadata file.", e);

http://git-wip-us.apache.org/repos/asf/drill/blob/2a191847/exec/java-exec/src/test/java/org/apache/drill/TestDropTable.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestDropTable.java b/exec/java-exec/src/test/java/org/apache/drill/TestDropTable.java
new file mode 100644
index 0000000..4f8fe1a
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestDropTable.java
@@ -0,0 +1,174 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill;
+
+import org.apache.drill.common.exceptions.UserException;
+import org.apache.hadoop.fs.Path;
+import org.junit.Test;
+import org.junit.Assert;
+
+
+public class TestDropTable extends PlanTestBase {
+
+  private static final String CREATE_SIMPLE_TABLE = "create table %s as select 1 from cp.`employee.json`";
+  private static final String DROP_TABLE = "drop table %s";
+  private static final String BACK_TICK = "`";
+
+  @Test
+  public void testDropJsonTable() throws Exception {
+    test("use dfs_test.tmp");
+    test("alter session set `store.format` = 'json'");
+
+    final String tableName = "simple_json";
+    // create a json table
+    test(String.format(CREATE_SIMPLE_TABLE, tableName));
+
+    // drop the table
+    final String dropSql = String.format(DROP_TABLE, tableName);
+    testBuilder()
+        .sqlQuery(dropSql)
+        .unOrdered()
+        .baselineColumns("ok", "summary")
+        .baselineValues(true, String.format("Table [%s] dropped", tableName))
+        .go();
+  }
+
+  @Test
+  public void testDropParquetTable() throws Exception {
+    test("use dfs_test.tmp");
+    final String tableName = "simple_json";
+
+    // create a parquet table
+    test(String.format(CREATE_SIMPLE_TABLE, tableName));
+
+    // drop the table
+    final String dropSql = String.format(DROP_TABLE, tableName);
+    testBuilder()
+        .sqlQuery(dropSql)
+        .unOrdered()
+        .baselineColumns("ok", "summary")
+        .baselineValues(true, String.format("Table [%s] dropped", tableName))
+        .go();
+  }
+
+  @Test
+  public void testDropTextTable() throws Exception {
+    test("use dfs_test.tmp");
+
+    test("alter session set `store.format` = 'csv'");
+    final String csvTable = "simple_csv";
+
+    // create a csv table
+    test(String.format(CREATE_SIMPLE_TABLE, csvTable));
+
+    // drop the table
+    String dropSql = String.format(DROP_TABLE, csvTable);
+    testBuilder()
+        .sqlQuery(dropSql)
+        .unOrdered()
+        .baselineColumns("ok", "summary")
+        .baselineValues(true, String.format("Table [%s] dropped", csvTable))
+        .go();
+
+    test("alter session set `store.format` = 'psv'");
+    final String psvTable = "simple_psv";
+
+    // create a psv table
+    test(String.format(CREATE_SIMPLE_TABLE, psvTable));
+
+    // drop the table
+    dropSql = String.format(DROP_TABLE, psvTable);
+    testBuilder()
+        .sqlQuery(dropSql)
+        .unOrdered()
+        .baselineColumns("ok", "summary")
+        .baselineValues(true, String.format("Table [%s] dropped", psvTable))
+        .go();
+
+    test("alter session set `store.format` = 'tsv'");
+    final String tsvTable = "simple_tsv";
+
+    // create a tsv table
+    test(String.format(CREATE_SIMPLE_TABLE, tsvTable));
+
+    // drop the table
+    dropSql = String.format(DROP_TABLE, tsvTable);
+    testBuilder()
+        .sqlQuery(dropSql)
+        .unOrdered()
+        .baselineColumns("ok", "summary")
+        .baselineValues(true, String.format("Table [%s] dropped", tsvTable))
+        .go();
+  }
+
+  @Test
+  public void testNonHomogenousDrop() throws Exception {
+    test("use dfs_test.tmp");
+    final String tableName = "homogenous_table";
+
+    // create a parquet table
+    test(String.format(CREATE_SIMPLE_TABLE, tableName));
+
+    // create a json table within the same directory
+    test("alter session set `store.format` = 'json'");
+    final String nestedJsonTable = tableName + Path.SEPARATOR + "json_table";
+    test(String.format(CREATE_SIMPLE_TABLE, BACK_TICK + nestedJsonTable + BACK_TICK));
+
+    test("show files from " + tableName);
+
+    boolean dropFailed = false;
+    // this should fail, because the directory contains non-homogenous files
+    try {
+      test(String.format(DROP_TABLE, tableName));
+    } catch (UserException e) {
+      Assert.assertTrue(e.getMessage().contains("VALIDATION ERROR"));
+      dropFailed = true;
+    }
+
+    Assert.assertTrue("Dropping of non-homogeneous table should have failed", dropFailed);
+
+    // drop the individual json table
+    testBuilder()
+        .sqlQuery(String.format(DROP_TABLE, BACK_TICK + nestedJsonTable + BACK_TICK))
+        .unOrdered()
+        .baselineColumns("ok", "summary")
+        .baselineValues(true, String.format("Table [%s] dropped", nestedJsonTable))
+        .go();
+
+    // Now drop should succeed
+    testBuilder()
+        .sqlQuery(String.format(DROP_TABLE, tableName))
+        .unOrdered()
+        .baselineColumns("ok", "summary")
+        .baselineValues(true, String.format("Table [%s] dropped", tableName))
+        .go();
+  }
+
+  @Test
+  public void testDropOnImmutableSchema() throws Exception {
+    boolean dropFailed = false;
+    try {
+      test("drop table dfs.`/tmp`");
+    } catch (UserException e) {
+      Assert.assertTrue(e.getMessage().contains("PARSE ERROR"));
+      dropFailed = true;
+    }
+
+    Assert.assertTrue("Dropping table on immutable schema failed", dropFailed);
+  }
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/2a191847/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationMetadata.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationMetadata.java b/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationMetadata.java
index ef3765e..998e35d 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationMetadata.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationMetadata.java
@@ -19,6 +19,7 @@ package org.apache.drill.exec.impersonation;
 
 import com.google.common.base.Joiner;
 import com.google.common.collect.Maps;
+import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.exceptions.UserRemoteException;
 import org.apache.drill.exec.store.dfs.WorkspaceConfig;
 import org.apache.hadoop.fs.FileSystem;
@@ -26,6 +27,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.junit.AfterClass;
+import org.junit.Assert;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
@@ -36,7 +38,7 @@ import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertThat;
 
 /**
- * Tests impersonation on metadata related queries as SHOW FILES, SHOW TABLES, CREATE VIEW and CREATE TABLE
+ * Tests impersonation on metadata related queries as SHOW FILES, SHOW TABLES, CREATE VIEW, CREATE TABLE and DROP TABLE
  */
 public class TestImpersonationMetadata extends BaseTestImpersonation {
   private static final String user1 = "drillTestUser1";
@@ -83,9 +85,49 @@ public class TestImpersonationMetadata extends BaseTestImpersonation {
     // Create /drillTestGrp1_700 directory with permissions 700 (owned by user1)
     createAndAddWorkspace("drillTestGrp1_700", "/drillTestGrp1_700", (short)0700, user1, group1, workspaces);
 
+    // create /user2_workspace1 with 775 permissions (owner by user1)
+    createAndAddWorkspace("user2_workspace1", "/user2_workspace1", (short)0775, user2, group1, workspaces);
+
+    // create /user2_workspace with 755 permissions (owner by user1)
+    createAndAddWorkspace("user2_workspace2", "/user2_workspace2", (short)0755, user2, group1, workspaces);
+
     return workspaces;
   }
 
+  @Test
+  public void testDropTable() throws Exception {
+
+    // create tables as user2
+    updateClient(user2);
+    test(String.format("use `%s.user2_workspace1`", MINIDFS_STORAGE_PLUGIN_NAME));
+    // create a table that can be dropped by another user in a different group
+    test("create table parquet_table_775 as select * from cp.`employee.json`");
+
+    // create a table that cannot be dropped by another user
+    test(String.format("use `%s.user2_workspace2`", MINIDFS_STORAGE_PLUGIN_NAME));
+    test("create table parquet_table_700 as select * from cp.`employee.json`");
+
+    // Drop tables as user1
+    updateClient(user1);
+    test(String.format("use `%s.user2_workspace1`", MINIDFS_STORAGE_PLUGIN_NAME));
+    testBuilder()
+        .sqlQuery("drop table parquet_table_775")
+        .unOrdered()
+        .baselineColumns("ok", "summary")
+        .baselineValues(true, String.format("Table [%s] dropped", "parquet_table_775"))
+        .go();
+
+    test(String.format("use `%s.user2_workspace2`", MINIDFS_STORAGE_PLUGIN_NAME));
+    boolean dropFailed = false;
+    try {
+      test("drop table parquet_table_700");
+    } catch (UserException e) {
+      Assert.assertTrue(e.getMessage().contains("PERMISSION ERROR"));
+      dropFailed = true;
+    }
+    Assert.assertTrue("Permission checking failed during drop table", dropFailed);
+  }
+
   @Test // DRILL-3037
   public void testImpersonatingProcessUser() throws Exception {
     updateClient(processUser);