You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by kg...@apache.org on 2018/07/27 11:39:11 UTC

hive git commit: HIVE-20184: Backport HIVE-20085 to branch-3 (Daniel Voros via Zoltan Haindrich)

Repository: hive
Updated Branches:
  refs/heads/branch-3 8f4bf93a3 -> 96b5ceb62


HIVE-20184: Backport HIVE-20085 to branch-3 (Daniel Voros via Zoltan Haindrich)

Signed-off-by: Zoltan Haindrich <ki...@rxd.hu>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/96b5ceb6
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/96b5ceb6
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/96b5ceb6

Branch: refs/heads/branch-3
Commit: 96b5ceb62aaead46830a7440cf12cdd0979b0bba
Parents: 8f4bf93
Author: Daniel Voros <da...@gmail.com>
Authored: Fri Jul 27 13:32:53 2018 +0200
Committer: Zoltan Haindrich <ki...@rxd.hu>
Committed: Fri Jul 27 13:32:53 2018 +0200

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/conf/HiveConf.java   |  2 +
 .../hadoop/hive/druid/DruidStorageHandler.java  | 71 ++++++--------------
 .../hadoop/hive/druid/serde/DruidSerDe.java     |  3 +-
 .../hive/druid/TestDruidStorageHandler.java     |  4 +-
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  |  2 +-
 ql/src/test/queries/clientnegative/ctas.q       |  2 +-
 .../queries/clientnegative/druid_datasource2.q  |  1 +
 .../queries/clientpositive/druid_timestamptz.q  |  5 +-
 .../queries/clientpositive/druid_timestamptz2.q | 14 ++--
 .../clientpositive/druidkafkamini_basic.q       |  2 +-
 .../druidmini_dynamic_partition.q               | 10 +--
 .../clientpositive/druidmini_expressions.q      |  3 +-
 .../clientpositive/druidmini_extractTime.q      |  6 +-
 .../clientpositive/druidmini_floorTime.q        |  4 +-
 .../queries/clientpositive/druidmini_joins.q    |  5 +-
 .../queries/clientpositive/druidmini_masking.q  |  4 +-
 .../queries/clientpositive/druidmini_test1.q    |  5 +-
 .../clientpositive/druidmini_test_alter.q       |  5 +-
 .../clientpositive/druidmini_test_insert.q      |  8 ++-
 .../clientnegative/druid_datasource2.q.out      |  2 +-
 .../druid/druid_timestamptz.q.out               |  4 +-
 .../druid/druid_timestamptz2.q.out              | 20 +++---
 .../druid/druidkafkamini_basic.q.out            |  6 +-
 .../druid/druidmini_dynamic_partition.q.out     | 18 ++---
 .../druid/druidmini_expressions.q.out           |  6 +-
 .../druid/druidmini_extractTime.q.out           |  8 +--
 .../druid/druidmini_floorTime.q.out             |  4 +-
 .../clientpositive/druid/druidmini_joins.q.out  |  4 +-
 .../druid/druidmini_masking.q.out               |  4 +-
 .../clientpositive/druid/druidmini_test1.q.out  |  4 +-
 .../druid/druidmini_test_alter.q.out            |  4 +-
 .../druid/druidmini_test_insert.q.out           | 12 ++--
 .../hive/metastore/utils/MetaStoreUtils.java    | 19 ++++++
 33 files changed, 143 insertions(+), 128 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 6c2d0ca..89171ef 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -2966,6 +2966,8 @@ public class HiveConf extends Configuration {
     HIVE_INSERT_INTO_MULTILEVEL_DIRS("hive.insert.into.multilevel.dirs", false,
         "Where to insert into multilevel directories like\n" +
         "\"insert directory '/HIVEFT25686/chinna/' from table\""),
+    HIVE_CTAS_EXTERNAL_TABLES("hive.ctas.external.tables", true,
+            "whether CTAS for external tables is allowed"),
     HIVE_INSERT_INTO_EXTERNAL_TABLES("hive.insert.into.external.tables", true,
         "whether insert into external tables is allowed"),
     HIVE_TEMPORARY_TABLE_STORAGE(

http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java
----------------------------------------------------------------------
diff --git a/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java b/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java
index 57e4800..53d93e1 100644
--- a/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java
+++ b/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java
@@ -215,12 +215,10 @@ public class DruidStorageHandler extends DefaultHiveMetaHook implements HiveStor
 
   @Override
   public void preCreateTable(Table table) throws MetaException {
-    // Do safety checks
-    if (MetaStoreUtils.isExternalTable(table) && !StringUtils
-            .isEmpty(table.getSd().getLocation())) {
+    if(!StringUtils
+        .isEmpty(table.getSd().getLocation())) {
       throw new MetaException("LOCATION may not be specified for Druid");
     }
-
     if (table.getPartitionKeysSize() != 0) {
       throw new MetaException("PARTITIONED BY may not be specified for Druid");
     }
@@ -228,25 +226,17 @@ public class DruidStorageHandler extends DefaultHiveMetaHook implements HiveStor
       throw new MetaException("CLUSTERED BY may not be specified for Druid");
     }
     String dataSourceName = table.getParameters().get(Constants.DRUID_DATA_SOURCE);
-    if (MetaStoreUtils.isExternalTable(table)) {
-      if (dataSourceName == null) {
-        throw new MetaException(
-            String.format("Datasource name should be specified using [%s] for external tables "
-                + "using Druid", Constants.DRUID_DATA_SOURCE));
-      }
-      // If it is an external table, we are done
+    if(dataSourceName != null){
+      // Already Existing datasource in Druid.
       return;
     }
-    // It is not an external table
-    // We need to check that datasource was not specified by user
-    if (dataSourceName != null) {
-      throw new MetaException(
-          String.format("Datasource name cannot be specified using [%s] for managed tables "
-              + "using Druid", Constants.DRUID_DATA_SOURCE));
-    }
-    // We need to check the Druid metadata
+
+    // create dataSourceName based on Hive Table name
     dataSourceName = Warehouse.getQualifiedName(table);
     try {
+      // NOTE: This just created druid_segments table in Druid metastore.
+      // This is needed for the case when hive is started before any of druid services
+      // and druid_segments table has not been created yet.
       getConnector().createSegmentTable();
     } catch (Exception e) {
       LOG.error("Exception while trying to create druid segments table", e);
@@ -255,6 +245,7 @@ public class DruidStorageHandler extends DefaultHiveMetaHook implements HiveStor
     Collection<String> existingDataSources = DruidStorageHandlerUtils
             .getAllDataSourceNames(getConnector(), getDruidMetadataStorageTablesConfig());
     LOG.debug("pre-create data source with name {}", dataSourceName);
+    // Check for existence of for the datasource we are going to create in druid_segments table.
     if (existingDataSources.contains(dataSourceName)) {
       throw new MetaException(String.format("Data source [%s] already existing", dataSourceName));
     }
@@ -263,38 +254,17 @@ public class DruidStorageHandler extends DefaultHiveMetaHook implements HiveStor
 
   @Override
   public void rollbackCreateTable(Table table) {
-    if (MetaStoreUtils.isExternalTable(table)) {
-      return;
-    }
-    final Path segmentDescriptorDir = getSegmentDescriptorDir();
-    try {
-      List<DataSegment> dataSegmentList = DruidStorageHandlerUtils
-              .getCreatedSegments(segmentDescriptorDir, getConf());
-      for (DataSegment dataSegment : dataSegmentList) {
-        try {
-          deleteSegment(dataSegment);
-        } catch (SegmentLoadingException e) {
-          LOG.error(String.format("Error while trying to clean the segment [%s]", dataSegment), e);
-        }
-      }
-    } catch (IOException e) {
-      LOG.error("Exception while rollback", e);
-      throw Throwables.propagate(e);
-    } finally {
-      cleanWorkingDir();
-    }
+    cleanWorkingDir();
   }
 
   @Override
   public void commitCreateTable(Table table) throws MetaException {
-    if (MetaStoreUtils.isExternalTable(table)) {
-      // For external tables, we do not need to do anything else
-      return;
-    }
     if(isKafkaStreamingTable(table)){
       updateKafkaIngestion(table);
     }
-    this.commitInsertTable(table, true);
+    // For CTAS queries when user has explicitly specified the datasource.
+    // We will append the data to existing druid datasource.
+    this.commitInsertTable(table, false);
   }
 
   private void updateKafkaIngestion(Table table){
@@ -762,9 +732,6 @@ public class DruidStorageHandler extends DefaultHiveMetaHook implements HiveStor
 
   @Override
   public void commitDropTable(Table table, boolean deleteData) {
-    if (MetaStoreUtils.isExternalTable(table)) {
-      return;
-    }
     if(isKafkaStreamingTable(table)) {
       // Stop Kafka Ingestion first
       final String overlordAddress = Preconditions.checkNotNull(HiveConf
@@ -775,12 +742,15 @@ public class DruidStorageHandler extends DefaultHiveMetaHook implements HiveStor
               "Druid Datasource name is null");
       stopKafkaIngestion(overlordAddress, dataSourceName);
     }
+
     String dataSourceName = Preconditions
             .checkNotNull(table.getParameters().get(Constants.DRUID_DATA_SOURCE),
                     "DataSource name is null !"
             );
-
-    if (deleteData == true) {
+    // TODO: Move MetaStoreUtils.isExternalTablePurge(table) calls to a common place for all StorageHandlers
+    // deleteData flag passed down to StorageHandler should be true only if
+    // MetaStoreUtils.isExternalTablePurge(table) returns true.
+    if (deleteData == true && MetaStoreUtils.isExternalTablePurge(table)) {
       LOG.info("Dropping with purge all the data for data source {}", dataSourceName);
       List<DataSegment> dataSegmentList = DruidStorageHandlerUtils
               .getDataSegmentList(getConnector(), getDruidMetadataStorageTablesConfig(), dataSourceName);
@@ -806,9 +776,6 @@ public class DruidStorageHandler extends DefaultHiveMetaHook implements HiveStor
   public void commitInsertTable(Table table, boolean overwrite) throws MetaException {
     LOG.debug("commit insert into table {} overwrite {}", table.getTableName(),
             overwrite);
-    if (MetaStoreUtils.isExternalTable(table)) {
-      throw new MetaException("Cannot insert data into external table backed by Druid");
-    }
     try {
       // Check if there segments to load
       final Path segmentDescriptorDir = getSegmentDescriptorDir();

http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java
----------------------------------------------------------------------
diff --git a/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java b/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java
index f0e12a2..df9049e 100644
--- a/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java
+++ b/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java
@@ -483,7 +483,6 @@ import static org.joda.time.format.ISODateTimeFormat.dateOptionalTimeParser;
   }
 
   @Override public boolean shouldStoreFieldsInMetastore(Map<String, String> tableParams) {
-    // If Druid table is not an external table store the schema in metadata store.
-    return !MetaStoreUtils.isExternal(tableParams);
+    return true;
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/druid-handler/src/test/org/apache/hadoop/hive/druid/TestDruidStorageHandler.java
----------------------------------------------------------------------
diff --git a/druid-handler/src/test/org/apache/hadoop/hive/druid/TestDruidStorageHandler.java b/druid-handler/src/test/org/apache/hadoop/hive/druid/TestDruidStorageHandler.java
index b96a13f..510330d 100644
--- a/druid-handler/src/test/org/apache/hadoop/hive/druid/TestDruidStorageHandler.java
+++ b/druid-handler/src/test/org/apache/hadoop/hive/druid/TestDruidStorageHandler.java
@@ -106,7 +106,9 @@ public class TestDruidStorageHandler {
   public void before() throws Throwable {
     tableWorkingPath = temporaryFolder.newFolder().getAbsolutePath();
     segmentsTable = derbyConnectorRule.metadataTablesConfigSupplier().get().getSegmentsTable();
-    Mockito.when(tableMock.getParameters()).thenReturn(new HashMap<>());
+    HashMap<String, String> params = new HashMap<>();
+    params.put("external.table.purge", "TRUE");
+    Mockito.when(tableMock.getParameters()).thenReturn(params);
     Mockito.when(tableMock.getPartitionKeysSize()).thenReturn(0);
     StorageDescriptor storageDes = Mockito.mock(StorageDescriptor.class);
     Mockito.when(storageDes.getBucketColsSize()).thenReturn(0);

http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 2bd6c5f..a804c15 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -13070,7 +13070,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
             throw new SemanticException(ErrorMsg.CTAS_PARCOL_COEXISTENCE.getMsg());
           }
         }
-        if (isExt) {
+        if (!conf.getBoolVar(ConfVars.HIVE_CTAS_EXTERNAL_TABLES) && isExt) {
           throw new SemanticException(ErrorMsg.CTAS_EXTTBL_COEXISTENCE.getMsg());
         }
         command_type = CTAS;

http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/ql/src/test/queries/clientnegative/ctas.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/ctas.q b/ql/src/test/queries/clientnegative/ctas.q
index 507a7a7..1d0afaf 100644
--- a/ql/src/test/queries/clientnegative/ctas.q
+++ b/ql/src/test/queries/clientnegative/ctas.q
@@ -1,5 +1,5 @@
 
-
+SET hive.ctas.external.tables=false;
 create external table nzhang_ctas4 as select key, value from src;
 
 

http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/ql/src/test/queries/clientnegative/druid_datasource2.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/druid_datasource2.q b/ql/src/test/queries/clientnegative/druid_datasource2.q
index cc20931..2e7400f 100644
--- a/ql/src/test/queries/clientnegative/druid_datasource2.q
+++ b/ql/src/test/queries/clientnegative/druid_datasource2.q
@@ -1,3 +1,4 @@
+SET metastore.strict.managed.tables=true;
 CREATE TABLE druid_table_1
 STORED BY 'org.apache.hadoop.hive.druid.QTestDruidStorageHandler'
 TBLPROPERTIES ("property" = "localhost", "druid.datasource" = "mydatasource");

http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/ql/src/test/queries/clientpositive/druid_timestamptz.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/druid_timestamptz.q b/ql/src/test/queries/clientpositive/druid_timestamptz.q
index 4830044..605d240 100644
--- a/ql/src/test/queries/clientpositive/druid_timestamptz.q
+++ b/ql/src/test/queries/clientpositive/druid_timestamptz.q
@@ -1,9 +1,10 @@
 set hive.fetch.task.conversion=more;
-
+SET hive.ctas.external.tables=true;
+SET hive.external.table.purge.default = true;
 
 drop table tstz1_n0;
 
-create table tstz1_n0(`__time` timestamp with local time zone, n string, v integer)
+create external table tstz1_n0(`__time` timestamp with local time zone, n string, v integer)
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "HOUR");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/ql/src/test/queries/clientpositive/druid_timestamptz2.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/druid_timestamptz2.q b/ql/src/test/queries/clientpositive/druid_timestamptz2.q
index 8f573c8..8b2c092 100644
--- a/ql/src/test/queries/clientpositive/druid_timestamptz2.q
+++ b/ql/src/test/queries/clientpositive/druid_timestamptz2.q
@@ -1,9 +1,9 @@
+SET hive.ctas.external.tables=true;
+SET hive.external.table.purge.default = true;
 CREATE database druid_test_dst;
 use druid_test_dst;
 
-
-
-create table test_base_table(`timecolumn` timestamp, `interval_marker` string, `num_l` double);
+create external table test_base_table(`timecolumn` timestamp, `interval_marker` string, `num_l` double);
 insert into test_base_table values ('2015-03-08 00:00:00', 'i1-start', 4);
 insert into test_base_table values ('2015-03-08 23:59:59', 'i1-end', 1);
 insert into test_base_table values ('2015-03-09 00:00:00', 'i2-start', 4);
@@ -11,7 +11,7 @@ insert into test_base_table values ('2015-03-09 23:59:59', 'i2-end', 1);
 insert into test_base_table values ('2015-03-10 00:00:00', 'i3-start', 2);
 insert into test_base_table values ('2015-03-10 23:59:59', 'i3-end', 2);
 
-CREATE TABLE druid_test_table_1
+CREATE EXTERNAL TABLE druid_test_table_1
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "DAY")
 AS
@@ -20,7 +20,7 @@ FROM druid_test_dst.test_base_table;
 
 select * FROM druid_test_table_1;
 
-CREATE TABLE druid_test_table_2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double)
+CREATE EXTERNAL TABLE druid_test_table_2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double)
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "DAY");
 
@@ -36,7 +36,7 @@ select * FROM druid_test_table_2;
 
 SET TIME ZONE UTC;
 
-CREATE TABLE druid_test_table_utc
+CREATE EXTERNAL TABLE druid_test_table_utc
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "DAY")
 AS
@@ -45,7 +45,7 @@ FROM druid_test_dst.test_base_table;
 
 select * FROM druid_test_table_utc;
 
-CREATE TABLE druid_test_table_utc2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double)
+CREATE EXTERNAL TABLE druid_test_table_utc2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double)
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "DAY");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/ql/src/test/queries/clientpositive/druidkafkamini_basic.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/druidkafkamini_basic.q b/ql/src/test/queries/clientpositive/druidkafkamini_basic.q
index 814890a..4cdabf2 100644
--- a/ql/src/test/queries/clientpositive/druidkafkamini_basic.q
+++ b/ql/src/test/queries/clientpositive/druidkafkamini_basic.q
@@ -1,5 +1,5 @@
 SET hive.vectorized.execution.enabled=false;
-CREATE TABLE druid_kafka_test(`__time` timestamp, page string, `user` string, language string, added int, deleted int)
+CREATE EXTERNAL TABLE druid_kafka_test(`__time` timestamp, page string, `user` string, language string, added int, deleted int)
         STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
         TBLPROPERTIES (
         "druid.segment.granularity" = "MONTH",

http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/ql/src/test/queries/clientpositive/druidmini_dynamic_partition.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/druidmini_dynamic_partition.q b/ql/src/test/queries/clientpositive/druidmini_dynamic_partition.q
index 4443af2..de42900 100644
--- a/ql/src/test/queries/clientpositive/druidmini_dynamic_partition.q
+++ b/ql/src/test/queries/clientpositive/druidmini_dynamic_partition.q
@@ -1,5 +1,7 @@
 SET hive.vectorized.execution.enabled=false;
-CREATE TABLE druid_partitioned_table_0
+SET hive.ctas.external.tables=true;
+SET hive.external.table.purge.default = true;
+CREATE EXTERNAL TABLE druid_partitioned_table_0
         STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
         TBLPROPERTIES (
         "druid.segment.granularity" = "HOUR",
@@ -20,7 +22,7 @@ CREATE TABLE druid_partitioned_table_0
           cboolean2
           FROM alltypesorc where ctimestamp1 IS NOT NULL;
 
-EXPLAIN CREATE TABLE druid_partitioned_table
+EXPLAIN CREATE EXTERNAL TABLE druid_partitioned_table
         STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
         TBLPROPERTIES (
         "druid.segment.granularity" = "HOUR",
@@ -43,7 +45,7 @@ EXPLAIN CREATE TABLE druid_partitioned_table
 
 
 
-CREATE TABLE druid_partitioned_table
+CREATE EXTERNAL TABLE druid_partitioned_table
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES (
 "druid.segment.granularity" = "HOUR",
@@ -154,7 +156,7 @@ SELECT sum(cint), sum(cbigint) FROM druid_partitioned_table ;
 
 set hive.druid.indexer.partition.size.max=10;
 
-CREATE TABLE druid_max_size_partition
+CREATE EXTERNAL TABLE druid_max_size_partition
         STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
         TBLPROPERTIES (
         "druid.segment.granularity" = "HOUR",

http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/ql/src/test/queries/clientpositive/druidmini_expressions.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/druidmini_expressions.q b/ql/src/test/queries/clientpositive/druidmini_expressions.q
index fad8f73..7857973 100644
--- a/ql/src/test/queries/clientpositive/druidmini_expressions.q
+++ b/ql/src/test/queries/clientpositive/druidmini_expressions.q
@@ -1,7 +1,8 @@
 --! qt:dataset:alltypesorc
+SET hive.ctas.external.tables=true;
 
 SET hive.vectorized.execution.enabled=false;
-CREATE TABLE druid_table_n0
+CREATE EXTERNAL TABLE druid_table_n0
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE")
 AS

http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/ql/src/test/queries/clientpositive/druidmini_extractTime.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/druidmini_extractTime.q b/ql/src/test/queries/clientpositive/druidmini_extractTime.q
index 9541361..0dbcd25 100644
--- a/ql/src/test/queries/clientpositive/druidmini_extractTime.q
+++ b/ql/src/test/queries/clientpositive/druidmini_extractTime.q
@@ -1,7 +1,9 @@
 --! qt:dataset:alltypesorc
 
 SET hive.vectorized.execution.enabled=false;
-CREATE TABLE druid_table
+SET hive.ctas.external.tables=true;
+SET hive.external.table.purge.default = true;
+CREATE EXTERNAL TABLE druid_table
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE")
 AS
@@ -181,7 +183,7 @@ SELECT CAST(`__time` AS DATE) AS `x_date` FROM druid_table ORDER BY `x_date` LIM
 create table test_extract_from_string_base_table(`timecolumn` timestamp, `date_c` string, `timestamp_c` string,  `metric_c` double);
 insert into test_extract_from_string_base_table values ('2015-03-08 00:00:00', '2015-03-10', '2015-03-08 05:30:20', 5.0);
 
-CREATE TABLE druid_test_extract_from_string_table
+CREATE EXTERNAL TABLE druid_test_extract_from_string_table
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "DAY")
 AS select

http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/ql/src/test/queries/clientpositive/druidmini_floorTime.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/druidmini_floorTime.q b/ql/src/test/queries/clientpositive/druidmini_floorTime.q
index a526413..3b5334f 100644
--- a/ql/src/test/queries/clientpositive/druidmini_floorTime.q
+++ b/ql/src/test/queries/clientpositive/druidmini_floorTime.q
@@ -1,7 +1,9 @@
 --! qt:dataset:alltypesorc
 
 SET hive.vectorized.execution.enabled=false;
-CREATE TABLE druid_table_n2
+SET hive.ctas.external.tables=true;
+SET hive.external.table.purge.default = true;
+CREATE EXTERNAL TABLE druid_table_n2
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE")
 AS

http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/ql/src/test/queries/clientpositive/druidmini_joins.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/druidmini_joins.q b/ql/src/test/queries/clientpositive/druidmini_joins.q
index 720127e..1f92a0d 100644
--- a/ql/src/test/queries/clientpositive/druidmini_joins.q
+++ b/ql/src/test/queries/clientpositive/druidmini_joins.q
@@ -1,11 +1,12 @@
 SET hive.vectorized.execution.enabled=false;
 SET hive.explain.user=false;
-
+SET hive.ctas.external.tables=true;
+SET hive.external.table.purge.default = true;
 --SET hive.execution.mode=llap;
 
 DROP TABLE druid_table_with_nulls;
 
-CREATE TABLE druid_table_with_nulls
+CREATE EXTERNAL TABLE druid_table_with_nulls
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "HOUR")
 AS

http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/ql/src/test/queries/clientpositive/druidmini_masking.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/druidmini_masking.q b/ql/src/test/queries/clientpositive/druidmini_masking.q
index f0f2c0c..824611f 100644
--- a/ql/src/test/queries/clientpositive/druidmini_masking.q
+++ b/ql/src/test/queries/clientpositive/druidmini_masking.q
@@ -1,7 +1,9 @@
 set hive.mapred.mode=nonstrict;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
+SET hive.ctas.external.tables=true;
+SET hive.external.table.purge.default = true;
 
-CREATE TABLE masking_test_druid
+CREATE EXTERNAL TABLE masking_test_druid
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "HOUR")
 AS

http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/ql/src/test/queries/clientpositive/druidmini_test1.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/druidmini_test1.q b/ql/src/test/queries/clientpositive/druidmini_test1.q
index f93665e..30abf3c 100644
--- a/ql/src/test/queries/clientpositive/druidmini_test1.q
+++ b/ql/src/test/queries/clientpositive/druidmini_test1.q
@@ -1,5 +1,8 @@
 --! qt:dataset:alltypesorc
-CREATE TABLE druid_table_n3
+SET hive.ctas.external.tables=true;
+SET hive.external.table.purge.default = true;
+
+CREATE EXTERNAL TABLE druid_table_n3
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE")
 AS

http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/ql/src/test/queries/clientpositive/druidmini_test_alter.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/druidmini_test_alter.q b/ql/src/test/queries/clientpositive/druidmini_test_alter.q
index e19a006..0d8623d 100644
--- a/ql/src/test/queries/clientpositive/druidmini_test_alter.q
+++ b/ql/src/test/queries/clientpositive/druidmini_test_alter.q
@@ -1,4 +1,7 @@
-CREATE TABLE druid_alltypesorc_n0
+--! qt:dataset:alltypesorc
+SET hive.ctas.external.tables=true;
+SET hive.external.table.purge.default = true;
+CREATE EXTERNAL TABLE druid_alltypesorc_n0
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE")
 AS

http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/ql/src/test/queries/clientpositive/druidmini_test_insert.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/druidmini_test_insert.q b/ql/src/test/queries/clientpositive/druidmini_test_insert.q
index cdd4c23..dd1ba1a 100644
--- a/ql/src/test/queries/clientpositive/druidmini_test_insert.q
+++ b/ql/src/test/queries/clientpositive/druidmini_test_insert.q
@@ -1,4 +1,6 @@
-CREATE TABLE druid_alltypesorc
+SET hive.ctas.external.tables=true;
+SET hive.external.table.purge.default = true;
+CREATE EXTERNAL TABLE druid_alltypesorc
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE")
 AS
@@ -62,7 +64,7 @@ DROP TABLE druid_alltypesorc;
  insert into test_table_n9 values ('2015-01-08 00:00:00', 'i1-start', 4);
  insert into test_table_n9 values ('2015-01-08 23:59:59', 'i1-end', 1);
  
- CREATE TABLE druid_table_n1 (`__time` timestamp with local time zone, `userid` string, `num_l` float)
+ CREATE EXTERNAL TABLE druid_table_n1 (`__time` timestamp with local time zone, `userid` string, `num_l` float)
  STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
  TBLPROPERTIES ("druid.segment.granularity" = "DAY");
  
@@ -89,7 +91,7 @@ insert into test_base_table values ('2015-03-09 23:59:59', 'i2-end', 1);
 insert into test_base_table values ('2015-03-10 00:00:00', 'i3-start', 2);
 insert into test_base_table values ('2015-03-10 23:59:59', 'i3-end', 2);
 
-CREATE TABLE druid_test_table_n9
+CREATE EXTERNAL TABLE druid_test_table_n9
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "DAY")
 AS

http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/ql/src/test/results/clientnegative/druid_datasource2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/druid_datasource2.q.out b/ql/src/test/results/clientnegative/druid_datasource2.q.out
index 1b74b06..2f783fe 100644
--- a/ql/src/test/results/clientnegative/druid_datasource2.q.out
+++ b/ql/src/test/results/clientnegative/druid_datasource2.q.out
@@ -4,4 +4,4 @@ TBLPROPERTIES ("property" = "localhost", "druid.datasource" = "mydatasource")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@druid_table_1
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. MetaException(message:Datasource name cannot be specified using [druid.datasource] for managed tables using Druid)
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. MetaException(message:Table default.druid_table_1 failed strict managed table checks due to the following reason: Table is marked as a managed table but is not transactional.)

http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/ql/src/test/results/clientpositive/druid/druid_timestamptz.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/druid/druid_timestamptz.q.out b/ql/src/test/results/clientpositive/druid/druid_timestamptz.q.out
index ed1a6ea..fa9583a 100644
--- a/ql/src/test/results/clientpositive/druid/druid_timestamptz.q.out
+++ b/ql/src/test/results/clientpositive/druid/druid_timestamptz.q.out
@@ -2,13 +2,13 @@ PREHOOK: query: drop table tstz1_n0
 PREHOOK: type: DROPTABLE
 POSTHOOK: query: drop table tstz1_n0
 POSTHOOK: type: DROPTABLE
-PREHOOK: query: create table tstz1_n0(`__time` timestamp with local time zone, n string, v integer)
+PREHOOK: query: create external table tstz1_n0(`__time` timestamp with local time zone, n string, v integer)
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "HOUR")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@tstz1_n0
-POSTHOOK: query: create table tstz1_n0(`__time` timestamp with local time zone, n string, v integer)
+POSTHOOK: query: create external table tstz1_n0(`__time` timestamp with local time zone, n string, v integer)
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "HOUR")
 POSTHOOK: type: CREATETABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/ql/src/test/results/clientpositive/druid/druid_timestamptz2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/druid/druid_timestamptz2.q.out b/ql/src/test/results/clientpositive/druid/druid_timestamptz2.q.out
index c71a435..b62095a 100644
--- a/ql/src/test/results/clientpositive/druid/druid_timestamptz2.q.out
+++ b/ql/src/test/results/clientpositive/druid/druid_timestamptz2.q.out
@@ -10,11 +10,11 @@ PREHOOK: Input: database:druid_test_dst
 POSTHOOK: query: use druid_test_dst
 POSTHOOK: type: SWITCHDATABASE
 POSTHOOK: Input: database:druid_test_dst
-PREHOOK: query: create table test_base_table(`timecolumn` timestamp, `interval_marker` string, `num_l` double)
+PREHOOK: query: create external table test_base_table(`timecolumn` timestamp, `interval_marker` string, `num_l` double)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:druid_test_dst
 PREHOOK: Output: druid_test_dst@test_base_table
-POSTHOOK: query: create table test_base_table(`timecolumn` timestamp, `interval_marker` string, `num_l` double)
+POSTHOOK: query: create external table test_base_table(`timecolumn` timestamp, `interval_marker` string, `num_l` double)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:druid_test_dst
 POSTHOOK: Output: druid_test_dst@test_base_table
@@ -84,7 +84,7 @@ POSTHOOK: Output: druid_test_dst@test_base_table
 POSTHOOK: Lineage: test_base_table.interval_marker SCRIPT []
 POSTHOOK: Lineage: test_base_table.num_l SCRIPT []
 POSTHOOK: Lineage: test_base_table.timecolumn SCRIPT []
-PREHOOK: query: CREATE TABLE druid_test_table_1
+PREHOOK: query: CREATE EXTERNAL TABLE druid_test_table_1
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "DAY")
 AS
@@ -94,7 +94,7 @@ PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: druid_test_dst@test_base_table
 PREHOOK: Output: database:druid_test_dst
 PREHOOK: Output: druid_test_dst@druid_test_table_1
-POSTHOOK: query: CREATE TABLE druid_test_table_1
+POSTHOOK: query: CREATE EXTERNAL TABLE druid_test_table_1
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "DAY")
 AS
@@ -121,13 +121,13 @@ POSTHOOK: Output: hdfs://### HDFS PATH ###
 2015-03-09 23:59:59.0 US/Pacific	i2-end	1.0
 2015-03-10 00:00:00.0 US/Pacific	i3-start	2.0
 2015-03-10 23:59:59.0 US/Pacific	i3-end	2.0
-PREHOOK: query: CREATE TABLE druid_test_table_2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double)
+PREHOOK: query: CREATE EXTERNAL TABLE druid_test_table_2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double)
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "DAY")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:druid_test_dst
 PREHOOK: Output: druid_test_dst@druid_test_table_2
-POSTHOOK: query: CREATE TABLE druid_test_table_2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double)
+POSTHOOK: query: CREATE EXTERNAL TABLE druid_test_table_2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double)
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "DAY")
 POSTHOOK: type: CREATETABLE
@@ -195,7 +195,7 @@ POSTHOOK: Output: hdfs://### HDFS PATH ###
 2015-03-09 23:59:59.0 US/Pacific	i2-end	1.0
 2015-03-10 00:00:00.0 US/Pacific	i3-start	2.0
 2015-03-10 23:59:59.0 US/Pacific	i3-end	2.0
-PREHOOK: query: CREATE TABLE druid_test_table_utc
+PREHOOK: query: CREATE EXTERNAL TABLE druid_test_table_utc
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "DAY")
 AS
@@ -205,7 +205,7 @@ PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: druid_test_dst@test_base_table
 PREHOOK: Output: database:druid_test_dst
 PREHOOK: Output: druid_test_dst@druid_test_table_utc
-POSTHOOK: query: CREATE TABLE druid_test_table_utc
+POSTHOOK: query: CREATE EXTERNAL TABLE druid_test_table_utc
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "DAY")
 AS
@@ -232,13 +232,13 @@ POSTHOOK: Output: hdfs://### HDFS PATH ###
 2015-03-09 23:59:59.0 UTC	i2-end	1.0
 2015-03-10 00:00:00.0 UTC	i3-start	2.0
 2015-03-10 23:59:59.0 UTC	i3-end	2.0
-PREHOOK: query: CREATE TABLE druid_test_table_utc2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double)
+PREHOOK: query: CREATE EXTERNAL TABLE druid_test_table_utc2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double)
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "DAY")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:druid_test_dst
 PREHOOK: Output: druid_test_dst@druid_test_table_utc2
-POSTHOOK: query: CREATE TABLE druid_test_table_utc2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double)
+POSTHOOK: query: CREATE EXTERNAL TABLE druid_test_table_utc2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double)
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "DAY")
 POSTHOOK: type: CREATETABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/ql/src/test/results/clientpositive/druid/druidkafkamini_basic.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/druid/druidkafkamini_basic.q.out b/ql/src/test/results/clientpositive/druid/druidkafkamini_basic.q.out
index 0743974..ffd9279 100644
--- a/ql/src/test/results/clientpositive/druid/druidkafkamini_basic.q.out
+++ b/ql/src/test/results/clientpositive/druid/druidkafkamini_basic.q.out
@@ -1,4 +1,4 @@
-PREHOOK: query: CREATE TABLE druid_kafka_test(`__time` timestamp, page string, `user` string, language string, added int, deleted int)
+PREHOOK: query: CREATE EXTERNAL TABLE druid_kafka_test(`__time` timestamp, page string, `user` string, language string, added int, deleted int)
         STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
         TBLPROPERTIES (
         "druid.segment.granularity" = "MONTH",
@@ -15,7 +15,7 @@ PREHOOK: query: CREATE TABLE druid_kafka_test(`__time` timestamp, page string, `
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@druid_kafka_test
-POSTHOOK: query: CREATE TABLE druid_kafka_test(`__time` timestamp, page string, `user` string, language string, added int, deleted int)
+POSTHOOK: query: CREATE EXTERNAL TABLE druid_kafka_test(`__time` timestamp, page string, `user` string, language string, added int, deleted int)
         STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
         TBLPROPERTIES (
         "druid.segment.granularity" = "MONTH",
@@ -339,6 +339,7 @@ STAGE PLANS:
                   output format: org.apache.hadoop.hive.druid.io.DruidOutputFormat
                   properties:
                     COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"__time":"true","added":"true","deleted":"true","language":"true","page":"true","user":"true"}}
+                    EXTERNAL TRUE
                     bucket_count -1
                     bucketing_version 2
                     column.name.delimiter ,
@@ -379,6 +380,7 @@ STAGE PLANS:
                     output format: org.apache.hadoop.hive.druid.io.DruidOutputFormat
                     properties:
                       COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"__time":"true","added":"true","deleted":"true","language":"true","page":"true","user":"true"}}
+                      EXTERNAL TRUE
                       bucket_count -1
                       bucketing_version 2
                       column.name.delimiter ,

http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/ql/src/test/results/clientpositive/druid/druidmini_dynamic_partition.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/druid/druidmini_dynamic_partition.q.out b/ql/src/test/results/clientpositive/druid/druidmini_dynamic_partition.q.out
index 28deb79..8b1e446 100644
--- a/ql/src/test/results/clientpositive/druid/druidmini_dynamic_partition.q.out
+++ b/ql/src/test/results/clientpositive/druid/druidmini_dynamic_partition.q.out
@@ -1,4 +1,4 @@
-PREHOOK: query: CREATE TABLE druid_partitioned_table_0
+PREHOOK: query: CREATE EXTERNAL TABLE druid_partitioned_table_0
         STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
         TBLPROPERTIES (
         "druid.segment.granularity" = "HOUR",
@@ -22,7 +22,7 @@ PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@alltypesorc
 PREHOOK: Output: database:default
 PREHOOK: Output: default@druid_partitioned_table_0
-POSTHOOK: query: CREATE TABLE druid_partitioned_table_0
+POSTHOOK: query: CREATE EXTERNAL TABLE druid_partitioned_table_0
         STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
         TBLPROPERTIES (
         "druid.segment.granularity" = "HOUR",
@@ -57,7 +57,7 @@ POSTHOOK: Lineage: druid_partitioned_table_0.csmallint SIMPLE [(alltypesorc)allt
 POSTHOOK: Lineage: druid_partitioned_table_0.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ]
 POSTHOOK: Lineage: druid_partitioned_table_0.cstring2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), ]
 POSTHOOK: Lineage: druid_partitioned_table_0.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ]
-PREHOOK: query: EXPLAIN CREATE TABLE druid_partitioned_table
+PREHOOK: query: EXPLAIN CREATE EXTERNAL TABLE druid_partitioned_table
         STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
         TBLPROPERTIES (
         "druid.segment.granularity" = "HOUR",
@@ -78,7 +78,7 @@ PREHOOK: query: EXPLAIN CREATE TABLE druid_partitioned_table
           cboolean2
           FROM alltypesorc where ctimestamp1 IS NOT NULL
 PREHOOK: type: CREATETABLE_AS_SELECT
-POSTHOOK: query: EXPLAIN CREATE TABLE druid_partitioned_table
+POSTHOOK: query: EXPLAIN CREATE EXTERNAL TABLE druid_partitioned_table
         STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
         TBLPROPERTIES (
         "druid.segment.granularity" = "HOUR",
@@ -165,6 +165,8 @@ STAGE PLANS:
             druid.query.granularity MINUTE
             druid.segment.granularity HOUR
             druid.segment.targetShardsPerGranularity 6
+            external.table.purge true
+          isExternal: true
 
   Stage: Stage-3
     Stats Work
@@ -176,7 +178,7 @@ STAGE PLANS:
           hdfs directory: true
           destination: hdfs://### HDFS PATH ###
 
-PREHOOK: query: CREATE TABLE druid_partitioned_table
+PREHOOK: query: CREATE EXTERNAL TABLE druid_partitioned_table
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES (
 "druid.segment.granularity" = "HOUR",
@@ -200,7 +202,7 @@ PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@alltypesorc
 PREHOOK: Output: database:default
 PREHOOK: Output: default@druid_partitioned_table
-POSTHOOK: query: CREATE TABLE druid_partitioned_table
+POSTHOOK: query: CREATE EXTERNAL TABLE druid_partitioned_table
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES (
 "druid.segment.granularity" = "HOUR",
@@ -567,7 +569,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@druid_partitioned_table
 POSTHOOK: Output: hdfs://### HDFS PATH ###
 1408069801800	10992545287
-PREHOOK: query: CREATE TABLE druid_max_size_partition
+PREHOOK: query: CREATE EXTERNAL TABLE druid_max_size_partition
         STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
         TBLPROPERTIES (
         "druid.segment.granularity" = "HOUR",
@@ -590,7 +592,7 @@ PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@alltypesorc
 PREHOOK: Output: database:default
 PREHOOK: Output: default@druid_max_size_partition
-POSTHOOK: query: CREATE TABLE druid_max_size_partition
+POSTHOOK: query: CREATE EXTERNAL TABLE druid_max_size_partition
         STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
         TBLPROPERTIES (
         "druid.segment.granularity" = "HOUR",

http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/ql/src/test/results/clientpositive/druid/druidmini_expressions.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/druid/druidmini_expressions.q.out b/ql/src/test/results/clientpositive/druid/druidmini_expressions.q.out
index fd77a91..e322d8f 100644
--- a/ql/src/test/results/clientpositive/druid/druidmini_expressions.q.out
+++ b/ql/src/test/results/clientpositive/druid/druidmini_expressions.q.out
@@ -1,4 +1,4 @@
-PREHOOK: query: CREATE TABLE druid_table_n0
+PREHOOK: query: CREATE EXTERNAL TABLE druid_table_n0
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE")
 AS
@@ -18,7 +18,7 @@ PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@alltypesorc
 PREHOOK: Output: database:default
 PREHOOK: Output: default@druid_table_n0
-POSTHOOK: query: CREATE TABLE druid_table_n0
+POSTHOOK: query: CREATE EXTERNAL TABLE druid_table_n0
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE")
 AS
@@ -270,6 +270,7 @@ STAGE PLANS:
                   output format: org.apache.hadoop.hive.druid.io.DruidOutputFormat
                   properties:
                     COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
+                    EXTERNAL TRUE
                     bucket_count -1
                     bucketing_version 2
                     column.name.delimiter ,
@@ -301,6 +302,7 @@ STAGE PLANS:
                     output format: org.apache.hadoop.hive.druid.io.DruidOutputFormat
                     properties:
                       COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
+                      EXTERNAL TRUE
                       bucket_count -1
                       bucketing_version 2
                       column.name.delimiter ,

http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/ql/src/test/results/clientpositive/druid/druidmini_extractTime.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/druid/druidmini_extractTime.q.out b/ql/src/test/results/clientpositive/druid/druidmini_extractTime.q.out
index 30e273b..116c326 100644
--- a/ql/src/test/results/clientpositive/druid/druidmini_extractTime.q.out
+++ b/ql/src/test/results/clientpositive/druid/druidmini_extractTime.q.out
@@ -1,4 +1,4 @@
-PREHOOK: query: CREATE TABLE druid_table
+PREHOOK: query: CREATE EXTERNAL TABLE druid_table
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE")
 AS
@@ -18,7 +18,7 @@ PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@alltypesorc
 PREHOOK: Output: database:default
 PREHOOK: Output: default@druid_table
-POSTHOOK: query: CREATE TABLE druid_table
+POSTHOOK: query: CREATE EXTERNAL TABLE druid_table
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE")
 AS
@@ -776,7 +776,7 @@ POSTHOOK: Lineage: test_extract_from_string_base_table.date_c SCRIPT []
 POSTHOOK: Lineage: test_extract_from_string_base_table.metric_c SCRIPT []
 POSTHOOK: Lineage: test_extract_from_string_base_table.timecolumn SCRIPT []
 POSTHOOK: Lineage: test_extract_from_string_base_table.timestamp_c SCRIPT []
-PREHOOK: query: CREATE TABLE druid_test_extract_from_string_table
+PREHOOK: query: CREATE EXTERNAL TABLE druid_test_extract_from_string_table
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "DAY")
 AS select
@@ -786,7 +786,7 @@ PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@test_extract_from_string_base_table
 PREHOOK: Output: database:default
 PREHOOK: Output: default@druid_test_extract_from_string_table
-POSTHOOK: query: CREATE TABLE druid_test_extract_from_string_table
+POSTHOOK: query: CREATE EXTERNAL TABLE druid_test_extract_from_string_table
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "DAY")
 AS select

http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/ql/src/test/results/clientpositive/druid/druidmini_floorTime.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/druid/druidmini_floorTime.q.out b/ql/src/test/results/clientpositive/druid/druidmini_floorTime.q.out
index c54fd93..1c9e9c6 100644
--- a/ql/src/test/results/clientpositive/druid/druidmini_floorTime.q.out
+++ b/ql/src/test/results/clientpositive/druid/druidmini_floorTime.q.out
@@ -1,4 +1,4 @@
-PREHOOK: query: CREATE TABLE druid_table_n2
+PREHOOK: query: CREATE EXTERNAL TABLE druid_table_n2
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE")
 AS
@@ -18,7 +18,7 @@ PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@alltypesorc
 PREHOOK: Output: database:default
 PREHOOK: Output: default@druid_table_n2
-POSTHOOK: query: CREATE TABLE druid_table_n2
+POSTHOOK: query: CREATE EXTERNAL TABLE druid_table_n2
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE")
 AS

http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/ql/src/test/results/clientpositive/druid/druidmini_joins.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/druid/druidmini_joins.q.out b/ql/src/test/results/clientpositive/druid/druidmini_joins.q.out
index 73a3c9f..014c7b5 100644
--- a/ql/src/test/results/clientpositive/druid/druidmini_joins.q.out
+++ b/ql/src/test/results/clientpositive/druid/druidmini_joins.q.out
@@ -2,7 +2,7 @@ PREHOOK: query: DROP TABLE druid_table_with_nulls
 PREHOOK: type: DROPTABLE
 POSTHOOK: query: DROP TABLE druid_table_with_nulls
 POSTHOOK: type: DROPTABLE
-PREHOOK: query: CREATE TABLE druid_table_with_nulls
+PREHOOK: query: CREATE EXTERNAL TABLE druid_table_with_nulls
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "HOUR")
 AS
@@ -22,7 +22,7 @@ PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: _dummy_database@_dummy_table
 PREHOOK: Output: database:default
 PREHOOK: Output: default@druid_table_with_nulls
-POSTHOOK: query: CREATE TABLE druid_table_with_nulls
+POSTHOOK: query: CREATE EXTERNAL TABLE druid_table_with_nulls
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "HOUR")
 AS

http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/ql/src/test/results/clientpositive/druid/druidmini_masking.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/druid/druidmini_masking.q.out b/ql/src/test/results/clientpositive/druid/druidmini_masking.q.out
index 1aad967..e3b50a4 100644
--- a/ql/src/test/results/clientpositive/druid/druidmini_masking.q.out
+++ b/ql/src/test/results/clientpositive/druid/druidmini_masking.q.out
@@ -1,4 +1,4 @@
-PREHOOK: query: CREATE TABLE masking_test_druid
+PREHOOK: query: CREATE EXTERNAL TABLE masking_test_druid
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "HOUR")
 AS
@@ -17,7 +17,7 @@ PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: _dummy_database@_dummy_table
 PREHOOK: Output: database:default
 PREHOOK: Output: default@masking_test_druid
-POSTHOOK: query: CREATE TABLE masking_test_druid
+POSTHOOK: query: CREATE EXTERNAL TABLE masking_test_druid
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "HOUR")
 AS

http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/ql/src/test/results/clientpositive/druid/druidmini_test1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/druid/druidmini_test1.q.out b/ql/src/test/results/clientpositive/druid/druidmini_test1.q.out
index 4e078aa..7f6c6b0 100644
--- a/ql/src/test/results/clientpositive/druid/druidmini_test1.q.out
+++ b/ql/src/test/results/clientpositive/druid/druidmini_test1.q.out
@@ -1,4 +1,4 @@
-PREHOOK: query: CREATE TABLE druid_table_n3
+PREHOOK: query: CREATE EXTERNAL TABLE druid_table_n3
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE")
 AS
@@ -18,7 +18,7 @@ PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@alltypesorc
 PREHOOK: Output: database:default
 PREHOOK: Output: default@druid_table_n3
-POSTHOOK: query: CREATE TABLE druid_table_n3
+POSTHOOK: query: CREATE EXTERNAL TABLE druid_table_n3
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE")
 AS

http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/ql/src/test/results/clientpositive/druid/druidmini_test_alter.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/druid/druidmini_test_alter.q.out b/ql/src/test/results/clientpositive/druid/druidmini_test_alter.q.out
index 439dc9c..a175f76 100644
--- a/ql/src/test/results/clientpositive/druid/druidmini_test_alter.q.out
+++ b/ql/src/test/results/clientpositive/druid/druidmini_test_alter.q.out
@@ -1,4 +1,4 @@
-PREHOOK: query: CREATE TABLE druid_alltypesorc_n0
+PREHOOK: query: CREATE EXTERNAL TABLE druid_alltypesorc_n0
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE")
 AS
@@ -16,7 +16,7 @@ PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@alltypesorc
 PREHOOK: Output: database:default
 PREHOOK: Output: default@druid_alltypesorc_n0
-POSTHOOK: query: CREATE TABLE druid_alltypesorc_n0
+POSTHOOK: query: CREATE EXTERNAL TABLE druid_alltypesorc_n0
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE")
 AS

http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/ql/src/test/results/clientpositive/druid/druidmini_test_insert.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/druid/druidmini_test_insert.q.out b/ql/src/test/results/clientpositive/druid/druidmini_test_insert.q.out
index c675aeb..0da7610 100644
--- a/ql/src/test/results/clientpositive/druid/druidmini_test_insert.q.out
+++ b/ql/src/test/results/clientpositive/druid/druidmini_test_insert.q.out
@@ -1,4 +1,4 @@
-PREHOOK: query: CREATE TABLE druid_alltypesorc
+PREHOOK: query: CREATE EXTERNAL TABLE druid_alltypesorc
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE")
 AS
@@ -18,7 +18,7 @@ PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@alltypesorc
 PREHOOK: Output: database:default
 PREHOOK: Output: default@druid_alltypesorc
-POSTHOOK: query: CREATE TABLE druid_alltypesorc
+POSTHOOK: query: CREATE EXTERNAL TABLE druid_alltypesorc
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE")
 AS
@@ -194,13 +194,13 @@ POSTHOOK: Output: druid_test_create_then_insert@test_table_n9
 POSTHOOK: Lineage: test_table_n9.num_l SCRIPT []
 POSTHOOK: Lineage: test_table_n9.timecolumn SCRIPT []
 POSTHOOK: Lineage: test_table_n9.userid SCRIPT []
-PREHOOK: query: CREATE TABLE druid_table_n1 (`__time` timestamp with local time zone, `userid` string, `num_l` float)
+PREHOOK: query: CREATE EXTERNAL TABLE druid_table_n1 (`__time` timestamp with local time zone, `userid` string, `num_l` float)
  STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
  TBLPROPERTIES ("druid.segment.granularity" = "DAY")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:druid_test_create_then_insert
 PREHOOK: Output: druid_test_create_then_insert@druid_table_n1
-POSTHOOK: query: CREATE TABLE druid_table_n1 (`__time` timestamp with local time zone, `userid` string, `num_l` float)
+POSTHOOK: query: CREATE EXTERNAL TABLE druid_table_n1 (`__time` timestamp with local time zone, `userid` string, `num_l` float)
  STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
  TBLPROPERTIES ("druid.segment.granularity" = "DAY")
 POSTHOOK: type: CREATETABLE
@@ -335,7 +335,7 @@ POSTHOOK: Output: druid_test_dst@test_base_table
 POSTHOOK: Lineage: test_base_table.num_l SCRIPT []
 POSTHOOK: Lineage: test_base_table.timecolumn SCRIPT []
 POSTHOOK: Lineage: test_base_table.userid SCRIPT []
-PREHOOK: query: CREATE TABLE druid_test_table_n9
+PREHOOK: query: CREATE EXTERNAL TABLE druid_test_table_n9
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "DAY")
 AS
@@ -344,7 +344,7 @@ PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: druid_test_dst@test_base_table
 PREHOOK: Output: database:druid_test_dst
 PREHOOK: Output: druid_test_dst@druid_test_table_n9
-POSTHOOK: query: CREATE TABLE druid_test_table_n9
+POSTHOOK: query: CREATE EXTERNAL TABLE druid_test_table_n9
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "DAY")
 AS

http://git-wip-us.apache.org/repos/asf/hive/blob/96b5ceb6/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java
index cbe89b6..4bc819f 100644
--- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java
@@ -568,6 +568,25 @@ public class MetaStoreUtils {
     return isExternal(params);
   }
 
+  /**
+   * Determines whether an table needs to be purged or not.
+   *
+   * @param table table of interest
+   *
+   * @return true if external table needs to be purged
+   */
+  public static boolean isExternalTablePurge(Table table) {
+    if (table == null) {
+      return false;
+    }
+    Map<String, String> params = table.getParameters();
+    if (params == null) {
+      return false;
+    }
+
+    return isPropertyTrue(params, EXTERNAL_TABLE_PURGE);
+  }
+
   public static boolean isExternal(Map<String, String> tableParams){
     return isPropertyTrue(tableParams, "EXTERNAL");
   }