You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by se...@apache.org on 2018/04/23 18:25:04 UTC

[1/2] hive git commit: HIVE-17970 : MM LOAD DATA with OVERWRITE doesn't use base_n directory concept (Sergey Shelukhin, reviewed by Eugene Koifman)

Repository: hive
Updated Branches:
  refs/heads/branch-3 0e1c66759 -> 25912f7b5
  refs/heads/master 622440199 -> 4f67bebe1


HIVE-17970 : MM LOAD DATA with OVERWRITE doesn't use base_n directory concept (Sergey Shelukhin, reviewed by Eugene Koifman)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/4f67bebe
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/4f67bebe
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/4f67bebe

Branch: refs/heads/master
Commit: 4f67bebe1916b77a8366a2f1627d59bb2d800522
Parents: 6224401
Author: sergey <se...@apache.org>
Authored: Mon Apr 23 11:18:51 2018 -0700
Committer: sergey <se...@apache.org>
Committed: Mon Apr 23 11:18:51 2018 -0700

----------------------------------------------------------------------
 .../apache/hadoop/hive/common/JavaUtils.java    |  25 +-
 .../hadoop/hive/ql/history/TestHiveHistory.java |   2 +-
 .../test/resources/testconfiguration.properties |   1 +
 .../apache/hadoop/hive/ql/exec/MoveTask.java    |  14 +-
 .../apache/hadoop/hive/ql/exec/Utilities.java   |   6 +-
 .../apache/hadoop/hive/ql/metadata/Hive.java    | 112 +++----
 .../hive/ql/parse/LoadSemanticAnalyzer.java     |  18 +-
 .../hadoop/hive/ql/exec/TestExecDriver.java     |   2 +-
 .../clientpositive/llap/mm_loaddata.q.out       | 296 +++++++++++++++++++
 .../results/clientpositive/mm_loaddata.q.out    | 296 -------------------
 10 files changed, 361 insertions(+), 411 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/4f67bebe/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java b/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
index 7894ec1..45abd2f 100644
--- a/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
+++ b/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
@@ -188,39 +188,26 @@ public final class JavaUtils {
 
   public static class IdPathFilter implements PathFilter {
     private String baseDirName, deltaDirName;
-    private final boolean isMatch, isIgnoreTemp, isDeltaPrefix;
+    private final boolean isDeltaPrefix;
 
-    public IdPathFilter(long writeId, int stmtId, boolean isMatch) {
-      this(writeId, stmtId, isMatch, false);
-    }
-
-    public IdPathFilter(long writeId, int stmtId, boolean isMatch, boolean isIgnoreTemp) {
+    public IdPathFilter(long writeId, int stmtId) {
       String deltaDirName = null;
       deltaDirName = DELTA_PREFIX + "_" + String.format(DELTA_DIGITS, writeId) + "_" +
-              String.format(DELTA_DIGITS, writeId) + "_";
+              String.format(DELTA_DIGITS, writeId);
       isDeltaPrefix = (stmtId < 0);
       if (!isDeltaPrefix) {
-        deltaDirName += String.format(STATEMENT_DIGITS, stmtId);
+        deltaDirName += "_" + String.format(STATEMENT_DIGITS, stmtId);
       }
 
       this.baseDirName = BASE_PREFIX + "_" + String.format(DELTA_DIGITS, writeId);
       this.deltaDirName = deltaDirName;
-      this.isMatch = isMatch;
-      this.isIgnoreTemp = isIgnoreTemp;
     }
 
     @Override
     public boolean accept(Path path) {
       String name = path.getName();
-      if (name.equals(baseDirName) || (isDeltaPrefix && name.startsWith(deltaDirName))
-          || (!isDeltaPrefix && name.equals(deltaDirName))) {
-        return isMatch;
-      }
-      if (isIgnoreTemp && name.length() > 0) {
-        char c = name.charAt(0);
-        if (c == '.' || c == '_') return false; // Regardless of isMatch, ignore this.
-      }
-      return !isMatch;
+      return name.equals(baseDirName) || (isDeltaPrefix && name.startsWith(deltaDirName))
+          || (!isDeltaPrefix && name.equals(deltaDirName));
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/4f67bebe/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
index 0168472..9b50fd4 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
@@ -107,7 +107,7 @@ public class TestHiveHistory extends TestCase {
         db.createTable(src, cols, null, TextInputFormat.class,
             IgnoreKeyTextOutputFormat.class);
         db.loadTable(hadoopDataFile[i], src,
-          LoadFileType.KEEP_EXISTING, false, false, false, false, null, 0);
+          LoadFileType.KEEP_EXISTING, false, false, false, false, null, 0, false);
         i++;
       }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/4f67bebe/itests/src/test/resources/testconfiguration.properties
----------------------------------------------------------------------
diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties
index ed161da..e43c7d4 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -582,6 +582,7 @@ minillaplocal.query.files=\
   mergejoin_3way.q,\
   mm_conversions.q,\
   mm_exim.q,\
+  mm_loaddata.q,\
   mrr.q,\
   multiMapJoin1.q,\
   multiMapJoin2.q,\

http://git-wip-us.apache.org/repos/asf/hive/blob/4f67bebe/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
index 6fff7e7..dbda5fd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
@@ -372,7 +372,7 @@ public class MoveTask extends Task<MoveWork> implements Serializable {
           }
           db.loadTable(tbd.getSourcePath(), tbd.getTable().getTableName(), tbd.getLoadFileType(),
               work.isSrcLocal(), isSkewedStoredAsDirs(tbd), isFullAcidOp, hasFollowingStatsTask(),
-              tbd.getWriteId(), tbd.getStmtId());
+              tbd.getWriteId(), tbd.getStmtId(), tbd.isInsertOverwrite());
           if (work.getOutputs() != null) {
             DDLTask.addIfAbsentByName(new WriteEntity(table,
               getWriteType(tbd, work.getLoadTableWork().getWriteType())), work.getOutputs());
@@ -467,12 +467,14 @@ public class MoveTask extends Task<MoveWork> implements Serializable {
       Utilities.FILE_OP_LOGGER.trace("loadPartition called from " + tbd.getSourcePath()
         + " into " + tbd.getTable().getTableName());
     }
-    db.loadPartition(tbd.getSourcePath(), tbd.getTable().getTableName(),
-        tbd.getPartitionSpec(), tbd.getLoadFileType(),
-        tbd.getInheritTableSpecs(), isSkewedStoredAsDirs(tbd), work.isSrcLocal(),
-        work.getLoadTableWork().getWriteType() != AcidUtils.Operation.NOT_ACID &&
+
+    db.loadPartition(tbd.getSourcePath(), db.getTable(tbd.getTable().getTableName()),
+        tbd.getPartitionSpec(), tbd.getLoadFileType(), tbd.getInheritTableSpecs(),
+        isSkewedStoredAsDirs(tbd), work.isSrcLocal(),
+         work.getLoadTableWork().getWriteType() != AcidUtils.Operation.NOT_ACID &&
             !tbd.isMmTable(),
-        hasFollowingStatsTask(), tbd.getWriteId(), tbd.getStmtId());
+         hasFollowingStatsTask(),
+        tbd.getWriteId(), tbd.getStmtId(), tbd.isInsertOverwrite());
     Partition partn = db.getPartition(table, tbd.getPartitionSpec(), false);
 
     // See the comment inside updatePartitionBucketSortColumns.

http://git-wip-us.apache.org/repos/asf/hive/blob/4f67bebe/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
index 5fbe045..6395c31 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
@@ -4076,7 +4076,7 @@ public final class Utilities {
       Boolean isBaseDir) throws IOException {
     int skipLevels = dpLevels + lbLevels;
     if (filter == null) {
-      filter = new JavaUtils.IdPathFilter(writeId, stmtId, true, false);
+      filter = new JavaUtils.IdPathFilter(writeId, stmtId);
     }
     if (skipLevels == 0) {
       return statusToPath(fs.listStatus(path, filter));
@@ -4250,7 +4250,7 @@ public final class Utilities {
     FileSystem fs = specPath.getFileSystem(hconf);
     Path manifestDir = getManifestDir(specPath, writeId, stmtId, unionSuffix, isInsertOverwrite);
     if (!success) {
-      JavaUtils.IdPathFilter filter = new JavaUtils.IdPathFilter(writeId, stmtId, true);
+      JavaUtils.IdPathFilter filter = new JavaUtils.IdPathFilter(writeId, stmtId);
       tryDeleteAllMmFiles(fs, specPath, manifestDir, dpLevels, lbLevels,
           filter, writeId, stmtId, hconf);
       return;
@@ -4275,7 +4275,7 @@ public final class Utilities {
     }
 
     Utilities.FILE_OP_LOGGER.debug("Looking for files in: {}", specPath);
-    JavaUtils.IdPathFilter filter = new JavaUtils.IdPathFilter(writeId, stmtId, true, false);
+    JavaUtils.IdPathFilter filter = new JavaUtils.IdPathFilter(writeId, stmtId);
     if (isMmCtas && !fs.exists(specPath)) {
       Utilities.FILE_OP_LOGGER.info("Creating table directory for CTAS with no output at {}", specPath);
       FileUtils.mkdir(fs, specPath, hconf);

http://git-wip-us.apache.org/repos/asf/hive/blob/4f67bebe/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
index be98446..69d42e3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -1617,31 +1617,6 @@ public class Hive {
   }
 
   /**
-   * @param loadPath
-   * @param tableName
-   * @param partSpec
-   * @param loadFileType
-   * @param inheritTableSpecs
-   * @param isSkewedStoreAsSubdir
-   * @param isSrcLocal
-   * @param isAcid
-   * @param hasFollowingStatsTask
-   * @param writeId
-   * @param stmtId
-   * @return
-   * @throws HiveException
-   */
-  public void loadPartition(Path loadPath, String tableName,
-      Map<String, String> partSpec, LoadFileType loadFileType, boolean inheritTableSpecs,
-      boolean isSkewedStoreAsSubdir,  boolean isSrcLocal, boolean isAcid,
-      boolean hasFollowingStatsTask, Long writeId, int stmtId)
-          throws HiveException {
-    Table tbl = getTable(tableName);
-    loadPartition(loadPath, tbl, partSpec, loadFileType, inheritTableSpecs,
-        isSkewedStoreAsSubdir, isSrcLocal, isAcid, hasFollowingStatsTask, writeId, stmtId);
-  }
-
-  /**
    * Load a directory into a Hive Table Partition - Alters existing content of
    * the partition with the contents of loadPath. - If the partition does not
    * exist - one is created - files in loadPath are moved into Hive. But the
@@ -1666,16 +1641,18 @@ public class Hive {
    *          true if there is a following task which updates the stats, so, this method need not update.
    * @param writeId write ID allocated for the current load operation
    * @param stmtId statement ID of the current load statement
+   * @param isInsertOverwrite 
    * @return Partition object being loaded with data
    */
   public Partition loadPartition(Path loadPath, Table tbl, Map<String, String> partSpec,
       LoadFileType loadFileType, boolean inheritTableSpecs, boolean isSkewedStoreAsSubdir,
-      boolean isSrcLocal, boolean isAcidIUDoperation, boolean hasFollowingStatsTask, Long writeId, int stmtId)
-          throws HiveException {
+      boolean isSrcLocal, boolean isAcidIUDoperation, boolean hasFollowingStatsTask, Long writeId,
+      int stmtId, boolean isInsertOverwrite) throws HiveException {
     Path tblDataLocationPath =  tbl.getDataLocation();
     boolean isMmTableWrite = AcidUtils.isInsertOnlyTable(tbl.getParameters());
     assert tbl.getPath() != null : "null==getPath() for " + tbl.getTableName();
     boolean isFullAcidTable = AcidUtils.isFullAcidTable(tbl);
+    boolean isTxnTable = AcidUtils.isTransactionalTable(tbl);
     try {
       // Get the partition object if it already exists
       Partition oldPart = getPartition(tbl, partSpec, false);
@@ -1742,35 +1719,31 @@ public class Hive {
         }
       } else {
         // Either a non-MM query, or a load into MM table from an external source.
-        PathFilter filter = FileUtils.HIDDEN_FILES_PATH_FILTER;
         Path destPath = newPartPath;
         if (isMmTableWrite) {
-          // We will load into MM directory, and delete from the parent if needed.
-          // TODO: this looks invalid after ACID integration. What about base dirs?
-          destPath = new Path(destPath, AcidUtils.deltaSubdir(writeId, writeId, stmtId));
-          // TODO: loadFileType for MM table will no longer be REPLACE_ALL
-          filter = (loadFileType == LoadFileType.REPLACE_ALL)
-            ? new JavaUtils.IdPathFilter(writeId, stmtId, false, true) : filter;
+          assert !isAcidIUDoperation;
+          // We will load into MM directory, and hide previous directories if needed.
+          destPath = new Path(destPath, isInsertOverwrite
+              ? AcidUtils.baseDir(writeId) : AcidUtils.deltaSubdir(writeId, writeId, stmtId));
         }
-        else if(!isAcidIUDoperation && isFullAcidTable) {
+        if (!isAcidIUDoperation && isFullAcidTable) {
           destPath = fixFullAcidPathForLoadData(loadFileType, destPath, writeId, stmtId, tbl);
         }
         if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) {
           Utilities.FILE_OP_LOGGER.trace("moving " + loadPath + " to " + destPath);
         }
-        //todo: why is "&& !isAcidIUDoperation" needed here?
-        if (!isFullAcidTable && ((loadFileType == LoadFileType.REPLACE_ALL) || (oldPart == null && !isAcidIUDoperation))) {
+        // TODO: why is "&& !isAcidIUDoperation" needed here?
+        if (!isTxnTable && ((loadFileType == LoadFileType.REPLACE_ALL) || (oldPart == null && !isAcidIUDoperation))) {
           //for fullAcid tables we don't delete files for commands with OVERWRITE - we create a new
           // base_x.  (there is Insert Overwrite and Load Data Overwrite)
           boolean isAutoPurge = "true".equalsIgnoreCase(tbl.getProperty("auto.purge"));
-          // TODO: this should never run for MM tables anymore. Remove the flag, and maybe the filter?
-          replaceFiles(tbl.getPath(), loadPath, destPath, oldPartPath, getConf(),
-              isSrcLocal, isAutoPurge, newFiles, filter, isMmTableWrite, !tbl.isTemporary());
+          replaceFiles(tbl.getPath(), loadPath, destPath, oldPartPath, getConf(), isSrcLocal,
+              isAutoPurge, newFiles, FileUtils.HIDDEN_FILES_PATH_FILTER, !tbl.isTemporary());
         } else {
           FileSystem fs = tbl.getDataLocation().getFileSystem(conf);
           copyFiles(conf, loadPath, destPath, fs, isSrcLocal, isAcidIUDoperation,
-            (loadFileType == LoadFileType.OVERWRITE_EXISTING), newFiles, tbl.getNumBuckets() > 0,
-                  isFullAcidTable);
+              (loadFileType == LoadFileType.OVERWRITE_EXISTING), newFiles,
+              tbl.getNumBuckets() > 0, isFullAcidTable);
         }
       }
       perfLogger.PerfLogEnd("MoveTask", "FileMoves");
@@ -1814,7 +1787,7 @@ public class Hive {
         }
         // Note: we are creating a brand new the partition, so this is going to be valid for ACID.
         List<FileStatus> filesForStats = null;
-        if (isFullAcidTable || isMmTableWrite) {
+        if (isTxnTable) {
           filesForStats = AcidUtils.getAcidFilesForStats(
               newTPart.getTable(), newPartPath, conf, null);
         } else {
@@ -2149,8 +2122,8 @@ private void constructOneLBLocationMap(FileStatus fSta,
   public Map<Map<String, String>, Partition> loadDynamicPartitions(final Path loadPath,
       final String tableName, final Map<String, String> partSpec, final LoadFileType loadFileType,
       final int numDP, final int numLB, final boolean isAcid, final long writeId, final int stmtId,
-      final boolean hasFollowingStatsTask, final AcidUtils.Operation operation, boolean isInsertOverwrite)
-      throws HiveException {
+      final boolean hasFollowingStatsTask, final AcidUtils.Operation operation,
+      boolean isInsertOverwrite) throws HiveException {
 
     final Map<Map<String, String>, Partition> partitionsMap =
         Collections.synchronizedMap(new LinkedHashMap<Map<String, String>, Partition>());
@@ -2197,9 +2170,9 @@ private void constructOneLBLocationMap(FileStatus fSta,
               LOG.info("New loading path = " + partPath + " with partSpec " + fullPartSpec);
 
               // load the partition
-              Partition newPartition = loadPartition(partPath, tbl, fullPartSpec,
-                  loadFileType, true, numLB > 0,
-                  false, isAcid, hasFollowingStatsTask, writeId, stmtId);
+              Partition newPartition = loadPartition(partPath, tbl, fullPartSpec, loadFileType,
+                  true, numLB > 0, false, isAcid, hasFollowingStatsTask, writeId, stmtId,
+                  isInsertOverwrite);
               partitionsMap.put(fullPartSpec, newPartition);
 
               if (inPlaceEligible) {
@@ -2293,10 +2266,11 @@ private void constructOneLBLocationMap(FileStatus fSta,
    */
   public void loadTable(Path loadPath, String tableName, LoadFileType loadFileType, boolean isSrcLocal,
       boolean isSkewedStoreAsSubdir, boolean isAcidIUDoperation, boolean hasFollowingStatsTask,
-      Long writeId, int stmtId) throws HiveException {
+      Long writeId, int stmtId, boolean isInsertOverwrite) throws HiveException {
     List<Path> newFiles = null;
     Table tbl = getTable(tableName);
     assert tbl.getPath() != null : "null==getPath() for " + tbl.getTableName();
+    boolean isTxnTable = AcidUtils.isTransactionalTable(tbl);
     boolean isMmTable = AcidUtils.isInsertOnlyTable(tbl);
     boolean isFullAcidTable = AcidUtils.isFullAcidTable(tbl);
     if (conf.getBoolVar(ConfVars.FIRE_EVENTS_FOR_DML) && !tbl.isTemporary()) {
@@ -2319,34 +2293,28 @@ private void constructOneLBLocationMap(FileStatus fSta,
       // Either a non-MM query, or a load into MM table from an external source.
       Path tblPath = tbl.getPath();
       Path destPath = tblPath;
-      PathFilter filter = FileUtils.HIDDEN_FILES_PATH_FILTER;
       if (isMmTable) {
         assert !isAcidIUDoperation;
-        // We will load into MM directory, and delete from the parent if needed.
-        // TODO: this looks invalid after ACID integration. What about base dirs?
-        destPath = new Path(destPath, AcidUtils.deltaSubdir(writeId, writeId, stmtId));
-        // TODO: loadFileType for MM table will no longer be REPLACE_ALL
-        filter = loadFileType == LoadFileType.REPLACE_ALL
-            ? new JavaUtils.IdPathFilter(writeId, stmtId, false, true) : filter;
+        // We will load into MM directory, and hide previous directories if needed.
+        destPath = new Path(destPath, isInsertOverwrite
+            ? AcidUtils.baseDir(writeId) : AcidUtils.deltaSubdir(writeId, writeId, stmtId));
       }
-      else if(!isAcidIUDoperation && isFullAcidTable) {
+      if (!isAcidIUDoperation && isFullAcidTable) {
         destPath = fixFullAcidPathForLoadData(loadFileType, destPath, writeId, stmtId, tbl);
       }
       Utilities.FILE_OP_LOGGER.debug("moving " + loadPath + " to " + tblPath
           + " (replace = " + loadFileType + ")");
-      if (loadFileType == LoadFileType.REPLACE_ALL && !isFullAcidTable) {
+      if (loadFileType == LoadFileType.REPLACE_ALL && !isTxnTable) {
         //for fullAcid we don't want to delete any files even for OVERWRITE see HIVE-14988/HIVE-17361
-        //todo:  should probably do the same for MM IOW
         boolean isAutopurge = "true".equalsIgnoreCase(tbl.getProperty("auto.purge"));
-        // TODO: this should never run for MM tables anymore. Remove the flag, and maybe the filter?
-        replaceFiles(tblPath, loadPath, destPath, tblPath,
-            conf, isSrcLocal, isAutopurge, newFiles, filter, isMmTable?true:false, !tbl.isTemporary());
+        replaceFiles(tblPath, loadPath, destPath, tblPath, conf, isSrcLocal, isAutopurge,
+            newFiles, FileUtils.HIDDEN_FILES_PATH_FILTER, !tbl.isTemporary());
       } else {
         try {
           FileSystem fs = tbl.getDataLocation().getFileSystem(conf);
           copyFiles(conf, loadPath, destPath, fs, isSrcLocal, isAcidIUDoperation,
-            loadFileType == LoadFileType.OVERWRITE_EXISTING, newFiles,
-                  tbl.getNumBuckets() > 0 ? true : false, isFullAcidTable);
+              loadFileType == LoadFileType.OVERWRITE_EXISTING, newFiles,
+              tbl.getNumBuckets() > 0 ? true : false, isFullAcidTable);
         } catch (IOException e) {
           throw new HiveException("addFiles: filesystem error in check phase", e);
         }
@@ -4006,7 +3974,7 @@ private void constructOneLBLocationMap(FileStatus fSta,
    */
   protected void replaceFiles(Path tablePath, Path srcf, Path destf, Path oldPath, HiveConf conf,
           boolean isSrcLocal, boolean purge, List<Path> newFiles, PathFilter deletePathFilter,
-          boolean isMmTableOverwrite, boolean isNeedRecycle) throws HiveException {
+          boolean isNeedRecycle) throws HiveException {
     try {
 
       FileSystem destFs = destf.getFileSystem(conf);
@@ -4025,9 +3993,7 @@ private void constructOneLBLocationMap(FileStatus fSta,
       }
 
       if (oldPath != null) {
-        // Note: we assume lbLevels is 0 here. Same as old code for non-MM.
-        //       For MM tables, this can only be a LOAD command. Does LOAD even support LB?
-        deleteOldPathForReplace(destf, oldPath, conf, purge, deletePathFilter, isMmTableOverwrite, 0, isNeedRecycle);
+        deleteOldPathForReplace(destf, oldPath, conf, purge, deletePathFilter, isNeedRecycle);
       }
 
       // first call FileUtils.mkdir to make sure that destf directory exists, if not, it creates
@@ -4073,7 +4039,7 @@ private void constructOneLBLocationMap(FileStatus fSta,
   }
 
   private void deleteOldPathForReplace(Path destPath, Path oldPath, HiveConf conf, boolean purge,
-      PathFilter pathFilter, boolean isMmTableOverwrite, int lbLevels, boolean isNeedRecycle) throws HiveException {
+      PathFilter pathFilter, boolean isNeedRecycle) throws HiveException {
     Utilities.FILE_OP_LOGGER.debug("Deleting old paths for replace in " + destPath
         + " and old path " + oldPath);
     boolean isOldPathUnderDestf = false;
@@ -4085,13 +4051,11 @@ private void constructOneLBLocationMap(FileStatus fSta,
       // But not sure why we changed not to delete the oldPath in HIVE-8750 if it is
       // not the destf or its subdir?
       isOldPathUnderDestf = isSubDir(oldPath, destPath, oldFs, destFs, false);
-      if (isOldPathUnderDestf || isMmTableOverwrite) {
-        if (lbLevels == 0 || !isMmTableOverwrite) {
-          cleanUpOneDirectoryForReplace(oldPath, oldFs, pathFilter, conf, purge, isNeedRecycle);
-        }
+      if (isOldPathUnderDestf) {
+        cleanUpOneDirectoryForReplace(oldPath, oldFs, pathFilter, conf, purge, isNeedRecycle);
       }
     } catch (IOException e) {
-      if (isOldPathUnderDestf || isMmTableOverwrite) {
+      if (isOldPathUnderDestf) {
         // if oldPath is a subdir of destf but it could not be cleaned
         throw new HiveException("Directory " + oldPath.toString()
             + " could not be cleaned up.", e);

http://git-wip-us.apache.org/repos/asf/hive/blob/4f67bebe/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
index df2098b..c07991d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
@@ -319,7 +319,6 @@ public class LoadSemanticAnalyzer extends BaseSemanticAnalyzer {
       ensureFileFormatsMatch(ts, files, fromURI);
     }
     inputs.add(toReadEntity(new Path(fromURI)));
-    Task<? extends Serializable> rTask = null;
 
     // create final load/move work
 
@@ -355,7 +354,8 @@ public class LoadSemanticAnalyzer extends BaseSemanticAnalyzer {
 
     Long writeId = null;
     int stmtId = -1;
-    if (AcidUtils.isTransactionalTable(ts.tableHandle)) {
+    boolean isTxnTable = AcidUtils.isTransactionalTable(ts.tableHandle);
+    if (isTxnTable) {
       try {
         writeId = getTxnMgr().getTableWriteId(ts.tableHandle.getDbName(),
                 ts.tableHandle.getTableName());
@@ -368,10 +368,11 @@ public class LoadSemanticAnalyzer extends BaseSemanticAnalyzer {
     // Note: this sets LoadFileType incorrectly for ACID; is that relevant for load?
     //       See setLoadFileType and setIsAcidIow calls elsewhere for an example.
     LoadTableDesc loadTableWork = new LoadTableDesc(new Path(fromURI),
-      Utilities.getTableDesc(ts.tableHandle), partSpec,
-      isOverWrite ? LoadFileType.REPLACE_ALL : LoadFileType.KEEP_EXISTING, writeId);
+      Utilities.getTableDesc(ts.tableHandle), partSpec, isOverWrite
+        ? LoadFileType.REPLACE_ALL : LoadFileType.KEEP_EXISTING, writeId);
     loadTableWork.setStmtId(stmtId);
-    if (preservePartitionSpecs){
+    loadTableWork.setInsertOverwrite(isOverWrite);
+    if (preservePartitionSpecs) {
       // Note : preservePartitionSpecs=true implies inheritTableSpecs=false but
       // but preservePartitionSpecs=false(default) here is not sufficient enough
       // info to set inheritTableSpecs=true
@@ -382,13 +383,8 @@ public class LoadSemanticAnalyzer extends BaseSemanticAnalyzer {
         new MoveWork(getInputs(), getOutputs(), loadTableWork, null, true,
             isLocal)
     );
-    if (rTask != null) {
-      rTask.addDependentTask(childTask);
-    } else {
-      rTask = childTask;
-    }
 
-    rootTasks.add(rTask);
+    rootTasks.add(childTask);
 
     // The user asked for stats to be collected.
     // Some stats like number of rows require a scan of the data

http://git-wip-us.apache.org/repos/asf/hive/blob/4f67bebe/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
index b0dfc48..e108684 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
@@ -142,7 +142,7 @@ public class TestExecDriver extends TestCase {
         db.createTable(src, cols, null, TextInputFormat.class,
             HiveIgnoreKeyTextOutputFormat.class);
         db.loadTable(hadoopDataFile[i], src, LoadFileType.KEEP_EXISTING,
-           true, false, false, false, null, 0);
+           true, false, false, false, null, 0, false);
         i++;
       }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/4f67bebe/ql/src/test/results/clientpositive/llap/mm_loaddata.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/mm_loaddata.q.out b/ql/src/test/results/clientpositive/llap/mm_loaddata.q.out
new file mode 100644
index 0000000..b849a88
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/mm_loaddata.q.out
@@ -0,0 +1,296 @@
+PREHOOK: query: drop table load0_mm
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table load0_mm
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table load0_mm (key string, value string) stored as textfile tblproperties("transactional"="true", "transactional_properties"="insert_only")
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@load0_mm
+POSTHOOK: query: create table load0_mm (key string, value string) stored as textfile tblproperties("transactional"="true", "transactional_properties"="insert_only")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@load0_mm
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table load0_mm
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@load0_mm
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table load0_mm
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@load0_mm
+PREHOOK: query: select count(1) from load0_mm
+PREHOOK: type: QUERY
+PREHOOK: Input: default@load0_mm
+#### A masked pattern was here ####
+POSTHOOK: query: select count(1) from load0_mm
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@load0_mm
+#### A masked pattern was here ####
+500
+PREHOOK: query: load data local inpath '../../data/files/kv2.txt' into table load0_mm
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@load0_mm
+POSTHOOK: query: load data local inpath '../../data/files/kv2.txt' into table load0_mm
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@load0_mm
+PREHOOK: query: select count(1) from load0_mm
+PREHOOK: type: QUERY
+PREHOOK: Input: default@load0_mm
+#### A masked pattern was here ####
+POSTHOOK: query: select count(1) from load0_mm
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@load0_mm
+#### A masked pattern was here ####
+1000
+PREHOOK: query: load data local inpath '../../data/files/kv2.txt' overwrite into table load0_mm
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@load0_mm
+POSTHOOK: query: load data local inpath '../../data/files/kv2.txt' overwrite into table load0_mm
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@load0_mm
+PREHOOK: query: select count(1) from load0_mm
+PREHOOK: type: QUERY
+PREHOOK: Input: default@load0_mm
+#### A masked pattern was here ####
+POSTHOOK: query: select count(1) from load0_mm
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@load0_mm
+#### A masked pattern was here ####
+500
+PREHOOK: query: drop table load0_mm
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@load0_mm
+PREHOOK: Output: default@load0_mm
+POSTHOOK: query: drop table load0_mm
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@load0_mm
+POSTHOOK: Output: default@load0_mm
+PREHOOK: query: drop table intermediate2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table intermediate2
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table intermediate2 (key string, value string) stored as textfile
+#### A masked pattern was here ####
+PREHOOK: type: CREATETABLE
+#### A masked pattern was here ####
+PREHOOK: Output: database:default
+PREHOOK: Output: default@intermediate2
+POSTHOOK: query: create table intermediate2 (key string, value string) stored as textfile
+#### A masked pattern was here ####
+POSTHOOK: type: CREATETABLE
+#### A masked pattern was here ####
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@intermediate2
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table intermediate2
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@intermediate2
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table intermediate2
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@intermediate2
+PREHOOK: query: load data local inpath '../../data/files/kv2.txt' into table intermediate2
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@intermediate2
+POSTHOOK: query: load data local inpath '../../data/files/kv2.txt' into table intermediate2
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@intermediate2
+PREHOOK: query: load data local inpath '../../data/files/kv3.txt' into table intermediate2
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@intermediate2
+POSTHOOK: query: load data local inpath '../../data/files/kv3.txt' into table intermediate2
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@intermediate2
+PREHOOK: query: drop table load1_mm
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table load1_mm
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table load1_mm (key string, value string) stored as textfile tblproperties("transactional"="true", "transactional_properties"="insert_only")
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@load1_mm
+POSTHOOK: query: create table load1_mm (key string, value string) stored as textfile tblproperties("transactional"="true", "transactional_properties"="insert_only")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@load1_mm
+#### A masked pattern was here ####
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@load1_mm
+#### A masked pattern was here ####
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@load1_mm
+#### A masked pattern was here ####
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@load1_mm
+#### A masked pattern was here ####
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@load1_mm
+PREHOOK: query: select count(1) from load1_mm
+PREHOOK: type: QUERY
+PREHOOK: Input: default@load1_mm
+#### A masked pattern was here ####
+POSTHOOK: query: select count(1) from load1_mm
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@load1_mm
+#### A masked pattern was here ####
+1000
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table intermediate2
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@intermediate2
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table intermediate2
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@intermediate2
+PREHOOK: query: load data local inpath '../../data/files/kv2.txt' into table intermediate2
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@intermediate2
+POSTHOOK: query: load data local inpath '../../data/files/kv2.txt' into table intermediate2
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@intermediate2
+PREHOOK: query: load data local inpath '../../data/files/kv3.txt' into table intermediate2
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@intermediate2
+POSTHOOK: query: load data local inpath '../../data/files/kv3.txt' into table intermediate2
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@intermediate2
+#### A masked pattern was here ####
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@load1_mm
+#### A masked pattern was here ####
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@load1_mm
+PREHOOK: query: select count(1) from load1_mm
+PREHOOK: type: QUERY
+PREHOOK: Input: default@load1_mm
+#### A masked pattern was here ####
+POSTHOOK: query: select count(1) from load1_mm
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@load1_mm
+#### A masked pattern was here ####
+1050
+PREHOOK: query: load data local inpath '../../data/files/kv2.txt' into table intermediate2
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@intermediate2
+POSTHOOK: query: load data local inpath '../../data/files/kv2.txt' into table intermediate2
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@intermediate2
+#### A masked pattern was here ####
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@load1_mm
+#### A masked pattern was here ####
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@load1_mm
+PREHOOK: query: select count(1) from load1_mm
+PREHOOK: type: QUERY
+PREHOOK: Input: default@load1_mm
+#### A masked pattern was here ####
+POSTHOOK: query: select count(1) from load1_mm
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@load1_mm
+#### A masked pattern was here ####
+500
+PREHOOK: query: drop table load1_mm
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@load1_mm
+PREHOOK: Output: default@load1_mm
+POSTHOOK: query: drop table load1_mm
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@load1_mm
+POSTHOOK: Output: default@load1_mm
+PREHOOK: query: drop table load2_mm
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table load2_mm
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table load2_mm (key string, value string)
+  partitioned by (k int, l int) stored as textfile tblproperties("transactional"="true", "transactional_properties"="insert_only")
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@load2_mm
+POSTHOOK: query: create table load2_mm (key string, value string)
+  partitioned by (k int, l int) stored as textfile tblproperties("transactional"="true", "transactional_properties"="insert_only")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@load2_mm
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table intermediate2
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@intermediate2
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table intermediate2
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@intermediate2
+PREHOOK: query: load data local inpath '../../data/files/kv2.txt' into table intermediate2
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@intermediate2
+POSTHOOK: query: load data local inpath '../../data/files/kv2.txt' into table intermediate2
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@intermediate2
+PREHOOK: query: load data local inpath '../../data/files/kv3.txt' into table intermediate2
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@intermediate2
+POSTHOOK: query: load data local inpath '../../data/files/kv3.txt' into table intermediate2
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@intermediate2
+#### A masked pattern was here ####
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@load2_mm
+#### A masked pattern was here ####
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@load2_mm
+POSTHOOK: Output: default@load2_mm@k=5/l=5
+PREHOOK: query: select count(1) from load2_mm
+PREHOOK: type: QUERY
+PREHOOK: Input: default@load2_mm
+PREHOOK: Input: default@load2_mm@k=5/l=5
+#### A masked pattern was here ####
+POSTHOOK: query: select count(1) from load2_mm
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@load2_mm
+POSTHOOK: Input: default@load2_mm@k=5/l=5
+#### A masked pattern was here ####
+1025
+PREHOOK: query: drop table load2_mm
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@load2_mm
+PREHOOK: Output: default@load2_mm
+POSTHOOK: query: drop table load2_mm
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@load2_mm
+POSTHOOK: Output: default@load2_mm
+PREHOOK: query: drop table intermediate2
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@intermediate2
+PREHOOK: Output: default@intermediate2
+POSTHOOK: query: drop table intermediate2
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@intermediate2
+POSTHOOK: Output: default@intermediate2

http://git-wip-us.apache.org/repos/asf/hive/blob/4f67bebe/ql/src/test/results/clientpositive/mm_loaddata.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/mm_loaddata.q.out b/ql/src/test/results/clientpositive/mm_loaddata.q.out
deleted file mode 100644
index b849a88..0000000
--- a/ql/src/test/results/clientpositive/mm_loaddata.q.out
+++ /dev/null
@@ -1,296 +0,0 @@
-PREHOOK: query: drop table load0_mm
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: drop table load0_mm
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: create table load0_mm (key string, value string) stored as textfile tblproperties("transactional"="true", "transactional_properties"="insert_only")
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@load0_mm
-POSTHOOK: query: create table load0_mm (key string, value string) stored as textfile tblproperties("transactional"="true", "transactional_properties"="insert_only")
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@load0_mm
-PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table load0_mm
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@load0_mm
-POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table load0_mm
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@load0_mm
-PREHOOK: query: select count(1) from load0_mm
-PREHOOK: type: QUERY
-PREHOOK: Input: default@load0_mm
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from load0_mm
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@load0_mm
-#### A masked pattern was here ####
-500
-PREHOOK: query: load data local inpath '../../data/files/kv2.txt' into table load0_mm
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@load0_mm
-POSTHOOK: query: load data local inpath '../../data/files/kv2.txt' into table load0_mm
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@load0_mm
-PREHOOK: query: select count(1) from load0_mm
-PREHOOK: type: QUERY
-PREHOOK: Input: default@load0_mm
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from load0_mm
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@load0_mm
-#### A masked pattern was here ####
-1000
-PREHOOK: query: load data local inpath '../../data/files/kv2.txt' overwrite into table load0_mm
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@load0_mm
-POSTHOOK: query: load data local inpath '../../data/files/kv2.txt' overwrite into table load0_mm
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@load0_mm
-PREHOOK: query: select count(1) from load0_mm
-PREHOOK: type: QUERY
-PREHOOK: Input: default@load0_mm
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from load0_mm
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@load0_mm
-#### A masked pattern was here ####
-500
-PREHOOK: query: drop table load0_mm
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@load0_mm
-PREHOOK: Output: default@load0_mm
-POSTHOOK: query: drop table load0_mm
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@load0_mm
-POSTHOOK: Output: default@load0_mm
-PREHOOK: query: drop table intermediate2
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: drop table intermediate2
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: create table intermediate2 (key string, value string) stored as textfile
-#### A masked pattern was here ####
-PREHOOK: type: CREATETABLE
-#### A masked pattern was here ####
-PREHOOK: Output: database:default
-PREHOOK: Output: default@intermediate2
-POSTHOOK: query: create table intermediate2 (key string, value string) stored as textfile
-#### A masked pattern was here ####
-POSTHOOK: type: CREATETABLE
-#### A masked pattern was here ####
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@intermediate2
-PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table intermediate2
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@intermediate2
-POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table intermediate2
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@intermediate2
-PREHOOK: query: load data local inpath '../../data/files/kv2.txt' into table intermediate2
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@intermediate2
-POSTHOOK: query: load data local inpath '../../data/files/kv2.txt' into table intermediate2
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@intermediate2
-PREHOOK: query: load data local inpath '../../data/files/kv3.txt' into table intermediate2
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@intermediate2
-POSTHOOK: query: load data local inpath '../../data/files/kv3.txt' into table intermediate2
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@intermediate2
-PREHOOK: query: drop table load1_mm
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: drop table load1_mm
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: create table load1_mm (key string, value string) stored as textfile tblproperties("transactional"="true", "transactional_properties"="insert_only")
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@load1_mm
-POSTHOOK: query: create table load1_mm (key string, value string) stored as textfile tblproperties("transactional"="true", "transactional_properties"="insert_only")
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@load1_mm
-#### A masked pattern was here ####
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@load1_mm
-#### A masked pattern was here ####
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@load1_mm
-#### A masked pattern was here ####
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@load1_mm
-#### A masked pattern was here ####
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@load1_mm
-PREHOOK: query: select count(1) from load1_mm
-PREHOOK: type: QUERY
-PREHOOK: Input: default@load1_mm
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from load1_mm
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@load1_mm
-#### A masked pattern was here ####
-1000
-PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table intermediate2
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@intermediate2
-POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table intermediate2
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@intermediate2
-PREHOOK: query: load data local inpath '../../data/files/kv2.txt' into table intermediate2
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@intermediate2
-POSTHOOK: query: load data local inpath '../../data/files/kv2.txt' into table intermediate2
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@intermediate2
-PREHOOK: query: load data local inpath '../../data/files/kv3.txt' into table intermediate2
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@intermediate2
-POSTHOOK: query: load data local inpath '../../data/files/kv3.txt' into table intermediate2
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@intermediate2
-#### A masked pattern was here ####
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@load1_mm
-#### A masked pattern was here ####
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@load1_mm
-PREHOOK: query: select count(1) from load1_mm
-PREHOOK: type: QUERY
-PREHOOK: Input: default@load1_mm
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from load1_mm
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@load1_mm
-#### A masked pattern was here ####
-1050
-PREHOOK: query: load data local inpath '../../data/files/kv2.txt' into table intermediate2
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@intermediate2
-POSTHOOK: query: load data local inpath '../../data/files/kv2.txt' into table intermediate2
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@intermediate2
-#### A masked pattern was here ####
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@load1_mm
-#### A masked pattern was here ####
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@load1_mm
-PREHOOK: query: select count(1) from load1_mm
-PREHOOK: type: QUERY
-PREHOOK: Input: default@load1_mm
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from load1_mm
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@load1_mm
-#### A masked pattern was here ####
-500
-PREHOOK: query: drop table load1_mm
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@load1_mm
-PREHOOK: Output: default@load1_mm
-POSTHOOK: query: drop table load1_mm
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@load1_mm
-POSTHOOK: Output: default@load1_mm
-PREHOOK: query: drop table load2_mm
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: drop table load2_mm
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: create table load2_mm (key string, value string)
-  partitioned by (k int, l int) stored as textfile tblproperties("transactional"="true", "transactional_properties"="insert_only")
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@load2_mm
-POSTHOOK: query: create table load2_mm (key string, value string)
-  partitioned by (k int, l int) stored as textfile tblproperties("transactional"="true", "transactional_properties"="insert_only")
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@load2_mm
-PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table intermediate2
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@intermediate2
-POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table intermediate2
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@intermediate2
-PREHOOK: query: load data local inpath '../../data/files/kv2.txt' into table intermediate2
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@intermediate2
-POSTHOOK: query: load data local inpath '../../data/files/kv2.txt' into table intermediate2
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@intermediate2
-PREHOOK: query: load data local inpath '../../data/files/kv3.txt' into table intermediate2
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@intermediate2
-POSTHOOK: query: load data local inpath '../../data/files/kv3.txt' into table intermediate2
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@intermediate2
-#### A masked pattern was here ####
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@load2_mm
-#### A masked pattern was here ####
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@load2_mm
-POSTHOOK: Output: default@load2_mm@k=5/l=5
-PREHOOK: query: select count(1) from load2_mm
-PREHOOK: type: QUERY
-PREHOOK: Input: default@load2_mm
-PREHOOK: Input: default@load2_mm@k=5/l=5
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from load2_mm
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@load2_mm
-POSTHOOK: Input: default@load2_mm@k=5/l=5
-#### A masked pattern was here ####
-1025
-PREHOOK: query: drop table load2_mm
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@load2_mm
-PREHOOK: Output: default@load2_mm
-POSTHOOK: query: drop table load2_mm
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@load2_mm
-POSTHOOK: Output: default@load2_mm
-PREHOOK: query: drop table intermediate2
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@intermediate2
-PREHOOK: Output: default@intermediate2
-POSTHOOK: query: drop table intermediate2
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@intermediate2
-POSTHOOK: Output: default@intermediate2


[2/2] hive git commit: HIVE-17970 : MM LOAD DATA with OVERWRITE doesn't use base_n directory concept (Sergey Shelukhin, reviewed by Eugene Koifman)

Posted by se...@apache.org.
HIVE-17970 : MM LOAD DATA with OVERWRITE doesn't use base_n directory concept (Sergey Shelukhin, reviewed by Eugene Koifman)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/25912f7b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/25912f7b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/25912f7b

Branch: refs/heads/branch-3
Commit: 25912f7b5530b7524a4e4739aea2a06dd6bc3249
Parents: 0e1c667
Author: sergey <se...@apache.org>
Authored: Mon Apr 23 11:18:51 2018 -0700
Committer: sergey <se...@apache.org>
Committed: Mon Apr 23 11:19:01 2018 -0700

----------------------------------------------------------------------
 .../apache/hadoop/hive/common/JavaUtils.java    |  25 +-
 .../hadoop/hive/ql/history/TestHiveHistory.java |   2 +-
 .../test/resources/testconfiguration.properties |   1 +
 .../apache/hadoop/hive/ql/exec/MoveTask.java    |  14 +-
 .../apache/hadoop/hive/ql/exec/Utilities.java   |   6 +-
 .../apache/hadoop/hive/ql/metadata/Hive.java    | 112 +++----
 .../hive/ql/parse/LoadSemanticAnalyzer.java     |  18 +-
 .../hadoop/hive/ql/exec/TestExecDriver.java     |   2 +-
 .../clientpositive/llap/mm_loaddata.q.out       | 296 +++++++++++++++++++
 .../results/clientpositive/mm_loaddata.q.out    | 296 -------------------
 10 files changed, 361 insertions(+), 411 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/25912f7b/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java b/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
index 7894ec1..45abd2f 100644
--- a/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
+++ b/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
@@ -188,39 +188,26 @@ public final class JavaUtils {
 
   public static class IdPathFilter implements PathFilter {
     private String baseDirName, deltaDirName;
-    private final boolean isMatch, isIgnoreTemp, isDeltaPrefix;
+    private final boolean isDeltaPrefix;
 
-    public IdPathFilter(long writeId, int stmtId, boolean isMatch) {
-      this(writeId, stmtId, isMatch, false);
-    }
-
-    public IdPathFilter(long writeId, int stmtId, boolean isMatch, boolean isIgnoreTemp) {
+    public IdPathFilter(long writeId, int stmtId) {
       String deltaDirName = null;
       deltaDirName = DELTA_PREFIX + "_" + String.format(DELTA_DIGITS, writeId) + "_" +
-              String.format(DELTA_DIGITS, writeId) + "_";
+              String.format(DELTA_DIGITS, writeId);
       isDeltaPrefix = (stmtId < 0);
       if (!isDeltaPrefix) {
-        deltaDirName += String.format(STATEMENT_DIGITS, stmtId);
+        deltaDirName += "_" + String.format(STATEMENT_DIGITS, stmtId);
       }
 
       this.baseDirName = BASE_PREFIX + "_" + String.format(DELTA_DIGITS, writeId);
       this.deltaDirName = deltaDirName;
-      this.isMatch = isMatch;
-      this.isIgnoreTemp = isIgnoreTemp;
     }
 
     @Override
     public boolean accept(Path path) {
       String name = path.getName();
-      if (name.equals(baseDirName) || (isDeltaPrefix && name.startsWith(deltaDirName))
-          || (!isDeltaPrefix && name.equals(deltaDirName))) {
-        return isMatch;
-      }
-      if (isIgnoreTemp && name.length() > 0) {
-        char c = name.charAt(0);
-        if (c == '.' || c == '_') return false; // Regardless of isMatch, ignore this.
-      }
-      return !isMatch;
+      return name.equals(baseDirName) || (isDeltaPrefix && name.startsWith(deltaDirName))
+          || (!isDeltaPrefix && name.equals(deltaDirName));
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/25912f7b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
index 0168472..9b50fd4 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
@@ -107,7 +107,7 @@ public class TestHiveHistory extends TestCase {
         db.createTable(src, cols, null, TextInputFormat.class,
             IgnoreKeyTextOutputFormat.class);
         db.loadTable(hadoopDataFile[i], src,
-          LoadFileType.KEEP_EXISTING, false, false, false, false, null, 0);
+          LoadFileType.KEEP_EXISTING, false, false, false, false, null, 0, false);
         i++;
       }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/25912f7b/itests/src/test/resources/testconfiguration.properties
----------------------------------------------------------------------
diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties
index 56595aa..ad7b3ad 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -582,6 +582,7 @@ minillaplocal.query.files=\
   mergejoin_3way.q,\
   mm_conversions.q,\
   mm_exim.q,\
+  mm_loaddata.q,\
   mrr.q,\
   multiMapJoin1.q,\
   multiMapJoin2.q,\

http://git-wip-us.apache.org/repos/asf/hive/blob/25912f7b/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
index 6fff7e7..dbda5fd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
@@ -372,7 +372,7 @@ public class MoveTask extends Task<MoveWork> implements Serializable {
           }
           db.loadTable(tbd.getSourcePath(), tbd.getTable().getTableName(), tbd.getLoadFileType(),
               work.isSrcLocal(), isSkewedStoredAsDirs(tbd), isFullAcidOp, hasFollowingStatsTask(),
-              tbd.getWriteId(), tbd.getStmtId());
+              tbd.getWriteId(), tbd.getStmtId(), tbd.isInsertOverwrite());
           if (work.getOutputs() != null) {
             DDLTask.addIfAbsentByName(new WriteEntity(table,
               getWriteType(tbd, work.getLoadTableWork().getWriteType())), work.getOutputs());
@@ -467,12 +467,14 @@ public class MoveTask extends Task<MoveWork> implements Serializable {
       Utilities.FILE_OP_LOGGER.trace("loadPartition called from " + tbd.getSourcePath()
         + " into " + tbd.getTable().getTableName());
     }
-    db.loadPartition(tbd.getSourcePath(), tbd.getTable().getTableName(),
-        tbd.getPartitionSpec(), tbd.getLoadFileType(),
-        tbd.getInheritTableSpecs(), isSkewedStoredAsDirs(tbd), work.isSrcLocal(),
-        work.getLoadTableWork().getWriteType() != AcidUtils.Operation.NOT_ACID &&
+
+    db.loadPartition(tbd.getSourcePath(), db.getTable(tbd.getTable().getTableName()),
+        tbd.getPartitionSpec(), tbd.getLoadFileType(), tbd.getInheritTableSpecs(),
+        isSkewedStoredAsDirs(tbd), work.isSrcLocal(),
+         work.getLoadTableWork().getWriteType() != AcidUtils.Operation.NOT_ACID &&
             !tbd.isMmTable(),
-        hasFollowingStatsTask(), tbd.getWriteId(), tbd.getStmtId());
+         hasFollowingStatsTask(),
+        tbd.getWriteId(), tbd.getStmtId(), tbd.isInsertOverwrite());
     Partition partn = db.getPartition(table, tbd.getPartitionSpec(), false);
 
     // See the comment inside updatePartitionBucketSortColumns.

http://git-wip-us.apache.org/repos/asf/hive/blob/25912f7b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
index 5fbe045..6395c31 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
@@ -4076,7 +4076,7 @@ public final class Utilities {
       Boolean isBaseDir) throws IOException {
     int skipLevels = dpLevels + lbLevels;
     if (filter == null) {
-      filter = new JavaUtils.IdPathFilter(writeId, stmtId, true, false);
+      filter = new JavaUtils.IdPathFilter(writeId, stmtId);
     }
     if (skipLevels == 0) {
       return statusToPath(fs.listStatus(path, filter));
@@ -4250,7 +4250,7 @@ public final class Utilities {
     FileSystem fs = specPath.getFileSystem(hconf);
     Path manifestDir = getManifestDir(specPath, writeId, stmtId, unionSuffix, isInsertOverwrite);
     if (!success) {
-      JavaUtils.IdPathFilter filter = new JavaUtils.IdPathFilter(writeId, stmtId, true);
+      JavaUtils.IdPathFilter filter = new JavaUtils.IdPathFilter(writeId, stmtId);
       tryDeleteAllMmFiles(fs, specPath, manifestDir, dpLevels, lbLevels,
           filter, writeId, stmtId, hconf);
       return;
@@ -4275,7 +4275,7 @@ public final class Utilities {
     }
 
     Utilities.FILE_OP_LOGGER.debug("Looking for files in: {}", specPath);
-    JavaUtils.IdPathFilter filter = new JavaUtils.IdPathFilter(writeId, stmtId, true, false);
+    JavaUtils.IdPathFilter filter = new JavaUtils.IdPathFilter(writeId, stmtId);
     if (isMmCtas && !fs.exists(specPath)) {
       Utilities.FILE_OP_LOGGER.info("Creating table directory for CTAS with no output at {}", specPath);
       FileUtils.mkdir(fs, specPath, hconf);

http://git-wip-us.apache.org/repos/asf/hive/blob/25912f7b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
index be98446..69d42e3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -1617,31 +1617,6 @@ public class Hive {
   }
 
   /**
-   * @param loadPath
-   * @param tableName
-   * @param partSpec
-   * @param loadFileType
-   * @param inheritTableSpecs
-   * @param isSkewedStoreAsSubdir
-   * @param isSrcLocal
-   * @param isAcid
-   * @param hasFollowingStatsTask
-   * @param writeId
-   * @param stmtId
-   * @return
-   * @throws HiveException
-   */
-  public void loadPartition(Path loadPath, String tableName,
-      Map<String, String> partSpec, LoadFileType loadFileType, boolean inheritTableSpecs,
-      boolean isSkewedStoreAsSubdir,  boolean isSrcLocal, boolean isAcid,
-      boolean hasFollowingStatsTask, Long writeId, int stmtId)
-          throws HiveException {
-    Table tbl = getTable(tableName);
-    loadPartition(loadPath, tbl, partSpec, loadFileType, inheritTableSpecs,
-        isSkewedStoreAsSubdir, isSrcLocal, isAcid, hasFollowingStatsTask, writeId, stmtId);
-  }
-
-  /**
    * Load a directory into a Hive Table Partition - Alters existing content of
    * the partition with the contents of loadPath. - If the partition does not
    * exist - one is created - files in loadPath are moved into Hive. But the
@@ -1666,16 +1641,18 @@ public class Hive {
    *          true if there is a following task which updates the stats, so, this method need not update.
    * @param writeId write ID allocated for the current load operation
    * @param stmtId statement ID of the current load statement
+   * @param isInsertOverwrite 
    * @return Partition object being loaded with data
    */
   public Partition loadPartition(Path loadPath, Table tbl, Map<String, String> partSpec,
       LoadFileType loadFileType, boolean inheritTableSpecs, boolean isSkewedStoreAsSubdir,
-      boolean isSrcLocal, boolean isAcidIUDoperation, boolean hasFollowingStatsTask, Long writeId, int stmtId)
-          throws HiveException {
+      boolean isSrcLocal, boolean isAcidIUDoperation, boolean hasFollowingStatsTask, Long writeId,
+      int stmtId, boolean isInsertOverwrite) throws HiveException {
     Path tblDataLocationPath =  tbl.getDataLocation();
     boolean isMmTableWrite = AcidUtils.isInsertOnlyTable(tbl.getParameters());
     assert tbl.getPath() != null : "null==getPath() for " + tbl.getTableName();
     boolean isFullAcidTable = AcidUtils.isFullAcidTable(tbl);
+    boolean isTxnTable = AcidUtils.isTransactionalTable(tbl);
     try {
       // Get the partition object if it already exists
       Partition oldPart = getPartition(tbl, partSpec, false);
@@ -1742,35 +1719,31 @@ public class Hive {
         }
       } else {
         // Either a non-MM query, or a load into MM table from an external source.
-        PathFilter filter = FileUtils.HIDDEN_FILES_PATH_FILTER;
         Path destPath = newPartPath;
         if (isMmTableWrite) {
-          // We will load into MM directory, and delete from the parent if needed.
-          // TODO: this looks invalid after ACID integration. What about base dirs?
-          destPath = new Path(destPath, AcidUtils.deltaSubdir(writeId, writeId, stmtId));
-          // TODO: loadFileType for MM table will no longer be REPLACE_ALL
-          filter = (loadFileType == LoadFileType.REPLACE_ALL)
-            ? new JavaUtils.IdPathFilter(writeId, stmtId, false, true) : filter;
+          assert !isAcidIUDoperation;
+          // We will load into MM directory, and hide previous directories if needed.
+          destPath = new Path(destPath, isInsertOverwrite
+              ? AcidUtils.baseDir(writeId) : AcidUtils.deltaSubdir(writeId, writeId, stmtId));
         }
-        else if(!isAcidIUDoperation && isFullAcidTable) {
+        if (!isAcidIUDoperation && isFullAcidTable) {
           destPath = fixFullAcidPathForLoadData(loadFileType, destPath, writeId, stmtId, tbl);
         }
         if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) {
           Utilities.FILE_OP_LOGGER.trace("moving " + loadPath + " to " + destPath);
         }
-        //todo: why is "&& !isAcidIUDoperation" needed here?
-        if (!isFullAcidTable && ((loadFileType == LoadFileType.REPLACE_ALL) || (oldPart == null && !isAcidIUDoperation))) {
+        // TODO: why is "&& !isAcidIUDoperation" needed here?
+        if (!isTxnTable && ((loadFileType == LoadFileType.REPLACE_ALL) || (oldPart == null && !isAcidIUDoperation))) {
           //for fullAcid tables we don't delete files for commands with OVERWRITE - we create a new
           // base_x.  (there is Insert Overwrite and Load Data Overwrite)
           boolean isAutoPurge = "true".equalsIgnoreCase(tbl.getProperty("auto.purge"));
-          // TODO: this should never run for MM tables anymore. Remove the flag, and maybe the filter?
-          replaceFiles(tbl.getPath(), loadPath, destPath, oldPartPath, getConf(),
-              isSrcLocal, isAutoPurge, newFiles, filter, isMmTableWrite, !tbl.isTemporary());
+          replaceFiles(tbl.getPath(), loadPath, destPath, oldPartPath, getConf(), isSrcLocal,
+              isAutoPurge, newFiles, FileUtils.HIDDEN_FILES_PATH_FILTER, !tbl.isTemporary());
         } else {
           FileSystem fs = tbl.getDataLocation().getFileSystem(conf);
           copyFiles(conf, loadPath, destPath, fs, isSrcLocal, isAcidIUDoperation,
-            (loadFileType == LoadFileType.OVERWRITE_EXISTING), newFiles, tbl.getNumBuckets() > 0,
-                  isFullAcidTable);
+              (loadFileType == LoadFileType.OVERWRITE_EXISTING), newFiles,
+              tbl.getNumBuckets() > 0, isFullAcidTable);
         }
       }
       perfLogger.PerfLogEnd("MoveTask", "FileMoves");
@@ -1814,7 +1787,7 @@ public class Hive {
         }
         // Note: we are creating a brand new the partition, so this is going to be valid for ACID.
         List<FileStatus> filesForStats = null;
-        if (isFullAcidTable || isMmTableWrite) {
+        if (isTxnTable) {
           filesForStats = AcidUtils.getAcidFilesForStats(
               newTPart.getTable(), newPartPath, conf, null);
         } else {
@@ -2149,8 +2122,8 @@ private void constructOneLBLocationMap(FileStatus fSta,
   public Map<Map<String, String>, Partition> loadDynamicPartitions(final Path loadPath,
       final String tableName, final Map<String, String> partSpec, final LoadFileType loadFileType,
       final int numDP, final int numLB, final boolean isAcid, final long writeId, final int stmtId,
-      final boolean hasFollowingStatsTask, final AcidUtils.Operation operation, boolean isInsertOverwrite)
-      throws HiveException {
+      final boolean hasFollowingStatsTask, final AcidUtils.Operation operation,
+      boolean isInsertOverwrite) throws HiveException {
 
     final Map<Map<String, String>, Partition> partitionsMap =
         Collections.synchronizedMap(new LinkedHashMap<Map<String, String>, Partition>());
@@ -2197,9 +2170,9 @@ private void constructOneLBLocationMap(FileStatus fSta,
               LOG.info("New loading path = " + partPath + " with partSpec " + fullPartSpec);
 
               // load the partition
-              Partition newPartition = loadPartition(partPath, tbl, fullPartSpec,
-                  loadFileType, true, numLB > 0,
-                  false, isAcid, hasFollowingStatsTask, writeId, stmtId);
+              Partition newPartition = loadPartition(partPath, tbl, fullPartSpec, loadFileType,
+                  true, numLB > 0, false, isAcid, hasFollowingStatsTask, writeId, stmtId,
+                  isInsertOverwrite);
               partitionsMap.put(fullPartSpec, newPartition);
 
               if (inPlaceEligible) {
@@ -2293,10 +2266,11 @@ private void constructOneLBLocationMap(FileStatus fSta,
    */
   public void loadTable(Path loadPath, String tableName, LoadFileType loadFileType, boolean isSrcLocal,
       boolean isSkewedStoreAsSubdir, boolean isAcidIUDoperation, boolean hasFollowingStatsTask,
-      Long writeId, int stmtId) throws HiveException {
+      Long writeId, int stmtId, boolean isInsertOverwrite) throws HiveException {
     List<Path> newFiles = null;
     Table tbl = getTable(tableName);
     assert tbl.getPath() != null : "null==getPath() for " + tbl.getTableName();
+    boolean isTxnTable = AcidUtils.isTransactionalTable(tbl);
     boolean isMmTable = AcidUtils.isInsertOnlyTable(tbl);
     boolean isFullAcidTable = AcidUtils.isFullAcidTable(tbl);
     if (conf.getBoolVar(ConfVars.FIRE_EVENTS_FOR_DML) && !tbl.isTemporary()) {
@@ -2319,34 +2293,28 @@ private void constructOneLBLocationMap(FileStatus fSta,
       // Either a non-MM query, or a load into MM table from an external source.
       Path tblPath = tbl.getPath();
       Path destPath = tblPath;
-      PathFilter filter = FileUtils.HIDDEN_FILES_PATH_FILTER;
       if (isMmTable) {
         assert !isAcidIUDoperation;
-        // We will load into MM directory, and delete from the parent if needed.
-        // TODO: this looks invalid after ACID integration. What about base dirs?
-        destPath = new Path(destPath, AcidUtils.deltaSubdir(writeId, writeId, stmtId));
-        // TODO: loadFileType for MM table will no longer be REPLACE_ALL
-        filter = loadFileType == LoadFileType.REPLACE_ALL
-            ? new JavaUtils.IdPathFilter(writeId, stmtId, false, true) : filter;
+        // We will load into MM directory, and hide previous directories if needed.
+        destPath = new Path(destPath, isInsertOverwrite
+            ? AcidUtils.baseDir(writeId) : AcidUtils.deltaSubdir(writeId, writeId, stmtId));
       }
-      else if(!isAcidIUDoperation && isFullAcidTable) {
+      if (!isAcidIUDoperation && isFullAcidTable) {
         destPath = fixFullAcidPathForLoadData(loadFileType, destPath, writeId, stmtId, tbl);
       }
       Utilities.FILE_OP_LOGGER.debug("moving " + loadPath + " to " + tblPath
           + " (replace = " + loadFileType + ")");
-      if (loadFileType == LoadFileType.REPLACE_ALL && !isFullAcidTable) {
+      if (loadFileType == LoadFileType.REPLACE_ALL && !isTxnTable) {
         //for fullAcid we don't want to delete any files even for OVERWRITE see HIVE-14988/HIVE-17361
-        //todo:  should probably do the same for MM IOW
         boolean isAutopurge = "true".equalsIgnoreCase(tbl.getProperty("auto.purge"));
-        // TODO: this should never run for MM tables anymore. Remove the flag, and maybe the filter?
-        replaceFiles(tblPath, loadPath, destPath, tblPath,
-            conf, isSrcLocal, isAutopurge, newFiles, filter, isMmTable?true:false, !tbl.isTemporary());
+        replaceFiles(tblPath, loadPath, destPath, tblPath, conf, isSrcLocal, isAutopurge,
+            newFiles, FileUtils.HIDDEN_FILES_PATH_FILTER, !tbl.isTemporary());
       } else {
         try {
           FileSystem fs = tbl.getDataLocation().getFileSystem(conf);
           copyFiles(conf, loadPath, destPath, fs, isSrcLocal, isAcidIUDoperation,
-            loadFileType == LoadFileType.OVERWRITE_EXISTING, newFiles,
-                  tbl.getNumBuckets() > 0 ? true : false, isFullAcidTable);
+              loadFileType == LoadFileType.OVERWRITE_EXISTING, newFiles,
+              tbl.getNumBuckets() > 0 ? true : false, isFullAcidTable);
         } catch (IOException e) {
           throw new HiveException("addFiles: filesystem error in check phase", e);
         }
@@ -4006,7 +3974,7 @@ private void constructOneLBLocationMap(FileStatus fSta,
    */
   protected void replaceFiles(Path tablePath, Path srcf, Path destf, Path oldPath, HiveConf conf,
           boolean isSrcLocal, boolean purge, List<Path> newFiles, PathFilter deletePathFilter,
-          boolean isMmTableOverwrite, boolean isNeedRecycle) throws HiveException {
+          boolean isNeedRecycle) throws HiveException {
     try {
 
       FileSystem destFs = destf.getFileSystem(conf);
@@ -4025,9 +3993,7 @@ private void constructOneLBLocationMap(FileStatus fSta,
       }
 
       if (oldPath != null) {
-        // Note: we assume lbLevels is 0 here. Same as old code for non-MM.
-        //       For MM tables, this can only be a LOAD command. Does LOAD even support LB?
-        deleteOldPathForReplace(destf, oldPath, conf, purge, deletePathFilter, isMmTableOverwrite, 0, isNeedRecycle);
+        deleteOldPathForReplace(destf, oldPath, conf, purge, deletePathFilter, isNeedRecycle);
       }
 
       // first call FileUtils.mkdir to make sure that destf directory exists, if not, it creates
@@ -4073,7 +4039,7 @@ private void constructOneLBLocationMap(FileStatus fSta,
   }
 
   private void deleteOldPathForReplace(Path destPath, Path oldPath, HiveConf conf, boolean purge,
-      PathFilter pathFilter, boolean isMmTableOverwrite, int lbLevels, boolean isNeedRecycle) throws HiveException {
+      PathFilter pathFilter, boolean isNeedRecycle) throws HiveException {
     Utilities.FILE_OP_LOGGER.debug("Deleting old paths for replace in " + destPath
         + " and old path " + oldPath);
     boolean isOldPathUnderDestf = false;
@@ -4085,13 +4051,11 @@ private void constructOneLBLocationMap(FileStatus fSta,
       // But not sure why we changed not to delete the oldPath in HIVE-8750 if it is
       // not the destf or its subdir?
       isOldPathUnderDestf = isSubDir(oldPath, destPath, oldFs, destFs, false);
-      if (isOldPathUnderDestf || isMmTableOverwrite) {
-        if (lbLevels == 0 || !isMmTableOverwrite) {
-          cleanUpOneDirectoryForReplace(oldPath, oldFs, pathFilter, conf, purge, isNeedRecycle);
-        }
+      if (isOldPathUnderDestf) {
+        cleanUpOneDirectoryForReplace(oldPath, oldFs, pathFilter, conf, purge, isNeedRecycle);
       }
     } catch (IOException e) {
-      if (isOldPathUnderDestf || isMmTableOverwrite) {
+      if (isOldPathUnderDestf) {
         // if oldPath is a subdir of destf but it could not be cleaned
         throw new HiveException("Directory " + oldPath.toString()
             + " could not be cleaned up.", e);

http://git-wip-us.apache.org/repos/asf/hive/blob/25912f7b/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
index e49089b..f9bdffe 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
@@ -319,7 +319,6 @@ public class LoadSemanticAnalyzer extends BaseSemanticAnalyzer {
       ensureFileFormatsMatch(ts, files, fromURI);
     }
     inputs.add(toReadEntity(new Path(fromURI)));
-    Task<? extends Serializable> rTask = null;
 
     // create final load/move work
 
@@ -355,7 +354,8 @@ public class LoadSemanticAnalyzer extends BaseSemanticAnalyzer {
 
     Long writeId = null;
     int stmtId = -1;
-    if (AcidUtils.isTransactionalTable(ts.tableHandle)) {
+    boolean isTxnTable = AcidUtils.isTransactionalTable(ts.tableHandle);
+    if (isTxnTable) {
       try {
         writeId = SessionState.get().getTxnMgr().getTableWriteId(ts.tableHandle.getDbName(),
                 ts.tableHandle.getTableName());
@@ -368,10 +368,11 @@ public class LoadSemanticAnalyzer extends BaseSemanticAnalyzer {
     // Note: this sets LoadFileType incorrectly for ACID; is that relevant for load?
     //       See setLoadFileType and setIsAcidIow calls elsewhere for an example.
     LoadTableDesc loadTableWork = new LoadTableDesc(new Path(fromURI),
-      Utilities.getTableDesc(ts.tableHandle), partSpec,
-      isOverWrite ? LoadFileType.REPLACE_ALL : LoadFileType.KEEP_EXISTING, writeId);
+      Utilities.getTableDesc(ts.tableHandle), partSpec, isOverWrite
+        ? LoadFileType.REPLACE_ALL : LoadFileType.KEEP_EXISTING, writeId);
     loadTableWork.setStmtId(stmtId);
-    if (preservePartitionSpecs){
+    loadTableWork.setInsertOverwrite(isOverWrite);
+    if (preservePartitionSpecs) {
       // Note : preservePartitionSpecs=true implies inheritTableSpecs=false but
       // but preservePartitionSpecs=false(default) here is not sufficient enough
       // info to set inheritTableSpecs=true
@@ -382,13 +383,8 @@ public class LoadSemanticAnalyzer extends BaseSemanticAnalyzer {
         new MoveWork(getInputs(), getOutputs(), loadTableWork, null, true,
             isLocal)
     );
-    if (rTask != null) {
-      rTask.addDependentTask(childTask);
-    } else {
-      rTask = childTask;
-    }
 
-    rootTasks.add(rTask);
+    rootTasks.add(childTask);
 
     // The user asked for stats to be collected.
     // Some stats like number of rows require a scan of the data

http://git-wip-us.apache.org/repos/asf/hive/blob/25912f7b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
index b0dfc48..e108684 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
@@ -142,7 +142,7 @@ public class TestExecDriver extends TestCase {
         db.createTable(src, cols, null, TextInputFormat.class,
             HiveIgnoreKeyTextOutputFormat.class);
         db.loadTable(hadoopDataFile[i], src, LoadFileType.KEEP_EXISTING,
-           true, false, false, false, null, 0);
+           true, false, false, false, null, 0, false);
         i++;
       }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/25912f7b/ql/src/test/results/clientpositive/llap/mm_loaddata.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/mm_loaddata.q.out b/ql/src/test/results/clientpositive/llap/mm_loaddata.q.out
new file mode 100644
index 0000000..b849a88
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/mm_loaddata.q.out
@@ -0,0 +1,296 @@
+PREHOOK: query: drop table load0_mm
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table load0_mm
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table load0_mm (key string, value string) stored as textfile tblproperties("transactional"="true", "transactional_properties"="insert_only")
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@load0_mm
+POSTHOOK: query: create table load0_mm (key string, value string) stored as textfile tblproperties("transactional"="true", "transactional_properties"="insert_only")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@load0_mm
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table load0_mm
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@load0_mm
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table load0_mm
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@load0_mm
+PREHOOK: query: select count(1) from load0_mm
+PREHOOK: type: QUERY
+PREHOOK: Input: default@load0_mm
+#### A masked pattern was here ####
+POSTHOOK: query: select count(1) from load0_mm
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@load0_mm
+#### A masked pattern was here ####
+500
+PREHOOK: query: load data local inpath '../../data/files/kv2.txt' into table load0_mm
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@load0_mm
+POSTHOOK: query: load data local inpath '../../data/files/kv2.txt' into table load0_mm
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@load0_mm
+PREHOOK: query: select count(1) from load0_mm
+PREHOOK: type: QUERY
+PREHOOK: Input: default@load0_mm
+#### A masked pattern was here ####
+POSTHOOK: query: select count(1) from load0_mm
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@load0_mm
+#### A masked pattern was here ####
+1000
+PREHOOK: query: load data local inpath '../../data/files/kv2.txt' overwrite into table load0_mm
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@load0_mm
+POSTHOOK: query: load data local inpath '../../data/files/kv2.txt' overwrite into table load0_mm
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@load0_mm
+PREHOOK: query: select count(1) from load0_mm
+PREHOOK: type: QUERY
+PREHOOK: Input: default@load0_mm
+#### A masked pattern was here ####
+POSTHOOK: query: select count(1) from load0_mm
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@load0_mm
+#### A masked pattern was here ####
+500
+PREHOOK: query: drop table load0_mm
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@load0_mm
+PREHOOK: Output: default@load0_mm
+POSTHOOK: query: drop table load0_mm
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@load0_mm
+POSTHOOK: Output: default@load0_mm
+PREHOOK: query: drop table intermediate2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table intermediate2
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table intermediate2 (key string, value string) stored as textfile
+#### A masked pattern was here ####
+PREHOOK: type: CREATETABLE
+#### A masked pattern was here ####
+PREHOOK: Output: database:default
+PREHOOK: Output: default@intermediate2
+POSTHOOK: query: create table intermediate2 (key string, value string) stored as textfile
+#### A masked pattern was here ####
+POSTHOOK: type: CREATETABLE
+#### A masked pattern was here ####
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@intermediate2
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table intermediate2
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@intermediate2
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table intermediate2
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@intermediate2
+PREHOOK: query: load data local inpath '../../data/files/kv2.txt' into table intermediate2
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@intermediate2
+POSTHOOK: query: load data local inpath '../../data/files/kv2.txt' into table intermediate2
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@intermediate2
+PREHOOK: query: load data local inpath '../../data/files/kv3.txt' into table intermediate2
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@intermediate2
+POSTHOOK: query: load data local inpath '../../data/files/kv3.txt' into table intermediate2
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@intermediate2
+PREHOOK: query: drop table load1_mm
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table load1_mm
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table load1_mm (key string, value string) stored as textfile tblproperties("transactional"="true", "transactional_properties"="insert_only")
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@load1_mm
+POSTHOOK: query: create table load1_mm (key string, value string) stored as textfile tblproperties("transactional"="true", "transactional_properties"="insert_only")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@load1_mm
+#### A masked pattern was here ####
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@load1_mm
+#### A masked pattern was here ####
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@load1_mm
+#### A masked pattern was here ####
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@load1_mm
+#### A masked pattern was here ####
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@load1_mm
+PREHOOK: query: select count(1) from load1_mm
+PREHOOK: type: QUERY
+PREHOOK: Input: default@load1_mm
+#### A masked pattern was here ####
+POSTHOOK: query: select count(1) from load1_mm
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@load1_mm
+#### A masked pattern was here ####
+1000
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table intermediate2
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@intermediate2
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table intermediate2
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@intermediate2
+PREHOOK: query: load data local inpath '../../data/files/kv2.txt' into table intermediate2
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@intermediate2
+POSTHOOK: query: load data local inpath '../../data/files/kv2.txt' into table intermediate2
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@intermediate2
+PREHOOK: query: load data local inpath '../../data/files/kv3.txt' into table intermediate2
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@intermediate2
+POSTHOOK: query: load data local inpath '../../data/files/kv3.txt' into table intermediate2
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@intermediate2
+#### A masked pattern was here ####
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@load1_mm
+#### A masked pattern was here ####
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@load1_mm
+PREHOOK: query: select count(1) from load1_mm
+PREHOOK: type: QUERY
+PREHOOK: Input: default@load1_mm
+#### A masked pattern was here ####
+POSTHOOK: query: select count(1) from load1_mm
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@load1_mm
+#### A masked pattern was here ####
+1050
+PREHOOK: query: load data local inpath '../../data/files/kv2.txt' into table intermediate2
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@intermediate2
+POSTHOOK: query: load data local inpath '../../data/files/kv2.txt' into table intermediate2
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@intermediate2
+#### A masked pattern was here ####
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@load1_mm
+#### A masked pattern was here ####
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@load1_mm
+PREHOOK: query: select count(1) from load1_mm
+PREHOOK: type: QUERY
+PREHOOK: Input: default@load1_mm
+#### A masked pattern was here ####
+POSTHOOK: query: select count(1) from load1_mm
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@load1_mm
+#### A masked pattern was here ####
+500
+PREHOOK: query: drop table load1_mm
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@load1_mm
+PREHOOK: Output: default@load1_mm
+POSTHOOK: query: drop table load1_mm
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@load1_mm
+POSTHOOK: Output: default@load1_mm
+PREHOOK: query: drop table load2_mm
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table load2_mm
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table load2_mm (key string, value string)
+  partitioned by (k int, l int) stored as textfile tblproperties("transactional"="true", "transactional_properties"="insert_only")
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@load2_mm
+POSTHOOK: query: create table load2_mm (key string, value string)
+  partitioned by (k int, l int) stored as textfile tblproperties("transactional"="true", "transactional_properties"="insert_only")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@load2_mm
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table intermediate2
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@intermediate2
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table intermediate2
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@intermediate2
+PREHOOK: query: load data local inpath '../../data/files/kv2.txt' into table intermediate2
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@intermediate2
+POSTHOOK: query: load data local inpath '../../data/files/kv2.txt' into table intermediate2
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@intermediate2
+PREHOOK: query: load data local inpath '../../data/files/kv3.txt' into table intermediate2
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@intermediate2
+POSTHOOK: query: load data local inpath '../../data/files/kv3.txt' into table intermediate2
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@intermediate2
+#### A masked pattern was here ####
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@load2_mm
+#### A masked pattern was here ####
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@load2_mm
+POSTHOOK: Output: default@load2_mm@k=5/l=5
+PREHOOK: query: select count(1) from load2_mm
+PREHOOK: type: QUERY
+PREHOOK: Input: default@load2_mm
+PREHOOK: Input: default@load2_mm@k=5/l=5
+#### A masked pattern was here ####
+POSTHOOK: query: select count(1) from load2_mm
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@load2_mm
+POSTHOOK: Input: default@load2_mm@k=5/l=5
+#### A masked pattern was here ####
+1025
+PREHOOK: query: drop table load2_mm
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@load2_mm
+PREHOOK: Output: default@load2_mm
+POSTHOOK: query: drop table load2_mm
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@load2_mm
+POSTHOOK: Output: default@load2_mm
+PREHOOK: query: drop table intermediate2
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@intermediate2
+PREHOOK: Output: default@intermediate2
+POSTHOOK: query: drop table intermediate2
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@intermediate2
+POSTHOOK: Output: default@intermediate2

http://git-wip-us.apache.org/repos/asf/hive/blob/25912f7b/ql/src/test/results/clientpositive/mm_loaddata.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/mm_loaddata.q.out b/ql/src/test/results/clientpositive/mm_loaddata.q.out
deleted file mode 100644
index b849a88..0000000
--- a/ql/src/test/results/clientpositive/mm_loaddata.q.out
+++ /dev/null
@@ -1,296 +0,0 @@
-PREHOOK: query: drop table load0_mm
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: drop table load0_mm
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: create table load0_mm (key string, value string) stored as textfile tblproperties("transactional"="true", "transactional_properties"="insert_only")
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@load0_mm
-POSTHOOK: query: create table load0_mm (key string, value string) stored as textfile tblproperties("transactional"="true", "transactional_properties"="insert_only")
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@load0_mm
-PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table load0_mm
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@load0_mm
-POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table load0_mm
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@load0_mm
-PREHOOK: query: select count(1) from load0_mm
-PREHOOK: type: QUERY
-PREHOOK: Input: default@load0_mm
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from load0_mm
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@load0_mm
-#### A masked pattern was here ####
-500
-PREHOOK: query: load data local inpath '../../data/files/kv2.txt' into table load0_mm
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@load0_mm
-POSTHOOK: query: load data local inpath '../../data/files/kv2.txt' into table load0_mm
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@load0_mm
-PREHOOK: query: select count(1) from load0_mm
-PREHOOK: type: QUERY
-PREHOOK: Input: default@load0_mm
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from load0_mm
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@load0_mm
-#### A masked pattern was here ####
-1000
-PREHOOK: query: load data local inpath '../../data/files/kv2.txt' overwrite into table load0_mm
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@load0_mm
-POSTHOOK: query: load data local inpath '../../data/files/kv2.txt' overwrite into table load0_mm
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@load0_mm
-PREHOOK: query: select count(1) from load0_mm
-PREHOOK: type: QUERY
-PREHOOK: Input: default@load0_mm
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from load0_mm
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@load0_mm
-#### A masked pattern was here ####
-500
-PREHOOK: query: drop table load0_mm
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@load0_mm
-PREHOOK: Output: default@load0_mm
-POSTHOOK: query: drop table load0_mm
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@load0_mm
-POSTHOOK: Output: default@load0_mm
-PREHOOK: query: drop table intermediate2
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: drop table intermediate2
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: create table intermediate2 (key string, value string) stored as textfile
-#### A masked pattern was here ####
-PREHOOK: type: CREATETABLE
-#### A masked pattern was here ####
-PREHOOK: Output: database:default
-PREHOOK: Output: default@intermediate2
-POSTHOOK: query: create table intermediate2 (key string, value string) stored as textfile
-#### A masked pattern was here ####
-POSTHOOK: type: CREATETABLE
-#### A masked pattern was here ####
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@intermediate2
-PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table intermediate2
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@intermediate2
-POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table intermediate2
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@intermediate2
-PREHOOK: query: load data local inpath '../../data/files/kv2.txt' into table intermediate2
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@intermediate2
-POSTHOOK: query: load data local inpath '../../data/files/kv2.txt' into table intermediate2
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@intermediate2
-PREHOOK: query: load data local inpath '../../data/files/kv3.txt' into table intermediate2
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@intermediate2
-POSTHOOK: query: load data local inpath '../../data/files/kv3.txt' into table intermediate2
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@intermediate2
-PREHOOK: query: drop table load1_mm
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: drop table load1_mm
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: create table load1_mm (key string, value string) stored as textfile tblproperties("transactional"="true", "transactional_properties"="insert_only")
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@load1_mm
-POSTHOOK: query: create table load1_mm (key string, value string) stored as textfile tblproperties("transactional"="true", "transactional_properties"="insert_only")
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@load1_mm
-#### A masked pattern was here ####
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@load1_mm
-#### A masked pattern was here ####
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@load1_mm
-#### A masked pattern was here ####
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@load1_mm
-#### A masked pattern was here ####
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@load1_mm
-PREHOOK: query: select count(1) from load1_mm
-PREHOOK: type: QUERY
-PREHOOK: Input: default@load1_mm
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from load1_mm
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@load1_mm
-#### A masked pattern was here ####
-1000
-PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table intermediate2
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@intermediate2
-POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table intermediate2
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@intermediate2
-PREHOOK: query: load data local inpath '../../data/files/kv2.txt' into table intermediate2
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@intermediate2
-POSTHOOK: query: load data local inpath '../../data/files/kv2.txt' into table intermediate2
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@intermediate2
-PREHOOK: query: load data local inpath '../../data/files/kv3.txt' into table intermediate2
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@intermediate2
-POSTHOOK: query: load data local inpath '../../data/files/kv3.txt' into table intermediate2
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@intermediate2
-#### A masked pattern was here ####
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@load1_mm
-#### A masked pattern was here ####
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@load1_mm
-PREHOOK: query: select count(1) from load1_mm
-PREHOOK: type: QUERY
-PREHOOK: Input: default@load1_mm
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from load1_mm
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@load1_mm
-#### A masked pattern was here ####
-1050
-PREHOOK: query: load data local inpath '../../data/files/kv2.txt' into table intermediate2
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@intermediate2
-POSTHOOK: query: load data local inpath '../../data/files/kv2.txt' into table intermediate2
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@intermediate2
-#### A masked pattern was here ####
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@load1_mm
-#### A masked pattern was here ####
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@load1_mm
-PREHOOK: query: select count(1) from load1_mm
-PREHOOK: type: QUERY
-PREHOOK: Input: default@load1_mm
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from load1_mm
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@load1_mm
-#### A masked pattern was here ####
-500
-PREHOOK: query: drop table load1_mm
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@load1_mm
-PREHOOK: Output: default@load1_mm
-POSTHOOK: query: drop table load1_mm
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@load1_mm
-POSTHOOK: Output: default@load1_mm
-PREHOOK: query: drop table load2_mm
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: drop table load2_mm
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: create table load2_mm (key string, value string)
-  partitioned by (k int, l int) stored as textfile tblproperties("transactional"="true", "transactional_properties"="insert_only")
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@load2_mm
-POSTHOOK: query: create table load2_mm (key string, value string)
-  partitioned by (k int, l int) stored as textfile tblproperties("transactional"="true", "transactional_properties"="insert_only")
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@load2_mm
-PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table intermediate2
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@intermediate2
-POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table intermediate2
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@intermediate2
-PREHOOK: query: load data local inpath '../../data/files/kv2.txt' into table intermediate2
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@intermediate2
-POSTHOOK: query: load data local inpath '../../data/files/kv2.txt' into table intermediate2
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@intermediate2
-PREHOOK: query: load data local inpath '../../data/files/kv3.txt' into table intermediate2
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@intermediate2
-POSTHOOK: query: load data local inpath '../../data/files/kv3.txt' into table intermediate2
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@intermediate2
-#### A masked pattern was here ####
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@load2_mm
-#### A masked pattern was here ####
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@load2_mm
-POSTHOOK: Output: default@load2_mm@k=5/l=5
-PREHOOK: query: select count(1) from load2_mm
-PREHOOK: type: QUERY
-PREHOOK: Input: default@load2_mm
-PREHOOK: Input: default@load2_mm@k=5/l=5
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from load2_mm
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@load2_mm
-POSTHOOK: Input: default@load2_mm@k=5/l=5
-#### A masked pattern was here ####
-1025
-PREHOOK: query: drop table load2_mm
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@load2_mm
-PREHOOK: Output: default@load2_mm
-POSTHOOK: query: drop table load2_mm
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@load2_mm
-POSTHOOK: Output: default@load2_mm
-PREHOOK: query: drop table intermediate2
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@intermediate2
-PREHOOK: Output: default@intermediate2
-POSTHOOK: query: drop table intermediate2
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@intermediate2
-POSTHOOK: Output: default@intermediate2