You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by dk...@apache.org on 2023/01/19 09:22:30 UTC
[hive] branch master updated: HIVE-26802: Create qtests running QB compaction queries for ACID, insert-only and clustered tables (Zoltan Ratkai, reviewed by Denys Kuzmenko, Laszlo Vegh, Sourabh Badhya)
This is an automated email from the ASF dual-hosted git repository.
dkuzmenko pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push:
new d9fc3cfe14d HIVE-26802: Create qtests running QB compaction queries for ACID, insert-only and clustered tables (Zoltan Ratkai, reviewed by Denys Kuzmenko, Laszlo Vegh, Sourabh Badhya)
d9fc3cfe14d is described below
commit d9fc3cfe14db4475b155d960432e3d063aa5ccd9
Author: zratkai <11...@users.noreply.github.com>
AuthorDate: Thu Jan 19 10:22:18 2023 +0100
HIVE-26802: Create qtests running QB compaction queries for ACID, insert-only and clustered tables (Zoltan Ratkai, reviewed by Denys Kuzmenko, Laszlo Vegh, Sourabh Badhya)
Closes #3882
---
.../cli/TestMiniLlapLocalCompactorCliDriver.java | 84 ++++++++++++
.../test/resources/testconfiguration.properties | 8 ++
.../hadoop/hive/cli/control/AbstractCliConfig.java | 11 ++
.../apache/hadoop/hive/cli/control/CliConfigs.java | 37 +++++-
.../hadoop/hive/cli/control/CoreCliDriver.java | 1 +
.../org/apache/hadoop/hive/ql/QTestArguments.java | 24 +++-
.../java/org/apache/hadoop/hive/ql/QTestUtil.java | 6 +
.../clientpositive/compaction_query_based.q | 27 ++++
.../compaction_query_based_clustered.q | 34 +++++
.../compaction_query_based_clustered_minor.q | 34 +++++
.../compaction_query_based_insert_only.q | 25 ++++
.../compaction_query_based_insert_only_minor.q | 25 ++++
.../clientpositive/compaction_query_based_minor.q | 26 ++++
.../llap/compaction_query_based.q.out | 68 ++++++++++
.../llap/compaction_query_based_clustered.q.out | 148 +++++++++++++++++++++
.../compaction_query_based_clustered_minor.q.out | 148 +++++++++++++++++++++
.../llap/compaction_query_based_insert_only.q.out | 68 ++++++++++
.../compaction_query_based_insert_only_minor.q.out | 68 ++++++++++
.../llap/compaction_query_based_minor.q.out | 68 ++++++++++
19 files changed, 908 insertions(+), 2 deletions(-)
diff --git a/itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestMiniLlapLocalCompactorCliDriver.java b/itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestMiniLlapLocalCompactorCliDriver.java
new file mode 100644
index 00000000000..8b829093855
--- /dev/null
+++ b/itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestMiniLlapLocalCompactorCliDriver.java
@@ -0,0 +1,84 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.cli;
+
+import org.apache.hadoop.hive.cli.control.CliAdapter;
+import org.apache.hadoop.hive.cli.control.CliConfigs;
+import org.apache.hadoop.hive.cli.control.SplitSupport;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.txn.compactor.Worker;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestRule;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+import java.io.File;
+import java.util.List;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+@RunWith(Parameterized.class)
+public class TestMiniLlapLocalCompactorCliDriver {
+
+ static CliAdapter adapter = new CliConfigs.MiniLlapLocalCompactorCliConfig().getCliAdapter();
+
+ private static int N_SPLITS = 32;
+
+ private static final AtomicBoolean stop = new AtomicBoolean();
+ private static Worker worker;
+ @Parameters(name = "{0}")
+ public static List<Object[]> getParameters() throws Exception {
+ return SplitSupport.process(adapter.getParameters(), TestMiniLlapLocalCompactorCliDriver.class, N_SPLITS);
+ }
+
+ @ClassRule
+ public static TestRule cliClassRule = adapter.buildClassRule();
+
+ @Rule
+ public TestRule cliTestRule = adapter.buildTestRule();
+
+ @BeforeClass
+ public static void setup() throws Exception {
+ worker = new Worker();
+ worker.setConf(SessionState.get().getConf());
+ stop.set(false);
+ worker.init(stop);
+ worker.start();
+ }
+
+ @AfterClass
+ public static void tearDown(){
+ stop.set(true);
+ }
+ private String name;
+ private File qfile;
+
+ public TestMiniLlapLocalCompactorCliDriver(String name, File qfile) {
+ this.name = name;
+ this.qfile = qfile;
+ }
+
+ @Test
+ public void testCliDriver() throws Exception {
+ adapter.runTest(name, qfile);
+ }
+}
diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties
index aa8e1addca7..46c824bccac 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -416,3 +416,11 @@ iceberg.llap.query.files=\
iceberg.llap.only.query.files=\
llap_iceberg_read_orc.q,\
llap_iceberg_read_parquet.q
+
+compaction.query.files=\
+ compaction_query_based.q,\
+ compaction_query_based_clustered.q,\
+ compaction_query_based_clustered_minor.q,\
+ compaction_query_based_insert_only.q,\
+ compaction_query_based_insert_only_minor.q,\
+ compaction_query_based_minor.q
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java
index cc31ce9b47e..6b1680d2f23 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java
@@ -26,6 +26,7 @@ import java.net.URL;
import java.util.Arrays;
import java.util.HashSet;
import java.util.LinkedHashSet;
+import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.TreeSet;
@@ -33,6 +34,7 @@ import java.util.regex.Pattern;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.QTestSystemProperties;
import org.apache.hadoop.hive.ql.QTestMiniClusters.FsType;
import org.apache.hadoop.hive.ql.QTestMiniClusters.MiniClusterType;
@@ -69,6 +71,7 @@ public abstract class AbstractCliConfig {
// moved...this may change
private Set<String> includeQueryFileNames;
private Class<? extends CliAdapter> cliAdapter;
+ private Map<HiveConf.ConfVars, String> customConfigValueMap;
public AbstractCliConfig(Class<? extends CliAdapter> adapter) {
cliAdapter = adapter;
@@ -415,4 +418,12 @@ public abstract class AbstractCliConfig {
protected void setMetastoreType(String metastoreType) {
this.metastoreType = metastoreType;
}
+
+ protected void setCustomConfigValueMap(Map<HiveConf.ConfVars, String> customConfigValueMap) {
+ this.customConfigValueMap = customConfigValueMap;
+ }
+
+ public Map<HiveConf.ConfVars, String> getCustomConfigValueMap() {
+ return this.customConfigValueMap;
+ }
}
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java
index 340899873e0..4026f3a980c 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java
@@ -20,7 +20,10 @@ package org.apache.hadoop.hive.cli.control;
import java.io.File;
import java.net.MalformedURLException;
import java.net.URL;
+import java.util.HashMap;
+import java.util.Map;
+import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.QTestMiniClusters;
import org.apache.hadoop.hive.ql.QTestMiniClusters.MiniClusterType;
import org.apache.hadoop.hive.ql.parse.CoreParseNegative;
@@ -222,7 +225,8 @@ public class CliConfigs {
excludesFrom(testConfigProps, "hive.kafka.query.files");
excludesFrom(testConfigProps, "erasurecoding.only.query.files");
excludesFrom(testConfigProps, "beeline.positive.include");
-
+ excludesFrom(testConfigProps, "compaction.query.files");
+
setResultsDir("ql/src/test/results/clientpositive/llap");
setLogDir("itests/qtest/target/qfile-results/clientpositive");
@@ -236,7 +240,38 @@ public class CliConfigs {
}
}
}
+
+ public static class MiniLlapLocalCompactorCliConfig extends AbstractCliConfig {
+
+ public MiniLlapLocalCompactorCliConfig() {
+ super(CoreCliDriver.class);
+ try {
+ setQueryDir("ql/src/test/queries/clientpositive");
+
+ includesFrom(testConfigProps, "compaction.query.files");
+ setResultsDir("ql/src/test/results/clientpositive/llap");
+ setLogDir("itests/qtest/target/qfile-results/clientpositive");
+
+ setInitScript("q_test_init.sql");
+ setCleanupScript("q_test_cleanup.sql");
+
+ setHiveConfDir("data/conf/llap");
+ setClusterType(MiniClusterType.LLAP_LOCAL);
+ setCustomConfigValueMap(createConfVarsStringMap());
+ } catch (Exception e) {
+ throw new RuntimeException("can't construct cliconfig", e);
+ }
+ }
+ private static Map<HiveConf.ConfVars, String> createConfVarsStringMap() {
+ Map<HiveConf.ConfVars,String> customConfigValueMap = new HashMap<>();
+ customConfigValueMap.put(HiveConf.ConfVars.COMPACTOR_CRUD_QUERY_BASED, "true");
+ customConfigValueMap.put(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, "true");
+ customConfigValueMap.put(HiveConf.ConfVars.HIVE_TXN_MANAGER, "org.apache.hadoop.hive.ql.lockmgr.DbTxnManager");
+ customConfigValueMap.put(HiveConf.ConfVars.HIVE_COMPACTOR_GATHER_STATS, "false");
+ return customConfigValueMap;
+ }
+ }
public static class EncryptedHDFSCliConfig extends AbstractCliConfig {
public EncryptedHDFSCliConfig() {
super(CoreCliDriver.class);
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
index c7430e8575b..8f4e9ad1a62 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
@@ -71,6 +71,7 @@ public class CoreCliDriver extends CliAdapter {
.withCleanupScript(cleanupScript)
.withLlapIo(true)
.withFsType(cliConfig.getFsType())
+ .withCustomConfigValueMap(this.cliConfig.getCustomConfigValueMap())
.build());
}
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestArguments.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestArguments.java
index 2941efab143..2bdb6ae5a0a 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestArguments.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestArguments.java
@@ -18,9 +18,13 @@
package org.apache.hadoop.hive.ql;
+import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.QTestMiniClusters.FsType;
import org.apache.hadoop.hive.ql.QTestMiniClusters.QTestSetup;
+import java.util.HashMap;
+import java.util.Map;
+
/**
* QTestArguments composite used as arguments holder for QTestUtil initialization.
*/
@@ -36,6 +40,8 @@ public final class QTestArguments {
private FsType fsType;
private QTestSetup qtestSetup;
+ private Map<HiveConf.ConfVars,String> customConfigValueMap;
+
private QTestArguments() {
}
@@ -111,6 +117,14 @@ public final class QTestArguments {
this.qtestSetup = qtestSetup;
}
+ private void setCustomConfigValueMap(Map<HiveConf.ConfVars,String> customConfigValueMap){
+ this.customConfigValueMap = customConfigValueMap;
+ }
+
+ public Map<HiveConf.ConfVars, String> getCustomConfs() {
+ return this.customConfigValueMap;
+ }
+
/**
* QTestArgumentsBuilder used for QTestArguments construction.
*/
@@ -126,7 +140,9 @@ public final class QTestArguments {
private FsType fsType;
private QTestSetup qtestSetup;
- private QTestArgumentsBuilder(){
+ private Map<HiveConf.ConfVars, String> customConfigValueMap;
+
+ private QTestArgumentsBuilder() {
}
public static QTestArgumentsBuilder instance() {
@@ -178,6 +194,11 @@ public final class QTestArguments {
return this;
}
+ public QTestArgumentsBuilder withCustomConfigValueMap(Map<HiveConf.ConfVars, String> customConfigValueMap) {
+ this. customConfigValueMap = customConfigValueMap;
+ return this;
+ }
+
public QTestArguments build() {
QTestArguments testArguments = new QTestArguments();
testArguments.setOutDir(outDir);
@@ -194,6 +215,7 @@ public final class QTestArguments {
testArguments.setQTestSetup(
qtestSetup != null ? qtestSetup : new QTestSetup());
+ testArguments.setCustomConfigValueMap(customConfigValueMap != null ? customConfigValueMap : new HashMap<>());
return testArguments;
}
}
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index 120b74d8853..e9c86372bc9 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -36,6 +36,7 @@ import java.util.Deque;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
+import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.regex.Matcher;
@@ -214,6 +215,7 @@ public class QTestUtil {
System.setProperty("hive.query.max.length", "100Mb");
conf = new HiveConf(IDriver.class);
+ setCustomConfs(conf, testArgs.getCustomConfs());
setMetaStoreProperties();
final String scriptsDir = getScriptsDir(conf);
@@ -243,6 +245,10 @@ public class QTestUtil {
}
+ private void setCustomConfs(HiveConf conf, Map<ConfVars,String> customConfigValueMap) {
+ customConfigValueMap.entrySet().forEach(item-> conf.set(item.getKey().varname, item.getValue()));
+ }
+
private void logClassPath() {
String classpath = System.getProperty("java.class.path");
String[] classpathEntries = classpath.split(File.pathSeparator);
diff --git a/ql/src/test/queries/clientpositive/compaction_query_based.q b/ql/src/test/queries/clientpositive/compaction_query_based.q
new file mode 100644
index 00000000000..90d85466a19
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/compaction_query_based.q
@@ -0,0 +1,27 @@
+--! qt:replace:/createTime:(\d+)/#Masked#/
+--! qt:replace:/location:(\S+)/#Masked#/
+--! qt:replace:/lastAccessTime:(\d+)/#Masked#/
+--! qt:replace:/ownerType:(\S*)/#Masked#/
+--! qt:replace:/owner:(\S*)/#Masked#/
+--! qt:replace:/skewedColValueLocationMaps:(\S*)/#Masked#/
+--! qt:replace:/transient_lastDdlTime=(\d+)/#Masked#/
+--! qt:replace:/totalSize=(\d+)/#Masked#/
+--! qt:replace:/rawDataSize=(\d+)/#Masked#/
+--! qt:replace:/writeId:(\d+)/#Masked#/
+--! qt:replace:/bucketing_version=(\d+)/#Masked#/
+--! qt:replace:/id:(\d+)/#Masked#/
+
+
+drop table orc_table;
+
+create table orc_table (a int, b string) stored as orc TBLPROPERTIES('transactional'='true');
+
+insert into orc_table values('1', 'text1');
+insert into orc_table values('2', 'text2');
+insert into orc_table values('3', 'text3');
+
+alter table orc_table compact 'MAJOR' and wait;
+analyze table orc_table compute statistics;
+
+describe extended orc_table;
+
diff --git a/ql/src/test/queries/clientpositive/compaction_query_based_clustered.q b/ql/src/test/queries/clientpositive/compaction_query_based_clustered.q
new file mode 100644
index 00000000000..1430797c210
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/compaction_query_based_clustered.q
@@ -0,0 +1,34 @@
+--! qt:replace:/createTime:(\d+)/#Masked#/
+--! qt:replace:/location:(\S+)/#Masked#/
+--! qt:replace:/lastAccessTime:(\d+)/#Masked#/
+--! qt:replace:/ownerType:(\S*)/#Masked#/
+--! qt:replace:/owner:(\S*)/#Masked#/
+--! qt:replace:/skewedColValueLocationMaps:(\S*)/#Masked#/
+--! qt:replace:/transient_lastDdlTime=(\d+)/#Masked#/
+--! qt:replace:/totalSize=(\d+)/#Masked#/
+--! qt:replace:/rawDataSize=(\d+)/#Masked#/
+--! qt:replace:/writeId:(\d+)/#Masked#/
+--! qt:replace:/bucketing_version=(\d+)/#Masked#/
+--! qt:replace:/id:(\d+)/#Masked#/
+
+drop table orc_table;
+
+create table orc_table (a int, b string) clustered by (a) into 3 buckets stored as orc TBLPROPERTIES('transactional'='true');
+
+insert into orc_table values('1', 'text1');
+insert into orc_table values('2', 'text2');
+insert into orc_table values('3', 'text3');
+insert into orc_table values('4', 'text4');
+insert into orc_table values('5', 'text5');
+insert into orc_table values('6', 'text6');
+insert into orc_table values('7', 'text7');
+insert into orc_table values('8', 'text8');
+insert into orc_table values('9', 'text9');
+insert into orc_table values('10', 'text10');
+
+describe extended orc_table;
+alter table orc_table compact 'MAJOR' and wait;
+analyze table orc_table compute statistics;
+
+describe extended orc_table;
+
diff --git a/ql/src/test/queries/clientpositive/compaction_query_based_clustered_minor.q b/ql/src/test/queries/clientpositive/compaction_query_based_clustered_minor.q
new file mode 100644
index 00000000000..48a0de1e76d
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/compaction_query_based_clustered_minor.q
@@ -0,0 +1,34 @@
+--! qt:replace:/createTime:(\d+)/#Masked#/
+--! qt:replace:/location:(\S+)/#Masked#/
+--! qt:replace:/lastAccessTime:(\d+)/#Masked#/
+--! qt:replace:/ownerType:(\S*)/#Masked#/
+--! qt:replace:/owner:(\S*)/#Masked#/
+--! qt:replace:/skewedColValueLocationMaps:(\S*)/#Masked#/
+--! qt:replace:/transient_lastDdlTime=(\d+)/#Masked#/
+--! qt:replace:/totalSize=(\d+)/#Masked#/
+--! qt:replace:/rawDataSize=(\d+)/#Masked#/
+--! qt:replace:/writeId:(\d+)/#Masked#/
+--! qt:replace:/bucketing_version=(\d+)/#Masked#/
+--! qt:replace:/id:(\d+)/#Masked#/
+
+drop table orc_table;
+
+create table orc_table (a int, b string) clustered by (a) into 3 buckets stored as orc TBLPROPERTIES('transactional'='true');
+
+insert into orc_table values('1', 'text1');
+insert into orc_table values('2', 'text2');
+insert into orc_table values('3', 'text3');
+insert into orc_table values('4', 'text4');
+insert into orc_table values('5', 'text5');
+insert into orc_table values('6', 'text6');
+insert into orc_table values('7', 'text7');
+insert into orc_table values('8', 'text8');
+insert into orc_table values('9', 'text9');
+insert into orc_table values('10', 'text10');
+
+describe extended orc_table;
+alter table orc_table compact 'MINOR' and wait;
+analyze table orc_table compute statistics;
+
+describe extended orc_table;
+
diff --git a/ql/src/test/queries/clientpositive/compaction_query_based_insert_only.q b/ql/src/test/queries/clientpositive/compaction_query_based_insert_only.q
new file mode 100644
index 00000000000..06b9771ec1f
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/compaction_query_based_insert_only.q
@@ -0,0 +1,25 @@
+--! qt:replace:/createTime:(\d+)/#Masked#/
+--! qt:replace:/location:(\S+)/#Masked#/
+--! qt:replace:/lastAccessTime:(\d+)/#Masked#/
+--! qt:replace:/ownerType:(\S*)/#Masked#/
+--! qt:replace:/owner:(\S*)/#Masked#/
+--! qt:replace:/skewedColValueLocationMaps:(\S*)/#Masked#/
+--! qt:replace:/transient_lastDdlTime=(\d+)/#Masked#/
+--! qt:replace:/totalSize=(\d+)/#Masked#/
+--! qt:replace:/rawDataSize=(\d+)/#Masked#/
+--! qt:replace:/writeId:(\d+)/#Masked#/
+--! qt:replace:/bucketing_version=(\d+)/#Masked#/
+--! qt:replace:/id:(\d+)/#Masked#/
+
+drop table orc_table;
+
+create table orc_table (a int, b string) stored as orc TBLPROPERTIES('transactional'='true', 'transactional_properties'='insert_only');
+
+insert into orc_table values('1', 'text1');
+insert into orc_table values('2', 'text2');
+insert into orc_table values('3', 'text3');
+
+alter table orc_table compact 'MAJOR' and wait;
+analyze table orc_table compute statistics;
+
+describe extended orc_table;
\ No newline at end of file
diff --git a/ql/src/test/queries/clientpositive/compaction_query_based_insert_only_minor.q b/ql/src/test/queries/clientpositive/compaction_query_based_insert_only_minor.q
new file mode 100644
index 00000000000..9c8c49abcb8
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/compaction_query_based_insert_only_minor.q
@@ -0,0 +1,25 @@
+--! qt:replace:/createTime:(\d+)/#Masked#/
+--! qt:replace:/location:(\S+)/#Masked#/
+--! qt:replace:/lastAccessTime:(\d+)/#Masked#/
+--! qt:replace:/ownerType:(\S*)/#Masked#/
+--! qt:replace:/owner:(\S*)/#Masked#/
+--! qt:replace:/skewedColValueLocationMaps:(\S*)/#Masked#/
+--! qt:replace:/transient_lastDdlTime=(\d+)/#Masked#/
+--! qt:replace:/totalSize=(\d+)/#Masked#/
+--! qt:replace:/rawDataSize=(\d+)/#Masked#/
+--! qt:replace:/writeId:(\d+)/#Masked#/
+--! qt:replace:/bucketing_version=(\d+)/#Masked#/
+--! qt:replace:/id:(\d+)/#Masked#/
+
+drop table orc_table;
+
+create table orc_table (a int, b string) stored as orc TBLPROPERTIES('transactional'='true', 'transactional_properties'='insert_only');
+
+insert into orc_table values('1', 'text1');
+insert into orc_table values('2', 'text2');
+insert into orc_table values('3', 'text3');
+
+alter table orc_table compact 'MINOR' and wait;
+analyze table orc_table compute statistics;
+
+describe extended orc_table;
\ No newline at end of file
diff --git a/ql/src/test/queries/clientpositive/compaction_query_based_minor.q b/ql/src/test/queries/clientpositive/compaction_query_based_minor.q
new file mode 100644
index 00000000000..d4dd7976729
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/compaction_query_based_minor.q
@@ -0,0 +1,26 @@
+--! qt:replace:/createTime:(\d+)/#Masked#/
+--! qt:replace:/location:(\S+)/#Masked#/
+--! qt:replace:/lastAccessTime:(\d+)/#Masked#/
+--! qt:replace:/ownerType:(\S*)/#Masked#/
+--! qt:replace:/owner:(\S*)/#Masked#/
+--! qt:replace:/skewedColValueLocationMaps:(\S*)/#Masked#/
+--! qt:replace:/transient_lastDdlTime=(\d+)/#Masked#/
+--! qt:replace:/totalSize=(\d+)/#Masked#/
+--! qt:replace:/rawDataSize=(\d+)/#Masked#/
+--! qt:replace:/writeId:(\d+)/#Masked#/
+--! qt:replace:/bucketing_version=(\d+)/#Masked#/
+--! qt:replace:/id:(\d+)/#Masked#/
+
+drop table orc_table;
+
+create table orc_table (a int, b string) stored as orc TBLPROPERTIES('transactional'='true');
+
+insert into orc_table values('1', 'text1');
+insert into orc_table values('2', 'text2');
+insert into orc_table values('3', 'text3');
+
+alter table orc_table compact 'MINOR' and wait;
+analyze table orc_table compute statistics;
+
+describe extended orc_table;
+
diff --git a/ql/src/test/results/clientpositive/llap/compaction_query_based.q.out b/ql/src/test/results/clientpositive/llap/compaction_query_based.q.out
new file mode 100644
index 00000000000..ca65111bbb8
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/compaction_query_based.q.out
@@ -0,0 +1,68 @@
+PREHOOK: query: drop table orc_table
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table orc_table
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table orc_table (a int, b string) stored as orc TBLPROPERTIES('transactional'='true')
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: create table orc_table (a int, b string) stored as orc TBLPROPERTIES('transactional'='true')
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@orc_table
+PREHOOK: query: insert into orc_table values('1', 'text1')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: insert into orc_table values('1', 'text1')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orc_table
+POSTHOOK: Lineage: orc_table.a SCRIPT []
+POSTHOOK: Lineage: orc_table.b SCRIPT []
+PREHOOK: query: insert into orc_table values('2', 'text2')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: insert into orc_table values('2', 'text2')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orc_table
+POSTHOOK: Lineage: orc_table.a SCRIPT []
+POSTHOOK: Lineage: orc_table.b SCRIPT []
+PREHOOK: query: insert into orc_table values('3', 'text3')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: insert into orc_table values('3', 'text3')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orc_table
+POSTHOOK: Lineage: orc_table.a SCRIPT []
+POSTHOOK: Lineage: orc_table.b SCRIPT []
+PREHOOK: query: alter table orc_table compact 'MAJOR' and wait
+PREHOOK: type: ALTERTABLE_COMPACT
+PREHOOK: Input: default@orc_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: alter table orc_table compact 'MAJOR' and wait
+POSTHOOK: type: ALTERTABLE_COMPACT
+POSTHOOK: Input: default@orc_table
+POSTHOOK: Output: default@orc_table
+PREHOOK: query: analyze table orc_table compute statistics
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orc_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: analyze table orc_table compute statistics
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orc_table
+POSTHOOK: Output: default@orc_table
+PREHOOK: query: describe extended orc_table
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@orc_table
+POSTHOOK: query: describe extended orc_table
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@orc_table
+a int
+b string
+
+Detailed Table Information Table(tableName:orc_table, dbName:default, #Masked# #Masked#, #Masked#, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:string, comment:null)], #Masked# inputFormat:org.apache.hadoop.hive.ql.io.orc.OrcInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.ql.io.orc.OrcSerde, pa [...]
diff --git a/ql/src/test/results/clientpositive/llap/compaction_query_based_clustered.q.out b/ql/src/test/results/clientpositive/llap/compaction_query_based_clustered.q.out
new file mode 100644
index 00000000000..98c213c40f7
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/compaction_query_based_clustered.q.out
@@ -0,0 +1,148 @@
+PREHOOK: query: drop table orc_table
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table orc_table
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table orc_table (a int, b string) clustered by (a) into 3 buckets stored as orc TBLPROPERTIES('transactional'='true')
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: create table orc_table (a int, b string) clustered by (a) into 3 buckets stored as orc TBLPROPERTIES('transactional'='true')
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@orc_table
+PREHOOK: query: insert into orc_table values('1', 'text1')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: insert into orc_table values('1', 'text1')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orc_table
+POSTHOOK: Lineage: orc_table.a SCRIPT []
+POSTHOOK: Lineage: orc_table.b SCRIPT []
+PREHOOK: query: insert into orc_table values('2', 'text2')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: insert into orc_table values('2', 'text2')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orc_table
+POSTHOOK: Lineage: orc_table.a SCRIPT []
+POSTHOOK: Lineage: orc_table.b SCRIPT []
+PREHOOK: query: insert into orc_table values('3', 'text3')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: insert into orc_table values('3', 'text3')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orc_table
+POSTHOOK: Lineage: orc_table.a SCRIPT []
+POSTHOOK: Lineage: orc_table.b SCRIPT []
+PREHOOK: query: insert into orc_table values('4', 'text4')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: insert into orc_table values('4', 'text4')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orc_table
+POSTHOOK: Lineage: orc_table.a SCRIPT []
+POSTHOOK: Lineage: orc_table.b SCRIPT []
+PREHOOK: query: insert into orc_table values('5', 'text5')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: insert into orc_table values('5', 'text5')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orc_table
+POSTHOOK: Lineage: orc_table.a SCRIPT []
+POSTHOOK: Lineage: orc_table.b SCRIPT []
+PREHOOK: query: insert into orc_table values('6', 'text6')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: insert into orc_table values('6', 'text6')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orc_table
+POSTHOOK: Lineage: orc_table.a SCRIPT []
+POSTHOOK: Lineage: orc_table.b SCRIPT []
+PREHOOK: query: insert into orc_table values('7', 'text7')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: insert into orc_table values('7', 'text7')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orc_table
+POSTHOOK: Lineage: orc_table.a SCRIPT []
+POSTHOOK: Lineage: orc_table.b SCRIPT []
+PREHOOK: query: insert into orc_table values('8', 'text8')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: insert into orc_table values('8', 'text8')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orc_table
+POSTHOOK: Lineage: orc_table.a SCRIPT []
+POSTHOOK: Lineage: orc_table.b SCRIPT []
+PREHOOK: query: insert into orc_table values('9', 'text9')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: insert into orc_table values('9', 'text9')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orc_table
+POSTHOOK: Lineage: orc_table.a SCRIPT []
+POSTHOOK: Lineage: orc_table.b SCRIPT []
+PREHOOK: query: insert into orc_table values('10', 'text10')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: insert into orc_table values('10', 'text10')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orc_table
+POSTHOOK: Lineage: orc_table.a SCRIPT []
+POSTHOOK: Lineage: orc_table.b SCRIPT []
+PREHOOK: query: describe extended orc_table
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@orc_table
+POSTHOOK: query: describe extended orc_table
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@orc_table
+a int
+b string
+
+Detailed Table Information Table(tableName:orc_table, dbName:default, #Masked# #Masked#, #Masked#, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:string, comment:null)], #Masked# inputFormat:org.apache.hadoop.hive.ql.io.orc.OrcInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat, compressed:false, numBuckets:3, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.ql.io.orc.OrcSerde, par [...]
+PREHOOK: query: alter table orc_table compact 'MAJOR' and wait
+PREHOOK: type: ALTERTABLE_COMPACT
+PREHOOK: Input: default@orc_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: alter table orc_table compact 'MAJOR' and wait
+POSTHOOK: type: ALTERTABLE_COMPACT
+POSTHOOK: Input: default@orc_table
+POSTHOOK: Output: default@orc_table
+PREHOOK: query: analyze table orc_table compute statistics
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orc_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: analyze table orc_table compute statistics
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orc_table
+POSTHOOK: Output: default@orc_table
+PREHOOK: query: describe extended orc_table
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@orc_table
+POSTHOOK: query: describe extended orc_table
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@orc_table
+a int
+b string
+
+Detailed Table Information Table(tableName:orc_table, dbName:default, #Masked# #Masked#, #Masked#, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:string, comment:null)], #Masked# inputFormat:org.apache.hadoop.hive.ql.io.orc.OrcInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat, compressed:false, numBuckets:3, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.ql.io.orc.OrcSerde, par [...]
diff --git a/ql/src/test/results/clientpositive/llap/compaction_query_based_clustered_minor.q.out b/ql/src/test/results/clientpositive/llap/compaction_query_based_clustered_minor.q.out
new file mode 100644
index 00000000000..7731c4ef35c
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/compaction_query_based_clustered_minor.q.out
@@ -0,0 +1,148 @@
+PREHOOK: query: drop table orc_table
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table orc_table
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table orc_table (a int, b string) clustered by (a) into 3 buckets stored as orc TBLPROPERTIES('transactional'='true')
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: create table orc_table (a int, b string) clustered by (a) into 3 buckets stored as orc TBLPROPERTIES('transactional'='true')
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@orc_table
+PREHOOK: query: insert into orc_table values('1', 'text1')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: insert into orc_table values('1', 'text1')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orc_table
+POSTHOOK: Lineage: orc_table.a SCRIPT []
+POSTHOOK: Lineage: orc_table.b SCRIPT []
+PREHOOK: query: insert into orc_table values('2', 'text2')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: insert into orc_table values('2', 'text2')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orc_table
+POSTHOOK: Lineage: orc_table.a SCRIPT []
+POSTHOOK: Lineage: orc_table.b SCRIPT []
+PREHOOK: query: insert into orc_table values('3', 'text3')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: insert into orc_table values('3', 'text3')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orc_table
+POSTHOOK: Lineage: orc_table.a SCRIPT []
+POSTHOOK: Lineage: orc_table.b SCRIPT []
+PREHOOK: query: insert into orc_table values('4', 'text4')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: insert into orc_table values('4', 'text4')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orc_table
+POSTHOOK: Lineage: orc_table.a SCRIPT []
+POSTHOOK: Lineage: orc_table.b SCRIPT []
+PREHOOK: query: insert into orc_table values('5', 'text5')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: insert into orc_table values('5', 'text5')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orc_table
+POSTHOOK: Lineage: orc_table.a SCRIPT []
+POSTHOOK: Lineage: orc_table.b SCRIPT []
+PREHOOK: query: insert into orc_table values('6', 'text6')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: insert into orc_table values('6', 'text6')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orc_table
+POSTHOOK: Lineage: orc_table.a SCRIPT []
+POSTHOOK: Lineage: orc_table.b SCRIPT []
+PREHOOK: query: insert into orc_table values('7', 'text7')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: insert into orc_table values('7', 'text7')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orc_table
+POSTHOOK: Lineage: orc_table.a SCRIPT []
+POSTHOOK: Lineage: orc_table.b SCRIPT []
+PREHOOK: query: insert into orc_table values('8', 'text8')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: insert into orc_table values('8', 'text8')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orc_table
+POSTHOOK: Lineage: orc_table.a SCRIPT []
+POSTHOOK: Lineage: orc_table.b SCRIPT []
+PREHOOK: query: insert into orc_table values('9', 'text9')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: insert into orc_table values('9', 'text9')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orc_table
+POSTHOOK: Lineage: orc_table.a SCRIPT []
+POSTHOOK: Lineage: orc_table.b SCRIPT []
+PREHOOK: query: insert into orc_table values('10', 'text10')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: insert into orc_table values('10', 'text10')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orc_table
+POSTHOOK: Lineage: orc_table.a SCRIPT []
+POSTHOOK: Lineage: orc_table.b SCRIPT []
+PREHOOK: query: describe extended orc_table
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@orc_table
+POSTHOOK: query: describe extended orc_table
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@orc_table
+a int
+b string
+
+Detailed Table Information Table(tableName:orc_table, dbName:default, #Masked# #Masked#, #Masked#, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:string, comment:null)], #Masked# inputFormat:org.apache.hadoop.hive.ql.io.orc.OrcInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat, compressed:false, numBuckets:3, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.ql.io.orc.OrcSerde, par [...]
+PREHOOK: query: alter table orc_table compact 'MINOR' and wait
+PREHOOK: type: ALTERTABLE_COMPACT
+PREHOOK: Input: default@orc_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: alter table orc_table compact 'MINOR' and wait
+POSTHOOK: type: ALTERTABLE_COMPACT
+POSTHOOK: Input: default@orc_table
+POSTHOOK: Output: default@orc_table
+PREHOOK: query: analyze table orc_table compute statistics
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orc_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: analyze table orc_table compute statistics
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orc_table
+POSTHOOK: Output: default@orc_table
+PREHOOK: query: describe extended orc_table
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@orc_table
+POSTHOOK: query: describe extended orc_table
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@orc_table
+a int
+b string
+
+Detailed Table Information Table(tableName:orc_table, dbName:default, #Masked# #Masked#, #Masked#, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:string, comment:null)], #Masked# inputFormat:org.apache.hadoop.hive.ql.io.orc.OrcInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat, compressed:false, numBuckets:3, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.ql.io.orc.OrcSerde, par [...]
diff --git a/ql/src/test/results/clientpositive/llap/compaction_query_based_insert_only.q.out b/ql/src/test/results/clientpositive/llap/compaction_query_based_insert_only.q.out
new file mode 100644
index 00000000000..44eb8184399
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/compaction_query_based_insert_only.q.out
@@ -0,0 +1,68 @@
+PREHOOK: query: drop table orc_table
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table orc_table
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table orc_table (a int, b string) stored as orc TBLPROPERTIES('transactional'='true', 'transactional_properties'='insert_only')
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: create table orc_table (a int, b string) stored as orc TBLPROPERTIES('transactional'='true', 'transactional_properties'='insert_only')
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@orc_table
+PREHOOK: query: insert into orc_table values('1', 'text1')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: insert into orc_table values('1', 'text1')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orc_table
+POSTHOOK: Lineage: orc_table.a SCRIPT []
+POSTHOOK: Lineage: orc_table.b SCRIPT []
+PREHOOK: query: insert into orc_table values('2', 'text2')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: insert into orc_table values('2', 'text2')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orc_table
+POSTHOOK: Lineage: orc_table.a SCRIPT []
+POSTHOOK: Lineage: orc_table.b SCRIPT []
+PREHOOK: query: insert into orc_table values('3', 'text3')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: insert into orc_table values('3', 'text3')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orc_table
+POSTHOOK: Lineage: orc_table.a SCRIPT []
+POSTHOOK: Lineage: orc_table.b SCRIPT []
+PREHOOK: query: alter table orc_table compact 'MAJOR' and wait
+PREHOOK: type: ALTERTABLE_COMPACT
+PREHOOK: Input: default@orc_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: alter table orc_table compact 'MAJOR' and wait
+POSTHOOK: type: ALTERTABLE_COMPACT
+POSTHOOK: Input: default@orc_table
+POSTHOOK: Output: default@orc_table
+PREHOOK: query: analyze table orc_table compute statistics
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orc_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: analyze table orc_table compute statistics
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orc_table
+POSTHOOK: Output: default@orc_table
+PREHOOK: query: describe extended orc_table
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@orc_table
+POSTHOOK: query: describe extended orc_table
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@orc_table
+a int
+b string
+
+Detailed Table Information Table(tableName:orc_table, dbName:default, #Masked# #Masked#, #Masked#, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:string, comment:null)], #Masked# inputFormat:org.apache.hadoop.hive.ql.io.orc.OrcInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.ql.io.orc.OrcSerde, pa [...]
diff --git a/ql/src/test/results/clientpositive/llap/compaction_query_based_insert_only_minor.q.out b/ql/src/test/results/clientpositive/llap/compaction_query_based_insert_only_minor.q.out
new file mode 100644
index 00000000000..1c950804a21
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/compaction_query_based_insert_only_minor.q.out
@@ -0,0 +1,68 @@
+PREHOOK: query: drop table orc_table
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table orc_table
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table orc_table (a int, b string) stored as orc TBLPROPERTIES('transactional'='true', 'transactional_properties'='insert_only')
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: create table orc_table (a int, b string) stored as orc TBLPROPERTIES('transactional'='true', 'transactional_properties'='insert_only')
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@orc_table
+PREHOOK: query: insert into orc_table values('1', 'text1')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: insert into orc_table values('1', 'text1')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orc_table
+POSTHOOK: Lineage: orc_table.a SCRIPT []
+POSTHOOK: Lineage: orc_table.b SCRIPT []
+PREHOOK: query: insert into orc_table values('2', 'text2')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: insert into orc_table values('2', 'text2')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orc_table
+POSTHOOK: Lineage: orc_table.a SCRIPT []
+POSTHOOK: Lineage: orc_table.b SCRIPT []
+PREHOOK: query: insert into orc_table values('3', 'text3')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: insert into orc_table values('3', 'text3')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orc_table
+POSTHOOK: Lineage: orc_table.a SCRIPT []
+POSTHOOK: Lineage: orc_table.b SCRIPT []
+PREHOOK: query: alter table orc_table compact 'MINOR' and wait
+PREHOOK: type: ALTERTABLE_COMPACT
+PREHOOK: Input: default@orc_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: alter table orc_table compact 'MINOR' and wait
+POSTHOOK: type: ALTERTABLE_COMPACT
+POSTHOOK: Input: default@orc_table
+POSTHOOK: Output: default@orc_table
+PREHOOK: query: analyze table orc_table compute statistics
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orc_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: analyze table orc_table compute statistics
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orc_table
+POSTHOOK: Output: default@orc_table
+PREHOOK: query: describe extended orc_table
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@orc_table
+POSTHOOK: query: describe extended orc_table
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@orc_table
+a int
+b string
+
+Detailed Table Information Table(tableName:orc_table, dbName:default, #Masked# #Masked#, #Masked#, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:string, comment:null)], #Masked# inputFormat:org.apache.hadoop.hive.ql.io.orc.OrcInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.ql.io.orc.OrcSerde, pa [...]
diff --git a/ql/src/test/results/clientpositive/llap/compaction_query_based_minor.q.out b/ql/src/test/results/clientpositive/llap/compaction_query_based_minor.q.out
new file mode 100644
index 00000000000..d9599d83b63
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/compaction_query_based_minor.q.out
@@ -0,0 +1,68 @@
+PREHOOK: query: drop table orc_table
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table orc_table
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table orc_table (a int, b string) stored as orc TBLPROPERTIES('transactional'='true')
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: create table orc_table (a int, b string) stored as orc TBLPROPERTIES('transactional'='true')
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@orc_table
+PREHOOK: query: insert into orc_table values('1', 'text1')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: insert into orc_table values('1', 'text1')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orc_table
+POSTHOOK: Lineage: orc_table.a SCRIPT []
+POSTHOOK: Lineage: orc_table.b SCRIPT []
+PREHOOK: query: insert into orc_table values('2', 'text2')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: insert into orc_table values('2', 'text2')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orc_table
+POSTHOOK: Lineage: orc_table.a SCRIPT []
+POSTHOOK: Lineage: orc_table.b SCRIPT []
+PREHOOK: query: insert into orc_table values('3', 'text3')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: insert into orc_table values('3', 'text3')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@orc_table
+POSTHOOK: Lineage: orc_table.a SCRIPT []
+POSTHOOK: Lineage: orc_table.b SCRIPT []
+PREHOOK: query: alter table orc_table compact 'MINOR' and wait
+PREHOOK: type: ALTERTABLE_COMPACT
+PREHOOK: Input: default@orc_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: alter table orc_table compact 'MINOR' and wait
+POSTHOOK: type: ALTERTABLE_COMPACT
+POSTHOOK: Input: default@orc_table
+POSTHOOK: Output: default@orc_table
+PREHOOK: query: analyze table orc_table compute statistics
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orc_table
+PREHOOK: Output: default@orc_table
+POSTHOOK: query: analyze table orc_table compute statistics
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orc_table
+POSTHOOK: Output: default@orc_table
+PREHOOK: query: describe extended orc_table
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@orc_table
+POSTHOOK: query: describe extended orc_table
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@orc_table
+a int
+b string
+
+Detailed Table Information Table(tableName:orc_table, dbName:default, #Masked# #Masked#, #Masked#, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:string, comment:null)], #Masked# inputFormat:org.apache.hadoop.hive.ql.io.orc.OrcInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.ql.io.orc.OrcSerde, pa [...]