You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ay...@apache.org on 2023/04/24 13:05:54 UTC

[hive] branch master updated: HIVE-27177: Iceberg: Add alter table...Convert to Iceberg command. (#4155). (Ayush Saxena, reviewed by Denys Kuzmenko, Butao Zhang)

This is an automated email from the ASF dual-hosted git repository.

ayushsaxena pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new 954bb49da61 HIVE-27177: Iceberg: Add alter table...Convert to Iceberg command. (#4155). (Ayush Saxena, reviewed by Denys Kuzmenko, Butao Zhang)
954bb49da61 is described below

commit 954bb49da611b13e689a6922538f54306004c676
Author: Ayush Saxena <ay...@apache.org>
AuthorDate: Mon Apr 24 18:35:45 2023 +0530

    HIVE-27177: Iceberg: Add alter table...Convert to Iceberg command. (#4155). (Ayush Saxena, reviewed by Denys Kuzmenko, Butao Zhang)
---
 .../iceberg/mr/hive/TestHiveIcebergMigration.java  | 53 +++++++++---
 .../negative/alter_acid_table_to_iceberg_failure.q |  2 +-
 .../alter_managed_table_to_iceberg_failure.q       |  2 +-
 .../positive/alter_multi_part_table_to_iceberg.q   |  9 +-
 .../queries/positive/alter_part_table_to_iceberg.q |  9 +-
 .../test/queries/positive/alter_table_to_iceberg.q |  9 +-
 .../test/queries/positive/llap_iceberg_read_orc.q  |  2 +-
 .../queries/positive/llap_iceberg_read_parquet.q   |  2 +-
 .../query_iceberg_metadata_of_partitioned_table.q  |  2 +-
 .../positive/truncate_partitioned_iceberg_table.q  |  2 +-
 .../alter_acid_table_to_iceberg_failure.q.out      |  5 +-
 .../alter_managed_table_to_iceberg_failure.q.out   |  5 +-
 .../alter_multi_part_table_to_iceberg.q.out        | 54 ++++++++----
 .../positive/alter_part_table_to_iceberg.q.out     | 54 ++++++++----
 .../results/positive/alter_table_to_iceberg.q.out  | 54 ++++++++----
 .../positive/llap/llap_iceberg_read_orc.q.out      |  9 +-
 .../positive/llap/llap_iceberg_read_parquet.q.out  |  9 +-
 ...ery_iceberg_metadata_of_partitioned_table.q.out |  9 +-
 .../truncate_partitioned_iceberg_table.q.out       |  9 +-
 .../hadoop/hive/ql/parse/AlterClauseParser.g       |  8 ++
 .../apache/hadoop/hive/ql/parse/HiveLexerParent.g  |  1 +
 .../org/apache/hadoop/hive/ql/parse/HiveParser.g   |  1 +
 .../table/convert/AlterTableConvertAnalyzer.java   | 65 +++++++++++++++
 .../ddl/table/convert/AlterTableConvertDesc.java   | 60 ++++++++++++++
 .../table/convert/AlterTableConvertOperation.java  | 95 ++++++++++++++++++++++
 .../hive/ql/parse/AlterTableConvertSpec.java       | 47 +++++++++++
 .../apache/hadoop/hive/ql/plan/HiveOperation.java  |  1 +
 .../authorization/plugin/HiveOperationType.java    |  1 +
 .../plugin/sqlstd/Operation2Privilege.java         |  2 +
 .../queries/clientnegative/alter_external_acid.q   |  2 +-
 .../acid_vectorization_original_tez.q              |  2 +-
 .../clientpositive/insert_only_to_acid_convert.q   |  2 +-
 .../test/queries/clientpositive/mm_conversions.q   |  4 +-
 .../clientnegative/alter_external_acid.q.out       |  5 +-
 .../llap/insert_only_to_acid_convert.q.out         |  9 +-
 .../clientpositive/llap/mm_conversions.q.out       | 24 ++++--
 .../tez/acid_vectorization_original_tez.q.out      |  9 +-
 37 files changed, 519 insertions(+), 119 deletions(-)

diff --git a/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergMigration.java b/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergMigration.java
index d8380892a70..526b788f638 100644
--- a/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergMigration.java
+++ b/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergMigration.java
@@ -20,6 +20,7 @@
 package org.apache.iceberg.mr.hive;
 
 import java.util.Arrays;
+import java.util.Collections;
 import java.util.List;
 import java.util.Locale;
 import java.util.Properties;
@@ -97,8 +98,7 @@ public class TestHiveIcebergMigration extends HiveIcebergStorageHandlerWithEngin
         testTables.locationForCreateTableSQL(identifier), testTables.propertiesForCreateTableSQL(ImmutableMap.of())));
     AssertHelpers.assertThrows("should throw exception", IllegalArgumentException.class,
         "Cannot convert hive table to iceberg that", () -> {
-          shell.executeStatement(String.format("ALTER TABLE %s SET TBLPROPERTIES " +
-              "('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')", identifier.name()));
+          shell.executeStatement(String.format("ALTER TABLE %s Convert to iceberg", identifier.name()));
         });
   }
 
@@ -141,6 +141,31 @@ public class TestHiveIcebergMigration extends HiveIcebergStorageHandlerWithEngin
     validateMigration(identifier.name());
   }
 
+  @Test
+  public void testMigrateHiveTableToIcebergWithTBLPROPERTIES() throws TException, InterruptedException {
+    String tableName = "tbl";
+    String createQuery = "CREATE EXTERNAL TABLE " + tableName + " (a int) STORED AS " + fileFormat.name() + " " +
+        testTables.locationForCreateTableSQL(TableIdentifier.of("default", tableName)) +
+        testTables.propertiesForCreateTableSQL(Collections.singletonMap("random.prop", "random"));
+    shell.executeStatement(createQuery);
+    shell.executeStatement("INSERT INTO " + tableName + " VALUES (1), (2), (3)");
+    Table hmsTable = validateMigration(tableName, "TBLPROPERTIES('external.table.purge'='true')");
+
+    // Check the new property gets set.
+    Assert.assertEquals("true", hmsTable.getParameters().get("external.table.purge"));
+    // Check the exiting property stays as is.
+    Assert.assertEquals("random", hmsTable.getParameters().get("random.prop"));
+
+    // Check the new property gets translated to iceberg equivalent and gets set.
+    org.apache.iceberg.Table icebergTable = testTables.loadTable(TableIdentifier.of("default", tableName));
+    Assert.assertEquals("true", icebergTable.properties().get(TableProperties.GC_ENABLED));
+
+    // Retry migration after table is already of iceberg type.
+    AssertHelpers.assertThrows("Should throw exception", IllegalArgumentException.class,
+        "Can not convert table to ICEBERG ,Table is already of that format", () -> {
+          shell.executeStatement("ALTER TABLE " + tableName + " CONVERT TO ICEBERG");
+        });
+  }
   @Test
   public void testMigrateHiveTableToIceberg() throws TException, InterruptedException {
     String tableName = "tbl";
@@ -244,8 +269,7 @@ public class TestHiveIcebergMigration extends HiveIcebergStorageHandlerWithEngin
       shell.executeStatement("INSERT INTO " + tableName + " VALUES (1), (2), (3)");
       AssertHelpers.assertThrows("Migrating a " + format + " table to Iceberg should have thrown an exception.",
           IllegalArgumentException.class, "Cannot convert hive table to iceberg with input format: ",
-          () -> shell.executeStatement("ALTER TABLE " + tableName + " SET TBLPROPERTIES " +
-              "('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')"));
+          () -> shell.executeStatement("ALTER TABLE " + tableName + " Convert to iceberg"));
       shell.executeStatement("DROP TABLE " + tableName);
     });
   }
@@ -262,14 +286,21 @@ public class TestHiveIcebergMigration extends HiveIcebergStorageHandlerWithEngin
     shell.executeStatement("INSERT INTO " + tableName + " VALUES (1), (2), (3)");
     AssertHelpers.assertThrows("Migrating a managed table to Iceberg should have thrown an exception.",
         IllegalArgumentException.class, "Converting non-external, temporary or transactional hive table to iceberg",
-        () -> shell.executeStatement("ALTER TABLE " + tableName + " SET TBLPROPERTIES " +
-            "('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')"));
+        () -> shell.executeStatement("ALTER TABLE " + tableName + " convert to iceberg"));
   }
 
-  private void validateMigration(String tableName) throws TException, InterruptedException {
+  private Table validateMigration(String tableName) throws TException, InterruptedException {
+    return validateMigration(tableName, null);
+  }
+
+  private Table validateMigration(String tableName, String tblProperties)
+      throws TException, InterruptedException {
     List<Object[]> originalResult = shell.executeStatement("SELECT * FROM " + tableName + " ORDER BY a");
-    shell.executeStatement("ALTER TABLE " + tableName + " SET TBLPROPERTIES " +
-        "('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')");
+    String stmt = "ALTER TABLE " + tableName + " CONVERT TO ICEBERG";
+    if (tblProperties != null) {
+      stmt = stmt + " " + tblProperties;
+    }
+    shell.executeStatement(stmt);
     List<Object[]> alterResult = shell.executeStatement("SELECT * FROM " + tableName + " ORDER BY a");
     Assert.assertEquals(originalResult.size(), alterResult.size());
     for (int i = 0; i < originalResult.size(); i++) {
@@ -282,6 +313,7 @@ public class TestHiveIcebergMigration extends HiveIcebergStorageHandlerWithEngin
     validateSd(hmsTable, "iceberg");
     validateTblProps(hmsTable, true);
     validatePartitions(tableName);
+    return hmsTable;
   }
 
   private void validatePartitions(String tableName) throws TException, InterruptedException {
@@ -298,8 +330,7 @@ public class TestHiveIcebergMigration extends HiveIcebergStorageHandlerWithEngin
           ArgumentMatchers.any(Properties.class), ArgumentMatchers.any(Configuration.class)))
           .thenThrow(new MetaException());
       try {
-        shell.executeStatement("ALTER TABLE " + tableName + " SET TBLPROPERTIES " +
-            "('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')");
+        shell.executeStatement("ALTER TABLE " + tableName + " CONVERT TO ICEBERG");
       } catch (IllegalArgumentException e) {
         Assert.assertTrue(e.getMessage().contains("Error occurred during hive table migration to iceberg."));
         Table hmsTable = shell.metastore().getTable("default", tableName);
diff --git a/iceberg/iceberg-handler/src/test/queries/negative/alter_acid_table_to_iceberg_failure.q b/iceberg/iceberg-handler/src/test/queries/negative/alter_acid_table_to_iceberg_failure.q
index f5d47e1bd97..389d4072cd2 100644
--- a/iceberg/iceberg-handler/src/test/queries/negative/alter_acid_table_to_iceberg_failure.q
+++ b/iceberg/iceberg-handler/src/test/queries/negative/alter_acid_table_to_iceberg_failure.q
@@ -3,4 +3,4 @@ set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager;
 
 drop table tbl_orc;
 create table tbl_orc (a int, b string) stored as orc tblproperties('transactional'='true');
-alter table tbl_orc set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler');
\ No newline at end of file
+alter table tbl_orc convert to iceberg;
\ No newline at end of file
diff --git a/iceberg/iceberg-handler/src/test/queries/negative/alter_managed_table_to_iceberg_failure.q b/iceberg/iceberg-handler/src/test/queries/negative/alter_managed_table_to_iceberg_failure.q
index 7eb681c2476..044a0d78a2f 100644
--- a/iceberg/iceberg-handler/src/test/queries/negative/alter_managed_table_to_iceberg_failure.q
+++ b/iceberg/iceberg-handler/src/test/queries/negative/alter_managed_table_to_iceberg_failure.q
@@ -1,3 +1,3 @@
 drop table if exists tbl_orc;
 create table tbl_orc(a int, b string) stored as orc;
-alter table tbl_orc set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler');
\ No newline at end of file
+alter table tbl_orc convert to iceberg;
\ No newline at end of file
diff --git a/iceberg/iceberg-handler/src/test/queries/positive/alter_multi_part_table_to_iceberg.q b/iceberg/iceberg-handler/src/test/queries/positive/alter_multi_part_table_to_iceberg.q
index 7a561c800f3..ee4ae8de38f 100644
--- a/iceberg/iceberg-handler/src/test/queries/positive/alter_multi_part_table_to_iceberg.q
+++ b/iceberg/iceberg-handler/src/test/queries/positive/alter_multi_part_table_to_iceberg.q
@@ -24,7 +24,8 @@ insert into table tbl_orc partition (b='four', c='Thursday') values (9);
 insert into table tbl_orc partition (b='four', c='Saturday') values (12), (13), (14);
 insert into table tbl_orc partition (b='four', c='Sunday') values (15);
 select * from tbl_orc order by a;
-alter table tbl_orc set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler');
+explain alter table tbl_orc convert to iceberg;
+alter table tbl_orc convert to iceberg;
 describe formatted tbl_orc;
 select * from tbl_orc order by a;
 drop table tbl_orc;
@@ -40,7 +41,8 @@ insert into table tbl_parquet partition (b='four', c='Thursday') values (9);
 insert into table tbl_parquet partition (b='four', c='Saturday') values (12), (13), (14);
 insert into table tbl_parquet partition (b='four', c='Sunday') values (15);
 select * from tbl_parquet order by a;
-alter table tbl_parquet set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler');
+explain alter table tbl_parquet convert to iceberg;
+alter table tbl_parquet convert to iceberg;
 describe formatted tbl_parquet;
 select * from tbl_parquet order by a;
 drop table tbl_parquet;
@@ -56,7 +58,8 @@ insert into table tbl_avro partition (b='four', c='Thursday') values (9);
 insert into table tbl_avro partition (b='four', c='Saturday') values (12), (13), (14);
 insert into table tbl_avro partition (b='four', c='Sunday') values (15);
 select * from tbl_avro order by a;
-alter table tbl_avro set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler');
+explain alter table tbl_avro convert to iceberg;
+alter table tbl_avro convert to iceberg;
 describe formatted tbl_avro;
 select * from tbl_avro order by a;
 drop table tbl_avro;
\ No newline at end of file
diff --git a/iceberg/iceberg-handler/src/test/queries/positive/alter_part_table_to_iceberg.q b/iceberg/iceberg-handler/src/test/queries/positive/alter_part_table_to_iceberg.q
index b9bf9e62b33..74a8730eaa5 100644
--- a/iceberg/iceberg-handler/src/test/queries/positive/alter_part_table_to_iceberg.q
+++ b/iceberg/iceberg-handler/src/test/queries/positive/alter_part_table_to_iceberg.q
@@ -21,7 +21,8 @@ insert into table tbl_orc partition (b='two') values (4), (5);
 insert into table tbl_orc partition (b='three') values (6), (7), (8);
 insert into table tbl_orc partition (b='four') values (9);
 select * from tbl_orc order by a;
-alter table tbl_orc set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler');
+explain alter table tbl_orc convert to iceberg;
+alter table tbl_orc convert to iceberg;
 describe formatted tbl_orc;
 select * from tbl_orc order by a;
 drop table tbl_orc;
@@ -34,7 +35,8 @@ insert into table tbl_parquet partition (b='two') values (4), (5);
 insert into table tbl_parquet partition (b='three') values (6), (7), (8);
 insert into table tbl_parquet partition (b='four') values (9);
 select * from tbl_parquet order by a;
-alter table tbl_parquet set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler');
+explain alter table tbl_parquet convert to iceberg;
+alter table tbl_parquet convert to iceberg;
 describe formatted tbl_parquet;
 select * from tbl_parquet order by a;
 drop table tbl_parquet;
@@ -47,7 +49,8 @@ insert into table tbl_avro partition (b='two') values (4), (5);
 insert into table tbl_avro partition (b='three') values (6), (7), (8);
 insert into table tbl_avro partition (b='four') values (9);
 select * from tbl_avro order by a;
-alter table tbl_avro set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler');
+explain alter table tbl_avro convert to iceberg;
+alter table tbl_avro convert to iceberg;
 describe formatted tbl_avro;
 select * from tbl_avro order by a;
 drop table tbl_avro;
\ No newline at end of file
diff --git a/iceberg/iceberg-handler/src/test/queries/positive/alter_table_to_iceberg.q b/iceberg/iceberg-handler/src/test/queries/positive/alter_table_to_iceberg.q
index 775351f647c..a9ed7d48808 100644
--- a/iceberg/iceberg-handler/src/test/queries/positive/alter_table_to_iceberg.q
+++ b/iceberg/iceberg-handler/src/test/queries/positive/alter_table_to_iceberg.q
@@ -18,7 +18,8 @@ create external table tbl_orc(a int, b string) stored as orc;
 describe formatted tbl_orc;
 insert into table tbl_orc values (1, 'one'), (2, 'two'), (3, 'three'), (4, 'four'), (5, 'five');
 select * from tbl_orc order by a;
-alter table tbl_orc set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler');
+explain alter table tbl_orc convert to iceberg;
+alter table tbl_orc convert to iceberg;
 describe formatted tbl_orc;
 select * from tbl_orc order by a;
 drop table tbl_orc;
@@ -28,7 +29,8 @@ create external table tbl_parquet(a int, b string) stored as parquet;
 describe formatted tbl_parquet;
 insert into table tbl_parquet values (1, 'one'), (2, 'two'), (3, 'three'), (4, 'four'), (5, 'five');
 select * from tbl_parquet order by a;
-alter table tbl_parquet set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler');
+explain alter table tbl_parquet convert to iceberg;
+alter table tbl_parquet convert to iceberg;
 describe formatted tbl_parquet;
 select * from tbl_parquet order by a;
 drop table tbl_parquet;
@@ -38,7 +40,8 @@ create external table tbl_avro(a int, b string) stored as avro;
 describe formatted tbl_avro;
 insert into table tbl_avro values (1, 'one'), (2, 'two'), (3, 'three'), (4, 'four'), (5, 'five');
 select * from tbl_avro order by a;
-alter table tbl_avro set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler');
+explain alter table tbl_avro convert to iceberg;
+alter table tbl_avro convert to iceberg;
 describe formatted tbl_avro;
 select * from tbl_avro order by a;
 drop table tbl_avro;
\ No newline at end of file
diff --git a/iceberg/iceberg-handler/src/test/queries/positive/llap_iceberg_read_orc.q b/iceberg/iceberg-handler/src/test/queries/positive/llap_iceberg_read_orc.q
index a450bb68d80..1c6f4c7d671 100644
--- a/iceberg/iceberg-handler/src/test/queries/positive/llap_iceberg_read_orc.q
+++ b/iceberg/iceberg-handler/src/test/queries/positive/llap_iceberg_read_orc.q
@@ -139,7 +139,7 @@ SELECT i.name, i.description, SUM(o.quantity) FROM llap_items i JOIN llap_orders
 
 CREATE EXTERNAL TABLE mig_source (id int) partitioned by (region string) stored as ORC;
 INSERT INTO mig_source VALUES (1, 'EU'), (1, 'US'), (2, 'EU'), (3, 'EU'), (2, 'US');
-ALTER TABLE mig_source SET TBLPROPERTIES ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler');
+ALTER TABLE mig_source CONVERT TO ICEBERG;
 
 -- Should miss, but fill cache
 SELECT region, SUM(id) from mig_source GROUP BY region;
diff --git a/iceberg/iceberg-handler/src/test/queries/positive/llap_iceberg_read_parquet.q b/iceberg/iceberg-handler/src/test/queries/positive/llap_iceberg_read_parquet.q
index d961dc74d6b..2f3e0d6b9c6 100644
--- a/iceberg/iceberg-handler/src/test/queries/positive/llap_iceberg_read_parquet.q
+++ b/iceberg/iceberg-handler/src/test/queries/positive/llap_iceberg_read_parquet.q
@@ -111,7 +111,7 @@ SELECT i.name, i.description, SUM(o.quantity) FROM llap_items_parquet i JOIN lla
 
 CREATE EXTERNAL TABLE mig_source_parquet (id int) partitioned by (region string) stored AS PARQUET;
 INSERT INTO mig_source_parquet VALUES (1, 'EU'), (1, 'US'), (2, 'EU'), (3, 'EU'), (2, 'US');
-ALTER TABLE mig_source_parquet SET TBLPROPERTIES ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler');
+ALTER TABLE mig_source_parquet convert to iceberg;
 
 -- Should miss, but fill cache
 SELECT region, SUM(id) from mig_source_parquet GROUP BY region;
diff --git a/iceberg/iceberg-handler/src/test/queries/positive/query_iceberg_metadata_of_partitioned_table.q b/iceberg/iceberg-handler/src/test/queries/positive/query_iceberg_metadata_of_partitioned_table.q
index 4f6bbdf397e..f0c738ef77b 100644
--- a/iceberg/iceberg-handler/src/test/queries/positive/query_iceberg_metadata_of_partitioned_table.q
+++ b/iceberg/iceberg-handler/src/test/queries/positive/query_iceberg_metadata_of_partitioned_table.q
@@ -30,7 +30,7 @@ insert into table ice_meta_3 partition (b='three', c='Wednesday') values (6), (7
 insert into table ice_meta_3 partition (b='four', c='Thursday') values (9);
 insert into table ice_meta_3 partition (b='four', c='Saturday') values (12), (13), (14);
 insert into table ice_meta_3 partition (b='four', c='Sunday') values (15);
-alter table ice_meta_3 set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler');
+alter table ice_meta_3 convert to iceberg;
 select * from ice_meta_3;
 
 
diff --git a/iceberg/iceberg-handler/src/test/queries/positive/truncate_partitioned_iceberg_table.q b/iceberg/iceberg-handler/src/test/queries/positive/truncate_partitioned_iceberg_table.q
index ad6b3773510..0b0c4af0a2c 100644
--- a/iceberg/iceberg-handler/src/test/queries/positive/truncate_partitioned_iceberg_table.q
+++ b/iceberg/iceberg-handler/src/test/queries/positive/truncate_partitioned_iceberg_table.q
@@ -23,7 +23,7 @@ insert into table test_truncate partition (b='one') values (1), (2), (3);
 insert into table test_truncate partition (b='two') values (4), (5);
 insert into table test_truncate partition (b='three') values (6), (7), (8);
 insert into table test_truncate partition (b='four') values (9);
-alter table test_truncate set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler');
+alter table test_truncate convert to iceberg;
 
 analyze table test_truncate compute statistics;
 describe formatted test_truncate;
diff --git a/iceberg/iceberg-handler/src/test/results/negative/alter_acid_table_to_iceberg_failure.q.out b/iceberg/iceberg-handler/src/test/results/negative/alter_acid_table_to_iceberg_failure.q.out
index 1262e0f75c0..9c6f16e0844 100644
--- a/iceberg/iceberg-handler/src/test/results/negative/alter_acid_table_to_iceberg_failure.q.out
+++ b/iceberg/iceberg-handler/src/test/results/negative/alter_acid_table_to_iceberg_failure.q.out
@@ -10,10 +10,9 @@ POSTHOOK: query: create table tbl_orc (a int, b string) stored as orc tblpropert
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@tbl_orc
-PREHOOK: query: alter table tbl_orc set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')
-PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: query: alter table tbl_orc convert to iceberg
+PREHOOK: type: ALTERTABLE_CONVERT
 PREHOOK: Input: default@tbl_orc
-PREHOOK: Output: default@tbl_orc
 FAILED: Execution Error, return code 40013 from org.apache.hadoop.hive.ql.ddl.DDLTask. Unable to alter table. MetaException(message:Converting non-external, temporary or transactional hive table to iceberg table is not allowed.)
 #### A masked pattern was here ####
 
diff --git a/iceberg/iceberg-handler/src/test/results/negative/alter_managed_table_to_iceberg_failure.q.out b/iceberg/iceberg-handler/src/test/results/negative/alter_managed_table_to_iceberg_failure.q.out
index 82c80c3eff3..71a810adb67 100644
--- a/iceberg/iceberg-handler/src/test/results/negative/alter_managed_table_to_iceberg_failure.q.out
+++ b/iceberg/iceberg-handler/src/test/results/negative/alter_managed_table_to_iceberg_failure.q.out
@@ -10,10 +10,9 @@ POSTHOOK: query: create table tbl_orc(a int, b string) stored as orc
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@tbl_orc
-PREHOOK: query: alter table tbl_orc set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')
-PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: query: alter table tbl_orc convert to iceberg
+PREHOOK: type: ALTERTABLE_CONVERT
 PREHOOK: Input: default@tbl_orc
-PREHOOK: Output: default@tbl_orc
 FAILED: Execution Error, return code 40013 from org.apache.hadoop.hive.ql.ddl.DDLTask. Unable to alter table. MetaException(message:Converting non-external, temporary or transactional hive table to iceberg table is not allowed.)
 #### A masked pattern was here ####
 
diff --git a/iceberg/iceberg-handler/src/test/results/positive/alter_multi_part_table_to_iceberg.q.out b/iceberg/iceberg-handler/src/test/results/positive/alter_multi_part_table_to_iceberg.q.out
index 5d876e2adc9..c9a90c91841 100644
--- a/iceberg/iceberg-handler/src/test/results/positive/alter_multi_part_table_to_iceberg.q.out
+++ b/iceberg/iceberg-handler/src/test/results/positive/alter_multi_part_table_to_iceberg.q.out
@@ -151,12 +151,20 @@ POSTHOOK: Output: hdfs://### HDFS PATH ###
 13	four	Saturday
 14	four	Saturday
 15	four	Sunday
-PREHOOK: query: alter table tbl_orc set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')
-PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: query: explain alter table tbl_orc convert to iceberg
+PREHOOK: type: ALTERTABLE_CONVERT
 PREHOOK: Input: default@tbl_orc
-PREHOOK: Output: default@tbl_orc
-POSTHOOK: query: alter table tbl_orc set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')
-POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: query: explain alter table tbl_orc convert to iceberg
+POSTHOOK: type: ALTERTABLE_CONVERT
+POSTHOOK: Input: default@tbl_orc
+Stage-0
+  Convert operation{"table name:":"default.tbl_orc","spec:":"AlterTableConvertSpec{ConvertTo=iceberg, TBLProperties={}}"}
+
+PREHOOK: query: alter table tbl_orc convert to iceberg
+PREHOOK: type: ALTERTABLE_CONVERT
+PREHOOK: Input: default@tbl_orc
+POSTHOOK: query: alter table tbl_orc convert to iceberg
+POSTHOOK: type: ALTERTABLE_CONVERT
 POSTHOOK: Input: default@tbl_orc
 POSTHOOK: Output: default@tbl_orc
 PREHOOK: query: describe formatted tbl_orc
@@ -405,12 +413,20 @@ POSTHOOK: Output: hdfs://### HDFS PATH ###
 13	four	Saturday
 14	four	Saturday
 15	four	Sunday
-PREHOOK: query: alter table tbl_parquet set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')
-PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: query: explain alter table tbl_parquet convert to iceberg
+PREHOOK: type: ALTERTABLE_CONVERT
 PREHOOK: Input: default@tbl_parquet
-PREHOOK: Output: default@tbl_parquet
-POSTHOOK: query: alter table tbl_parquet set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')
-POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: query: explain alter table tbl_parquet convert to iceberg
+POSTHOOK: type: ALTERTABLE_CONVERT
+POSTHOOK: Input: default@tbl_parquet
+Stage-0
+  Convert operation{"table name:":"default.tbl_parquet","spec:":"AlterTableConvertSpec{ConvertTo=iceberg, TBLProperties={}}"}
+
+PREHOOK: query: alter table tbl_parquet convert to iceberg
+PREHOOK: type: ALTERTABLE_CONVERT
+PREHOOK: Input: default@tbl_parquet
+POSTHOOK: query: alter table tbl_parquet convert to iceberg
+POSTHOOK: type: ALTERTABLE_CONVERT
 POSTHOOK: Input: default@tbl_parquet
 POSTHOOK: Output: default@tbl_parquet
 PREHOOK: query: describe formatted tbl_parquet
@@ -659,12 +675,20 @@ POSTHOOK: Output: hdfs://### HDFS PATH ###
 13	four	Saturday
 14	four	Saturday
 15	four	Sunday
-PREHOOK: query: alter table tbl_avro set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')
-PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: query: explain alter table tbl_avro convert to iceberg
+PREHOOK: type: ALTERTABLE_CONVERT
 PREHOOK: Input: default@tbl_avro
-PREHOOK: Output: default@tbl_avro
-POSTHOOK: query: alter table tbl_avro set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')
-POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: query: explain alter table tbl_avro convert to iceberg
+POSTHOOK: type: ALTERTABLE_CONVERT
+POSTHOOK: Input: default@tbl_avro
+Stage-0
+  Convert operation{"table name:":"default.tbl_avro","spec:":"AlterTableConvertSpec{ConvertTo=iceberg, TBLProperties={}}"}
+
+PREHOOK: query: alter table tbl_avro convert to iceberg
+PREHOOK: type: ALTERTABLE_CONVERT
+PREHOOK: Input: default@tbl_avro
+POSTHOOK: query: alter table tbl_avro convert to iceberg
+POSTHOOK: type: ALTERTABLE_CONVERT
 POSTHOOK: Input: default@tbl_avro
 POSTHOOK: Output: default@tbl_avro
 PREHOOK: query: describe formatted tbl_avro
diff --git a/iceberg/iceberg-handler/src/test/results/positive/alter_part_table_to_iceberg.q.out b/iceberg/iceberg-handler/src/test/results/positive/alter_part_table_to_iceberg.q.out
index 5c8b9ee857f..7a16fef5e80 100644
--- a/iceberg/iceberg-handler/src/test/results/positive/alter_part_table_to_iceberg.q.out
+++ b/iceberg/iceberg-handler/src/test/results/positive/alter_part_table_to_iceberg.q.out
@@ -111,12 +111,20 @@ POSTHOOK: Output: hdfs://### HDFS PATH ###
 7	three
 8	three
 9	four
-PREHOOK: query: alter table tbl_orc set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')
-PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: query: explain alter table tbl_orc convert to iceberg
+PREHOOK: type: ALTERTABLE_CONVERT
 PREHOOK: Input: default@tbl_orc
-PREHOOK: Output: default@tbl_orc
-POSTHOOK: query: alter table tbl_orc set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')
-POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: query: explain alter table tbl_orc convert to iceberg
+POSTHOOK: type: ALTERTABLE_CONVERT
+POSTHOOK: Input: default@tbl_orc
+Stage-0
+  Convert operation{"table name:":"default.tbl_orc","spec:":"AlterTableConvertSpec{ConvertTo=iceberg, TBLProperties={}}"}
+
+PREHOOK: query: alter table tbl_orc convert to iceberg
+PREHOOK: type: ALTERTABLE_CONVERT
+PREHOOK: Input: default@tbl_orc
+POSTHOOK: query: alter table tbl_orc convert to iceberg
+POSTHOOK: type: ALTERTABLE_CONVERT
 POSTHOOK: Input: default@tbl_orc
 POSTHOOK: Output: default@tbl_orc
 PREHOOK: query: describe formatted tbl_orc
@@ -314,12 +322,20 @@ POSTHOOK: Output: hdfs://### HDFS PATH ###
 7	three
 8	three
 9	four
-PREHOOK: query: alter table tbl_parquet set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')
-PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: query: explain alter table tbl_parquet convert to iceberg
+PREHOOK: type: ALTERTABLE_CONVERT
 PREHOOK: Input: default@tbl_parquet
-PREHOOK: Output: default@tbl_parquet
-POSTHOOK: query: alter table tbl_parquet set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')
-POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: query: explain alter table tbl_parquet convert to iceberg
+POSTHOOK: type: ALTERTABLE_CONVERT
+POSTHOOK: Input: default@tbl_parquet
+Stage-0
+  Convert operation{"table name:":"default.tbl_parquet","spec:":"AlterTableConvertSpec{ConvertTo=iceberg, TBLProperties={}}"}
+
+PREHOOK: query: alter table tbl_parquet convert to iceberg
+PREHOOK: type: ALTERTABLE_CONVERT
+PREHOOK: Input: default@tbl_parquet
+POSTHOOK: query: alter table tbl_parquet convert to iceberg
+POSTHOOK: type: ALTERTABLE_CONVERT
 POSTHOOK: Input: default@tbl_parquet
 POSTHOOK: Output: default@tbl_parquet
 PREHOOK: query: describe formatted tbl_parquet
@@ -517,12 +533,20 @@ POSTHOOK: Output: hdfs://### HDFS PATH ###
 7	three
 8	three
 9	four
-PREHOOK: query: alter table tbl_avro set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')
-PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: query: explain alter table tbl_avro convert to iceberg
+PREHOOK: type: ALTERTABLE_CONVERT
 PREHOOK: Input: default@tbl_avro
-PREHOOK: Output: default@tbl_avro
-POSTHOOK: query: alter table tbl_avro set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')
-POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: query: explain alter table tbl_avro convert to iceberg
+POSTHOOK: type: ALTERTABLE_CONVERT
+POSTHOOK: Input: default@tbl_avro
+Stage-0
+  Convert operation{"table name:":"default.tbl_avro","spec:":"AlterTableConvertSpec{ConvertTo=iceberg, TBLProperties={}}"}
+
+PREHOOK: query: alter table tbl_avro convert to iceberg
+PREHOOK: type: ALTERTABLE_CONVERT
+PREHOOK: Input: default@tbl_avro
+POSTHOOK: query: alter table tbl_avro convert to iceberg
+POSTHOOK: type: ALTERTABLE_CONVERT
 POSTHOOK: Input: default@tbl_avro
 POSTHOOK: Output: default@tbl_avro
 PREHOOK: query: describe formatted tbl_avro
diff --git a/iceberg/iceberg-handler/src/test/results/positive/alter_table_to_iceberg.q.out b/iceberg/iceberg-handler/src/test/results/positive/alter_table_to_iceberg.q.out
index 96882292e58..f6c92cc8824 100644
--- a/iceberg/iceberg-handler/src/test/results/positive/alter_table_to_iceberg.q.out
+++ b/iceberg/iceberg-handler/src/test/results/positive/alter_table_to_iceberg.q.out
@@ -69,12 +69,20 @@ POSTHOOK: Output: hdfs://### HDFS PATH ###
 3	three
 4	four
 5	five
-PREHOOK: query: alter table tbl_orc set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')
-PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: query: explain alter table tbl_orc convert to iceberg
+PREHOOK: type: ALTERTABLE_CONVERT
 PREHOOK: Input: default@tbl_orc
-PREHOOK: Output: default@tbl_orc
-POSTHOOK: query: alter table tbl_orc set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')
-POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: query: explain alter table tbl_orc convert to iceberg
+POSTHOOK: type: ALTERTABLE_CONVERT
+POSTHOOK: Input: default@tbl_orc
+Stage-0
+  Convert operation{"table name:":"default.tbl_orc","spec:":"AlterTableConvertSpec{ConvertTo=iceberg, TBLProperties={}}"}
+
+PREHOOK: query: alter table tbl_orc convert to iceberg
+PREHOOK: type: ALTERTABLE_CONVERT
+PREHOOK: Input: default@tbl_orc
+POSTHOOK: query: alter table tbl_orc convert to iceberg
+POSTHOOK: type: ALTERTABLE_CONVERT
 POSTHOOK: Input: default@tbl_orc
 POSTHOOK: Output: default@tbl_orc
 PREHOOK: query: describe formatted tbl_orc
@@ -223,12 +231,20 @@ POSTHOOK: Output: hdfs://### HDFS PATH ###
 3	three
 4	four
 5	five
-PREHOOK: query: alter table tbl_parquet set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')
-PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: query: explain alter table tbl_parquet convert to iceberg
+PREHOOK: type: ALTERTABLE_CONVERT
 PREHOOK: Input: default@tbl_parquet
-PREHOOK: Output: default@tbl_parquet
-POSTHOOK: query: alter table tbl_parquet set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')
-POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: query: explain alter table tbl_parquet convert to iceberg
+POSTHOOK: type: ALTERTABLE_CONVERT
+POSTHOOK: Input: default@tbl_parquet
+Stage-0
+  Convert operation{"table name:":"default.tbl_parquet","spec:":"AlterTableConvertSpec{ConvertTo=iceberg, TBLProperties={}}"}
+
+PREHOOK: query: alter table tbl_parquet convert to iceberg
+PREHOOK: type: ALTERTABLE_CONVERT
+PREHOOK: Input: default@tbl_parquet
+POSTHOOK: query: alter table tbl_parquet convert to iceberg
+POSTHOOK: type: ALTERTABLE_CONVERT
 POSTHOOK: Input: default@tbl_parquet
 POSTHOOK: Output: default@tbl_parquet
 PREHOOK: query: describe formatted tbl_parquet
@@ -377,12 +393,20 @@ POSTHOOK: Output: hdfs://### HDFS PATH ###
 3	three
 4	four
 5	five
-PREHOOK: query: alter table tbl_avro set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')
-PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: query: explain alter table tbl_avro convert to iceberg
+PREHOOK: type: ALTERTABLE_CONVERT
 PREHOOK: Input: default@tbl_avro
-PREHOOK: Output: default@tbl_avro
-POSTHOOK: query: alter table tbl_avro set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')
-POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: query: explain alter table tbl_avro convert to iceberg
+POSTHOOK: type: ALTERTABLE_CONVERT
+POSTHOOK: Input: default@tbl_avro
+Stage-0
+  Convert operation{"table name:":"default.tbl_avro","spec:":"AlterTableConvertSpec{ConvertTo=iceberg, TBLProperties={}}"}
+
+PREHOOK: query: alter table tbl_avro convert to iceberg
+PREHOOK: type: ALTERTABLE_CONVERT
+PREHOOK: Input: default@tbl_avro
+POSTHOOK: query: alter table tbl_avro convert to iceberg
+POSTHOOK: type: ALTERTABLE_CONVERT
 POSTHOOK: Input: default@tbl_avro
 POSTHOOK: Output: default@tbl_avro
 PREHOOK: query: describe formatted tbl_avro
diff --git a/iceberg/iceberg-handler/src/test/results/positive/llap/llap_iceberg_read_orc.q.out b/iceberg/iceberg-handler/src/test/results/positive/llap/llap_iceberg_read_orc.q.out
index 440ee72dd0c..c12562d3187 100644
--- a/iceberg/iceberg-handler/src/test/results/positive/llap/llap_iceberg_read_orc.q.out
+++ b/iceberg/iceberg-handler/src/test/results/positive/llap/llap_iceberg_read_orc.q.out
@@ -589,12 +589,11 @@ POSTHOOK: Output: default@mig_source@region=EU
 POSTHOOK: Output: default@mig_source@region=US
 POSTHOOK: Lineage: mig_source PARTITION(region=EU).id SCRIPT []
 POSTHOOK: Lineage: mig_source PARTITION(region=US).id SCRIPT []
-PREHOOK: query: ALTER TABLE mig_source SET TBLPROPERTIES ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')
-PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: query: ALTER TABLE mig_source CONVERT TO ICEBERG
+PREHOOK: type: ALTERTABLE_CONVERT
 PREHOOK: Input: default@mig_source
-PREHOOK: Output: default@mig_source
-POSTHOOK: query: ALTER TABLE mig_source SET TBLPROPERTIES ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')
-POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: query: ALTER TABLE mig_source CONVERT TO ICEBERG
+POSTHOOK: type: ALTERTABLE_CONVERT
 POSTHOOK: Input: default@mig_source
 POSTHOOK: Output: default@mig_source
 PREHOOK: query: SELECT region, SUM(id) from mig_source GROUP BY region
diff --git a/iceberg/iceberg-handler/src/test/results/positive/llap/llap_iceberg_read_parquet.q.out b/iceberg/iceberg-handler/src/test/results/positive/llap/llap_iceberg_read_parquet.q.out
index a7ae1a2c34d..7c75acd71d8 100644
--- a/iceberg/iceberg-handler/src/test/results/positive/llap/llap_iceberg_read_parquet.q.out
+++ b/iceberg/iceberg-handler/src/test/results/positive/llap/llap_iceberg_read_parquet.q.out
@@ -435,12 +435,11 @@ POSTHOOK: Output: default@mig_source_parquet@region=EU
 POSTHOOK: Output: default@mig_source_parquet@region=US
 POSTHOOK: Lineage: mig_source_parquet PARTITION(region=EU).id SCRIPT []
 POSTHOOK: Lineage: mig_source_parquet PARTITION(region=US).id SCRIPT []
-PREHOOK: query: ALTER TABLE mig_source_parquet SET TBLPROPERTIES ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')
-PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: query: ALTER TABLE mig_source_parquet convert to iceberg
+PREHOOK: type: ALTERTABLE_CONVERT
 PREHOOK: Input: default@mig_source_parquet
-PREHOOK: Output: default@mig_source_parquet
-POSTHOOK: query: ALTER TABLE mig_source_parquet SET TBLPROPERTIES ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')
-POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: query: ALTER TABLE mig_source_parquet convert to iceberg
+POSTHOOK: type: ALTERTABLE_CONVERT
 POSTHOOK: Input: default@mig_source_parquet
 POSTHOOK: Output: default@mig_source_parquet
 PREHOOK: query: SELECT region, SUM(id) from mig_source_parquet GROUP BY region
diff --git a/iceberg/iceberg-handler/src/test/results/positive/query_iceberg_metadata_of_partitioned_table.q.out b/iceberg/iceberg-handler/src/test/results/positive/query_iceberg_metadata_of_partitioned_table.q.out
index 48f37290945..c663ca5688e 100644
--- a/iceberg/iceberg-handler/src/test/results/positive/query_iceberg_metadata_of_partitioned_table.q.out
+++ b/iceberg/iceberg-handler/src/test/results/positive/query_iceberg_metadata_of_partitioned_table.q.out
@@ -141,12 +141,11 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: _dummy_database@_dummy_table
 POSTHOOK: Output: default@ice_meta_3@b=four/c=Sunday
 POSTHOOK: Lineage: ice_meta_3 PARTITION(b=four,c=Sunday).a SCRIPT []
-PREHOOK: query: alter table ice_meta_3 set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')
-PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: query: alter table ice_meta_3 convert to iceberg
+PREHOOK: type: ALTERTABLE_CONVERT
 PREHOOK: Input: default@ice_meta_3
-PREHOOK: Output: default@ice_meta_3
-POSTHOOK: query: alter table ice_meta_3 set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')
-POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: query: alter table ice_meta_3 convert to iceberg
+POSTHOOK: type: ALTERTABLE_CONVERT
 POSTHOOK: Input: default@ice_meta_3
 POSTHOOK: Output: default@ice_meta_3
 PREHOOK: query: select * from ice_meta_3
diff --git a/iceberg/iceberg-handler/src/test/results/positive/truncate_partitioned_iceberg_table.q.out b/iceberg/iceberg-handler/src/test/results/positive/truncate_partitioned_iceberg_table.q.out
index ebcd4442a46..72837370162 100644
--- a/iceberg/iceberg-handler/src/test/results/positive/truncate_partitioned_iceberg_table.q.out
+++ b/iceberg/iceberg-handler/src/test/results/positive/truncate_partitioned_iceberg_table.q.out
@@ -54,12 +54,11 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: _dummy_database@_dummy_table
 POSTHOOK: Output: default@test_truncate@b=four
 POSTHOOK: Lineage: test_truncate PARTITION(b=four).a SCRIPT []
-PREHOOK: query: alter table test_truncate set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')
-PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: query: alter table test_truncate convert to iceberg
+PREHOOK: type: ALTERTABLE_CONVERT
 PREHOOK: Input: default@test_truncate
-PREHOOK: Output: default@test_truncate
-POSTHOOK: query: alter table test_truncate set tblproperties ('storage_handler'='org.apache.iceberg.mr.hive.HiveIcebergStorageHandler')
-POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: query: alter table test_truncate convert to iceberg
+POSTHOOK: type: ALTERTABLE_CONVERT
 POSTHOOK: Input: default@test_truncate
 POSTHOOK: Output: default@test_truncate
 PREHOOK: query: analyze table test_truncate compute statistics
diff --git a/parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g b/parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g
index befca385ac0..9b276bafc6b 100644
--- a/parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g
+++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g
@@ -74,6 +74,7 @@ alterTableStatementSuffix
     | alterStatementSuffixSetOwner
     | alterStatementSuffixSetPartSpec
     | alterStatementSuffixExecute
+    | alterStatementSuffixConvert
     ;
 
 alterTblPartitionStatementSuffix[boolean partition]
@@ -458,6 +459,13 @@ alterStatementSuffixSetPartSpec
     -> ^(TOK_ALTERTABLE_SETPARTSPEC $spec)
     ;
 
+alterStatementSuffixConvert
+@init { gParent.pushMsg("alter table convert to", state); }
+@after { gParent.popMsg(state); }
+    : KW_CONVERT KW_TO genericSpec=identifier tablePropertiesPrefixed?
+    -> ^(TOK_ALTERTABLE_CONVERT $genericSpec tablePropertiesPrefixed?)
+    ;
+
 alterStatementSuffixExecute
 @init { gParent.pushMsg("alter table execute", state); }
 @after { gParent.popMsg(state); }
diff --git a/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveLexerParent.g b/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveLexerParent.g
index 35b054c08d3..b07ba782d86 100644
--- a/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveLexerParent.g
+++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveLexerParent.g
@@ -242,6 +242,7 @@ KW_BOTH: 'BOTH';
 KW_BINARY: 'BINARY';
 KW_CROSS: 'CROSS';
 KW_CONTINUE: 'CONTINUE';
+KW_CONVERT: 'CONVERT';
 KW_CURSOR: 'CURSOR';
 KW_TRIGGER: 'TRIGGER';
 KW_RECORDREADER: 'RECORDREADER';
diff --git a/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g b/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
index cd3a74f1dac..8a974e278b8 100644
--- a/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
+++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
@@ -219,6 +219,7 @@ TOK_ALTERTABLE_UPDATECOLUMNS;
 TOK_ALTERTABLE_OWNER;
 TOK_ALTERTABLE_SETPARTSPEC;
 TOK_ALTERTABLE_EXECUTE;
+TOK_ALTERTABLE_CONVERT;
 TOK_MSCK;
 TOK_SHOWDATABASES;
 TOK_SHOWDATACONNECTORS;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/convert/AlterTableConvertAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/convert/AlterTableConvertAnalyzer.java
new file mode 100644
index 00000000000..76770484a3c
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/convert/AlterTableConvertAnalyzer.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.table.convert;
+
+import org.apache.hadoop.hive.common.TableName;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableAnalyzer;
+import org.apache.hadoop.hive.ql.ddl.table.AlterTableType;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.AlterTableConvertSpec;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Analyzer for ALTER TABLE ... CONVERT commands.
+ */
+@DDLType(types = HiveParser.TOK_ALTERTABLE_CONVERT)
+public class AlterTableConvertAnalyzer extends AbstractAlterTableAnalyzer {
+
+  public AlterTableConvertAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  protected void analyzeCommand(TableName tableName, Map<String, String> partitionSpec, ASTNode command)
+      throws SemanticException {
+    Table table = getTable(tableName);
+    // the first child must be the addprops operation type
+    validateAlterTableType(table, AlterTableType.ADDPROPS, false);
+    inputs.add(new ReadEntity(table));
+    ASTNode targetType = (ASTNode) command.getChild(0);
+    Map<String, String> properties = new HashMap<>();
+    if (command.getChildCount() == 2) {
+      properties = getProps((ASTNode) (command.getChild(1)).getChild(0));
+    }
+    AlterTableConvertSpec spec = new AlterTableConvertSpec(targetType.getText(), properties);
+    AlterTableConvertDesc desc = new AlterTableConvertDesc(tableName, spec);
+
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/convert/AlterTableConvertDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/convert/AlterTableConvertDesc.java
new file mode 100644
index 00000000000..6b6b92f40e0
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/convert/AlterTableConvertDesc.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.table.convert;
+
+import org.apache.hadoop.hive.common.TableName;
+import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc;
+import org.apache.hadoop.hive.ql.ddl.table.AlterTableType;
+import org.apache.hadoop.hive.ql.parse.AlterTableConvertSpec;
+import org.apache.hadoop.hive.ql.parse.AlterTableExecuteSpec;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+import java.util.Map;
+
+/**
+ * DDL task description for ALTER TABLE ... CONVERT commands.
+ */
+@Explain(displayName = "Convert operation", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class AlterTableConvertDesc extends AbstractAlterTableDesc {
+  private static final long serialVersionUID = 1L;
+
+  private final AlterTableConvertSpec convertSpec;
+
+  public AlterTableConvertDesc(TableName tableName, AlterTableConvertSpec convertSpec)
+      throws SemanticException {
+    super(AlterTableType.ADDPROPS, tableName, null, null, false, false, null);
+    this.convertSpec = convertSpec;
+  }
+
+  public AlterTableConvertSpec getConvertSpec() {
+    return convertSpec;
+  }
+
+  @Explain(displayName = "spec", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getExplainOutput() {
+    return convertSpec.toString();
+  }
+
+  @Override
+  public boolean mayNeedWriteId() {
+    return false;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/convert/AlterTableConvertOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/convert/AlterTableConvertOperation.java
new file mode 100644
index 00000000000..7a1e82a9fe3
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/convert/AlterTableConvertOperation.java
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.table.convert;
+
+import com.google.common.collect.ImmutableMap;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableOperation;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+import java.util.Map;
+
+import static org.apache.hadoop.hive.metastore.TransactionalValidationListener.DEFAULT_TRANSACTIONAL_PROPERTY;
+import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE;
+import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.TABLE_IS_TRANSACTIONAL;
+import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.TABLE_TRANSACTIONAL_PROPERTIES;
+
+/**
+ * Operation process of ALTER TABLE ... CONVERT command
+ */
+public class AlterTableConvertOperation extends AbstractAlterTableOperation<AlterTableConvertDesc> {
+
+  private enum ConversionFormats {
+    ICEBERG(ImmutableMap.of(META_TABLE_STORAGE, "org.apache.iceberg.mr.hive.HiveIcebergStorageHandler")),
+    ACID(ImmutableMap.of(TABLE_IS_TRANSACTIONAL, "true", TABLE_TRANSACTIONAL_PROPERTIES,
+        DEFAULT_TRANSACTIONAL_PROPERTY));
+
+    private final Map<String, String> properties;
+
+    ConversionFormats(Map<String, String> properties) {
+      this.properties = properties;
+    }
+
+
+    public Map<String, String> properties() {
+      return properties;
+    }
+  }
+
+  public AlterTableConvertOperation(DDLOperationContext context, AlterTableConvertDesc desc) {
+    super(context, desc);
+  }
+
+  @Override
+  protected void doAlteration(Table table, Partition partition) throws HiveException {
+    // Add the covert type
+    String convertType = desc.getConvertSpec().getTargetType();
+    ConversionFormats format = ConversionFormats.valueOf(convertType.toUpperCase());
+
+    // Check the properties don't already exist, in that case we need not do any conversion
+    validatePropertiesAlreadyExist(format, table.getParameters());
+
+    // Add the conversion related table properties.
+    table.getParameters().putAll(format.properties());
+
+    // Add any additional table properties, if specified with the convert command.
+    if (desc.getConvertSpec().getTblProperties() != null) {
+      table.getParameters().putAll(desc.getConvertSpec().getTblProperties());
+    }
+  }
+
+  private void validatePropertiesAlreadyExist(ConversionFormats targetFormat, Map<String, String> originalParameters)
+      throws SemanticException {
+    boolean needsMigration = false;
+    for (Map.Entry<String, String> entry : targetFormat.properties().entrySet()) {
+      String originalParam = originalParameters.get(entry.getKey());
+      if (originalParam == null || !originalParam.equalsIgnoreCase(entry.getValue())) {
+        needsMigration = true;
+        break;
+      }
+    }
+
+    if (!needsMigration) {
+      throw new SemanticException("Can not convert table to " + targetFormat + " ,Table is already of that format");
+    }
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTableConvertSpec.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTableConvertSpec.java
new file mode 100644
index 00000000000..572f7282f75
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTableConvertSpec.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.parse;
+
+import com.google.common.base.MoreObjects;
+
+import java.util.Map;
+
+public class AlterTableConvertSpec {
+
+  public String getTargetType() {
+    return targetType;
+  }
+
+  public Map<String, String> getTblProperties() {
+    return tblProperties;
+  }
+
+  private final String targetType;
+  private final Map<String, String> tblProperties;
+
+  public AlterTableConvertSpec(String targetType, Map<String, String> tblProperties) {
+    this.targetType = targetType;
+    this.tblProperties = tblProperties;
+  }
+
+  public String toString() {
+    return MoreObjects.toStringHelper(this).add("ConvertTo", targetType)
+        .add("TBLProperties", tblProperties).toString();
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
index ca77bdf81f3..8cb4c2ab735 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
@@ -77,6 +77,7 @@ public enum HiveOperation {
   ALTERTABLE_OWNER("ALTERTABLE_OWNER", HiveParser.TOK_ALTERTABLE_OWNER, null, null),
   ALTERTABLE_SETPARTSPEC("ALTERTABLE_SETPARTSPEC", HiveParser.TOK_ALTERTABLE_SETPARTSPEC, null, null),
   ALTERTABLE_EXECUTE("ALTERTABLE_EXECUTE", HiveParser.TOK_ALTERTABLE_EXECUTE, null, null),
+  ALTERTABLE_CONVERT("ALTERTABLE_CONVERT", HiveParser.TOK_ALTERTABLE_CONVERT, null, null),
   ALTERTABLE_SERIALIZER("ALTERTABLE_SERIALIZER", HiveParser.TOK_ALTERTABLE_SERIALIZER,
       new Privilege[]{Privilege.ALTER_METADATA}, null),
   ALTERPARTITION_SERIALIZER("ALTERPARTITION_SERIALIZER", HiveParser.TOK_ALTERPARTITION_SERIALIZER,
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
index 4769be393d0..15b5c59057c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
@@ -60,6 +60,7 @@ public enum HiveOperationType {
   ALTERTABLE_OWNER,
   ALTERTABLE_SETPARTSPEC,
   ALTERTABLE_EXECUTE,
+  ALTERTABLE_CONVERT,
   ALTERTABLE_SERIALIZER,
   ALTERTABLE_PARTCOLTYPE,
   ALTERTABLE_DROPCONSTRAINT,
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
index 4dd6426de19..4d47d10e2d8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
@@ -192,6 +192,8 @@ public class Operation2Privilege {
         PrivRequirement.newIOPrivRequirement(OWNER_PRIV_AR, OWNER_PRIV_AR));
     op2Priv.put(HiveOperationType.ALTERTABLE_EXECUTE,
         PrivRequirement.newIOPrivRequirement(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERTABLE_CONVERT,
+        PrivRequirement.newIOPrivRequirement(OWNER_PRIV_AR, OWNER_PRIV_AR));
     op2Priv.put(HiveOperationType.ALTERTABLE_SERIALIZER,
         PrivRequirement.newIOPrivRequirement(OWNER_PRIV_AR, OWNER_PRIV_AR));
     op2Priv.put(HiveOperationType.ALTERTABLE_PARTCOLTYPE,
diff --git a/ql/src/test/queries/clientnegative/alter_external_acid.q b/ql/src/test/queries/clientnegative/alter_external_acid.q
index 780727840c7..1ae28fc5437 100644
--- a/ql/src/test/queries/clientnegative/alter_external_acid.q
+++ b/ql/src/test/queries/clientnegative/alter_external_acid.q
@@ -4,6 +4,6 @@ set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager;
 
 create external table acid_external (a int, b varchar(128)) clustered by (b) into 2 buckets stored as orc;
 
-alter table acid_external set TBLPROPERTIES ('transactional'='true');
+alter table acid_external convert to Acid;
 
 drop table acid_external;
\ No newline at end of file
diff --git a/ql/src/test/queries/clientpositive/acid_vectorization_original_tez.q b/ql/src/test/queries/clientpositive/acid_vectorization_original_tez.q
index c80b0dc1be3..8f5167226ae 100644
--- a/ql/src/test/queries/clientpositive/acid_vectorization_original_tez.q
+++ b/ql/src/test/queries/clientpositive/acid_vectorization_original_tez.q
@@ -72,7 +72,7 @@ dfs -ls ${hiveconf:hive.metastore.warehouse.dir}/over10k_orc_bucketed_n0;
 select distinct 7 as seven, INPUT__FILE__NAME from over10k_orc_bucketed_n0;
 
 -- convert table to acid
-alter table over10k_orc_bucketed_n0 set TBLPROPERTIES ('transactional'='true');
+alter table over10k_orc_bucketed_n0 convert to acid;
 
 -- this should vectorize (and push predicate to storage: filterExpr in TableScan )
 --             Execution mode: vectorized (both Map and Reducer)
diff --git a/ql/src/test/queries/clientpositive/insert_only_to_acid_convert.q b/ql/src/test/queries/clientpositive/insert_only_to_acid_convert.q
index 61ee7e41a13..72b6230af06 100644
--- a/ql/src/test/queries/clientpositive/insert_only_to_acid_convert.q
+++ b/ql/src/test/queries/clientpositive/insert_only_to_acid_convert.q
@@ -8,7 +8,7 @@ set hive.enforce.bucketing=true;
 create table insert_only(col1 Int, col2 String) stored as orc  TBLPROPERTIES ('transactional'='true','transactional_properties'='insert_only');
 insert into insert_only values(1,'hi'),(2,'hello');
 describe formatted insert_only;
-ALTER TABLE insert_only SET TBLPROPERTIES ('transactional'='true','transactional_properties'='default');
+ALTER TABLE insert_only convert to acid TBLPROPERTIES ('transactional_properties'='default');
 describe formatted insert_only;
 insert into insert_only values(1,'hi'),(2,'hello');
 select * from insert_only;
diff --git a/ql/src/test/queries/clientpositive/mm_conversions.q b/ql/src/test/queries/clientpositive/mm_conversions.q
index 7ea838a8ce6..5816caf4783 100644
--- a/ql/src/test/queries/clientpositive/mm_conversions.q
+++ b/ql/src/test/queries/clientpositive/mm_conversions.q
@@ -21,7 +21,9 @@ drop table simple_to_mm;
 create table simple_to_mm(key int) stored as orc tblproperties("transactional"="false");
 insert into table simple_to_mm select key from intermediate;
 select * from simple_to_mm s1 order by key;
-alter table simple_to_mm set tblproperties("transactional"="true", "transactional_properties"="insert_only");
+
+explain alter table simple_to_mm convert to acid tblproperties ("transactional_properties"="insert_only");
+alter table simple_to_mm convert to acid tblproperties ("transactional_properties"="insert_only");
 export table simple_to_mm to 'ql/test/data/exports/export0';
 select * from simple_to_mm s2 order by key;
 create table import_converted0_mm(key int) stored as orc tblproperties("transactional"="false");
diff --git a/ql/src/test/results/clientnegative/alter_external_acid.q.out b/ql/src/test/results/clientnegative/alter_external_acid.q.out
index 84a7bc2a91e..6bb6492d9d8 100644
--- a/ql/src/test/results/clientnegative/alter_external_acid.q.out
+++ b/ql/src/test/results/clientnegative/alter_external_acid.q.out
@@ -6,8 +6,7 @@ POSTHOOK: query: create external table acid_external (a int, b varchar(128)) clu
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@acid_external
-PREHOOK: query: alter table acid_external set TBLPROPERTIES ('transactional'='true')
-PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: query: alter table acid_external convert to Acid
+PREHOOK: type: ALTERTABLE_CONVERT
 PREHOOK: Input: default@acid_external
-PREHOOK: Output: default@acid_external
 FAILED: Execution Error, return code 40013 from org.apache.hadoop.hive.ql.ddl.DDLTask. Unable to alter table. default.acid_external cannot be declared transactional because it's an external table
diff --git a/ql/src/test/results/clientpositive/llap/insert_only_to_acid_convert.q.out b/ql/src/test/results/clientpositive/llap/insert_only_to_acid_convert.q.out
index 079b22d84c2..596b65c3b3b 100644
--- a/ql/src/test/results/clientpositive/llap/insert_only_to_acid_convert.q.out
+++ b/ql/src/test/results/clientpositive/llap/insert_only_to_acid_convert.q.out
@@ -53,12 +53,11 @@ Bucket Columns:     	[]
 Sort Columns:       	[]                  	 
 Storage Desc Params:	 	 
 	serialization.format	1                   
-PREHOOK: query: ALTER TABLE insert_only SET TBLPROPERTIES ('transactional'='true','transactional_properties'='default')
-PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: query: ALTER TABLE insert_only convert to acid TBLPROPERTIES ('transactional_properties'='default')
+PREHOOK: type: ALTERTABLE_CONVERT
 PREHOOK: Input: default@insert_only
-PREHOOK: Output: default@insert_only
-POSTHOOK: query: ALTER TABLE insert_only SET TBLPROPERTIES ('transactional'='true','transactional_properties'='default')
-POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: query: ALTER TABLE insert_only convert to acid TBLPROPERTIES ('transactional_properties'='default')
+POSTHOOK: type: ALTERTABLE_CONVERT
 POSTHOOK: Input: default@insert_only
 POSTHOOK: Output: default@insert_only
 PREHOOK: query: describe formatted insert_only
diff --git a/ql/src/test/results/clientpositive/llap/mm_conversions.q.out b/ql/src/test/results/clientpositive/llap/mm_conversions.q.out
index 618aa3ce29a..ad9161f1995 100644
--- a/ql/src/test/results/clientpositive/llap/mm_conversions.q.out
+++ b/ql/src/test/results/clientpositive/llap/mm_conversions.q.out
@@ -75,12 +75,26 @@ POSTHOOK: Input: default@simple_to_mm
 0
 98
 100
-PREHOOK: query: alter table simple_to_mm set tblproperties("transactional"="true", "transactional_properties"="insert_only")
-PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: query: explain alter table simple_to_mm convert to acid tblproperties ("transactional_properties"="insert_only")
+PREHOOK: type: ALTERTABLE_CONVERT
 PREHOOK: Input: default@simple_to_mm
-PREHOOK: Output: default@simple_to_mm
-POSTHOOK: query: alter table simple_to_mm set tblproperties("transactional"="true", "transactional_properties"="insert_only")
-POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: query: explain alter table simple_to_mm convert to acid tblproperties ("transactional_properties"="insert_only")
+POSTHOOK: type: ALTERTABLE_CONVERT
+POSTHOOK: Input: default@simple_to_mm
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Convert operation
+      table name: default.simple_to_mm
+      spec: AlterTableConvertSpec{ConvertTo=acid, TBLProperties={transactional_properties=insert_only}}
+
+PREHOOK: query: alter table simple_to_mm convert to acid tblproperties ("transactional_properties"="insert_only")
+PREHOOK: type: ALTERTABLE_CONVERT
+PREHOOK: Input: default@simple_to_mm
+POSTHOOK: query: alter table simple_to_mm convert to acid tblproperties ("transactional_properties"="insert_only")
+POSTHOOK: type: ALTERTABLE_CONVERT
 POSTHOOK: Input: default@simple_to_mm
 POSTHOOK: Output: default@simple_to_mm
 PREHOOK: query: export table simple_to_mm to 'ql/test/data/exports/export0'
diff --git a/ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out b/ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out
index 4fc991901e8..3c6ddb257a4 100644
--- a/ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out
+++ b/ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out
@@ -420,12 +420,11 @@ POSTHOOK: Output: hdfs://### HDFS PATH ###
 7	hdfs://### HDFS PATH ###
 7	hdfs://### HDFS PATH ###
 7	hdfs://### HDFS PATH ###
-PREHOOK: query: alter table over10k_orc_bucketed_n0 set TBLPROPERTIES ('transactional'='true')
-PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: query: alter table over10k_orc_bucketed_n0 convert to acid
+PREHOOK: type: ALTERTABLE_CONVERT
 PREHOOK: Input: default@over10k_orc_bucketed_n0
-PREHOOK: Output: default@over10k_orc_bucketed_n0
-POSTHOOK: query: alter table over10k_orc_bucketed_n0 set TBLPROPERTIES ('transactional'='true')
-POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: query: alter table over10k_orc_bucketed_n0 convert to acid
+POSTHOOK: type: ALTERTABLE_CONVERT
 POSTHOOK: Input: default@over10k_orc_bucketed_n0
 POSTHOOK: Output: default@over10k_orc_bucketed_n0
 PREHOOK: query: explain select t, si, i from over10k_orc_bucketed_n0 where b = 4294967363 and t < 100 order by t, si, i