You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@impala.apache.org by jo...@apache.org on 2022/04/12 17:40:25 UTC

[impala] branch master updated: IMPALA-11023: Raise error when delete file is found in an Iceberg table

This is an automated email from the ASF dual-hosted git repository.

joemcdonnell pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/impala.git


The following commit(s) were added to refs/heads/master by this push:
     new 9cd4823aa IMPALA-11023: Raise error when delete file is found in an Iceberg table
9cd4823aa is described below

commit 9cd4823aa9b940c6e57abbb0c116fb4582cce80a
Author: Tamas Mate <tm...@apache.org>
AuthorDate: Tue Apr 5 14:52:42 2022 +0200

    IMPALA-11023: Raise error when delete file is found in an Iceberg table
    
    Iceberg V2 DeleteFiles are skipped during scans and the whole content of
    the DataFiles are returned. This commit adds an extra check to prevent
    scanning tables that have delete files to avoid unexpected results till
    merge on read is supported. Metadata operations are allowed on tables
    with delete files.
    
    Testing:
     - Added e2e test.
    
    Change-Id: I6e9cbf2424b27157883d551f73e728ab4ec6d21e
    Reviewed-on: http://gerrit.cloudera.org:8080/18383
    Reviewed-by: Zoltan Borok-Nagy <bo...@cloudera.com>
    Tested-by: Impala Public Jenkins <im...@cloudera.com>
---
 .../analysis/AlterTableSetTblProperties.java       |   2 +-
 .../org/apache/impala/catalog/FeIcebergTable.java  |   2 +-
 .../org/apache/impala/planner/IcebergScanNode.java |  11 ++-
 .../java/org/apache/impala/util/IcebergUtil.java   |  24 ++++--
 testdata/data/README                               |   6 ++
 ...78c51-b12a-4c5f-a66e-a8e9375daeba-00001.parquet | Bin 0 -> 662 bytes
 ...80302-527b-4911-8c6e-88d416adac57-00001.parquet | Bin 0 -> 1598 bytes
 .../0eadf173-0c84-4378-a9d0-5d7f47183978-m0.avro   | Bin 0 -> 3936 bytes
 .../8cbef400-daea-478a-858a-2baf2438f644-m0.avro   | Bin 0 -> 3619 bytes
 ...755-1-0eadf173-0c84-4378-a9d0-5d7f47183978.avro | Bin 0 -> 2318 bytes
 ...807-1-8cbef400-daea-478a-858a-2baf2438f644.avro | Bin 0 -> 2162 bytes
 .../metadata/v1.metadata.json                      |  66 +++++++++++++++
 .../metadata/v2.metadata.json                      |  93 +++++++++++++++++++++
 .../metadata/version-hint.text                     |   1 +
 .../functional/functional_schema_template.sql      |  15 ++++
 .../datasets/functional/schema_constraints.csv     |   1 +
 .../queries/QueryTest/iceberg-negative.test        |   5 ++
 17 files changed, 217 insertions(+), 9 deletions(-)

diff --git a/fe/src/main/java/org/apache/impala/analysis/AlterTableSetTblProperties.java b/fe/src/main/java/org/apache/impala/analysis/AlterTableSetTblProperties.java
index e11126f5a..0acd29495 100644
--- a/fe/src/main/java/org/apache/impala/analysis/AlterTableSetTblProperties.java
+++ b/fe/src/main/java/org/apache/impala/analysis/AlterTableSetTblProperties.java
@@ -182,7 +182,7 @@ public class AlterTableSetTblProperties extends AlterTableSetStmt {
     try {
       FeIcebergTable iceTable = (FeIcebergTable)getTargetTable();
       List<DataFile> dataFiles = IcebergUtil.getIcebergDataFiles(iceTable,
-          new ArrayList<>(), /*timeTravelSpec=*/null);
+          new ArrayList<>(), /*timeTravelSpec=*/null).first;
       if (dataFiles.isEmpty()) return;
       DataFile firstFile = dataFiles.get(0);
       String errorMsg = "Attempt to set Iceberg data file format to %s, but found data " +
diff --git a/fe/src/main/java/org/apache/impala/catalog/FeIcebergTable.java b/fe/src/main/java/org/apache/impala/catalog/FeIcebergTable.java
index e28a6559a..eeeeca4eb 100644
--- a/fe/src/main/java/org/apache/impala/catalog/FeIcebergTable.java
+++ b/fe/src/main/java/org/apache/impala/catalog/FeIcebergTable.java
@@ -511,7 +511,7 @@ public interface FeIcebergTable extends FeFsTable {
       }
       Map<String, HdfsPartition.FileDescriptor> fileDescMap = new HashMap<>();
       List<DataFile> dataFileList = IcebergUtil.getIcebergDataFiles(table,
-          new ArrayList<>(), /*timeTravelSpecl=*/null);
+          new ArrayList<>(), /*timeTravelSpecl=*/null).first;
       for (DataFile dataFile : dataFileList) {
           Path path = new Path(dataFile.path().toString());
           if (hdfsFileDescMap.containsKey(path.toUri().getPath())) {
diff --git a/fe/src/main/java/org/apache/impala/planner/IcebergScanNode.java b/fe/src/main/java/org/apache/impala/planner/IcebergScanNode.java
index ca5a670be..5e17b4d35 100644
--- a/fe/src/main/java/org/apache/impala/planner/IcebergScanNode.java
+++ b/fe/src/main/java/org/apache/impala/planner/IcebergScanNode.java
@@ -51,6 +51,7 @@ import org.apache.impala.catalog.Type;
 import org.apache.impala.catalog.HdfsPartition.FileDescriptor;
 import org.apache.impala.common.ImpalaException;
 import org.apache.impala.common.ImpalaRuntimeException;
+import org.apache.impala.common.Pair;
 import org.apache.impala.util.IcebergUtil;
 
 import com.google.common.base.Preconditions;
@@ -105,8 +106,16 @@ public class IcebergScanNode extends HdfsScanNode {
       throws ImpalaRuntimeException {
     List<DataFile> dataFileList;
     try {
-      dataFileList = IcebergUtil.getIcebergDataFiles(icebergTable_, icebergPredicates_,
+      Pair<List<DataFile>, Boolean> dataFileListAndDeletePair =
+          IcebergUtil.getIcebergDataFiles(icebergTable_, icebergPredicates_,
           timeTravelSpec_);
+      dataFileList = dataFileListAndDeletePair.first;
+      Boolean hasDeleteFile = dataFileListAndDeletePair.second;
+      if (hasDeleteFile) {
+        throw new TableLoadingException(String.format("Unsupported Iceberg V2 feature, "
+            + "table '%s' with snapshot id '%s' contains delete files.",
+            icebergTable_.getFullName(), icebergTable_.snapshotId()));
+      }
     } catch (TableLoadingException e) {
       throw new ImpalaRuntimeException(String.format(
           "Failed to load data files for Iceberg table: %s", icebergTable_.getFullName()),
diff --git a/fe/src/main/java/org/apache/impala/util/IcebergUtil.java b/fe/src/main/java/org/apache/impala/util/IcebergUtil.java
index 94888fc7e..8f4fca0df 100644
--- a/fe/src/main/java/org/apache/impala/util/IcebergUtil.java
+++ b/fe/src/main/java/org/apache/impala/util/IcebergUtil.java
@@ -17,6 +17,7 @@
 
 package org.apache.impala.util;
 
+import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.time.Instant;
 import java.time.LocalDateTime;
@@ -54,6 +55,7 @@ import org.apache.iceberg.FileFormat;
 import org.apache.iceberg.FileScanTask;
 import org.apache.iceberg.TableScan;
 import org.apache.iceberg.expressions.UnboundPredicate;
+import org.apache.iceberg.io.CloseableIterable;
 import org.apache.iceberg.PartitionField;
 import org.apache.iceberg.PartitionSpec;
 import org.apache.iceberg.Schema;
@@ -516,12 +518,14 @@ public class IcebergUtil {
   }
 
   /**
-   * Get iceberg data file by file system table location and iceberg predicates
+   * Returns a Pair, the first element is the DataFiles by file system table location and
+   * iceberg predicates, the second element is whether or not a DeleteFile exists for the
+   * DataFiles in the first element.
    */
-  public static List<DataFile> getIcebergDataFiles(FeIcebergTable table,
+  public static Pair<List<DataFile>, Boolean> getIcebergDataFiles(FeIcebergTable table,
       List<UnboundPredicate> predicates, TimeTravelSpec timeTravelSpec)
         throws TableLoadingException {
-    if (table.snapshotId() == -1) return Collections.emptyList();
+    if (table.snapshotId() == -1) return new Pair<>(Collections.emptyList(), false);
 
     TableScan scan = createScanAsOf(table, timeTravelSpec);
     for (UnboundPredicate predicate : predicates) {
@@ -529,10 +533,18 @@ public class IcebergUtil {
     }
 
     List<DataFile> dataFileList = new ArrayList<>();
-    for (FileScanTask task : scan.planFiles()) {
-      dataFileList.add(task.file());
+    Boolean hasDeleteFile = false;
+    try (CloseableIterable<FileScanTask> fileScanTasks = scan.planFiles()) {
+      for (FileScanTask task : fileScanTasks) {
+        if (!task.deletes().isEmpty()) {
+          hasDeleteFile = true;
+        }
+        dataFileList.add(task.file());
+      }
+    } catch (IOException e) {
+      throw new TableLoadingException("Data file list collection failed.", e);
     }
-    return dataFileList;
+    return new Pair<>(dataFileList, hasDeleteFile);
   }
 
   private static TableScan createScanAsOf(FeIcebergTable table,
diff --git a/testdata/data/README b/testdata/data/README
index af49e9327..570c1e87a 100644
--- a/testdata/data/README
+++ b/testdata/data/README
@@ -718,3 +718,9 @@ The tables that have the following schema changes since table migration:
 
 iceberg_test/hadoop_catalog/ice/iceberg_uppercase_col:
 Generated by Impala, then modified the metadata.json file to contain uppercase characters.
+
+iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional:
+Generated by Spark 3.2 + Iceberg 0.13. Then the JSON and AVRO files were manually edited
+to make these tables correspond to an Iceberg table in a HadoopCatalog instead of
+HiveCatalog.
+The table has a positional delete file.
diff --git a/testdata/data/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional/data/00000-0-fb178c51-b12a-4c5f-a66e-a8e9375daeba-00001.parquet b/testdata/data/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional/data/00000-0-fb178c51-b12a-4c5f-a66e-a8e9375daeba-00001.parquet
new file mode 100644
index 000000000..d27d6b3f6
Binary files /dev/null and b/testdata/data/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional/data/00000-0-fb178c51-b12a-4c5f-a66e-a8e9375daeba-00001.parquet differ
diff --git a/testdata/data/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional/data/00191-4-6e780302-527b-4911-8c6e-88d416adac57-00001.parquet b/testdata/data/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional/data/00191-4-6e780302-527b-4911-8c6e-88d416adac57-00001.parquet
new file mode 100644
index 000000000..ecd146b49
Binary files /dev/null and b/testdata/data/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional/data/00191-4-6e780302-527b-4911-8c6e-88d416adac57-00001.parquet differ
diff --git a/testdata/data/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional/metadata/0eadf173-0c84-4378-a9d0-5d7f47183978-m0.avro b/testdata/data/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional/metadata/0eadf173-0c84-4378-a9d0-5d7f47183978-m0.avro
new file mode 100644
index 000000000..197685709
Binary files /dev/null and b/testdata/data/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional/metadata/0eadf173-0c84-4378-a9d0-5d7f47183978-m0.avro differ
diff --git a/testdata/data/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional/metadata/8cbef400-daea-478a-858a-2baf2438f644-m0.avro b/testdata/data/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional/metadata/8cbef400-daea-478a-858a-2baf2438f644-m0.avro
new file mode 100644
index 000000000..de8de07de
Binary files /dev/null and b/testdata/data/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional/metadata/8cbef400-daea-478a-858a-2baf2438f644-m0.avro differ
diff --git a/testdata/data/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional/metadata/snap-5725822353600261755-1-0eadf173-0c84-4378-a9d0-5d7f47183978.avro b/testdata/data/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional/metadata/snap-5725822353600261755-1-0eadf173-0c84-4378-a9d0-5d7f47183978.avro
new file mode 100644
index 000000000..f2dd2fb21
Binary files /dev/null and b/testdata/data/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional/metadata/snap-5725822353600261755-1-0eadf173-0c84-4378-a9d0-5d7f47183978.avro differ
diff --git a/testdata/data/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional/metadata/snap-6816997371555012807-1-8cbef400-daea-478a-858a-2baf2438f644.avro b/testdata/data/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional/metadata/snap-6816997371555012807-1-8cbef400-daea-478a-858a-2baf2438f644.avro
new file mode 100644
index 000000000..b01d3ea28
Binary files /dev/null and b/testdata/data/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional/metadata/snap-6816997371555012807-1-8cbef400-daea-478a-858a-2baf2438f644.avro differ
diff --git a/testdata/data/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional/metadata/v1.metadata.json b/testdata/data/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional/metadata/v1.metadata.json
new file mode 100644
index 000000000..6477049e9
--- /dev/null
+++ b/testdata/data/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional/metadata/v1.metadata.json
@@ -0,0 +1,66 @@
+{
+  "format-version" : 2,
+  "table-uuid" : "3deb545a-5a19-48f1-ad07-a4d80c677e3e",
+  "location" : "/test-warehouse/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional",
+  "last-sequence-number" : 1,
+  "last-updated-ms" : 1649071501670,
+  "last-column-id" : 2,
+  "current-schema-id" : 0,
+  "schemas" : [ {
+    "type" : "struct",
+    "schema-id" : 0,
+    "fields" : [ {
+      "id" : 1,
+      "name" : "id",
+      "required" : false,
+      "type" : "long"
+    }, {
+      "id" : 2,
+      "name" : "data",
+      "required" : false,
+      "type" : "string"
+    } ]
+  } ],
+  "default-spec-id" : 0,
+  "partition-specs" : [ {
+    "spec-id" : 0,
+    "fields" : [ ]
+  } ],
+  "last-partition-id" : 999,
+  "default-sort-order-id" : 0,
+  "sort-orders" : [ {
+    "order-id" : 0,
+    "fields" : [ ]
+  } ],
+  "properties" : {
+    "owner" : "tamasmate",
+    "write.delete.mode" : "merge-on-read"
+  },
+  "current-snapshot-id" : 6816997371555012807,
+  "snapshots" : [ {
+    "sequence-number" : 1,
+    "snapshot-id" : 6816997371555012807,
+    "timestamp-ms" : 1649071501670,
+    "summary" : {
+      "operation" : "append",
+      "spark.app.id" : "local-1649071493099",
+      "added-data-files" : "1",
+      "added-records" : "3",
+      "added-files-size" : "662",
+      "changed-partition-count" : "1",
+      "total-records" : "3",
+      "total-files-size" : "662",
+      "total-data-files" : "1",
+      "total-delete-files" : "0",
+      "total-position-deletes" : "0",
+      "total-equality-deletes" : "0"
+    },
+    "manifest-list" : "/test-warehouse/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional/metadata/snap-6816997371555012807-1-8cbef400-daea-478a-858a-2baf2438f644.avro",
+    "schema-id" : 0
+  } ],
+  "snapshot-log" : [ {
+    "timestamp-ms" : 1649071501670,
+    "snapshot-id" : 6816997371555012807
+  } ],
+  "metadata-log" : [ ]
+}
diff --git a/testdata/data/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional/metadata/v2.metadata.json b/testdata/data/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional/metadata/v2.metadata.json
new file mode 100644
index 000000000..1d7ab1861
--- /dev/null
+++ b/testdata/data/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional/metadata/v2.metadata.json
@@ -0,0 +1,93 @@
+{
+  "format-version" : 2,
+  "table-uuid" : "3deb545a-5a19-48f1-ad07-a4d80c677e3e",
+  "location" : "/test-warehouse/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional",
+  "last-sequence-number" : 2,
+  "last-updated-ms" : 1649071557501,
+  "last-column-id" : 2,
+  "current-schema-id" : 0,
+  "schemas" : [ {
+    "type" : "struct",
+    "schema-id" : 0,
+    "fields" : [ {
+      "id" : 1,
+      "name" : "id",
+      "required" : false,
+      "type" : "long"
+    }, {
+      "id" : 2,
+      "name" : "data",
+      "required" : false,
+      "type" : "string"
+    } ]
+  } ],
+  "default-spec-id" : 0,
+  "partition-specs" : [ {
+    "spec-id" : 0,
+    "fields" : [ ]
+  } ],
+  "last-partition-id" : 999,
+  "default-sort-order-id" : 0,
+  "sort-orders" : [ {
+    "order-id" : 0,
+    "fields" : [ ]
+  } ],
+  "properties" : {
+    "owner" : "tamasmate",
+    "write.delete.mode" : "merge-on-read"
+  },
+  "current-snapshot-id" : 5725822353600261755,
+  "snapshots" : [ {
+    "sequence-number" : 1,
+    "snapshot-id" : 6816997371555012807,
+    "timestamp-ms" : 1649071501670,
+    "summary" : {
+      "operation" : "append",
+      "spark.app.id" : "local-1649071493099",
+      "added-data-files" : "1",
+      "added-records" : "3",
+      "added-files-size" : "662",
+      "changed-partition-count" : "1",
+      "total-records" : "3",
+      "total-files-size" : "662",
+      "total-data-files" : "1",
+      "total-delete-files" : "0",
+      "total-position-deletes" : "0",
+      "total-equality-deletes" : "0"
+    },
+    "manifest-list" : "/test-warehouse/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional/metadata/snap-6816997371555012807-1-8cbef400-daea-478a-858a-2baf2438f644.avro",
+    "schema-id" : 0
+  }, {
+    "sequence-number" : 2,
+    "snapshot-id" : 5725822353600261755,
+    "parent-snapshot-id" : 6816997371555012807,
+    "timestamp-ms" : 1649071557501,
+    "summary" : {
+      "operation" : "overwrite",
+      "spark.app.id" : "local-1649071493099",
+      "added-delete-files" : "1",
+      "added-files-size" : "1598",
+      "added-position-deletes" : "1",
+      "changed-partition-count" : "1",
+      "total-records" : "3",
+      "total-files-size" : "2260",
+      "total-data-files" : "1",
+      "total-delete-files" : "1",
+      "total-position-deletes" : "1",
+      "total-equality-deletes" : "0"
+    },
+    "manifest-list" : "/test-warehouse/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional/metadata/snap-5725822353600261755-1-0eadf173-0c84-4378-a9d0-5d7f47183978.avro",
+    "schema-id" : 0
+  } ],
+  "snapshot-log" : [ {
+    "timestamp-ms" : 1649071501670,
+    "snapshot-id" : 6816997371555012807
+  }, {
+    "timestamp-ms" : 1649071557501,
+    "snapshot-id" : 5725822353600261755
+  } ],
+  "metadata-log" : [ {
+    "timestamp-ms" : 1649071501670,
+    "metadata-file" : "/test-warehouse/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional/metadata/v1.metadata.json"
+  } ]
+}
diff --git a/testdata/data/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional/metadata/version-hint.text b/testdata/data/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional/metadata/version-hint.text
new file mode 100644
index 000000000..d8263ee98
--- /dev/null
+++ b/testdata/data/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional/metadata/version-hint.text
@@ -0,0 +1 @@
+2
\ No newline at end of file
diff --git a/testdata/datasets/functional/functional_schema_template.sql b/testdata/datasets/functional/functional_schema_template.sql
index a81f76da9..c39f6740f 100644
--- a/testdata/datasets/functional/functional_schema_template.sql
+++ b/testdata/datasets/functional/functional_schema_template.sql
@@ -3389,3 +3389,18 @@ CREATE VIEW {db_name}{db_suffix}.{table_name}
 AS SELECT id, int_array, int_array_array FROM {db_name}{db_suffix}.complextypestbl;
 ---- LOAD
 ====
+---- DATASET
+functional
+---- BASE_TABLE_NAME
+iceberg_v2_delete_positional
+---- CREATE
+CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name}
+STORED AS ICEBERG
+TBLPROPERTIES('write.format.default'='parquet', 'iceberg.catalog'='hadoop.catalog',
+              'iceberg.catalog_location'='/test-warehouse/iceberg_test/hadoop_catalog',
+              'iceberg.table_identifier'='ice.iceberg_v2_delete_positional',
+              'format-version'='2', 'write.update.mode'='merge-on-read');
+---- DEPENDENT_LOAD
+`hadoop fs -mkdir -p /test-warehouse/iceberg_test/hadoop_catalog/ice && \
+hadoop fs -put -f ${IMPALA_HOME}/testdata/data/iceberg_test/hadoop_catalog/ice/iceberg_v2_delete_positional /test-warehouse/iceberg_test/hadoop_catalog/ice
+====
\ No newline at end of file
diff --git a/testdata/datasets/functional/schema_constraints.csv b/testdata/datasets/functional/schema_constraints.csv
index c462d4aed..2c99a888b 100644
--- a/testdata/datasets/functional/schema_constraints.csv
+++ b/testdata/datasets/functional/schema_constraints.csv
@@ -78,6 +78,7 @@ table_name:iceberg_alltypes_part_orc, constraint:restrict_to, table_format:parqu
 table_name:iceberg_legacy_partition_schema_evolution, constraint:restrict_to, table_format:parquet/none/none
 table_name:iceberg_legacy_partition_schema_evolution_orc, constraint:restrict_to, table_format:parquet/none/none
 table_name:iceberg_uppercase_col, constraint:restrict_to, table_format:parquet/none/none
+table_name:iceberg_v2_delete_positional, constraint:restrict_to, table_format:parquet/none/none
 
 # TODO: Support Avro. Data loading currently fails for Avro because complex types
 # cannot be converted to the corresponding Avro types yet.
diff --git a/testdata/workloads/functional-query/queries/QueryTest/iceberg-negative.test b/testdata/workloads/functional-query/queries/QueryTest/iceberg-negative.test
index 9d4aa9075..69b0d67d8 100644
--- a/testdata/workloads/functional-query/queries/QueryTest/iceberg-negative.test
+++ b/testdata/workloads/functional-query/queries/QueryTest/iceberg-negative.test
@@ -597,3 +597,8 @@ ALTER TABLE iceberg_alter_part SET PARTITION SPEC (HOUR(d));
 ---- CATCH
 ImpalaRuntimeException: Failed to ALTER table 'iceberg_alter_part': Cannot partition type date by hour
 ====
+---- QUERY
+select * from functional_parquet.iceberg_v2_delete_positional;
+---- CATCH
+row_regex:.*CAUSED BY: TableLoadingException: Unsupported Iceberg V2 feature, table .* contains delete files..*
+====