You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@iceberg.apache.org by ao...@apache.org on 2022/10/14 04:24:43 UTC

[iceberg] branch master updated: Core, Spark: Fix Kryo deserialization of SerializableTable (#5975)

This is an automated email from the ASF dual-hosted git repository.

aokolnychyi pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/iceberg.git


The following commit(s) were added to refs/heads/master by this push:
     new 7c77d1dd93 Core, Spark: Fix Kryo deserialization of SerializableTable (#5975)
7c77d1dd93 is described below

commit 7c77d1dd931acb6eb017d3de3976d0d10f9e79b2
Author: Kristin Cowalcijk <mo...@yeah.net>
AuthorDate: Fri Oct 14 12:24:37 2022 +0800

    Core, Spark: Fix Kryo deserialization of SerializableTable (#5975)
---
 .../main/java/org/apache/iceberg/SerializableTable.java  |  2 +-
 .../apache/iceberg/hadoop/TestTableSerialization.java    | 16 ++++++++++++++++
 2 files changed, 17 insertions(+), 1 deletion(-)

diff --git a/core/src/main/java/org/apache/iceberg/SerializableTable.java b/core/src/main/java/org/apache/iceberg/SerializableTable.java
index 9b441113e7..4f5ddef0c6 100644
--- a/core/src/main/java/org/apache/iceberg/SerializableTable.java
+++ b/core/src/main/java/org/apache/iceberg/SerializableTable.java
@@ -82,7 +82,7 @@ public class SerializableTable implements Table, Serializable {
     this.io = fileIO(table);
     this.encryption = table.encryption();
     this.locationProvider = table.locationProvider();
-    this.refs = table.refs();
+    this.refs = SerializableMap.copyOf(table.refs());
   }
 
   /**
diff --git a/core/src/test/java/org/apache/iceberg/hadoop/TestTableSerialization.java b/core/src/test/java/org/apache/iceberg/hadoop/TestTableSerialization.java
index 6fa0a901af..284b6c86e5 100644
--- a/core/src/test/java/org/apache/iceberg/hadoop/TestTableSerialization.java
+++ b/core/src/test/java/org/apache/iceberg/hadoop/TestTableSerialization.java
@@ -29,6 +29,7 @@ import java.util.Set;
 import org.apache.iceberg.FileScanTask;
 import org.apache.iceberg.HasTableOperations;
 import org.apache.iceberg.MetadataTableType;
+import org.apache.iceberg.SerializableTable;
 import org.apache.iceberg.Table;
 import org.apache.iceberg.TableProperties;
 import org.apache.iceberg.TestHelpers;
@@ -51,6 +52,18 @@ public class TestTableSerialization extends HadoopTableTestBase {
     table.updateSchema().addColumn("new_col", Types.IntegerType.get()).commit();
 
     TestHelpers.assertSerializedAndLoadedMetadata(table, TestHelpers.roundTripSerialize(table));
+    Table serializableTable = SerializableTable.copyOf(table);
+    TestHelpers.assertSerializedAndLoadedMetadata(
+        serializableTable, TestHelpers.KryoHelpers.roundTripSerialize(serializableTable));
+  }
+
+  @Test
+  public void testSerializableTableWithSnapshot() throws IOException, ClassNotFoundException {
+    table.newAppend().appendFile(FILE_A).commit();
+    TestHelpers.assertSerializedAndLoadedMetadata(table, TestHelpers.roundTripSerialize(table));
+    Table serializableTable = SerializableTable.copyOf(table);
+    TestHelpers.assertSerializedAndLoadedMetadata(
+        serializableTable, TestHelpers.KryoHelpers.roundTripSerialize(serializableTable));
   }
 
   @Test
@@ -75,6 +88,9 @@ public class TestTableSerialization extends HadoopTableTestBase {
       Table metadataTable = getMetaDataTable(table, type);
       TestHelpers.assertSerializedAndLoadedMetadata(
           metadataTable, TestHelpers.roundTripSerialize(metadataTable));
+      Table serializableTable = SerializableTable.copyOf(metadataTable);
+      TestHelpers.assertSerializedAndLoadedMetadata(
+          serializableTable, TestHelpers.KryoHelpers.roundTripSerialize(serializableTable));
     }
   }