You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by do...@apache.org on 2020/03/08 18:43:14 UTC

[spark] branch master updated: [SPARK-31053][SQL] mark connector APIs as Evolving

This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 1aa1847  [SPARK-31053][SQL] mark connector APIs as Evolving
1aa1847 is described below

commit 1aa184763aa49d70907669b2d8af5a713ee0d7fa
Author: Wenchen Fan <we...@databricks.com>
AuthorDate: Sun Mar 8 11:41:09 2020 -0700

    [SPARK-31053][SQL] mark connector APIs as Evolving
    
    ### What changes were proposed in this pull request?
    
    The newly added catalog APIs are marked as Experimental but other DS v2 APIs are marked as Evolving.
    
    This PR makes it consistent and mark all Connector APIs as Evolving.
    
    ### Why are the changes needed?
    
    For consistency.
    
    ### Does this PR introduce any user-facing change?
    
    no
    
    ### How was this patch tested?
    
    N/A
    
    Closes #27811 from cloud-fan/tag.
    
    Authored-by: Wenchen Fan <we...@databricks.com>
    Signed-off-by: Dongjoon Hyun <do...@apache.org>
---
 .../java/org/apache/spark/sql/connector/catalog/CatalogExtension.java | 4 ++--
 .../java/org/apache/spark/sql/connector/catalog/CatalogPlugin.java    | 4 ++--
 .../spark/sql/connector/catalog/DelegatingCatalogExtension.java       | 4 ++--
 .../main/java/org/apache/spark/sql/connector/catalog/Identifier.java  | 4 ++--
 .../java/org/apache/spark/sql/connector/catalog/IdentifierImpl.java   | 4 ++--
 .../java/org/apache/spark/sql/connector/catalog/NamespaceChange.java  | 4 ++--
 .../main/java/org/apache/spark/sql/connector/catalog/StagedTable.java | 4 ++--
 .../org/apache/spark/sql/connector/catalog/StagingTableCatalog.java   | 4 ++--
 .../java/org/apache/spark/sql/connector/catalog/SupportsDelete.java   | 4 ++--
 .../org/apache/spark/sql/connector/catalog/SupportsNamespaces.java    | 4 ++--
 .../java/org/apache/spark/sql/connector/catalog/SupportsRead.java     | 4 ++--
 .../java/org/apache/spark/sql/connector/catalog/SupportsWrite.java    | 4 ++--
 .../java/org/apache/spark/sql/connector/catalog/TableCapability.java  | 4 ++--
 .../java/org/apache/spark/sql/connector/catalog/TableCatalog.java     | 4 ++--
 .../main/java/org/apache/spark/sql/connector/catalog/TableChange.java | 4 ++--
 .../java/org/apache/spark/sql/connector/expressions/Expression.java   | 4 ++--
 .../java/org/apache/spark/sql/connector/expressions/Expressions.java  | 4 ++--
 .../main/java/org/apache/spark/sql/connector/expressions/Literal.java | 4 ++--
 .../org/apache/spark/sql/connector/expressions/NamedReference.java    | 4 ++--
 .../java/org/apache/spark/sql/connector/expressions/Transform.java    | 4 ++--
 .../apache/spark/sql/connector/write/SupportsDynamicOverwrite.java    | 3 +++
 .../java/org/apache/spark/sql/connector/write/SupportsOverwrite.java  | 2 ++
 .../java/org/apache/spark/sql/connector/write/SupportsTruncate.java   | 3 +++
 23 files changed, 48 insertions(+), 40 deletions(-)

diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/CatalogExtension.java b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/CatalogExtension.java
index 61cb83c..155dca5 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/CatalogExtension.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/CatalogExtension.java
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.connector.catalog;
 
-import org.apache.spark.annotation.Experimental;
+import org.apache.spark.annotation.Evolving;
 import org.apache.spark.sql.util.CaseInsensitiveStringMap;
 
 /**
@@ -29,7 +29,7 @@ import org.apache.spark.sql.util.CaseInsensitiveStringMap;
  *
  * @since 3.0.0
  */
-@Experimental
+@Evolving
 public interface CatalogExtension extends TableCatalog, SupportsNamespaces {
 
   /**
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/CatalogPlugin.java b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/CatalogPlugin.java
index 2958538..8ca4f56 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/CatalogPlugin.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/CatalogPlugin.java
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.connector.catalog;
 
-import org.apache.spark.annotation.Experimental;
+import org.apache.spark.annotation.Evolving;
 import org.apache.spark.sql.internal.SQLConf;
 import org.apache.spark.sql.util.CaseInsensitiveStringMap;
 
@@ -41,7 +41,7 @@ import org.apache.spark.sql.util.CaseInsensitiveStringMap;
  *
  * @since 3.0.0
  */
-@Experimental
+@Evolving
 public interface CatalogPlugin {
   /**
    * Called to initialize configuration.
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/DelegatingCatalogExtension.java b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/DelegatingCatalogExtension.java
index 5a51959..d07d299 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/DelegatingCatalogExtension.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/DelegatingCatalogExtension.java
@@ -19,7 +19,7 @@ package org.apache.spark.sql.connector.catalog;
 
 import java.util.Map;
 
-import org.apache.spark.annotation.Experimental;
+import org.apache.spark.annotation.Evolving;
 import org.apache.spark.sql.catalyst.analysis.NamespaceAlreadyExistsException;
 import org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException;
 import org.apache.spark.sql.catalyst.analysis.NoSuchTableException;
@@ -36,7 +36,7 @@ import org.apache.spark.sql.util.CaseInsensitiveStringMap;
  *
  * @since 3.0.0
  */
-@Experimental
+@Evolving
 public abstract class DelegatingCatalogExtension implements CatalogExtension {
 
   private CatalogPlugin delegate;
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/Identifier.java b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/Identifier.java
index 7b3a081..88c51d6c 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/Identifier.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/Identifier.java
@@ -17,14 +17,14 @@
 
 package org.apache.spark.sql.connector.catalog;
 
-import org.apache.spark.annotation.Experimental;
+import org.apache.spark.annotation.Evolving;
 
 /**
  * Identifies an object in a catalog.
  *
  * @since 3.0.0
  */
-@Experimental
+@Evolving
 public interface Identifier {
 
   static Identifier of(String[] namespace, String name) {
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/IdentifierImpl.java b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/IdentifierImpl.java
index 30596d9..b6c84cd 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/IdentifierImpl.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/IdentifierImpl.java
@@ -24,12 +24,12 @@ import java.util.stream.Stream;
 
 import com.google.common.base.Preconditions;
 
-import org.apache.spark.annotation.Experimental;
+import org.apache.spark.annotation.Evolving;
 
 /**
  *  An {@link Identifier} implementation.
  */
-@Experimental
+@Evolving
 class IdentifierImpl implements Identifier {
 
   private String[] namespace;
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/NamespaceChange.java b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/NamespaceChange.java
index f4eee84..dae29c1 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/NamespaceChange.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/NamespaceChange.java
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.connector.catalog;
 
-import org.apache.spark.annotation.Experimental;
+import org.apache.spark.annotation.Evolving;
 
 /**
  * NamespaceChange subclasses represent requested changes to a namespace. These are passed to
@@ -33,7 +33,7 @@ import org.apache.spark.annotation.Experimental;
  *
  * @since 3.0.0
  */
-@Experimental
+@Evolving
 public interface NamespaceChange {
   /**
    * Create a NamespaceChange for setting a namespace property.
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/StagedTable.java b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/StagedTable.java
index c328f83..60b250a 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/StagedTable.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/StagedTable.java
@@ -19,7 +19,7 @@ package org.apache.spark.sql.connector.catalog;
 
 import java.util.Map;
 
-import org.apache.spark.annotation.Experimental;
+import org.apache.spark.annotation.Evolving;
 import org.apache.spark.sql.connector.expressions.Transform;
 import org.apache.spark.sql.connector.write.LogicalWriteInfo;
 import org.apache.spark.sql.types.StructType;
@@ -39,7 +39,7 @@ import org.apache.spark.sql.types.StructType;
  *
  * @since 3.0.0
  */
-@Experimental
+@Evolving
 public interface StagedTable extends Table {
 
   /**
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/StagingTableCatalog.java b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/StagingTableCatalog.java
index 9d4f6ea..35455a0 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/StagingTableCatalog.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/StagingTableCatalog.java
@@ -19,7 +19,7 @@ package org.apache.spark.sql.connector.catalog;
 
 import java.util.Map;
 
-import org.apache.spark.annotation.Experimental;
+import org.apache.spark.annotation.Evolving;
 import org.apache.spark.sql.connector.expressions.Transform;
 import org.apache.spark.sql.connector.write.LogicalWriteInfo;
 import org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException;
@@ -51,7 +51,7 @@ import org.apache.spark.sql.types.StructType;
  *
  * @since 3.0.0
  */
-@Experimental
+@Evolving
 public interface StagingTableCatalog extends TableCatalog {
 
   /**
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/SupportsDelete.java b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/SupportsDelete.java
index 9246188..106f328 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/SupportsDelete.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/SupportsDelete.java
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.connector.catalog;
 
-import org.apache.spark.annotation.Experimental;
+import org.apache.spark.annotation.Evolving;
 import org.apache.spark.sql.sources.Filter;
 
 /**
@@ -26,7 +26,7 @@ import org.apache.spark.sql.sources.Filter;
  *
  * @since 3.0.0
  */
-@Experimental
+@Evolving
 public interface SupportsDelete {
   /**
    * Delete data from a data source table that matches filter expressions.
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/SupportsNamespaces.java b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/SupportsNamespaces.java
index bd7e49e..f70746b 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/SupportsNamespaces.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/SupportsNamespaces.java
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.connector.catalog;
 
-import org.apache.spark.annotation.Experimental;
+import org.apache.spark.annotation.Evolving;
 import org.apache.spark.sql.catalyst.analysis.NamespaceAlreadyExistsException;
 import org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException;
 
@@ -38,7 +38,7 @@ import java.util.Map;
  *
  * @since 3.0.0
  */
-@Experimental
+@Evolving
 public interface SupportsNamespaces extends CatalogPlugin {
 
   /**
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/SupportsRead.java b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/SupportsRead.java
index a4cb66b..e0ed977 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/SupportsRead.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/SupportsRead.java
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.connector.catalog;
 
-import org.apache.spark.annotation.Experimental;
+import org.apache.spark.annotation.Evolving;
 import org.apache.spark.sql.connector.read.Scan;
 import org.apache.spark.sql.connector.read.ScanBuilder;
 import org.apache.spark.sql.util.CaseInsensitiveStringMap;
@@ -29,7 +29,7 @@ import org.apache.spark.sql.util.CaseInsensitiveStringMap;
  *
  * @since 3.0.0
  */
-@Experimental
+@Evolving
 public interface SupportsRead extends Table {
 
   /**
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/SupportsWrite.java b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/SupportsWrite.java
index 19499d1..00aa180 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/SupportsWrite.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/SupportsWrite.java
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.connector.catalog;
 
-import org.apache.spark.annotation.Experimental;
+import org.apache.spark.annotation.Evolving;
 import org.apache.spark.sql.connector.write.BatchWrite;
 import org.apache.spark.sql.connector.write.LogicalWriteInfo;
 import org.apache.spark.sql.connector.write.WriteBuilder;
@@ -29,7 +29,7 @@ import org.apache.spark.sql.connector.write.WriteBuilder;
  *
  * @since 3.0.0
  */
-@Experimental
+@Evolving
 public interface SupportsWrite extends Table {
 
   /**
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/TableCapability.java b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/TableCapability.java
index 6a64bf8..68161d7 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/TableCapability.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/TableCapability.java
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.connector.catalog;
 
-import org.apache.spark.annotation.Experimental;
+import org.apache.spark.annotation.Evolving;
 
 /**
  * Capabilities that can be provided by a {@link Table} implementation.
@@ -28,7 +28,7 @@ import org.apache.spark.annotation.Experimental;
  *
  * @since 3.0.0
  */
-@Experimental
+@Evolving
 public enum TableCapability {
   /**
    * Signals that the table supports reads in batch execution mode.
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/TableCatalog.java b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/TableCatalog.java
index 357cd4b..1809b9c 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/TableCatalog.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/TableCatalog.java
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.connector.catalog;
 
-import org.apache.spark.annotation.Experimental;
+import org.apache.spark.annotation.Evolving;
 import org.apache.spark.sql.connector.expressions.Transform;
 import org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException;
 import org.apache.spark.sql.catalyst.analysis.NoSuchTableException;
@@ -37,7 +37,7 @@ import java.util.Map;
  *
  * @since 3.0.0
  */
-@Experimental
+@Evolving
 public interface TableCatalog extends CatalogPlugin {
 
   /**
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/TableChange.java b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/TableChange.java
index 81e988a..3ed185a 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/TableChange.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/TableChange.java
@@ -21,7 +21,7 @@ import java.util.Arrays;
 import java.util.Objects;
 import javax.annotation.Nullable;
 
-import org.apache.spark.annotation.Experimental;
+import org.apache.spark.annotation.Evolving;
 import org.apache.spark.sql.types.DataType;
 
 /**
@@ -39,7 +39,7 @@ import org.apache.spark.sql.types.DataType;
  *
  * @since 3.0.0
  */
-@Experimental
+@Evolving
 public interface TableChange {
 
   /**
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/Expression.java b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/Expression.java
index a5b8e22..6540c91 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/Expression.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/Expression.java
@@ -17,14 +17,14 @@
 
 package org.apache.spark.sql.connector.expressions;
 
-import org.apache.spark.annotation.Experimental;
+import org.apache.spark.annotation.Evolving;
 
 /**
  * Base class of the public logical expression API.
  *
  * @since 3.0.0
  */
-@Experimental
+@Evolving
 public interface Expression {
   /**
    * Format the expression as a human readable SQL-like string.
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/Expressions.java b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/Expressions.java
index 28fb71e..791dc96 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/Expressions.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/Expressions.java
@@ -19,9 +19,9 @@ package org.apache.spark.sql.connector.expressions;
 
 import java.util.Arrays;
 
+import org.apache.spark.annotation.Evolving;
 import scala.collection.JavaConverters;
 
-import org.apache.spark.annotation.Experimental;
 import org.apache.spark.sql.types.DataType;
 
 /**
@@ -29,7 +29,7 @@ import org.apache.spark.sql.types.DataType;
  *
  * @since 3.0.0
  */
-@Experimental
+@Evolving
 public class Expressions {
   private Expressions() {
   }
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/Literal.java b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/Literal.java
index 6474eb4..df9e58f 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/Literal.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/Literal.java
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.connector.expressions;
 
-import org.apache.spark.annotation.Experimental;
+import org.apache.spark.annotation.Evolving;
 import org.apache.spark.sql.types.DataType;
 
 /**
@@ -29,7 +29,7 @@ import org.apache.spark.sql.types.DataType;
  * @param <T> the JVM type of a value held by the literal
  * @since 3.0.0
  */
-@Experimental
+@Evolving
 public interface Literal<T> extends Expression {
   /**
    * Returns the literal value.
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/NamedReference.java b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/NamedReference.java
index 6dd46cc..167432f 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/NamedReference.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/NamedReference.java
@@ -17,14 +17,14 @@
 
 package org.apache.spark.sql.connector.expressions;
 
-import org.apache.spark.annotation.Experimental;
+import org.apache.spark.annotation.Evolving;
 
 /**
  * Represents a field or column reference in the public logical expression API.
  *
  * @since 3.0.0
  */
-@Experimental
+@Evolving
 public interface NamedReference extends Expression {
   /**
    * Returns the referenced field name as an array of String parts.
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/Transform.java b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/Transform.java
index ad78f00..2972058 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/Transform.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/Transform.java
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.connector.expressions;
 
-import org.apache.spark.annotation.Experimental;
+import org.apache.spark.annotation.Evolving;
 
 /**
  * Represents a transform function in the public logical expression API.
@@ -27,7 +27,7 @@ import org.apache.spark.annotation.Experimental;
  *
  * @since 3.0.0
  */
-@Experimental
+@Evolving
 public interface Transform extends Expression {
   /**
    * Returns the transform function name.
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/SupportsDynamicOverwrite.java b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/SupportsDynamicOverwrite.java
index bda0eed..422cd71 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/SupportsDynamicOverwrite.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/SupportsDynamicOverwrite.java
@@ -17,6 +17,8 @@
 
 package org.apache.spark.sql.connector.write;
 
+import org.apache.spark.annotation.Evolving;
+
 /**
  * Write builder trait for tables that support dynamic partition overwrite.
  * <p>
@@ -29,6 +31,7 @@ package org.apache.spark.sql.connector.write;
  *
  * @since 3.0.0
  */
+@Evolving
 public interface SupportsDynamicOverwrite extends WriteBuilder {
   /**
    * Configures a write to dynamically replace partitions with data committed in the write.
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/SupportsOverwrite.java b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/SupportsOverwrite.java
index ffa106c..b4e6025 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/SupportsOverwrite.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/SupportsOverwrite.java
@@ -17,6 +17,7 @@
 
 package org.apache.spark.sql.connector.write;
 
+import org.apache.spark.annotation.Evolving;
 import org.apache.spark.sql.sources.AlwaysTrue$;
 import org.apache.spark.sql.sources.Filter;
 
@@ -28,6 +29,7 @@ import org.apache.spark.sql.sources.Filter;
  *
  * @since 3.0.0
  */
+@Evolving
 public interface SupportsOverwrite extends WriteBuilder, SupportsTruncate {
   /**
    * Configures a write to replace data matching the filters with data committed in the write.
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/SupportsTruncate.java b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/SupportsTruncate.java
index b89d409..0763f65 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/SupportsTruncate.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/SupportsTruncate.java
@@ -17,6 +17,8 @@
 
 package org.apache.spark.sql.connector.write;
 
+import org.apache.spark.annotation.Evolving;
+
 /**
  * Write builder trait for tables that support truncation.
  * <p>
@@ -24,6 +26,7 @@ package org.apache.spark.sql.connector.write;
  *
  * @since 3.0.0
  */
+@Evolving
 public interface SupportsTruncate extends WriteBuilder {
   /**
    * Configures a write to replace all existing data with data committed in the write.


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org