You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by gu...@apache.org on 2021/01/26 08:13:38 UTC

[spark] branch master updated: [SPARK-34235][SS] Make spark.sql.hive as a private package

This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 0a1a029  [SPARK-34235][SS] Make spark.sql.hive as a private package
0a1a029 is described below

commit 0a1a029622eb49e7943f87cfae6942d09bc121a6
Author: Yuanjian Li <yu...@databricks.com>
AuthorDate: Tue Jan 26 17:13:11 2021 +0900

    [SPARK-34235][SS] Make spark.sql.hive as a private package
    
    ### What changes were proposed in this pull request?
    Follow the comment https://github.com/apache/spark/pull/31271#discussion_r562598983:
    
    - Remove the API tag `Unstable` for `HiveSessionStateBuilder`
    - Add document for spark.sql.hive package to emphasize it's a private package
    
    ### Why are the changes needed?
    Follow the rule for a private package.
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    Doc change only.
    
    Closes #31321 from xuanyuanking/SPARK-34185-follow.
    
    Authored-by: Yuanjian Li <yu...@databricks.com>
    Signed-off-by: HyukjinKwon <gu...@apache.org>
---
 project/SparkBuild.scala                                               | 2 +-
 .../main/scala/org/apache/spark/sql/hive/HiveSessionStateBuilder.scala | 2 --
 sql/hive/src/main/scala/org/apache/spark/sql/hive/package.scala        | 3 +++
 3 files changed, 4 insertions(+), 3 deletions(-)

diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 5fedfec..21754ae 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -910,7 +910,7 @@ object Unidoc {
       .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/sql/catalyst")))
       .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/sql/execution")))
       .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/sql/internal")))
-      .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/sql/hive/test")))
+      .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/sql/hive")))
       .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/sql/catalog/v2/utils")))
       .map(_.filterNot(_.getCanonicalPath.contains("org/apache/hive")))
       .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/sql/v2/avro")))
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionStateBuilder.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionStateBuilder.scala
index 454eef3..3a6b4d4 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionStateBuilder.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionStateBuilder.scala
@@ -19,7 +19,6 @@ package org.apache.spark.sql.hive
 
 import java.net.URI
 
-import org.apache.spark.annotation.Unstable
 import org.apache.spark.sql._
 import org.apache.spark.sql.catalyst.analysis.{Analyzer, ResolveSessionCatalog}
 import org.apache.spark.sql.catalyst.catalog.ExternalCatalogWithListener
@@ -38,7 +37,6 @@ import org.apache.spark.sql.internal.{BaseSessionStateBuilder, SessionResourceLo
 /**
  * Builder that produces a Hive-aware `SessionState`.
  */
-@Unstable
 class HiveSessionStateBuilder(
     session: SparkSession,
     parentState: Option[SessionState],
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/package.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/package.scala
index 14276c9..f89a8a5 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/package.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/package.scala
@@ -23,5 +23,8 @@ package org.apache.spark.sql
  *  - Using HiveQL to express queries.
  *  - Reading metadata from the Hive Metastore using HiveSerDes.
  *  - Hive UDFs, UDAs, UDTs
+ *
+ * Note that this is a private package. All classes in this package are considered an internal API
+ * to Spark and are subject to change between minor releases.
  */
 package object hive


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org