You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2022/07/01 16:21:02 UTC

[spark] branch master updated: [SPARK-39629][SQL] Support v2 SHOW FUNCTIONS

This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new f661ecce64c [SPARK-39629][SQL] Support v2 SHOW FUNCTIONS
f661ecce64c is described below

commit f661ecce64c3356553cda2feb038dfea5cf18d14
Author: Max Gekk <ma...@gmail.com>
AuthorDate: Fri Jul 1 19:20:40 2022 +0300

    [SPARK-39629][SQL] Support v2 SHOW FUNCTIONS
    
    ### What changes were proposed in this pull request?
    In the PR, I propose to implement v2 SHOW FUNCTIONS as `ShowFunctionsExec`, and move v1 tests to the base trait `ShowFunctionsSuiteBase` of v1 and v2 tests.
    
    ### Why are the changes needed?
    To have feature parity with the datasource V1.
    
    ### Does this PR introduce _any_ user-facing change?
    Yes.
    
    ### How was this patch tested?
    By running the unified tests for v2 implementation:
    ```
    $ build/sbt -Phive-2.3 -Phive-thriftserver "test:testOnly *ShowFunctionsSuite"
    $ build/sbt "test:testOnly *ShowTempFunctionsSuite"
    ```
    
    Closes #37036 from MaxGekk/show-functions-v2-2.
    
    Authored-by: Max Gekk <ma...@gmail.com>
    Signed-off-by: Max Gekk <ma...@gmail.com>
---
 .../sql/catalyst/catalog/SessionCatalog.scala      |   7 ++
 .../catalyst/analysis/ResolveSessionCatalog.scala  |   9 +-
 .../datasources/v2/DataSourceV2Strategy.scala      |   9 ++
 .../datasources/v2/ShowFunctionsExec.scala         |  67 +++++++++++
 .../execution/command/ShowFunctionsSuiteBase.scala | 122 ++++++++++++++++++++-
 .../execution/command/v1/ShowFunctionsSuite.scala  |  56 +---------
 .../execution/command/v2/CommandSuiteBase.scala    |  18 +--
 .../execution/command/v2/ShowFunctionsSuite.scala  |  29 +++--
 .../execution/command/ShowFunctionsSuite.scala     |  36 ------
 9 files changed, 227 insertions(+), 126 deletions(-)

diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
index 8128babacc2..16d89c9b2e4 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
@@ -1805,6 +1805,13 @@ class SessionCatalog(
     }.distinct
   }
 
+  /**
+   * List all temporary functions.
+   */
+  def listTemporaryFunctions(): Seq[FunctionIdentifier] = {
+    (functionRegistry.listFunction() ++ tableFunctionRegistry.listFunction())
+      .filter(isTemporaryFunction)
+  }
 
   // -----------------
   // | Other methods |
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala
index e629b7129b2..3e39863f5bb 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala
@@ -400,13 +400,8 @@ class ResolveSessionCatalog(val catalogManager: CatalogManager)
         throw QueryCompilationErrors.missingCatalogAbilityError(catalog, "functions")
       }
 
-    case ShowFunctions(ns: ResolvedNamespace, userScope, systemScope, pattern, output) =>
-      ns match {
-        case DatabaseInSessionCatalog(db) =>
-          ShowFunctionsCommand(db, pattern, userScope, systemScope, output)
-        case _ =>
-          throw QueryCompilationErrors.missingCatalogAbilityError(ns.catalog, "functions")
-      }
+    case ShowFunctions(DatabaseInSessionCatalog(db), userScope, systemScope, pattern, output) =>
+      ShowFunctionsCommand(db, pattern, userScope, systemScope, output)
 
     case DropFunction(ResolvedPersistentFunc(catalog, identifier, _), ifExists) =>
       if (isSessionCatalog(catalog)) {
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala
index c0fa3e2ba65..b678effbea9 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala
@@ -485,6 +485,15 @@ class DataSourceV2Strategy(session: SparkSession) extends Strategy with Predicat
           s"DropIndex is not supported in this table ${table.name}.")
       }
 
+    case ShowFunctions(ResolvedNamespace(catalog, ns), userScope, systemScope, pattern, output) =>
+      ShowFunctionsExec(
+        output,
+        catalog.asFunctionCatalog,
+        ns,
+        userScope,
+        systemScope,
+        pattern) :: Nil
+
     case _ => Nil
   }
 }
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowFunctionsExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowFunctionsExec.scala
new file mode 100644
index 00000000000..5ca0b01d42f
--- /dev/null
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowFunctionsExec.scala
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.datasources.v2
+
+import scala.collection.mutable.ArrayBuffer
+
+import org.apache.spark.sql.catalyst.InternalRow
+import org.apache.spark.sql.catalyst.analysis.{FunctionRegistry, TableFunctionRegistry}
+import org.apache.spark.sql.catalyst.expressions.Attribute
+import org.apache.spark.sql.catalyst.util.StringUtils
+import org.apache.spark.sql.connector.catalog.FunctionCatalog
+import org.apache.spark.sql.execution.LeafExecNode
+
+/**
+ * Physical plan node for showing functions.
+ */
+case class ShowFunctionsExec(
+    output: Seq[Attribute],
+    catalog: FunctionCatalog,
+    namespace: Seq[String],
+    userScope: Boolean,
+    systemScope: Boolean,
+    pattern: Option[String]) extends V2CommandExec with LeafExecNode {
+
+  override protected def run(): Seq[InternalRow] = {
+    val rows = new ArrayBuffer[InternalRow]()
+    val systemFunctions = if (systemScope) {
+      // All built-in functions
+      (FunctionRegistry.functionSet ++ TableFunctionRegistry.functionSet).map(_.unquotedString) ++
+      // Hard code "<>", "!=", "between", "case", and "||"
+      // for now as there is no corresponding functions.
+      // "<>", "!=", "between", "case", and "||" is system functions,
+      // only show when systemScope=true
+      FunctionRegistry.builtinOperators.keys.toSeq
+    } else Seq.empty
+    val userFunctions = if (userScope) {
+      // List all temporary functions in the session catalog
+      session.sessionState.catalog.listTemporaryFunctions().map(_.unquotedString) ++
+      // List all functions registered in the given name space of the catalog
+      catalog.listFunctions(namespace.toArray).map(_.name()).toSeq
+    } else Seq.empty
+    val allFunctions = StringUtils.filterPattern(
+      userFunctions ++ systemFunctions,
+      pattern.getOrElse("*")).distinct.sorted
+
+    allFunctions.foreach { fn =>
+      rows += toCatalystRow(fn)
+    }
+
+    rows.toSeq
+  }
+}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowFunctionsSuiteBase.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowFunctionsSuiteBase.scala
index 130914d35d9..415bc813792 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowFunctionsSuiteBase.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowFunctionsSuiteBase.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql.execution.command
 
 import java.util.Locale
 
-import org.apache.spark.sql.QueryTest
+import org.apache.spark.sql.{QueryTest, Row}
 import org.apache.spark.util.Utils
 
 /**
@@ -38,9 +38,11 @@ import org.apache.spark.util.Utils
 trait ShowFunctionsSuiteBase extends QueryTest with DDLCommandTestUtils {
   override val command = "SHOW FUNCTIONS"
 
+  protected def funCatalog: String = catalog
   protected def createFunction(name: String): Unit = {}
   protected def dropFunction(name: String): Unit = {}
   protected def showFun(ns: String, name: String): String = s"$ns.$name".toLowerCase(Locale.ROOT)
+  protected def isTempFunctions(): Boolean = false
 
   /**
    * Drops function `funName` after calling `f`.
@@ -51,7 +53,7 @@ trait ShowFunctionsSuiteBase extends QueryTest with DDLCommandTestUtils {
     }
   }
 
-  protected def withNamespaceAndFuns(ns: String, funNames: Seq[String], cat: String = catalog)
+  protected def withNamespaceAndFuns(ns: String, funNames: Seq[String], cat: String = funCatalog)
       (f: (String, Seq[String]) => Unit): Unit = {
     val nsCat = s"$cat.$ns"
     withNamespace(nsCat) {
@@ -63,10 +65,124 @@ trait ShowFunctionsSuiteBase extends QueryTest with DDLCommandTestUtils {
     }
   }
 
-  protected def withNamespaceAndFun(ns: String, funName: String, cat: String = catalog)
+  protected def withNamespaceAndFun(ns: String, funName: String, cat: String = funCatalog)
       (f: (String, String) => Unit): Unit = {
     withNamespaceAndFuns(ns, Seq(funName), cat) { case (ns, Seq(name)) =>
       f(ns, name)
     }
   }
+
+  test("show a function") {
+    withNamespaceAndFun("ns", "iiilog") { (ns, f) =>
+      val totalFuns = sql(s"SHOW FUNCTIONS IN $ns").count()
+      createFunction(f)
+      assert(sql(s"SHOW FUNCTIONS IN $ns").count() - totalFuns === 1)
+      assert(!sql(s"SHOW FUNCTIONS IN $ns").filter("contains(function, 'iiilog')").isEmpty)
+    }
+  }
+
+  test("show a function in the USER name space") {
+    withNamespaceAndFun("ns", "logiii") { (ns, f) =>
+      assert(sql(s"SHOW USER FUNCTIONS IN $ns").count() === 0)
+      createFunction(f)
+      QueryTest.checkAnswer(
+        sql(s"SHOW USER FUNCTIONS IN $ns"),
+        Row(showFun("ns", "logiii")) :: Nil)
+    }
+  }
+
+  test("show a temporary function as an USER function") {
+    withNamespaceAndFun("ns", "poggi") { (ns, f0) =>
+      createFunction(f0)
+      val f1 = "temp_test_fun"
+      withUserDefinedFunction(f1 -> true) {
+        spark.udf.register(f1, (arg1: Int, arg2: String) => arg2 + arg1)
+        QueryTest.checkAnswer(
+          sql(s"SHOW USER FUNCTIONS IN $ns"),
+          Row(showFun("ns", "poggi")) :: Row(f1) :: Nil)
+        QueryTest.checkAnswer(
+          sql(s"SHOW ALL FUNCTIONS IN $ns").filter(s"function='$f1'"),
+          Row(f1) :: Nil)
+        QueryTest.checkAnswer(
+          sql(s"SHOW SYSTEM FUNCTIONS IN $ns").filter(s"function='$f1'"),
+          Nil)
+      }
+    }
+  }
+
+  test("show functions in the SYSTEM name space") {
+    withNamespaceAndFun("ns", "date_addi") { (ns, f) =>
+      val systemFuns = sql(s"SHOW SYSTEM FUNCTIONS IN $ns")
+      assert(systemFuns.count() > 0)
+      createFunction(f)
+      assert(sql(s"SHOW SYSTEM FUNCTIONS IN $ns").count() === systemFuns.count())
+      // Built-in operators
+      assert(!systemFuns.filter("function='case'").isEmpty)
+      // Built-in functions
+      assert(!systemFuns.filter("function='substring'").isEmpty)
+    }
+  }
+
+  test("show functions among both user and system defined functions") {
+    withNamespaceAndFun("ns", "current_datei") { (ns, f) =>
+      val allFuns = sql(s"SHOW ALL FUNCTIONS IN $ns").collect()
+      assert(allFuns.nonEmpty)
+      createFunction(f)
+      QueryTest.checkAnswer(
+        sql(s"SHOW ALL FUNCTIONS IN $ns"),
+        allFuns :+ Row(showFun("ns", "current_datei")))
+    }
+  }
+
+  test("show functions matched to the wildcard pattern") {
+    val testFuns = Seq("crc32i", "crc16j", "date1900", "Date1")
+    withNamespaceAndFuns("ns", testFuns) { (ns, funs) =>
+      assert(sql(s"SHOW USER FUNCTIONS IN $ns").isEmpty)
+      funs.foreach(createFunction)
+      QueryTest.checkAnswer(
+        sql(s"SHOW USER FUNCTIONS IN $ns LIKE '*'"),
+        testFuns.map(testFun => Row(showFun("ns", testFun))))
+      QueryTest.checkAnswer(
+        sql(s"SHOW USER FUNCTIONS IN $ns LIKE '*rc*'"),
+        Seq("crc32i", "crc16j").map(testFun => Row(showFun("ns", testFun))))
+    }
+  }
+
+  test("show a function by its string name") {
+    assume(!isTempFunctions())
+    val testFuns = Seq("crc32i", "crc16j")
+    withNamespaceAndFuns("ns", testFuns) { (ns, funs) =>
+      assert(sql(s"SHOW USER FUNCTIONS IN $ns").isEmpty)
+      funs.foreach(createFunction)
+      QueryTest.checkAnswer(
+        sql(s"SHOW USER FUNCTIONS IN $ns 'crc32i'"),
+        Row(showFun("ns", "crc32i")) :: Nil)
+    }
+  }
+
+  test("show functions matched to the '|' pattern") {
+    assume(!isTempFunctions())
+    val testFuns = Seq("crc32i", "crc16j", "date1900", "Date1")
+    withNamespaceAndFuns("ns", testFuns) { (ns, funs) =>
+      assert(sql(s"SHOW USER FUNCTIONS IN $ns").isEmpty)
+      funs.foreach(createFunction)
+      QueryTest.checkAnswer(
+        sql(s"SHOW USER FUNCTIONS IN $ns LIKE 'crc32i|date1900'"),
+        Seq("crc32i", "date1900").map(testFun => Row(showFun("ns", testFun))))
+      QueryTest.checkAnswer(
+        sql(s"SHOW USER FUNCTIONS IN $ns LIKE 'crc32i|date*'"),
+        Seq("crc32i", "date1900", "Date1").map(testFun => Row(showFun("ns", testFun))))
+    }
+  }
+
+  test("show a function by its id") {
+    assume(!isTempFunctions())
+    withNamespaceAndFun("ns", "crc32i") { (ns, fun) =>
+      assert(sql(s"SHOW USER FUNCTIONS IN $ns").isEmpty)
+      createFunction(fun)
+      QueryTest.checkAnswer(
+        sql(s"SHOW USER FUNCTIONS $fun"),
+        Row(showFun("ns", "crc32i")) :: Nil)
+    }
+  }
 }
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowFunctionsSuite.scala
index f14b550688b..f7ea4e75875 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowFunctionsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowFunctionsSuite.scala
@@ -19,7 +19,6 @@ package org.apache.spark.sql.execution.command.v1
 
 import java.util.Locale
 
-import org.apache.spark.sql.Row
 import org.apache.spark.sql.execution.command
 
 /**
@@ -33,65 +32,14 @@ import org.apache.spark.sql.execution.command
  *     `org.apache.spark.sql.hive.execution.command.ShowFunctionsSuite`
  */
 trait ShowFunctionsSuiteBase extends command.ShowFunctionsSuiteBase
-  with command.TestsV1AndV2Commands {
-
-  test("show a function") {
-    withNamespaceAndFun("ns", "iiilog") { (ns, f) =>
-      val totalFuns = sql(s"SHOW FUNCTIONS IN $ns").count()
-      createFunction(f)
-      assert(sql(s"SHOW FUNCTIONS IN $ns").count() - totalFuns === 1)
-      assert(!sql(s"SHOW FUNCTIONS IN $ns").filter("contains(function, 'iiilog')").isEmpty)
-    }
-  }
-
-  test("show a function in the USER name space") {
-    withNamespaceAndFun("ns", "logiii") { (ns, f) =>
-      assert(sql(s"SHOW USER FUNCTIONS IN $ns").count() === 0)
-      createFunction(f)
-      checkAnswer(sql(s"SHOW USER FUNCTIONS IN $ns"), Row(showFun("ns", "logiii")))
-    }
-  }
-
-  test("show functions in the SYSTEM name space") {
-    withNamespaceAndFun("ns", "date_addi") { (ns, f) =>
-      val systemFuns = sql(s"SHOW SYSTEM FUNCTIONS IN $ns").count()
-      assert(systemFuns > 0)
-      createFunction(f)
-      assert(sql(s"SHOW SYSTEM FUNCTIONS IN $ns").count() === systemFuns)
-    }
-  }
-
-  test("show functions among both user and system defined functions") {
-    withNamespaceAndFun("ns", "current_datei") { (ns, f) =>
-      val allFuns = sql(s"SHOW ALL FUNCTIONS IN $ns").collect()
-      assert(allFuns.nonEmpty)
-      createFunction(f)
-      checkAnswer(
-        sql(s"SHOW ALL FUNCTIONS IN $ns"),
-        allFuns :+ Row(showFun("ns", "current_datei")))
-    }
-  }
-
-  test("show functions matched to the wildcard pattern") {
-    val testFuns = Seq("crc32i", "crc16j", "date1900", "Date1")
-    withNamespaceAndFuns("ns", testFuns) { (ns, funs) =>
-      assert(sql(s"SHOW USER FUNCTIONS IN $ns").isEmpty)
-      funs.foreach(createFunction)
-      checkAnswer(
-        sql(s"SHOW USER FUNCTIONS IN $ns LIKE '*'"),
-        testFuns.map(testFun => Row(showFun("ns", testFun))))
-      checkAnswer(
-        sql(s"SHOW USER FUNCTIONS IN $ns LIKE '*rc*'"),
-        Seq("crc32i", "crc16j").map(testFun => Row(showFun("ns", testFun))))
-    }
-  }
-}
+  with command.TestsV1AndV2Commands
 
 /**
  * The class contains tests for the `SHOW FUNCTIONS` command to check temporary functions.
  */
 class ShowTempFunctionsSuite extends ShowFunctionsSuiteBase with CommandSuiteBase {
   override def commandVersion: String = super[ShowFunctionsSuiteBase].commandVersion
+  override protected def isTempFunctions(): Boolean = true
 
   override protected def createFunction(name: String): Unit = {
     spark.udf.register(name, (arg1: Int, arg2: String) => arg2 + arg1)
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/CommandSuiteBase.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/CommandSuiteBase.scala
index ac38a589ffd..15d56050c23 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/CommandSuiteBase.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/CommandSuiteBase.scala
@@ -21,9 +21,7 @@ import org.apache.spark.SparkConf
 import org.apache.spark.sql.catalyst.analysis.ResolvePartitionSpec
 import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
 import org.apache.spark.sql.connector.catalog.{CatalogV2Implicits, Identifier, InMemoryCatalog, InMemoryPartitionTable, InMemoryPartitionTableCatalog, InMemoryTableCatalog}
-import org.apache.spark.sql.connector.catalog.functions.UnboundFunction
 import org.apache.spark.sql.test.SharedSparkSession
-import org.apache.spark.util.Utils
 
 /**
  * The trait contains settings and utility functions. It can be mixed to the test suites for
@@ -35,14 +33,13 @@ trait CommandSuiteBase extends SharedSparkSession {
   def catalogVersion: String = "V2" // The catalog version is added to test names
   def commandVersion: String = "V2" // The command version is added to test names
   def catalog: String = "test_catalog" // The default V2 catalog for testing
-  def funCatalog: String = s"fun_$catalog"
   def defaultUsing: String = "USING _" // The clause is used in creating v2 tables under testing
 
   // V2 catalogs created and used especially for testing
   override def sparkConf: SparkConf = super.sparkConf
     .set(s"spark.sql.catalog.$catalog", classOf[InMemoryPartitionTableCatalog].getName)
     .set(s"spark.sql.catalog.non_part_$catalog", classOf[InMemoryTableCatalog].getName)
-    .set(s"spark.sql.catalog.$funCatalog", classOf[InMemoryCatalog].getName)
+    .set(s"spark.sql.catalog.fun_$catalog", classOf[InMemoryCatalog].getName)
 
   def checkLocation(
       t: String,
@@ -65,17 +62,4 @@ trait CommandSuiteBase extends SharedSparkSession {
     assert(partMetadata.containsKey("location"))
     assert(partMetadata.get("location") === expected)
   }
-
-
-  def withFun(ident: Identifier, fn: UnboundFunction)(f: => Unit): Unit = {
-    val cat = spark.sessionState
-      .catalogManager
-      .catalog(funCatalog)
-      .asInstanceOf[InMemoryCatalog]
-
-    cat.createFunction(ident, fn)
-    Utils.tryWithSafeFinally(f) {
-      cat.dropFunction(ident)
-    }
-  }
 }
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowFunctionsSuite.scala
index 88c16a5552a..b3f791abdad 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowFunctionsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowFunctionsSuite.scala
@@ -20,21 +20,32 @@ package org.apache.spark.sql.execution.command.v2
 import test.org.apache.spark.sql.connector.catalog.functions.JavaStrLen
 import test.org.apache.spark.sql.connector.catalog.functions.JavaStrLen.JavaStrLenNoImpl
 
-import org.apache.spark.sql.AnalysisException
-import org.apache.spark.sql.connector.catalog.Identifier
+import org.apache.spark.sql.connector.catalog.{Identifier, InMemoryCatalog}
+import org.apache.spark.sql.connector.catalog.CatalogV2Implicits.MultipartIdentifierHelper
 import org.apache.spark.sql.execution.command
 
 /**
  * The class contains tests for the `SHOW FUNCTIONS` command to check V2 table catalogs.
  */
 class ShowFunctionsSuite extends command.ShowFunctionsSuiteBase with CommandSuiteBase {
+  override protected def funCatalog: String = s"fun_$catalog"
+  override protected def showFun(ns: String, name: String): String = name
 
-  test("only support session catalog") {
-    withFun(Identifier.of(Array.empty, "abc"), new JavaStrLen(new JavaStrLenNoImpl)) {
-      val e = intercept[AnalysisException] {
-        sql(s"SHOW FUNCTIONS LIKE $funCatalog.abc")
-      }
-      assert(e.getMessage === s"Catalog $funCatalog does not support functions")
-    }
+  private def getFunCatalog(): InMemoryCatalog = {
+    spark.sessionState.catalogManager.catalog(funCatalog).asInstanceOf[InMemoryCatalog]
+  }
+
+  private def funNameToId(name: String): Identifier = {
+    val parts = name.split('.')
+    assert(parts.head == funCatalog, s"${parts.head} is wrong catalog. Expected: $funCatalog.")
+    new MultipartIdentifierHelper(parts.tail).asIdentifier
+  }
+
+  override protected def createFunction(name: String): Unit = {
+    getFunCatalog().createFunction(funNameToId(name), new JavaStrLen(new JavaStrLenNoImpl))
+  }
+
+  override protected def dropFunction(name: String): Unit = {
+    getFunCatalog().dropFunction(funNameToId(name))
   }
 }
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowFunctionsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowFunctionsSuite.scala
index 830949dbcf7..d471669f25c 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowFunctionsSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowFunctionsSuite.scala
@@ -19,7 +19,6 @@ package org.apache.spark.sql.hive.execution.command
 
 import java.util.Locale
 
-import org.apache.spark.sql.Row
 import org.apache.spark.sql.connector.catalog.CatalogManager.SESSION_CATALOG_NAME
 import org.apache.spark.sql.execution.command.v1
 import org.apache.spark.sql.hive.execution.UDFToListInt
@@ -38,39 +37,4 @@ class ShowFunctionsSuite extends v1.ShowFunctionsSuiteBase with CommandSuiteBase
   override protected def dropFunction(name: String): Unit = {
     sql(s"DROP FUNCTION IF EXISTS $name")
   }
-
-  test("show a function by its string name") {
-    val testFuns = Seq("crc32i", "crc16j")
-    withNamespaceAndFuns("ns", testFuns) { (ns, funs) =>
-      assert(sql(s"SHOW USER FUNCTIONS IN $ns").isEmpty)
-      funs.foreach(createFunction)
-      checkAnswer(
-        sql(s"SHOW USER FUNCTIONS IN $ns 'crc32i'"),
-        Row(showFun("ns", "crc32i")))
-    }
-  }
-
-  test("show functions matched to the '|' pattern") {
-    val testFuns = Seq("crc32i", "crc16j", "date1900", "Date1")
-    withNamespaceAndFuns("ns", testFuns) { (ns, funs) =>
-      assert(sql(s"SHOW USER FUNCTIONS IN $ns").isEmpty)
-      funs.foreach(createFunction)
-      checkAnswer(
-        sql(s"SHOW USER FUNCTIONS IN $ns LIKE 'crc32i|date1900'"),
-        Seq("crc32i", "date1900").map(testFun => Row(showFun("ns", testFun))))
-      checkAnswer(
-        sql(s"SHOW USER FUNCTIONS IN $ns LIKE 'crc32i|date*'"),
-        Seq("crc32i", "date1900", "Date1").map(testFun => Row(showFun("ns", testFun))))
-    }
-  }
-
-  test("show a function by its id") {
-    withNamespaceAndFun("ns", "crc32i") { (ns, fun) =>
-      assert(sql(s"SHOW USER FUNCTIONS IN $ns").isEmpty)
-      createFunction(fun)
-      checkAnswer(
-        sql(s"SHOW USER FUNCTIONS $fun"),
-        Row(showFun("ns", "crc32i")))
-    }
-  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org