You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by rx...@apache.org on 2016/04/26 01:21:00 UTC

spark git commit: [SPARK-14888][SQL] UnresolvedFunction should use FunctionIdentifier

Repository: spark
Updated Branches:
  refs/heads/master 34336b625 -> f36c9c837


[SPARK-14888][SQL] UnresolvedFunction should use FunctionIdentifier

## What changes were proposed in this pull request?
This patch changes UnresolvedFunction and UnresolvedGenerator to use a FunctionIdentifier rather than just a String for function name. Also changed SessionCatalog to accept FunctionIdentifier in lookupFunction.

## How was this patch tested?
Updated related unit tests.

Author: Reynold Xin <rx...@databricks.com>

Closes #12659 from rxin/SPARK-14888.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/f36c9c83
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/f36c9c83
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/f36c9c83

Branch: refs/heads/master
Commit: f36c9c83798877256efa1447a6b9be5aa47a7e87
Parents: 34336b6
Author: Reynold Xin <rx...@databricks.com>
Authored: Mon Apr 25 16:20:57 2016 -0700
Committer: Reynold Xin <rx...@databricks.com>
Committed: Mon Apr 25 16:20:57 2016 -0700

----------------------------------------------------------------------
 .../spark/sql/catalyst/analysis/Analyzer.scala  |  4 +-
 .../catalyst/analysis/FunctionRegistry.scala    |  2 +-
 .../sql/catalyst/analysis/unresolved.scala      | 22 ++++--
 .../sql/catalyst/catalog/SessionCatalog.scala   | 76 ++++++++++----------
 .../apache/spark/sql/catalyst/identifiers.scala | 14 +++-
 .../spark/sql/catalyst/parser/AstBuilder.scala  | 19 +++--
 .../catalyst/catalog/SessionCatalogSuite.scala  | 18 ++---
 .../catalyst/parser/ExpressionParserSuite.scala |  4 +-
 .../sql/catalyst/parser/PlanParserSuite.scala   |  6 +-
 .../spark/sql/execution/SparkSqlParser.scala    | 25 +++----
 .../spark/sql/internal/SessionState.scala       |  3 +-
 .../hive/execution/HiveCompatibilitySuite.scala |  2 +-
 .../spark/sql/hive/HiveSessionCatalog.scala     | 14 ++--
 13 files changed, 117 insertions(+), 92 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/f36c9c83/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index 50957e8..e37d976 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -838,9 +838,9 @@ class Analyzer(
                     s"its class is ${other.getClass.getCanonicalName}, which is not a generator.")
               }
             }
-          case u @ UnresolvedFunction(name, children, isDistinct) =>
+          case u @ UnresolvedFunction(funcId, children, isDistinct) =>
             withPosition(u) {
-              catalog.lookupFunction(name, children) match {
+              catalog.lookupFunction(funcId, children) match {
                 // DISTINCT is not meaningful for a Max or a Min.
                 case max: Max if isDistinct =>
                   AggregateExpression(max, Complete, isDistinct = false)

http://git-wip-us.apache.org/repos/asf/spark/blob/f36c9c83/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
index a444300..dd35770 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
@@ -387,7 +387,7 @@ object FunctionRegistry {
   }
 
   /** See usage above. */
-  def expression[T <: Expression](name: String)
+  private def expression[T <: Expression](name: String)
       (implicit tag: ClassTag[T]): (String, (ExpressionInfo, FunctionBuilder)) = {
 
     // See if we can find a constructor that accepts Seq[Expression]

http://git-wip-us.apache.org/repos/asf/spark/blob/f36c9c83/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala
index e83008e..f82b63a 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala
@@ -18,7 +18,8 @@
 package org.apache.spark.sql.catalyst.analysis
 
 import org.apache.spark.sql.AnalysisException
-import org.apache.spark.sql.catalyst.{errors, InternalRow, TableIdentifier}
+import org.apache.spark.sql.catalyst.{FunctionIdentifier, InternalRow, TableIdentifier}
+import org.apache.spark.sql.catalyst.errors.TreeNodeException
 import org.apache.spark.sql.catalyst.expressions._
 import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, CodegenFallback, ExprCode}
 import org.apache.spark.sql.catalyst.plans.logical.{LeafNode, LogicalPlan}
@@ -30,8 +31,8 @@ import org.apache.spark.sql.types.{DataType, StructType}
  * Thrown when an invalid attempt is made to access a property of a tree that has yet to be fully
  * resolved.
  */
-class UnresolvedException[TreeType <: TreeNode[_]](tree: TreeType, function: String) extends
-  errors.TreeNodeException(tree, s"Invalid call to $function on unresolved object", null)
+class UnresolvedException[TreeType <: TreeNode[_]](tree: TreeType, function: String)
+  extends TreeNodeException(tree, s"Invalid call to $function on unresolved object", null)
 
 /**
  * Holds the name of a relation that has yet to be looked up in a catalog.
@@ -138,7 +139,8 @@ object UnresolvedAttribute {
  * the [[org.apache.spark.sql.catalyst.plans.logical.Generate]] operator.
  * The analyzer will resolve this generator.
  */
-case class UnresolvedGenerator(name: String, children: Seq[Expression]) extends Generator {
+case class UnresolvedGenerator(name: FunctionIdentifier, children: Seq[Expression])
+  extends Generator {
 
   override def elementTypes: Seq[(DataType, Boolean, String)] =
     throw new UnresolvedException(this, "elementTypes")
@@ -147,7 +149,7 @@ case class UnresolvedGenerator(name: String, children: Seq[Expression]) extends
   override def nullable: Boolean = throw new UnresolvedException(this, "nullable")
   override lazy val resolved = false
 
-  override def prettyName: String = name
+  override def prettyName: String = name.unquotedString
   override def toString: String = s"'$name(${children.mkString(", ")})"
 
   override def eval(input: InternalRow = null): TraversableOnce[InternalRow] =
@@ -161,7 +163,7 @@ case class UnresolvedGenerator(name: String, children: Seq[Expression]) extends
 }
 
 case class UnresolvedFunction(
-    name: String,
+    name: FunctionIdentifier,
     children: Seq[Expression],
     isDistinct: Boolean)
   extends Expression with Unevaluable {
@@ -171,10 +173,16 @@ case class UnresolvedFunction(
   override def nullable: Boolean = throw new UnresolvedException(this, "nullable")
   override lazy val resolved = false
 
-  override def prettyName: String = name
+  override def prettyName: String = name.unquotedString
   override def toString: String = s"'$name(${children.mkString(", ")})"
 }
 
+object UnresolvedFunction {
+  def apply(name: String, children: Seq[Expression], isDistinct: Boolean): UnresolvedFunction = {
+    UnresolvedFunction(FunctionIdentifier(name, None), children, isDistinct)
+  }
+}
+
 /**
  * Represents all of the input attributes to a given relational operator, for example in
  * "SELECT * FROM ...". A [[Star]] gets automatically expanded during analysis.

http://git-wip-us.apache.org/repos/asf/spark/blob/f36c9c83/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
index 67b1752..b36a76a 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
@@ -59,13 +59,14 @@ class SessionCatalog(
     this(externalCatalog, new SimpleFunctionRegistry, new SimpleCatalystConf(true))
   }
 
-  protected[this] val tempTables = new mutable.HashMap[String, LogicalPlan]
+  /** List of temporary tables, mapping from table name to their logical plan. */
+  protected val tempTables = new mutable.HashMap[String, LogicalPlan]
 
   // Note: we track current database here because certain operations do not explicitly
   // specify the database (e.g. DROP TABLE my_table). In these cases we must first
   // check whether the temporary table or function exists, then, if not, operate on
   // the corresponding item in the current database.
-  protected[this] var currentDb = {
+  protected var currentDb = {
     val defaultName = "default"
     val defaultDbDefinition = CatalogDatabase(defaultName, "default database", "", Map())
     // Initialize default database if it doesn't already exist
@@ -118,7 +119,7 @@ class SessionCatalog(
 
   def setCurrentDatabase(db: String): Unit = {
     if (!databaseExists(db)) {
-      throw new AnalysisException(s"cannot set current database to non-existent '$db'")
+      throw new AnalysisException(s"Database '$db' does not exist.")
     }
     currentDb = db
   }
@@ -593,9 +594,6 @@ class SessionCatalog(
   /**
    * Drop a temporary function.
    */
-  // TODO: The reason that we distinguish dropFunction and dropTempFunction is that
-  // Hive has DROP FUNCTION and DROP TEMPORARY FUNCTION. We may want to consolidate
-  // dropFunction and dropTempFunction.
   def dropTempFunction(name: String, ignoreIfNotExists: Boolean): Unit = {
     if (!functionRegistry.dropFunction(name) && !ignoreIfNotExists) {
       throw new AnalysisException(
@@ -622,40 +620,44 @@ class SessionCatalog(
    * based on the function class and put the builder into the FunctionRegistry.
    * The name of this function in the FunctionRegistry will be `databaseName.functionName`.
    */
-  def lookupFunction(name: String, children: Seq[Expression]): Expression = {
-    // TODO: Right now, the name can be qualified or not qualified.
-    // It will be better to get a FunctionIdentifier.
-    // TODO: Right now, we assume that name is not qualified!
-    val qualifiedName = FunctionIdentifier(name, Some(currentDb)).unquotedString
-    if (functionRegistry.functionExists(name)) {
+  def lookupFunction(name: FunctionIdentifier, children: Seq[Expression]): Expression = {
+    // Note: the implementation of this function is a little bit convoluted.
+    // We probably shouldn't use a single FunctionRegistry to register all three kinds of functions
+    // (built-in, temp, and external).
+    if (name.database.isEmpty && functionRegistry.functionExists(name.funcName)) {
       // This function has been already loaded into the function registry.
-      functionRegistry.lookupFunction(name, children)
-    } else if (functionRegistry.functionExists(qualifiedName)) {
+      return functionRegistry.lookupFunction(name.funcName, children)
+    }
+
+    // If the name itself is not qualified, add the current database to it.
+    val qualifiedName = if (name.database.isEmpty) name.copy(database = Some(currentDb)) else name
+
+    if (functionRegistry.functionExists(qualifiedName.unquotedString)) {
       // This function has been already loaded into the function registry.
       // Unlike the above block, we find this function by using the qualified name.
-      functionRegistry.lookupFunction(qualifiedName, children)
-    } else {
-      // The function has not been loaded to the function registry, which means
-      // that the function is a permanent function (if it actually has been registered
-      // in the metastore). We need to first put the function in the FunctionRegistry.
-      val catalogFunction = try {
-        externalCatalog.getFunction(currentDb, name)
-      } catch {
-        case e: AnalysisException => failFunctionLookup(name)
-        case e: NoSuchFunctionException => failFunctionLookup(name)
-      }
-      loadFunctionResources(catalogFunction.resources)
-      // Please note that qualifiedName is provided by the user. However,
-      // catalogFunction.identifier.unquotedString is returned by the underlying
-      // catalog. So, it is possible that qualifiedName is not exactly the same as
-      // catalogFunction.identifier.unquotedString (difference is on case-sensitivity).
-      // At here, we preserve the input from the user.
-      val info = new ExpressionInfo(catalogFunction.className, qualifiedName)
-      val builder = makeFunctionBuilder(qualifiedName, catalogFunction.className)
-      createTempFunction(qualifiedName, info, builder, ignoreIfExists = false)
-      // Now, we need to create the Expression.
-      functionRegistry.lookupFunction(qualifiedName, children)
+      return functionRegistry.lookupFunction(qualifiedName.unquotedString, children)
+    }
+
+    // The function has not been loaded to the function registry, which means
+    // that the function is a permanent function (if it actually has been registered
+    // in the metastore). We need to first put the function in the FunctionRegistry.
+    val catalogFunction = try {
+      externalCatalog.getFunction(currentDb, name.funcName)
+    } catch {
+      case e: AnalysisException => failFunctionLookup(name.funcName)
+      case e: NoSuchFunctionException => failFunctionLookup(name.funcName)
     }
+    loadFunctionResources(catalogFunction.resources)
+    // Please note that qualifiedName is provided by the user. However,
+    // catalogFunction.identifier.unquotedString is returned by the underlying
+    // catalog. So, it is possible that qualifiedName is not exactly the same as
+    // catalogFunction.identifier.unquotedString (difference is on case-sensitivity).
+    // At here, we preserve the input from the user.
+    val info = new ExpressionInfo(catalogFunction.className, qualifiedName.unquotedString)
+    val builder = makeFunctionBuilder(qualifiedName.unquotedString, catalogFunction.className)
+    createTempFunction(qualifiedName.unquotedString, info, builder, ignoreIfExists = false)
+    // Now, we need to create the Expression.
+    return functionRegistry.lookupFunction(qualifiedName.unquotedString, children)
   }
 
   /**
@@ -671,8 +673,6 @@ class SessionCatalog(
       externalCatalog.listFunctions(db, pattern).map { f => FunctionIdentifier(f, Some(db)) }
     val loadedFunctions = StringUtils.filterPattern(functionRegistry.listFunction(), pattern)
       .map { f => FunctionIdentifier(f) }
-    // TODO: Actually, there will be dbFunctions that have been loaded into the FunctionRegistry.
-    // So, the returned list may have two entries for the same function.
     dbFunctions ++ loadedFunctions
   }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/f36c9c83/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/identifiers.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/identifiers.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/identifiers.scala
index aae7595..7d05845 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/identifiers.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/identifiers.scala
@@ -26,9 +26,17 @@ package org.apache.spark.sql.catalyst
  */
 sealed trait IdentifierWithDatabase {
   val identifier: String
+
   def database: Option[String]
-  def quotedString: String = database.map(db => s"`$db`.`$identifier`").getOrElse(s"`$identifier`")
-  def unquotedString: String = database.map(db => s"$db.$identifier").getOrElse(identifier)
+
+  def quotedString: String = {
+    if (database.isDefined) s"`${database.get}`.`$identifier`" else s"`$identifier`"
+  }
+
+  def unquotedString: String = {
+    if (database.isDefined) s"${database.get}.$identifier" else identifier
+  }
+
   override def toString: String = quotedString
 }
 
@@ -63,6 +71,8 @@ case class FunctionIdentifier(funcName: String, database: Option[String])
   override val identifier: String = funcName
 
   def this(funcName: String) = this(funcName, None)
+
+  override def toString: String = unquotedString
 }
 
 object FunctionIdentifier {

http://git-wip-us.apache.org/repos/asf/spark/blob/f36c9c83/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
index 1c06762..3d7b888 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
@@ -25,7 +25,7 @@ import org.antlr.v4.runtime.{ParserRuleContext, Token}
 import org.antlr.v4.runtime.tree.{ParseTree, RuleNode, TerminalNode}
 
 import org.apache.spark.internal.Logging
-import org.apache.spark.sql.catalyst.{InternalRow, TableIdentifier}
+import org.apache.spark.sql.catalyst.{FunctionIdentifier, InternalRow, TableIdentifier}
 import org.apache.spark.sql.catalyst.analysis._
 import org.apache.spark.sql.catalyst.expressions._
 import org.apache.spark.sql.catalyst.parser.SqlBaseParser._
@@ -554,7 +554,7 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging {
       case "json_tuple" =>
         JsonTuple(expressions)
       case name =>
-        UnresolvedGenerator(name, expressions)
+        UnresolvedGenerator(visitFunctionName(ctx.qualifiedName), expressions)
     }
 
     Generate(
@@ -1033,12 +1033,12 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging {
     val isDistinct = Option(ctx.setQuantifier()).exists(_.DISTINCT != null)
     val arguments = ctx.expression().asScala.map(expression) match {
       case Seq(UnresolvedStar(None)) if name.toLowerCase == "count" && !isDistinct =>
-        // Transform COUNT(*) into COUNT(1). Move this to analysis?
+        // Transform COUNT(*) into COUNT(1).
         Seq(Literal(1))
       case expressions =>
         expressions
     }
-    val function = UnresolvedFunction(name, arguments, isDistinct)
+    val function = UnresolvedFunction(visitFunctionName(ctx.qualifiedName), arguments, isDistinct)
 
     // Check if the function is evaluated in a windowed context.
     ctx.windowSpec match {
@@ -1051,6 +1051,17 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging {
   }
 
   /**
+   * Create a function database (optional) and name pair.
+   */
+  protected def visitFunctionName(ctx: QualifiedNameContext): FunctionIdentifier = {
+    ctx.identifier().asScala.map(_.getText) match {
+      case Seq(db, fn) => FunctionIdentifier(fn, Option(db))
+      case Seq(fn) => FunctionIdentifier(fn, None)
+      case other => throw new ParseException(s"Unsupported function name '${ctx.getText}'", ctx)
+    }
+  }
+
+  /**
    * Create a reference to a window frame, i.e. [[WindowSpecReference]].
    */
   override def visitWindowRef(ctx: WindowRefContext): WindowSpecReference = withOrigin(ctx) {

http://git-wip-us.apache.org/repos/asf/spark/blob/f36c9c83/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala
index 27205c4..1933be5 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala
@@ -705,11 +705,11 @@ class SessionCatalogSuite extends SparkFunSuite {
     catalog.createTempFunction("temp1", info1, tempFunc1, ignoreIfExists = false)
     catalog.createTempFunction("temp2", info2, tempFunc2, ignoreIfExists = false)
     val arguments = Seq(Literal(1), Literal(2), Literal(3))
-    assert(catalog.lookupFunction("temp1", arguments) === Literal(1))
-    assert(catalog.lookupFunction("temp2", arguments) === Literal(3))
+    assert(catalog.lookupFunction(FunctionIdentifier("temp1"), arguments) === Literal(1))
+    assert(catalog.lookupFunction(FunctionIdentifier("temp2"), arguments) === Literal(3))
     // Temporary function does not exist.
     intercept[AnalysisException] {
-      catalog.lookupFunction("temp3", arguments)
+      catalog.lookupFunction(FunctionIdentifier("temp3"), arguments)
     }
     val tempFunc3 = (e: Seq[Expression]) => Literal(e.size)
     val info3 = new ExpressionInfo("tempFunc3", "temp1")
@@ -719,7 +719,8 @@ class SessionCatalogSuite extends SparkFunSuite {
     }
     // Temporary function is overridden
     catalog.createTempFunction("temp1", info3, tempFunc3, ignoreIfExists = true)
-    assert(catalog.lookupFunction("temp1", arguments) === Literal(arguments.length))
+    assert(
+      catalog.lookupFunction(FunctionIdentifier("temp1"), arguments) === Literal(arguments.length))
   }
 
   test("drop function") {
@@ -755,10 +756,10 @@ class SessionCatalogSuite extends SparkFunSuite {
     val tempFunc = (e: Seq[Expression]) => e.head
     catalog.createTempFunction("func1", info, tempFunc, ignoreIfExists = false)
     val arguments = Seq(Literal(1), Literal(2), Literal(3))
-    assert(catalog.lookupFunction("func1", arguments) === Literal(1))
+    assert(catalog.lookupFunction(FunctionIdentifier("func1"), arguments) === Literal(1))
     catalog.dropTempFunction("func1", ignoreIfNotExists = false)
     intercept[AnalysisException] {
-      catalog.lookupFunction("func1", arguments)
+      catalog.lookupFunction(FunctionIdentifier("func1"), arguments)
     }
     intercept[AnalysisException] {
       catalog.dropTempFunction("func1", ignoreIfNotExists = false)
@@ -792,10 +793,11 @@ class SessionCatalogSuite extends SparkFunSuite {
     val info1 = new ExpressionInfo("tempFunc1", "func1")
     val tempFunc1 = (e: Seq[Expression]) => e.head
     catalog.createTempFunction("func1", info1, tempFunc1, ignoreIfExists = false)
-    assert(catalog.lookupFunction("func1", Seq(Literal(1), Literal(2), Literal(3))) == Literal(1))
+    assert(catalog.lookupFunction(
+      FunctionIdentifier("func1"), Seq(Literal(1), Literal(2), Literal(3))) == Literal(1))
     catalog.dropTempFunction("func1", ignoreIfNotExists = false)
     intercept[AnalysisException] {
-      catalog.lookupFunction("func1", Seq(Literal(1), Literal(2), Literal(3)))
+      catalog.lookupFunction(FunctionIdentifier("func1"), Seq(Literal(1), Literal(2), Literal(3)))
     }
   }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/f36c9c83/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala
index f0ddc92..5af3ea9 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala
@@ -18,6 +18,7 @@ package org.apache.spark.sql.catalyst.parser
 
 import java.sql.{Date, Timestamp}
 
+import org.apache.spark.sql.catalyst.FunctionIdentifier
 import org.apache.spark.sql.catalyst.analysis.{UnresolvedAttribute, _}
 import org.apache.spark.sql.catalyst.expressions._
 import org.apache.spark.sql.catalyst.plans.PlanTest
@@ -199,7 +200,8 @@ class ExpressionParserSuite extends PlanTest {
 
   test("function expressions") {
     assertEqual("foo()", 'foo.function())
-    assertEqual("foo.bar()", Symbol("foo.bar").function())
+    assertEqual("foo.bar()",
+      UnresolvedFunction(FunctionIdentifier("bar", Some("foo")), Seq.empty, isDistinct = false))
     assertEqual("foo(*)", 'foo.function(star()))
     assertEqual("count(*)", 'count.function(1))
     assertEqual("foo(a, b)", 'foo.function('a, 'b))

http://git-wip-us.apache.org/repos/asf/spark/blob/f36c9c83/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala
index 56c91a0..5e896a3 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala
@@ -17,11 +17,13 @@
 package org.apache.spark.sql.catalyst.parser
 
 import org.apache.spark.sql.Row
+import org.apache.spark.sql.catalyst.FunctionIdentifier
 import org.apache.spark.sql.catalyst.analysis.UnresolvedGenerator
 import org.apache.spark.sql.catalyst.expressions._
 import org.apache.spark.sql.catalyst.plans._
 import org.apache.spark.sql.catalyst.plans.logical._
-import org.apache.spark.sql.types.{BooleanType, IntegerType}
+import org.apache.spark.sql.types.IntegerType
+
 
 class PlanParserSuite extends PlanTest {
   import CatalystSqlParser._
@@ -300,7 +302,7 @@ class PlanParserSuite extends PlanTest {
     // Unresolved generator.
     val expected = table("t")
       .generate(
-        UnresolvedGenerator("posexplode", Seq('x)),
+        UnresolvedGenerator(FunctionIdentifier("posexplode"), Seq('x)),
         join = true,
         outer = false,
         Some("posexpl"),

http://git-wip-us.apache.org/repos/asf/spark/blob/f36c9c83/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
index f22ed43..a1862f5 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
@@ -393,10 +393,10 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
     }
 
     // Extract database, name & alias.
-    val (database, function) = visitFunctionName(ctx.qualifiedName)
+    val functionIdentifier = visitFunctionName(ctx.qualifiedName)
     CreateFunction(
-      database,
-      function,
+      functionIdentifier.database,
+      functionIdentifier.funcName,
       string(ctx.className),
       resources,
       ctx.TEMPORARY != null)
@@ -411,19 +411,12 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
    * }}}
    */
   override def visitDropFunction(ctx: DropFunctionContext): LogicalPlan = withOrigin(ctx) {
-    val (database, function) = visitFunctionName(ctx.qualifiedName)
-    DropFunction(database, function, ctx.EXISTS != null, ctx.TEMPORARY != null)
-  }
-
-  /**
-   * Create a function database (optional) and name pair.
-   */
-  private def visitFunctionName(ctx: QualifiedNameContext): (Option[String], String) = {
-    ctx.identifier().asScala.map(_.getText) match {
-      case Seq(db, fn) => (Option(db), fn)
-      case Seq(fn) => (None, fn)
-      case other => throw new ParseException(s"Unsupported function name '${ctx.getText}'", ctx)
-    }
+    val functionIdentifier = visitFunctionName(ctx.qualifiedName)
+    DropFunction(
+      functionIdentifier.database,
+      functionIdentifier.funcName,
+      ctx.EXISTS != null,
+      ctx.TEMPORARY != null)
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/spark/blob/f36c9c83/sql/core/src/main/scala/org/apache/spark/sql/internal/SessionState.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/internal/SessionState.scala b/sql/core/src/main/scala/org/apache/spark/sql/internal/SessionState.scala
index 04ad920..f683bbb 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/internal/SessionState.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/internal/SessionState.scala
@@ -78,8 +78,7 @@ private[sql] class SessionState(ctx: SQLContext) {
   /**
    * Internal catalog for managing table and database states.
    */
-  lazy val catalog =
-    new SessionCatalog(
+  lazy val catalog = new SessionCatalog(
       ctx.externalCatalog,
       functionResourceLoader,
       functionRegistry,

http://git-wip-us.apache.org/repos/asf/spark/blob/f36c9c83/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala
----------------------------------------------------------------------
diff --git a/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala b/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala
index 3e97edb..01b24d2 100644
--- a/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala
+++ b/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala
@@ -74,7 +74,7 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
       TestHive.setConf(HiveUtils.CONVERT_METASTORE_ORC, originalConvertMetastoreOrc)
       TestHive.sessionState.functionRegistry.restore()
 
-      // For debugging dump some statistics about how much time was spent in various optimizer rules.
+      // For debugging dump some statistics about how much time was spent in various optimizer rules
       logWarning(RuleExecutor.dumpTimeSpent())
     } finally {
       super.afterAll()

http://git-wip-us.apache.org/repos/asf/spark/blob/f36c9c83/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
index 3e71882..9e52707 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
@@ -27,18 +27,16 @@ import org.apache.hadoop.hive.ql.exec.{FunctionRegistry => HiveFunctionRegistry}
 import org.apache.hadoop.hive.ql.udf.generic.{AbstractGenericUDAFResolver, GenericUDF, GenericUDTF}
 
 import org.apache.spark.sql.{AnalysisException, SQLContext}
-import org.apache.spark.sql.catalyst.TableIdentifier
+import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
 import org.apache.spark.sql.catalyst.analysis.FunctionRegistry
 import org.apache.spark.sql.catalyst.analysis.FunctionRegistry.FunctionBuilder
 import org.apache.spark.sql.catalyst.catalog.{FunctionResourceLoader, SessionCatalog}
 import org.apache.spark.sql.catalyst.expressions.{Expression, ExpressionInfo}
 import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, SubqueryAlias}
 import org.apache.spark.sql.catalyst.rules.Rule
-import org.apache.spark.sql.execution.datasources.BucketSpec
 import org.apache.spark.sql.hive.HiveShim.HiveFunctionWrapper
 import org.apache.spark.sql.hive.client.HiveClient
 import org.apache.spark.sql.internal.SQLConf
-import org.apache.spark.sql.types.StructType
 import org.apache.spark.util.Utils
 
 
@@ -176,7 +174,7 @@ private[sql] class HiveSessionCatalog(
   // regexp, sentences, stack, std, str_to_map, windowingtablefunction, xpath, xpath_boolean,
   // xpath_double, xpath_float, xpath_int, xpath_long, xpath_number,
   // xpath_short, and xpath_string.
-  override def lookupFunction(name: String, children: Seq[Expression]): Expression = {
+  override def lookupFunction(name: FunctionIdentifier, children: Seq[Expression]): Expression = {
     // TODO: Once lookupFunction accepts a FunctionIdentifier, we should refactor this method to
     // if (super.functionExists(name)) {
     //   super.lookupFunction(name, children)
@@ -187,7 +185,7 @@ private[sql] class HiveSessionCatalog(
     Try(super.lookupFunction(name, children)) match {
       case Success(expr) => expr
       case Failure(error) =>
-        if (functionRegistry.functionExists(name)) {
+        if (functionRegistry.functionExists(name.unquotedString)) {
           // If the function actually exists in functionRegistry, it means that there is an
           // error when we create the Expression using the given children.
           // We need to throw the original exception.
@@ -196,7 +194,7 @@ private[sql] class HiveSessionCatalog(
           // This function is not in functionRegistry, let's try to load it as a Hive's
           // built-in function.
           // Hive is case insensitive.
-          val functionName = name.toLowerCase
+          val functionName = name.unquotedString.toLowerCase
           // TODO: This may not really work for current_user because current_user is not evaluated
           // with session info.
           // We do not need to use executionHive at here because we only load
@@ -204,12 +202,12 @@ private[sql] class HiveSessionCatalog(
           val functionInfo = {
             try {
               Option(HiveFunctionRegistry.getFunctionInfo(functionName)).getOrElse(
-                failFunctionLookup(name))
+                failFunctionLookup(name.unquotedString))
             } catch {
               // If HiveFunctionRegistry.getFunctionInfo throws an exception,
               // we are failing to load a Hive builtin function, which means that
               // the given function is not a Hive builtin function.
-              case NonFatal(e) => failFunctionLookup(name)
+              case NonFatal(e) => failFunctionLookup(name.unquotedString)
             }
           }
           val className = functionInfo.getFunctionClass.getName


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org