You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by rx...@apache.org on 2015/07/08 09:10:29 UTC
spark git commit: [SPARK-8883][SQL]Remove the OverrideFunctionRegistry
Repository: spark
Updated Branches:
refs/heads/master 08192a1b8 -> 351a36d0c
[SPARK-8883][SQL]Remove the OverrideFunctionRegistry
Remove the `OverrideFunctionRegistry` from the Spark SQL, as the subclasses of `FunctionRegistry` have their own way to the delegate to the right underlying `FunctionRegistry`.
Author: Cheng Hao <ha...@intel.com>
Closes #7260 from chenghao-intel/override and squashes the following commits:
164d093 [Cheng Hao] enable the function registry
2ca8459 [Cheng Hao] remove the OverrideFunctionRegistry
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/351a36d0
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/351a36d0
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/351a36d0
Branch: refs/heads/master
Commit: 351a36d0c54d2f995df956ffb0a4236e12f89aad
Parents: 08192a1
Author: Cheng Hao <ha...@intel.com>
Authored: Wed Jul 8 00:10:24 2015 -0700
Committer: Reynold Xin <rx...@databricks.com>
Committed: Wed Jul 8 00:10:24 2015 -0700
----------------------------------------------------------------------
.../spark/sql/catalyst/analysis/FunctionRegistry.scala | 13 -------------
.../main/scala/org/apache/spark/sql/SQLContext.scala | 3 +--
.../scala/org/apache/spark/sql/hive/HiveContext.scala | 2 +-
.../scala/org/apache/spark/sql/hive/hiveUDFs.scala | 2 +-
4 files changed, 3 insertions(+), 17 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/351a36d0/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
index fef2763..5c25181 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
@@ -35,19 +35,6 @@ trait FunctionRegistry {
def lookupFunction(name: String, children: Seq[Expression]): Expression
}
-class OverrideFunctionRegistry(underlying: FunctionRegistry) extends FunctionRegistry {
-
- private val functionBuilders = StringKeyHashMap[FunctionBuilder](caseSensitive = false)
-
- override def registerFunction(name: String, builder: FunctionBuilder): Unit = {
- functionBuilders.put(name, builder)
- }
-
- override def lookupFunction(name: String, children: Seq[Expression]): Expression = {
- functionBuilders.get(name).map(_(children)).getOrElse(underlying.lookupFunction(name, children))
- }
-}
-
class SimpleFunctionRegistry extends FunctionRegistry {
private val functionBuilders = StringKeyHashMap[FunctionBuilder](caseSensitive = false)
http://git-wip-us.apache.org/repos/asf/spark/blob/351a36d0/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
index e81371e..079f31a 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
@@ -139,8 +139,7 @@ class SQLContext(@transient val sparkContext: SparkContext)
// TODO how to handle the temp function per user session?
@transient
- protected[sql] lazy val functionRegistry: FunctionRegistry =
- new OverrideFunctionRegistry(FunctionRegistry.builtin)
+ protected[sql] lazy val functionRegistry: FunctionRegistry = FunctionRegistry.builtin
@transient
protected[sql] lazy val analyzer: Analyzer =
http://git-wip-us.apache.org/repos/asf/spark/blob/351a36d0/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
index b91242a..439d8ca 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
@@ -371,7 +371,7 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
// Note that HiveUDFs will be overridden by functions registered in this context.
@transient
override protected[sql] lazy val functionRegistry: FunctionRegistry =
- new OverrideFunctionRegistry(new HiveFunctionRegistry(FunctionRegistry.builtin))
+ new HiveFunctionRegistry(FunctionRegistry.builtin)
/* An analyzer that uses the Hive metastore. */
@transient
http://git-wip-us.apache.org/repos/asf/spark/blob/351a36d0/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
index 1deef6b..0bc8adb 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
@@ -77,7 +77,7 @@ private[hive] class HiveFunctionRegistry(underlying: analysis.FunctionRegistry)
}
override def registerFunction(name: String, builder: FunctionBuilder): Unit =
- throw new UnsupportedOperationException
+ underlying.registerFunction(name, builder)
}
private[hive] case class HiveSimpleUDF(funcWrapper: HiveFunctionWrapper, children: Seq[Expression])
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org