You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by we...@apache.org on 2017/11/13 11:41:46 UTC
spark git commit: [SPARK-22442][SQL][BRANCH-2.2][FOLLOWUP]
ScalaReflection should produce correct field names for special characters
Repository: spark
Updated Branches:
refs/heads/branch-2.2 f73637798 -> 2f6dece03
[SPARK-22442][SQL][BRANCH-2.2][FOLLOWUP] ScalaReflection should produce correct field names for special characters
## What changes were proposed in this pull request?
`val TermName: TermNameExtractor` is new in scala 2.11. For 2.10, we should use deprecated `newTermName`.
## How was this patch tested?
Build locally with scala 2.10.
Author: Liang-Chi Hsieh <vi...@gmail.com>
Closes #19736 from viirya/SPARK-22442-2.2-followup.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/2f6dece0
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/2f6dece0
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/2f6dece0
Branch: refs/heads/branch-2.2
Commit: 2f6dece033f0e93c3969d94acbc3ad7d56c78b92
Parents: f736377
Author: Liang-Chi Hsieh <vi...@gmail.com>
Authored: Mon Nov 13 12:41:42 2017 +0100
Committer: Wenchen Fan <we...@databricks.com>
Committed: Mon Nov 13 12:41:42 2017 +0100
----------------------------------------------------------------------
.../apache/spark/sql/catalyst/expressions/objects/objects.scala | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/2f6dece0/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala
index 0b45dfe..c523766 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala
@@ -27,7 +27,7 @@ import org.apache.spark.{SparkConf, SparkEnv}
import org.apache.spark.serializer._
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.InternalRow
-import org.apache.spark.sql.catalyst.ScalaReflection.universe.TermName
+import org.apache.spark.sql.catalyst.ScalaReflection.universe.newTermName
import org.apache.spark.sql.catalyst.encoders.RowEncoder
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, ExprCode}
@@ -190,7 +190,7 @@ case class Invoke(
override def eval(input: InternalRow): Any =
throw new UnsupportedOperationException("Only code-generated evaluation is supported.")
- private lazy val encodedFunctionName = TermName(functionName).encodedName.toString
+ private lazy val encodedFunctionName = newTermName(functionName).encodedName.toString
@transient lazy val method = targetObject.dataType match {
case ObjectType(cls) =>
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org