You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2016/01/04 21:34:09 UTC

spark git commit: [SPARK-12512][SQL] support column name with dot in withColumn()

Repository: spark
Updated Branches:
  refs/heads/master 43706bf8b -> 573ac55d7


[SPARK-12512][SQL] support column name with dot in withColumn()

Author: Xiu Guo <xg...@gmail.com>

Closes #10500 from xguo27/SPARK-12512.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/573ac55d
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/573ac55d
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/573ac55d

Branch: refs/heads/master
Commit: 573ac55d7469ea2ea7a5979b4d3eea99c98f6560
Parents: 43706bf
Author: Xiu Guo <xg...@gmail.com>
Authored: Mon Jan 4 12:34:04 2016 -0800
Committer: Michael Armbrust <mi...@databricks.com>
Committed: Mon Jan 4 12:34:04 2016 -0800

----------------------------------------------------------------------
 .../scala/org/apache/spark/sql/DataFrame.scala  | 32 ++++++++++++--------
 .../org/apache/spark/sql/DataFrameSuite.scala   |  7 +++++
 2 files changed, 27 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/573ac55d/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala b/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala
index 965eaa9..0763aa4 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala
@@ -1171,13 +1171,17 @@ class DataFrame private[sql](
    */
   def withColumn(colName: String, col: Column): DataFrame = {
     val resolver = sqlContext.analyzer.resolver
-    val replaced = schema.exists(f => resolver(f.name, colName))
-    if (replaced) {
-      val colNames = schema.map { field =>
-        val name = field.name
-        if (resolver(name, colName)) col.as(colName) else Column(name)
+    val output = queryExecution.analyzed.output
+    val shouldReplace = output.exists(f => resolver(f.name, colName))
+    if (shouldReplace) {
+      val columns = output.map { field =>
+        if (resolver(field.name, colName)) {
+          col.as(colName)
+        } else {
+          Column(field)
+        }
       }
-      select(colNames : _*)
+      select(columns : _*)
     } else {
       select(Column("*"), col.as(colName))
     }
@@ -1188,13 +1192,17 @@ class DataFrame private[sql](
    */
   private[spark] def withColumn(colName: String, col: Column, metadata: Metadata): DataFrame = {
     val resolver = sqlContext.analyzer.resolver
-    val replaced = schema.exists(f => resolver(f.name, colName))
-    if (replaced) {
-      val colNames = schema.map { field =>
-        val name = field.name
-        if (resolver(name, colName)) col.as(colName, metadata) else Column(name)
+    val output = queryExecution.analyzed.output
+    val shouldReplace = output.exists(f => resolver(f.name, colName))
+    if (shouldReplace) {
+      val columns = output.map { field =>
+        if (resolver(field.name, colName)) {
+          col.as(colName, metadata)
+        } else {
+          Column(field)
+        }
       }
-      select(colNames : _*)
+      select(columns : _*)
     } else {
       select(Column("*"), col.as(colName, metadata))
     }

http://git-wip-us.apache.org/repos/asf/spark/blob/573ac55d/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
index ad478b0..ab02b32 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
@@ -1221,4 +1221,11 @@ class DataFrameSuite extends QueryTest with SharedSQLContext {
           " _2: bigint ... 2 more fields> ... 2 more fields> ... 2 more fields]")
 
   }
+
+  test("SPARK-12512: support `.` in column name for withColumn()") {
+    val df = Seq("a" -> "b").toDF("col.a", "col.b")
+    checkAnswer(df.select(df("*")), Row("a", "b"))
+    checkAnswer(df.withColumn("col.a", lit("c")), Row("c", "b"))
+    checkAnswer(df.withColumn("col.c", lit("c")), Row("a", "b", "c"))
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org