You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by we...@apache.org on 2016/12/06 02:23:48 UTC
spark git commit: [SPARK-18720][SQL][MINOR] Code Refactoring of
withColumn
Repository: spark
Updated Branches:
refs/heads/master bb57bfe97 -> 2398fde45
[SPARK-18720][SQL][MINOR] Code Refactoring of withColumn
### What changes were proposed in this pull request?
Our existing withColumn for adding metadata can simply use the existing public withColumn API.
### How was this patch tested?
The existing test cases cover it.
Author: gatorsmile <ga...@gmail.com>
Closes #16152 from gatorsmile/withColumnRefactoring.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/2398fde4
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/2398fde4
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/2398fde4
Branch: refs/heads/master
Commit: 2398fde450139473b912cadb364e2ec5675b8355
Parents: bb57bfe
Author: gatorsmile <ga...@gmail.com>
Authored: Tue Dec 6 10:23:42 2016 +0800
Committer: Wenchen Fan <we...@databricks.com>
Committed: Tue Dec 6 10:23:42 2016 +0800
----------------------------------------------------------------------
.../main/scala/org/apache/spark/sql/Dataset.scala | 16 +---------------
1 file changed, 1 insertion(+), 15 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/2398fde4/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
index 133f633..29397b1 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
@@ -1871,21 +1871,7 @@ class Dataset[T] private[sql](
* Returns a new Dataset by adding a column with metadata.
*/
private[spark] def withColumn(colName: String, col: Column, metadata: Metadata): DataFrame = {
- val resolver = sparkSession.sessionState.analyzer.resolver
- val output = queryExecution.analyzed.output
- val shouldReplace = output.exists(f => resolver(f.name, colName))
- if (shouldReplace) {
- val columns = output.map { field =>
- if (resolver(field.name, colName)) {
- col.as(colName, metadata)
- } else {
- Column(field)
- }
- }
- select(columns : _*)
- } else {
- select(Column("*"), col.as(colName, metadata))
- }
+ withColumn(colName, col.as(colName, metadata))
}
/**
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org