From 2398fde450139473b912cadb364e2ec5675b8355 Mon Sep 17 00:00:00 2001 From: gatorsmile Date: Tue, 6 Dec 2016 10:23:42 +0800 Subject: [SPARK-18720][SQL][MINOR] Code Refactoring of withColumn ### What changes were proposed in this pull request? Our existing withColumn for adding metadata can simply use the existing public withColumn API. ### How was this patch tested? The existing test cases cover it. Author: gatorsmile Closes #16152 from gatorsmile/withColumnRefactoring. --- .../src/main/scala/org/apache/spark/sql/Dataset.scala | 16 +--------------- 1 file changed, 1 insertion(+), 15 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala index 133f633212..29397b1340 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala @@ -1871,21 +1871,7 @@ class Dataset[T] private[sql]( * Returns a new Dataset by adding a column with metadata. */ private[spark] def withColumn(colName: String, col: Column, metadata: Metadata): DataFrame = { - val resolver = sparkSession.sessionState.analyzer.resolver - val output = queryExecution.analyzed.output - val shouldReplace = output.exists(f => resolver(f.name, colName)) - if (shouldReplace) { - val columns = output.map { field => - if (resolver(field.name, colName)) { - col.as(colName, metadata) - } else { - Column(field) - } - } - select(columns : _*) - } else { - select(Column("*"), col.as(colName, metadata)) - } + withColumn(colName, col.as(colName, metadata)) } /** -- cgit v1.2.3