You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by GitBox <gi...@apache.org> on 2019/04/16 02:34:28 UTC

[GitHub] [spark] beliefer commented on a change in pull request #24372: [SPARK-27462][SQL] Enhance insert into hive table that could choose some columns in target table flexibly.

beliefer commented on a change in pull request #24372: [SPARK-27462][SQL] Enhance insert into hive table that could choose some columns in target table flexibly.
URL: https://github.com/apache/spark/pull/24372#discussion_r275610496
 
 

 ##########
 File path: sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala
 ##########
 @@ -209,6 +209,31 @@ class PlanParserSuite extends AnalysisTest {
           table("u"), Map.empty, plan2, false, ifPartitionNotExists = false)))
   }
 
+  test("insert into and choose inserted columns") {
+    val sql = "select a2, b2 from t"
+    val plan = table("t").select('a2, 'b2)
+    def insert(
+        insertCols: Option[Seq[String]],
+        partition: Map[String, Option[String]],
+        overwrite: Boolean = false,
+        ifPartitionNotExists: Boolean = false): LogicalPlan =
+      InsertIntoTable(table("s"), insertCols, partition, plan, overwrite, ifPartitionNotExists)
+
+    // Single inserts
+    assertEqual(s"insert into s(a1, b1) $sql",
 
 Review comment:
   > Please add more tests. Example:
   > 
   > ```sql
   > insert into s(b1) select b2 from t;
   > insert into s(b1, a1) select b2, a2 from t;
   > insert into s(A1, B1) select a2, b2 from t;
   > insert into s(B1) select a2, b2 from t;
   > ...
   > ```
   
   OK, I have supplement some UT.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org