You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by GitBox <gi...@apache.org> on 2018/12/20 07:48:43 UTC

[GitHub] cloud-fan commented on a change in pull request #23353: [SPARK-26402][SQL] Canonicalization on GetStructField

cloud-fan commented on a change in pull request #23353: [SPARK-26402][SQL] Canonicalization on GetStructField 
URL: https://github.com/apache/spark/pull/23353#discussion_r243179452
 
 

 ##########
 File path: sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CanonicalizeSuite.scala
 ##########
 @@ -50,4 +59,44 @@ class CanonicalizeSuite extends SparkFunSuite {
     assert(range.where(arrays1).sameResult(range.where(arrays2)))
     assert(!range.where(arrays1).sameResult(range.where(arrays3)))
   }
+
+  test("SPARK-26402: GetStructField with different optional names are semantically equal") {
+    val expId = NamedExpression.newExprId
+    val qualifier = Seq.empty[String]
+    val structType = StructType(
+      StructField("a", StructType(StructField("b", IntegerType, false) :: Nil), false) :: Nil)
+
+    val fieldB1 = GetStructField(
+      AttributeReference("data1", structType, false)(expId, qualifier),
+      0, Some("b1"))
+    val fieldB2 = GetStructField(
+      AttributeReference("data2", structType, false)(expId, qualifier),
+      0, Some("b2"))
+    assert(fieldB1.semanticEquals(fieldB2))
+
+    val fieldA1 = GetStructField(
+      GetStructField(
+        AttributeReference("data1", structType, false)(expId, qualifier),
+        0, Some("a1")),
+      0, Some("b1"))
+    val fieldA2 = GetStructField(
+      GetStructField(
+        AttributeReference("data2", structType, false)(expId, qualifier),
+        0, Some("a2")),
+      0, Some("b2"))
+    assert(fieldA1.semanticEquals(fieldA2))
+
+    // End-to-end test case
+    val testRelation = LocalRelation('a.int)
 
 Review comment:
   This is not a real end-to-end test...
   
   How about add the following test to SQLQuerySuite?
   ```
   sql("create table t (s struct<i: Int>) using json")
   sql("select s.I from t group by s.i")
   ```
   currently it fials with
   ```
   org.apache.spark.sql.AnalysisException: expression 'default.t.`s`' is neither present in the group by, nor is it an aggregate function
   ```

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org