You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by GitBox <gi...@apache.org> on 2021/09/16 04:49:38 UTC

[GitHub] [spark] cloud-fan commented on a change in pull request #34001: [SPARK-36760][SQL] Add internal utils to convert between v1 and v2 filters

cloud-fan commented on a change in pull request #34001:
URL: https://github.com/apache/spark/pull/34001#discussion_r709766470



##########
File path: sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceUtils.scala
##########
@@ -261,4 +267,116 @@ object DataSourceUtils extends PredicateHelper {
       dataFilters.flatMap(extractPredicatesWithinOutputSet(_, partitionSet))
     (ExpressionSet(partitionFilters ++ extraPartitionFilter).toSeq, dataFilters)
   }
+
+  def convertV1FilterToV2(v1Filter: sources.Filter): V2Filter = {
+    v1Filter match {
+      case _: sources.AlwaysFalse =>
+        new V2AlwaysFalse
+      case _: sources.AlwaysTrue =>
+        new V2AlwaysTrue
+      case e: sources.EqualNullSafe =>
+        new V2EqualNullSafe(FieldReference(e.attribute), getLiteralValue(e.value))
+      case equal: sources.EqualTo =>
+        new V2EqualTo(FieldReference(equal.attribute), getLiteralValue(equal.value))
+      case g: sources.GreaterThan =>
+        new V2GreaterThan(FieldReference(g.attribute), getLiteralValue(g.value))
+      case ge: sources.GreaterThanOrEqual =>
+        new V2GreaterThanOrEqual(FieldReference(ge.attribute), getLiteralValue(ge.value))
+      case in: sources.In =>
+        new V2In(FieldReference(
+          in.attribute), in.values.map(value => getLiteralValue(value)))
+      case notNull: sources.IsNotNull =>
+        new V2IsNotNull(FieldReference(notNull.attribute))
+      case isNull: sources.IsNull =>
+        new V2IsNull(FieldReference(isNull.attribute))
+      case l: sources.LessThan =>
+        new V2LessThan(FieldReference(l.attribute), getLiteralValue(l.value))
+      case le: sources.LessThanOrEqual =>
+        new V2LessThanOrEqual(FieldReference(le.attribute), getLiteralValue(le.value))
+      case contains: sources.StringContains =>
+        new V2StringContains(
+          FieldReference(contains.attribute), UTF8String.fromString(contains.value))
+      case ends: sources.StringEndsWith =>
+        new V2StringEndsWith(FieldReference(ends.attribute), UTF8String.fromString(ends.value))
+      case starts: sources.StringStartsWith =>
+        new V2StringStartsWith(
+          FieldReference(starts.attribute), UTF8String.fromString(starts.value))
+      case and: sources.And =>
+        new V2And(convertV1FilterToV2(and.left), convertV1FilterToV2(and.right))
+      case or: sources.Or =>
+        new V2Or(convertV1FilterToV2(or.left), convertV1FilterToV2(or.right))
+      case not: sources.Not =>
+        new V2Not(convertV1FilterToV2(not.child))
+      case _ => throw new IllegalStateException("Invalid v1Filter: " + v1Filter)
+    }
+  }
+
+  def getLiteralValue(value: Any): LiteralValue[_] = value match {
+    case _: JavaBigDecimal =>

Review comment:
       According to `DataSourceStrategy.translateLeafNodeFilter`, the value of decimal type can only be `Decimal`




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org