You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@flink.apache.org by "Caizhi Weng (Jira)" <ji...@apache.org> on 2021/03/29 12:30:00 UTC

[jira] [Created] (FLINK-22016) PushFilterIntoLegacyTableSourceScanRule fails to deal with NULLs

Caizhi Weng created FLINK-22016:
-----------------------------------

             Summary: PushFilterIntoLegacyTableSourceScanRule fails to deal with NULLs
                 Key: FLINK-22016
                 URL: https://issues.apache.org/jira/browse/FLINK-22016
             Project: Flink
          Issue Type: Bug
          Components: Table SQL / Planner
    Affects Versions: 1.13.0
            Reporter: Caizhi Weng
             Fix For: 1.13.0


Add the following test case to {{PushFilterIntoLegacyTableSourceScanRuleTest}} to reproduce this bug:

{code:scala}
@Test
def myTest(): Unit = {
  val schema = TableSchema
    .builder()
    .field("a", DataTypes.STRING)
    .field("b", DataTypes.STRING)
    .build()

  val data = List(Row.of("foo", "bar"))
  TestLegacyFilterableTableSource.createTemporaryTable(
    util.tableEnv,
    schema,
    "MTable",
    isBounded = true,
    data,
    List("a", "b"))

  util.verifyRelPlan(
    """
      |WITH MView AS (SELECT CASE
      |  WHEN a = b THEN a
      |  ELSE CAST(NULL AS STRING)
      |  END AS a
      |  FROM MTable)
      |SELECT a FROM MView WHERE a IS NOT NULL
      |""".stripMargin)
}
{code}

The exception stack is
{code}
org.apache.flink.table.api.ValidationException: Data type 'STRING NOT NULL' does not support null values.

	at org.apache.flink.table.expressions.ValueLiteralExpression.validateValueDataType(ValueLiteralExpression.java:272)
	at org.apache.flink.table.expressions.ValueLiteralExpression.<init>(ValueLiteralExpression.java:79)
	at org.apache.flink.table.expressions.ApiExpressionUtils.valueLiteral(ApiExpressionUtils.java:251)
	at org.apache.flink.table.planner.plan.utils.RexNodeToExpressionConverter.visitLiteral(RexNodeExtractor.scala:451)
	at org.apache.flink.table.planner.plan.utils.RexNodeToExpressionConverter.visitLiteral(RexNodeExtractor.scala:359)
	at org.apache.calcite.rex.RexLiteral.accept(RexLiteral.java:1173)
	at org.apache.flink.table.planner.plan.utils.RexNodeToExpressionConverter$$anonfun$8.apply(RexNodeExtractor.scala:459)
	at org.apache.flink.table.planner.plan.utils.RexNodeToExpressionConverter$$anonfun$8.apply(RexNodeExtractor.scala:459)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
	at scala.collection.Iterator$class.foreach(Iterator.scala:891)
	at scala.collection.AbstractIterator.foreach(Iterator.scala:1334)
	at scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
	at scala.collection.AbstractIterable.foreach(Iterable.scala:54)
	at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
	at scala.collection.AbstractTraversable.map(Traversable.scala:104)
	at org.apache.flink.table.planner.plan.utils.RexNodeToExpressionConverter.visitCall(RexNodeExtractor.scala:458)
	at org.apache.flink.table.planner.plan.utils.RexNodeToExpressionConverter.visitCall(RexNodeExtractor.scala:359)
	at org.apache.calcite.rex.RexCall.accept(RexCall.java:174)
	at org.apache.flink.table.planner.plan.utils.RexNodeToExpressionConverter$$anonfun$8.apply(RexNodeExtractor.scala:459)
	at org.apache.flink.table.planner.plan.utils.RexNodeToExpressionConverter$$anonfun$8.apply(RexNodeExtractor.scala:459)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
	at scala.collection.Iterator$class.foreach(Iterator.scala:891)
	at scala.collection.AbstractIterator.foreach(Iterator.scala:1334)
	at scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
	at scala.collection.AbstractIterable.foreach(Iterable.scala:54)
	at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
	at scala.collection.AbstractTraversable.map(Traversable.scala:104)
	at org.apache.flink.table.planner.plan.utils.RexNodeToExpressionConverter.visitCall(RexNodeExtractor.scala:458)
	at org.apache.flink.table.planner.plan.utils.RexNodeToExpressionConverter.visitCall(RexNodeExtractor.scala:359)
	at org.apache.calcite.rex.RexCall.accept(RexCall.java:174)
	at org.apache.flink.table.planner.plan.utils.RexNodeExtractor$$anonfun$extractConjunctiveConditions$1.apply(RexNodeExtractor.scala:136)
	at org.apache.flink.table.planner.plan.utils.RexNodeExtractor$$anonfun$extractConjunctiveConditions$1.apply(RexNodeExtractor.scala:135)
	at scala.collection.Iterator$class.foreach(Iterator.scala:891)
	at scala.collection.AbstractIterator.foreach(Iterator.scala:1334)
	at scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
	at scala.collection.AbstractIterable.foreach(Iterable.scala:54)
	at org.apache.flink.table.planner.plan.utils.RexNodeExtractor$.extractConjunctiveConditions(RexNodeExtractor.scala:135)
	at org.apache.flink.table.planner.plan.utils.RexNodeExtractor$.extractConjunctiveConditions(RexNodeExtractor.scala:101)
	at org.apache.flink.table.planner.plan.rules.logical.PushFilterIntoLegacyTableSourceScanRule.pushFilterIntoScan(PushFilterIntoLegacyTableSourceScanRule.scala:90)
	at org.apache.flink.table.planner.plan.rules.logical.PushFilterIntoLegacyTableSourceScanRule.onMatch(PushFilterIntoLegacyTableSourceScanRule.scala:77)
	at org.apache.calcite.plan.AbstractRelOptPlanner.fireRule(AbstractRelOptPlanner.java:333)
	at org.apache.calcite.plan.hep.HepPlanner.applyRule(HepPlanner.java:542)
	at org.apache.calcite.plan.hep.HepPlanner.applyRules(HepPlanner.java:407)
	at org.apache.calcite.plan.hep.HepPlanner.executeInstruction(HepPlanner.java:271)
	at org.apache.calcite.plan.hep.HepInstruction$RuleCollection.execute(HepInstruction.java:74)
	at org.apache.calcite.plan.hep.HepPlanner.executeProgram(HepPlanner.java:202)
	at org.apache.calcite.plan.hep.HepPlanner.findBestExp(HepPlanner.java:189)
	at org.apache.flink.table.planner.plan.optimize.program.FlinkHepProgram.optimize(FlinkHepProgram.scala:69)
	at org.apache.flink.table.planner.plan.optimize.program.FlinkHepRuleSetProgram.optimize(FlinkHepRuleSetProgram.scala:87)
	at org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram$$anonfun$optimize$1.apply(FlinkChainedProgram.scala:62)
	at org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram$$anonfun$optimize$1.apply(FlinkChainedProgram.scala:58)
	at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157)
	at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157)
	at scala.collection.Iterator$class.foreach(Iterator.scala:891)
	at scala.collection.AbstractIterator.foreach(Iterator.scala:1334)
	at scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
	at scala.collection.AbstractIterable.foreach(Iterable.scala:54)
	at scala.collection.TraversableOnce$class.foldLeft(TraversableOnce.scala:157)
	at scala.collection.AbstractTraversable.foldLeft(Traversable.scala:104)
	at org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram.optimize(FlinkChainedProgram.scala:57)
	at org.apache.flink.table.planner.plan.optimize.BatchCommonSubGraphBasedOptimizer.optimizeTree(BatchCommonSubGraphBasedOptimizer.scala:87)
	at org.apache.flink.table.planner.plan.optimize.BatchCommonSubGraphBasedOptimizer.org$apache$flink$table$planner$plan$optimize$BatchCommonSubGraphBasedOptimizer$$optimizeBlock(BatchCommonSubGraphBasedOptimizer.scala:58)
	at org.apache.flink.table.planner.plan.optimize.BatchCommonSubGraphBasedOptimizer$$anonfun$doOptimize$1.apply(BatchCommonSubGraphBasedOptimizer.scala:46)
	at org.apache.flink.table.planner.plan.optimize.BatchCommonSubGraphBasedOptimizer$$anonfun$doOptimize$1.apply(BatchCommonSubGraphBasedOptimizer.scala:46)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.apache.flink.table.planner.plan.optimize.BatchCommonSubGraphBasedOptimizer.doOptimize(BatchCommonSubGraphBasedOptimizer.scala:46)
	at org.apache.flink.table.planner.plan.optimize.CommonSubGraphBasedOptimizer.optimize(CommonSubGraphBasedOptimizer.scala:81)
	at org.apache.flink.table.planner.delegation.PlannerBase.optimize(PlannerBase.scala:304)
	at org.apache.flink.table.planner.utils.TableTestUtilBase.assertPlanEquals(TableTestBase.scala:890)
	at org.apache.flink.table.planner.utils.TableTestUtilBase.doVerifyPlan(TableTestBase.scala:781)
	at org.apache.flink.table.planner.utils.TableTestUtilBase.verifyRelPlan(TableTestBase.scala:401)
{code}

It seems that this bug is related to commit 957c49d56c80416ae712ae79cdd2784bb2387c80 by [~dwysakowicz]. This commit adds a {{notNull}} to the return value of RexNodeExtractor#visitLiteral.



--
This message was sent by Atlassian Jira
(v8.3.4#803005)