You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@flink.apache.org by GitBox <gi...@apache.org> on 2022/03/16 11:00:21 UTC

[GitHub] [flink] twalthr commented on a change in pull request #19045: [FLINK-26194][table-api] Deprecate unused options from TableConfig

twalthr commented on a change in pull request #19045:
URL: https://github.com/apache/flink/pull/19045#discussion_r827862246



##########
File path: flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/agg/batch/HashAggCodeGenHelper.scala
##########
@@ -493,7 +493,7 @@ object HashAggCodeGenHelper {
 
         if (filterArg >= 0) {
           var filterTerm = s"$inputTerm.getBoolean($filterArg)"
-          if (ctx.nullCheck) {
+          if (inputType.isNullable) {

Review comment:
       we need to be careful with these `isNullable` checks. I agree with Francesco but we should avoid introducing bugs for little to no performance optimizations. This check seems wrong to me. We should check the filterArg type not the input type.

##########
File path: flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/CodeGenUtils.scala
##########
@@ -477,40 +477,27 @@ object CodeGenUtils {
         case Some(writer) =>
           // use writer to set field
           val writeField = binaryWriterWriteField(ctx, indexTerm, fieldTerm, writer, fieldType)
-          if (ctx.nullCheck) {
-            s"""
-               |${fieldExpr.code}
-               |if (${fieldExpr.nullTerm}) {
-               |  ${binaryWriterWriteNull(indexTerm, writer, fieldType)};
-               |} else {
-               |  $writeField;
-               |}
-             """.stripMargin
-          } else {
-            s"""
-               |${fieldExpr.code}
-               |$writeField;
-             """.stripMargin
-          }
+          s"""
+             |${fieldExpr.code}
+             |if (${fieldExpr.nullTerm}) {
+             |  ${binaryWriterWriteNull(indexTerm, writer, fieldType)};
+             |} else {
+             |  $writeField;
+             |}
+           """.stripMargin
 
         case None =>
           // directly set field to BinaryRowData, this depends on all the fields are fixed length
           val writeField = binaryRowFieldSetAccess(indexTerm, rowTerm, fieldType, fieldTerm)
-          if (ctx.nullCheck) {
-            s"""
-               |${fieldExpr.code}
-               |if (${fieldExpr.nullTerm}) {
-               |  ${binaryRowSetNull(indexTerm, rowTerm, fieldType)};
-               |} else {
-               |  $writeField;
-               |}
-             """.stripMargin
-          } else {
-            s"""
-               |${fieldExpr.code}
-               |$writeField;
-             """.stripMargin
-          }
+
+          s"""

Review comment:
       Isn't this unused code? It is not assigned to a variable?

##########
File path: flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/calls/ScalarOperatorGens.scala
##########
@@ -381,7 +381,7 @@ object ScalarOperatorGens {
       val resultTypeTerm = primitiveTypeTermForType(new BooleanType())
       val defaultValue = primitiveDefaultValue(new BooleanType())
 
-      val operatorCode = if (ctx.nullCheck) {
+      val operatorCode = if (resultType.isNullable) {

Review comment:
       this should be on the nullability of `castedNeedle` not the result type

##########
File path: flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableConfig.java
##########
@@ -312,12 +318,21 @@ private void validateTimeZone(String zone) {
         }
     }
 
-    /** Returns the NULL check. If enabled, all fields need to be checked for NULL first. */
+    /**
+     * Returns the NULL check. If enabled, all fields need to be checked for NULL first.
+     *
+     * @deprecated This option is not used and will be removed in next releases.

Review comment:
       `This option is not used anymore`

##########
File path: flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/calls/ScalarOperatorGens.scala
##########
@@ -1117,7 +1066,7 @@ object ScalarOperatorGens {
     val resultTypeTerm = primitiveTypeTermForType(access.resultType)
     val defaultValue = primitiveDefaultValue(access.resultType)
 
-    val resultCode = if (ctx.nullCheck) {
+    val resultCode = if (access.resultType.isNullable) {

Review comment:
       also wrong

##########
File path: flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/calls/ScalarOperatorGens.scala
##########
@@ -945,12 +913,8 @@ object ScalarOperatorGens {
     val rule = CastRuleProvider.resolve(operand.resultType, targetType)
     rule match {
       case codeGeneratorCastRule: CodeGeneratorCastRule[_, _] =>
-        // Make sure to force nullability checks in case ctx.nullCheck is enabled
-        val inputType = if (ctx.nullCheck) {
-          operand.resultType.copy(true)
-        } else {
-          operand.resultType
-        }
+        // Make sure to force nullability checks in case type is nullable

Review comment:
       why this?




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: issues-unsubscribe@flink.apache.org

For queries about this service, please contact Infrastructure at:
users@infra.apache.org