You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@drill.apache.org by GitBox <gi...@apache.org> on 2019/01/12 07:48:00 UTC

[GitHub] sohami closed pull request #1607: DRILL-6959: Fix loss of precision when casting time and timestamp literals in filter condition

sohami closed pull request #1607: DRILL-6959: Fix loss of precision when casting time and timestamp literals in filter condition
URL: https://github.com/apache/drill/pull/1607
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillConstExecutor.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillConstExecutor.java
index 8991a7d50bb..dd5b62e8908 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillConstExecutor.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillConstExecutor.java
@@ -18,7 +18,6 @@
 package org.apache.drill.exec.planner.logical;
 
 import org.apache.calcite.rel.type.RelDataType;
-import org.apache.drill.shaded.guava.com.google.common.base.Function;
 import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableList;
 import io.netty.buffer.DrillBuf;
 import org.apache.calcite.rex.RexExecutor;
@@ -84,6 +83,7 @@
 import java.math.BigInteger;
 import java.util.Calendar;
 import java.util.List;
+import java.util.function.Function;
 
 public class DrillConstExecutor implements RexExecutor {
   private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DrillConstExecutor.class);
@@ -115,8 +115,8 @@
       TypeProtos.MinorType.UINT8)
       .build();
 
-  FunctionImplementationRegistry funcImplReg;
-  UdfUtilities udfUtilities;
+  private final FunctionImplementationRegistry funcImplReg;
+  private final UdfUtilities udfUtilities;
 
   public DrillConstExecutor(FunctionImplementationRegistry funcImplReg, UdfUtilities udfUtilities, PlannerSettings plannerSettings) {
     this.funcImplReg = funcImplReg;
@@ -125,12 +125,13 @@ public DrillConstExecutor(FunctionImplementationRegistry funcImplReg, UdfUtiliti
   }
 
   @Override
-  public void reduce(final RexBuilder rexBuilder, List<RexNode> constExps, final List<RexNode> reducedValues) {
-    for (final RexNode newCall : constExps) {
+  @SuppressWarnings("deprecation")
+  public void reduce(RexBuilder rexBuilder, List<RexNode> constExps, List<RexNode> reducedValues) {
+    for (RexNode newCall : constExps) {
       LogicalExpression logEx = DrillOptiq.toDrill(new DrillParseContext(plannerSettings), (RelNode) null /* input rel */, newCall);
 
       ErrorCollectorImpl errors = new ErrorCollectorImpl();
-      final LogicalExpression materializedExpr = ExpressionTreeMaterializer.materialize(logEx, null, errors, funcImplReg);
+      LogicalExpression materializedExpr = ExpressionTreeMaterializer.materialize(logEx, null, errors, funcImplReg);
       if (errors.getErrorCount() != 0) {
         String message = String.format(
             "Failure while materializing expression in constant expression evaluator [%s].  Errors: %s",
@@ -149,7 +150,7 @@ public void reduce(final RexBuilder rexBuilder, List<RexNode> constExps, final L
       }
 
       ValueHolder output = InterpreterEvaluator.evaluateConstantExpr(udfUtilities, materializedExpr);
-      final RelDataTypeFactory typeFactory = rexBuilder.getTypeFactory();
+      RelDataTypeFactory typeFactory = rexBuilder.getTypeFactory();
 
       if (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL && TypeHelper.isNull(output)) {
         SqlTypeName sqlTypeName = TypeInferenceUtils.getCalciteTypeFromDrillType(materializedExpr.getMajorType().getMinorType());
@@ -166,199 +167,199 @@ public void reduce(final RexBuilder rexBuilder, List<RexNode> constExps, final L
         continue;
       }
 
-      Function<ValueHolder, RexNode> literator = new Function<ValueHolder, RexNode>() {
-        @Override
-        public RexNode apply(ValueHolder output) {
-          switch(materializedExpr.getMajorType().getMinorType()) {
-            case INT: {
-              int value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ?
-                ((NullableIntHolder) output).value : ((IntHolder) output).value;
-              return rexBuilder.makeLiteral(new BigDecimal(value),
+      Function<ValueHolder, RexNode> literator = valueHolder -> {
+        switch (materializedExpr.getMajorType().getMinorType()) {
+          case INT: {
+            int value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ?
+                ((NullableIntHolder) valueHolder).value : ((IntHolder) valueHolder).value;
+            return rexBuilder.makeLiteral(new BigDecimal(value),
                 TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.INTEGER, newCall.getType().isNullable()), false);
-            }
-            case BIGINT: {
-              long value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ?
-                ((NullableBigIntHolder) output).value : ((BigIntHolder) output).value;
-              return rexBuilder.makeLiteral(new BigDecimal(value),
+          }
+          case BIGINT: {
+            long value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ?
+                ((NullableBigIntHolder) valueHolder).value : ((BigIntHolder) valueHolder).value;
+            return rexBuilder.makeLiteral(new BigDecimal(value),
                 TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.BIGINT, newCall.getType().isNullable()), false);
-            }
-            case FLOAT4: {
-              float value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ?
-                ((NullableFloat4Holder) output).value : ((Float4Holder) output).value;
-              return rexBuilder.makeLiteral(new BigDecimal(value),
+          }
+          case FLOAT4: {
+            float value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ?
+                ((NullableFloat4Holder) valueHolder).value : ((Float4Holder) valueHolder).value;
+            return rexBuilder.makeLiteral(new BigDecimal(value),
                 TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.FLOAT, newCall.getType().isNullable()), false);
-            }
-            case FLOAT8: {
-              double value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ?
-                ((NullableFloat8Holder) output).value : ((Float8Holder) output).value;
-              return rexBuilder.makeLiteral(new BigDecimal(value),
+          }
+          case FLOAT8: {
+            double value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ?
+                ((NullableFloat8Holder) valueHolder).value : ((Float8Holder) valueHolder).value;
+            return rexBuilder.makeLiteral(new BigDecimal(value),
                 TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.DOUBLE, newCall.getType().isNullable()), false);
-            }
-            case VARCHAR: {
-              String value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ?
-                StringFunctionHelpers.getStringFromVarCharHolder((NullableVarCharHolder)output) :
-                StringFunctionHelpers.getStringFromVarCharHolder((VarCharHolder)output);
-              return rexBuilder.makeLiteral(value,
-                TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.VARCHAR, newCall.getType().isNullable()), false);
-            }
-            case BIT: {
-              boolean value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ?
-                ((NullableBitHolder) output).value == 1 : ((BitHolder) output).value == 1;
-              return rexBuilder.makeLiteral(value,
+          }
+          case VARCHAR: {
+            String value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ?
+                StringFunctionHelpers.getStringFromVarCharHolder((NullableVarCharHolder) valueHolder) :
+                StringFunctionHelpers.getStringFromVarCharHolder((VarCharHolder) valueHolder);
+            RelDataType type = typeFactory.createSqlType(SqlTypeName.VARCHAR, newCall.getType().getPrecision());
+            RelDataType typeWithNullability = typeFactory.createTypeWithNullability(type, newCall.getType().isNullable());
+            return rexBuilder.makeLiteral(value, typeWithNullability, false);
+          }
+          case BIT: {
+            boolean value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ?
+                ((NullableBitHolder) valueHolder).value == 1 : ((BitHolder) valueHolder).value == 1;
+            return rexBuilder.makeLiteral(value,
                 TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.BOOLEAN, newCall.getType().isNullable()), false);
-            }
-            case DATE: {
-              Calendar value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ?
-                new DateTime(((NullableDateHolder) output).value, DateTimeZone.UTC).toCalendar(null) :
-                new DateTime(((DateHolder) output).value, DateTimeZone.UTC).toCalendar(null);
-              return rexBuilder.makeLiteral(DateString.fromCalendarFields(value),
+          }
+          case DATE: {
+            Calendar value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ?
+                new DateTime(((NullableDateHolder) valueHolder).value, DateTimeZone.UTC).toCalendar(null) :
+                new DateTime(((DateHolder) valueHolder).value, DateTimeZone.UTC).toCalendar(null);
+            return rexBuilder.makeLiteral(DateString.fromCalendarFields(value),
                 TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.DATE, newCall.getType().isNullable()), false);
+          }
+          case DECIMAL9: {
+            long value;
+            int scale;
+            if (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) {
+              NullableDecimal9Holder decimal9Out = (NullableDecimal9Holder) valueHolder;
+              value = decimal9Out.value;
+              scale = decimal9Out.scale;
+            } else {
+              Decimal9Holder decimal9Out = (Decimal9Holder) valueHolder;
+              value = decimal9Out.value;
+              scale = decimal9Out.scale;
             }
-            case DECIMAL9: {
-              long value;
-              int scale;
-              if (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) {
-                NullableDecimal9Holder decimal9Out = (NullableDecimal9Holder)output;
-                value = decimal9Out.value;
-                scale = decimal9Out.scale;
-              } else {
-                Decimal9Holder decimal9Out = (Decimal9Holder)output;
-                value = decimal9Out.value;
-                scale = decimal9Out.scale;
-              }
-              return rexBuilder.makeLiteral(
+            return rexBuilder.makeLiteral(
                 new BigDecimal(BigInteger.valueOf(value), scale),
                 TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.DECIMAL, newCall.getType().isNullable()),
                 false);
+          }
+          case DECIMAL18: {
+            long value;
+            int scale;
+            if (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) {
+              NullableDecimal18Holder decimal18Out = (NullableDecimal18Holder) valueHolder;
+              value = decimal18Out.value;
+              scale = decimal18Out.scale;
+            } else {
+              Decimal18Holder decimal18Out = (Decimal18Holder) valueHolder;
+              value = decimal18Out.value;
+              scale = decimal18Out.scale;
             }
-            case DECIMAL18: {
-              long value;
-              int scale;
-              if (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) {
-                NullableDecimal18Holder decimal18Out = (NullableDecimal18Holder)output;
-                value = decimal18Out.value;
-                scale = decimal18Out.scale;
-              } else {
-                Decimal18Holder decimal18Out = (Decimal18Holder)output;
-                value = decimal18Out.value;
-                scale = decimal18Out.scale;
-              }
-              return rexBuilder.makeLiteral(
+            return rexBuilder.makeLiteral(
                 new BigDecimal(BigInteger.valueOf(value), scale),
                 TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.DECIMAL, newCall.getType().isNullable()),
                 false);
+          }
+          case VARDECIMAL: {
+            DrillBuf buffer;
+            int start;
+            int end;
+            int scale;
+            int precision;
+            if (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) {
+              NullableVarDecimalHolder varDecimalHolder = (NullableVarDecimalHolder) valueHolder;
+              buffer = varDecimalHolder.buffer;
+              start = varDecimalHolder.start;
+              end = varDecimalHolder.end;
+              scale = varDecimalHolder.scale;
+              precision = varDecimalHolder.precision;
+            } else {
+              VarDecimalHolder varDecimalHolder = (VarDecimalHolder) valueHolder;
+              buffer = varDecimalHolder.buffer;
+              start = varDecimalHolder.start;
+              end = varDecimalHolder.end;
+              scale = varDecimalHolder.scale;
+              precision = varDecimalHolder.precision;
             }
-            case VARDECIMAL: {
-              DrillBuf buffer;
-              int start;
-              int end;
-              int scale;
-              int precision;
-              if (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) {
-                NullableVarDecimalHolder varDecimalHolder = (NullableVarDecimalHolder) output;
-                buffer = varDecimalHolder.buffer;
-                start = varDecimalHolder.start;
-                end = varDecimalHolder.end;
-                scale = varDecimalHolder.scale;
-                precision = varDecimalHolder.precision;
-              } else {
-                VarDecimalHolder varDecimalHolder = (VarDecimalHolder) output;
-                buffer = varDecimalHolder.buffer;
-                start = varDecimalHolder.start;
-                end = varDecimalHolder.end;
-                scale = varDecimalHolder.scale;
-                precision = varDecimalHolder.precision;
-              }
-              return rexBuilder.makeLiteral(
-                  org.apache.drill.exec.util.DecimalUtility.getBigDecimalFromDrillBuf(buffer, start, end - start, scale),
-                  typeFactory.createSqlType(SqlTypeName.DECIMAL, precision, scale),
-                  false);
+            return rexBuilder.makeLiteral(
+                org.apache.drill.exec.util.DecimalUtility.getBigDecimalFromDrillBuf(buffer, start, end - start, scale),
+                typeFactory.createSqlType(SqlTypeName.DECIMAL, precision, scale),
+                false);
+          }
+          case DECIMAL28SPARSE: {
+            DrillBuf buffer;
+            int start;
+            int scale;
+            if (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) {
+              NullableDecimal28SparseHolder decimal28Out = (NullableDecimal28SparseHolder) valueHolder;
+              buffer = decimal28Out.buffer;
+              start = decimal28Out.start;
+              scale = decimal28Out.scale;
+            } else {
+              Decimal28SparseHolder decimal28Out = (Decimal28SparseHolder) valueHolder;
+              buffer = decimal28Out.buffer;
+              start = decimal28Out.start;
+              scale = decimal28Out.scale;
             }
-            case DECIMAL28SPARSE: {
-              DrillBuf buffer;
-              int start;
-              int scale;
-              if (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) {
-                NullableDecimal28SparseHolder decimal28Out = (NullableDecimal28SparseHolder)output;
-                buffer = decimal28Out.buffer;
-                start = decimal28Out.start;
-                scale = decimal28Out.scale;
-              } else {
-                Decimal28SparseHolder decimal28Out = (Decimal28SparseHolder)output;
-                buffer = decimal28Out.buffer;
-                start = decimal28Out.start;
-                scale = decimal28Out.scale;
-              }
-              return rexBuilder.makeLiteral(
+            return rexBuilder.makeLiteral(
                 org.apache.drill.exec.util.DecimalUtility.getBigDecimalFromSparse(buffer, start * 20, 5, scale),
                 TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.DECIMAL, newCall.getType().isNullable()), false);
+          }
+          case DECIMAL38SPARSE: {
+            DrillBuf buffer;
+            int start;
+            int scale;
+            if (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) {
+              NullableDecimal38SparseHolder decimal38Out = (NullableDecimal38SparseHolder) valueHolder;
+              buffer = decimal38Out.buffer;
+              start = decimal38Out.start;
+              scale = decimal38Out.scale;
+            } else {
+              Decimal38SparseHolder decimal38Out = (Decimal38SparseHolder) valueHolder;
+              buffer = decimal38Out.buffer;
+              start = decimal38Out.start;
+              scale = decimal38Out.scale;
             }
-            case DECIMAL38SPARSE: {
-              DrillBuf buffer;
-              int start;
-              int scale;
-              if (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) {
-                NullableDecimal38SparseHolder decimal38Out = (NullableDecimal38SparseHolder)output;
-                buffer = decimal38Out.buffer;
-                start = decimal38Out.start;
-                scale = decimal38Out.scale;
-              } else {
-                Decimal38SparseHolder decimal38Out = (Decimal38SparseHolder)output;
-                buffer = decimal38Out.buffer;
-                start = decimal38Out.start;
-                scale = decimal38Out.scale;
-              }
-              return rexBuilder.makeLiteral(org.apache.drill.exec.util.DecimalUtility.getBigDecimalFromSparse(buffer, start * 24, 6, scale),
+            return rexBuilder.makeLiteral(org.apache.drill.exec.util.DecimalUtility.getBigDecimalFromSparse(buffer, start * 24, 6, scale),
                 TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.DECIMAL, newCall.getType().isNullable()),
                 false);
-            }
-            case TIME: {
-              Calendar value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ?
-                new DateTime(((NullableTimeHolder) output).value, DateTimeZone.UTC).toCalendar(null) :
-                new DateTime(((TimeHolder) output).value, DateTimeZone.UTC).toCalendar(null);
-              return rexBuilder.makeLiteral(TimeString.fromCalendarFields(value),
-                TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.TIME, newCall.getType().isNullable()), false);
-            }
-            case TIMESTAMP: {
-              Calendar value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ?
-                new DateTime(((NullableTimeStampHolder) output).value, DateTimeZone.UTC).toCalendar(null) :
-                new DateTime(((TimeStampHolder) output).value, DateTimeZone.UTC).toCalendar(null);
-              return rexBuilder.makeLiteral(TimestampString.fromCalendarFields(value),
-                TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.TIMESTAMP, newCall.getType().isNullable()), false);
-            }
-            case INTERVALYEAR: {
-              BigDecimal value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ?
-                new BigDecimal(((NullableIntervalYearHolder) output).value) :
-                new BigDecimal(((IntervalYearHolder) output).value);
-              return rexBuilder.makeLiteral(value,
+          }
+          case TIME: {
+            Calendar value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ?
+                new DateTime(((NullableTimeHolder) valueHolder).value, DateTimeZone.UTC).toCalendar(null) :
+                new DateTime(((TimeHolder) valueHolder).value, DateTimeZone.UTC).toCalendar(null);
+            RelDataType type = typeFactory.createSqlType(SqlTypeName.TIME, newCall.getType().getPrecision());
+            RelDataType typeWithNullability = typeFactory.createTypeWithNullability(type, newCall.getType().isNullable());
+            return rexBuilder.makeLiteral(TimeString.fromCalendarFields(value), typeWithNullability, false);
+          }
+          case TIMESTAMP: {
+            Calendar value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ?
+                new DateTime(((NullableTimeStampHolder) valueHolder).value, DateTimeZone.UTC).toCalendar(null) :
+                new DateTime(((TimeStampHolder) valueHolder).value, DateTimeZone.UTC).toCalendar(null);
+            RelDataType type = typeFactory.createSqlType(SqlTypeName.TIMESTAMP, newCall.getType().getPrecision());
+            RelDataType typeWithNullability = typeFactory.createTypeWithNullability(type, newCall.getType().isNullable());
+            return rexBuilder.makeLiteral(TimestampString.fromCalendarFields(value), typeWithNullability, false);
+          }
+          case INTERVALYEAR: {
+            BigDecimal value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ?
+                new BigDecimal(((NullableIntervalYearHolder) valueHolder).value) :
+                new BigDecimal(((IntervalYearHolder) valueHolder).value);
+            return rexBuilder.makeLiteral(value,
                 TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.INTERVAL_YEAR_MONTH, newCall.getType().isNullable()), false);
+          }
+          case INTERVALDAY: {
+            int days;
+            int milliseconds;
+            if (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) {
+              NullableIntervalDayHolder intervalDayOut = (NullableIntervalDayHolder) valueHolder;
+              days = intervalDayOut.days;
+              milliseconds = intervalDayOut.milliseconds;
+            } else {
+              IntervalDayHolder intervalDayOut = (IntervalDayHolder) valueHolder;
+              days = intervalDayOut.days;
+              milliseconds = intervalDayOut.milliseconds;
             }
-            case INTERVALDAY: {
-              int days;
-              int milliseconds;
-              if (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) {
-                NullableIntervalDayHolder intervalDayOut = (NullableIntervalDayHolder) output;
-                days = intervalDayOut.days;
-                milliseconds = intervalDayOut.milliseconds;
-              } else {
-                IntervalDayHolder intervalDayOut = (IntervalDayHolder) output;
-                days = intervalDayOut.days;
-                milliseconds = intervalDayOut.milliseconds;
-              }
-              return rexBuilder.makeLiteral(
-                  new BigDecimal(days * (long) DateUtilities.daysToStandardMillis + milliseconds),
-                  TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.INTERVAL_DAY,
-                      newCall.getType().isNullable()), false);
-            }
-            // The list of known unsupported types is used to trigger this behavior of re-using the input expression
-            // before the expression is even attempted to be evaluated, this is just here as a last precaution a
-            // as new types may be added in the future.
-            default:
-              logger.debug("Constant expression not folded due to return type {}, complete expression: {}",
+            return rexBuilder.makeLiteral(
+                new BigDecimal(days * (long) DateUtilities.daysToStandardMillis + milliseconds),
+                TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.INTERVAL_DAY,
+                    newCall.getType().isNullable()), false);
+          }
+          // The list of known unsupported types is used to trigger this behavior of re-using the input expression
+          // before the expression is even attempted to be evaluated, this is just here as a last precaution a
+          // as new types may be added in the future.
+          default:
+            logger.debug("Constant expression not folded due to return type {}, complete expression: {}",
                 materializedExpr.getMajorType(),
                 ExpressionStringBuilder.toString(materializedExpr));
-              return newCall;
-          }
+            return newCall;
         }
       };
 
@@ -366,5 +367,3 @@ public RexNode apply(ValueHolder output) {
     }
   }
 }
-
-
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastFunctions.java
index 37b260c6e61..0d884b9319c 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastFunctions.java
@@ -19,6 +19,8 @@
 
 import java.math.BigDecimal;
 import java.time.LocalDate;
+import java.time.LocalDateTime;
+import java.time.LocalTime;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
@@ -32,8 +34,10 @@
 import org.apache.drill.exec.record.RecordBatchLoader;
 import org.apache.drill.exec.rpc.user.QueryDataBatch;
 import org.apache.drill.exec.vector.IntervalYearVector;
-import org.apache.drill.test.BaseTestQuery;
+import org.apache.drill.test.ClusterFixture;
+import org.apache.drill.test.ClusterTest;
 import org.junit.Assert;
+import org.junit.BeforeClass;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -42,15 +46,21 @@
 import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
 import org.apache.drill.shaded.guava.com.google.common.collect.Maps;
 
+import static org.apache.drill.exec.ExecTest.mockUtcDateTimeZone;
 import static org.hamcrest.CoreMatchers.containsString;
 import static org.hamcrest.CoreMatchers.hasItem;
 
 @Category({UnlikelyTest.class, SqlFunctionTest.class})
-public class TestCastFunctions extends BaseTestQuery {
+public class TestCastFunctions extends ClusterTest {
 
   @Rule
   public ExpectedException thrown = ExpectedException.none();
 
+  @BeforeClass
+  public static void setUp() throws Exception {
+    startCluster(ClusterFixture.builder(dirTestWatcher));
+  }
+
   @Test
   public void testVarbinaryToDate() throws Exception {
     testBuilder()
@@ -123,7 +133,7 @@ public void testCastFloatToInt() throws Exception {
 
     for (float value : values.keySet()) {
       try {
-        test("create table dfs.tmp.table_with_float as\n" +
+        run("create table dfs.tmp.table_with_float as\n" +
               "(select cast(%1$s as float) c1 from (values(1)))", value);
 
         testBuilder()
@@ -133,7 +143,7 @@ public void testCastFloatToInt() throws Exception {
           .baselineValues(values.get(value))
           .go();
       } finally {
-        test("drop table if exists dfs.tmp.table_with_float");
+        run("drop table if exists dfs.tmp.table_with_float");
       }
     }
   }
@@ -152,7 +162,7 @@ public void testCastIntToFloatAndDouble() throws Exception {
 
     for (int value : values) {
       try {
-        test("create table dfs.tmp.table_with_int as\n" +
+        run("create table dfs.tmp.table_with_int as\n" +
               "(select cast(%1$s as int) c1 from (values(1)))", value);
 
         testBuilder()
@@ -164,7 +174,7 @@ public void testCastIntToFloatAndDouble() throws Exception {
           .baselineValues((float) value, (double) value)
           .go();
       } finally {
-        test("drop table if exists dfs.tmp.table_with_int");
+        run("drop table if exists dfs.tmp.table_with_int");
       }
     }
   }
@@ -190,7 +200,7 @@ public void testCastFloatToBigInt() throws Exception {
 
     for (float value : values.keySet()) {
       try {
-        test("create table dfs.tmp.table_with_float as\n" +
+        run("create table dfs.tmp.table_with_float as\n" +
               "(select cast(%1$s as float) c1 from (values(1)))", value);
 
         testBuilder()
@@ -200,7 +210,7 @@ public void testCastFloatToBigInt() throws Exception {
           .baselineValues(values.get(value))
           .go();
       } finally {
-        test("drop table if exists dfs.tmp.table_with_float");
+        run("drop table if exists dfs.tmp.table_with_float");
       }
     }
   }
@@ -221,7 +231,7 @@ public void testCastBigIntToFloatAndDouble() throws Exception {
 
     for (long value : values) {
       try {
-        test("create table dfs.tmp.table_with_bigint as\n" +
+        run("create table dfs.tmp.table_with_bigint as\n" +
               "(select cast(%1$s as bigInt) c1 from (values(1)))", value);
 
         testBuilder()
@@ -233,7 +243,7 @@ public void testCastBigIntToFloatAndDouble() throws Exception {
           .baselineValues((float) value, (double) value)
           .go();
       } finally {
-        test("drop table if exists dfs.tmp.table_with_bigint");
+        run("drop table if exists dfs.tmp.table_with_bigint");
       }
     }
   }
@@ -259,7 +269,7 @@ public void testCastDoubleToInt() throws Exception {
 
     for (double value : values.keySet()) {
       try {
-        test("create table dfs.tmp.table_with_double as\n" +
+        run("create table dfs.tmp.table_with_double as\n" +
               "(select cast(%1$s as double) c1 from (values(1)))", value);
 
         testBuilder()
@@ -269,7 +279,7 @@ public void testCastDoubleToInt() throws Exception {
           .baselineValues(values.get(value))
           .go();
       } finally {
-        test("drop table if exists dfs.tmp.table_with_double");
+        run("drop table if exists dfs.tmp.table_with_double");
       }
     }
   }
@@ -295,7 +305,7 @@ public void testCastDoubleToBigInt() throws Exception {
     values.put(Double.MIN_VALUE, 0L);
     for (double value : values.keySet()) {
       try {
-        test("create table dfs.tmp.table_with_double as\n" +
+        run("create table dfs.tmp.table_with_double as\n" +
               "(select cast(%1$s as double) c1 from (values(1)))", value);
 
         testBuilder()
@@ -305,7 +315,7 @@ public void testCastDoubleToBigInt() throws Exception {
           .baselineValues(values.get(value))
           .go();
       } finally {
-        test("drop table if exists dfs.tmp.table_with_double");
+        run("drop table if exists dfs.tmp.table_with_double");
       }
     }
   }
@@ -323,7 +333,7 @@ public void testCastIntAndBigInt() throws Exception {
 
     for (int value : values) {
       try {
-        test("create table dfs.tmp.table_with_int as\n" +
+        run("create table dfs.tmp.table_with_int as\n" +
               "(select cast(%1$s as int) c1, cast(%1$s as bigInt) c2 from (values(1)))", value);
 
         testBuilder()
@@ -335,7 +345,7 @@ public void testCastIntAndBigInt() throws Exception {
           .baselineValues((long) value, value)
           .go();
       } finally {
-        test("drop table if exists dfs.tmp.table_with_int");
+        run("drop table if exists dfs.tmp.table_with_int");
       }
     }
   }
@@ -358,7 +368,7 @@ public void testCastFloatAndDouble() throws Exception {
 
     for (double value : values) {
       try {
-        test("create table dfs.tmp.table_with_float as\n" +
+        run("create table dfs.tmp.table_with_float as\n" +
               "(select cast(%1$s as float) c1,\n" +
                       "cast(%1$s as double) c2\n" +
               "from (values(1)))", value);
@@ -372,7 +382,7 @@ public void testCastFloatAndDouble() throws Exception {
           .baselineValues((double) ((float) (value)), (float) value)
           .go();
       } finally {
-        test("drop table if exists dfs.tmp.table_with_float");
+        run("drop table if exists dfs.tmp.table_with_float");
       }
     }
   }
@@ -380,7 +390,7 @@ public void testCastFloatAndDouble() throws Exception {
   @Test
   public void testCastIntAndBigIntToDecimal() throws Exception {
       try {
-        test("alter session set planner.enable_decimal_data_type = true");
+        client.alterSession(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY, true);
 
         testBuilder()
           .physicalPlanFromFile("decimal/cast_int_decimal.json")
@@ -406,15 +416,15 @@ public void testCastIntAndBigIntToDecimal() throws Exception {
                           new BigDecimal(123456789))
           .go();
       } finally {
-        test("drop table if exists dfs.tmp.table_with_int");
-        test("alter session reset planner.enable_decimal_data_type");
+        run("drop table if exists dfs.tmp.table_with_int");
+        client.resetSession(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY);
       }
   }
 
   @Test
   public void testCastDecimalToIntAndBigInt() throws Exception {
     try {
-      test("alter session set planner.enable_decimal_data_type = true");
+      client.alterSession(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY, true);
 
       testBuilder()
         .physicalPlanFromFile("decimal/cast_decimal_int.json")
@@ -434,15 +444,15 @@ public void testCastDecimalToIntAndBigInt() throws Exception {
         .baselineValues(123456789, 123456789, 123456789L, 123456789L)
         .go();
     } finally {
-      test("drop table if exists dfs.tmp.table_with_int");
-      test("alter session reset planner.enable_decimal_data_type");
+      run("drop table if exists dfs.tmp.table_with_int");
+      client.resetSession(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY);
     }
   }
 
   @Test
   public void testCastDecimalToFloatAndDouble() throws Exception {
     try {
-      test("alter session set planner.enable_decimal_data_type = true");
+      client.alterSession(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY, true);
 
       testBuilder()
         .physicalPlanFromFile("decimal/cast_decimal_float.json")
@@ -456,15 +466,15 @@ public void testCastDecimalToFloatAndDouble() throws Exception {
         .baselineValues(-1.0001f, -2.0301f, -1.0001, -2.0301)
         .go();
     } finally {
-      test("drop table if exists dfs.tmp.table_with_int");
-      test("alter session reset planner.enable_decimal_data_type");
+      run("drop table if exists dfs.tmp.table_with_int");
+      client.resetSession(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY);
     }
   }
 
   @Test
   public void testCastDecimalToVarDecimal() throws Exception {
     try {
-      setSessionOption(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY, true);
+      client.alterSession(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY, true);
 
       testBuilder()
         .physicalPlanFromFile("decimal/cast_decimal_vardecimal.json")
@@ -486,15 +496,15 @@ public void testCastDecimalToVarDecimal() throws Exception {
             new BigDecimal("12"), new BigDecimal("123456789123456789"))
         .go();
     } finally {
-      test("drop table if exists dfs.tmp.table_with_int");
-      resetSessionOption(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY);
+      run("drop table if exists dfs.tmp.table_with_int");
+      client.resetSession(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY);
     }
   }
 
   @Test
   public void testCastVarDecimalToDecimal() throws Exception {
     try {
-      setSessionOption(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY, true);
+      client.alterSession(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY, true);
 
       testBuilder()
         .physicalPlanFromFile("decimal/cast_vardecimal_decimal.json")
@@ -516,15 +526,15 @@ public void testCastVarDecimalToDecimal() throws Exception {
           new BigDecimal("12"), new BigDecimal("123456789123456789"))
         .go();
     } finally {
-      test("drop table if exists dfs.tmp.table_with_int");
-      resetSessionOption(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY);
+      run("drop table if exists dfs.tmp.table_with_int");
+      client.resetSession(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY);
     }
   }
 
   @Test // DRILL-4970
   public void testCastNegativeFloatToInt() throws Exception {
     try {
-      test("create table dfs.tmp.table_with_float as\n" +
+      run("create table dfs.tmp.table_with_float as\n" +
               "(select cast(-255.0 as double) as double_col,\n" +
                       "cast(-255.0 as float) as float_col\n" +
               "from (values(1)))");
@@ -551,16 +561,16 @@ public void testCastNegativeFloatToInt() throws Exception {
         }
       }
     } finally {
-      test("drop table if exists dfs.tmp.table_with_float");
+      run("drop table if exists dfs.tmp.table_with_float");
     }
   }
 
   @Test // DRILL-4970
   public void testCastNegativeDecimalToVarChar() throws Exception {
     try {
-      test("alter session set planner.enable_decimal_data_type = true");
+      client.alterSession(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY, true);
 
-      test("create table dfs.tmp.table_with_decimal as" +
+      run("create table dfs.tmp.table_with_decimal as" +
               "(select cast(cast(manager_id as double) * (-1) as decimal(9, 0)) as decimal9_col,\n" +
                       "cast(cast(manager_id as double) * (-1) as decimal(18, 0)) as decimal18_col\n" +
               "from cp.`parquet/fixedlenDecimal.parquet` limit 1)");
@@ -581,8 +591,8 @@ public void testCastNegativeDecimalToVarChar() throws Exception {
           .go();
       }
     } finally {
-      test("drop table if exists dfs.tmp.table_with_decimal");
-      test("alter session reset planner.enable_decimal_data_type");
+      run("drop table if exists dfs.tmp.table_with_decimal");
+      client.resetSession(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY);
     }
   }
 
@@ -606,7 +616,7 @@ public void testCastDecimalZeroPrecision() throws Exception {
     thrown.expect(UserRemoteException.class);
     thrown.expectMessage(containsString("VALIDATION ERROR: Expected precision greater than 0, but was 0"));
 
-    test(query);
+    run(query);
   }
 
   @Test
@@ -616,7 +626,7 @@ public void testCastDecimalGreaterScaleThanPrecision() throws Exception {
     thrown.expect(UserRemoteException.class);
     thrown.expectMessage(containsString("VALIDATION ERROR: Expected scale less than or equal to precision, but was scale 5 and precision 3"));
 
-    test(query);
+    run(query);
   }
 
   @Test
@@ -626,7 +636,7 @@ public void testCastIntDecimalOverflow() throws Exception {
     thrown.expect(UserRemoteException.class);
     thrown.expectMessage(containsString("VALIDATION ERROR: Value 123456 overflows specified precision 4 with scale 0"));
 
-    test(query);
+    run(query);
   }
 
   @Test
@@ -636,7 +646,7 @@ public void testCastBigIntDecimalOverflow() throws Exception {
     thrown.expect(UserRemoteException.class);
     thrown.expectMessage(containsString("VALIDATION ERROR: Value 123456 overflows specified precision 4 with scale 0"));
 
-    test(query);
+    run(query);
   }
 
   @Test
@@ -646,7 +656,7 @@ public void testCastFloatDecimalOverflow() throws Exception {
     thrown.expect(UserRemoteException.class);
     thrown.expectMessage(containsString("VALIDATION ERROR: Value 123456.123 overflows specified precision 4 with scale 0"));
 
-    test(query);
+    run(query);
   }
 
   @Test
@@ -656,7 +666,7 @@ public void testCastDoubleDecimalOverflow() throws Exception {
     thrown.expect(UserRemoteException.class);
     thrown.expectMessage(containsString("VALIDATION ERROR: Value 123456.123 overflows specified precision 4 with scale 0"));
 
-    test(query);
+    run(query);
   }
 
   @Test
@@ -666,14 +676,14 @@ public void testCastVarCharDecimalOverflow() throws Exception {
     thrown.expect(UserRemoteException.class);
     thrown.expectMessage(containsString("VALIDATION ERROR: Value 123456.123 overflows specified precision 4 with scale 0"));
 
-    test(query);
+    run(query);
   }
 
   @Test // DRILL-6783
   public void testCastVarCharIntervalYear() throws Exception {
     String query = "select cast('P31M' as interval month) as i from cp.`employee.json` limit 10";
-    List<QueryDataBatch> result = testSqlWithResults(query);
-    RecordBatchLoader loader = new RecordBatchLoader(getDrillbitContext().getAllocator());
+    List<QueryDataBatch> result = queryBuilder().sql(query).results();
+    RecordBatchLoader loader = new RecordBatchLoader(cluster.drillbit().getContext().getAllocator());
 
     QueryDataBatch b = result.get(0);
     loader.load(b.getHeader().getDef(), b.getData());
@@ -696,4 +706,47 @@ public void testCastVarCharIntervalYear() throws Exception {
     b.release();
     loader.clear();
   }
+
+  @Test // DRILL-6959
+  public void testCastTimestampLiteralInFilter() throws Exception {
+    try {
+      run("create table dfs.tmp.test_timestamp_filter as\n" +
+          "(select timestamp '2018-01-01 12:12:12.123' as c1)");
+
+      String query =
+          "select * from dfs.tmp.test_timestamp_filter\n" +
+              "where c1 = cast('2018-01-01 12:12:12.123' as timestamp(3))";
+
+      testBuilder()
+          .sqlQuery(query)
+          .unOrdered()
+          .baselineColumns("c1")
+          .baselineValues(LocalDateTime.of(2018, 1, 1,
+              12, 12, 12, 123_000_000))
+          .go();
+    } finally {
+      run("drop table if exists dfs.tmp.test_timestamp_filter");
+    }
+  }
+
+  @Test // DRILL-6959
+  public void testCastTimeLiteralInFilter() throws Exception {
+    try {
+      run("create table dfs.tmp.test_time_filter as\n" +
+          "(select time '12:12:12.123' as c1)");
+
+      String query =
+        "select * from dfs.tmp.test_time_filter\n" +
+            "where c1 = cast('12:12:12.123' as time(3))";
+
+      testBuilder()
+        .sqlQuery(query)
+        .unOrdered()
+        .baselineColumns("c1")
+        .baselineValues(LocalTime.of(12, 12, 12, 123_000_000))
+        .go();
+    } finally {
+      run("drop table if exists dfs.tmp.test_time_filter");
+    }
+  }
 }


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services