You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@drill.apache.org by vi...@apache.org on 2019/01/18 18:22:50 UTC

[drill] branch master updated (172dc7c -> 0f05f53)

This is an automated email from the ASF dual-hosted git repository.

vitalii pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git.


    from 172dc7c  DRILL-6903: SchemaBuilder code improvements
     new 4355e97  DRILL-6971: Display query state in query result page
     new da7cb4e  DRILL-6971: Labelled Query State with color coding
     new 95d91f4  DRILL-6969: Fix inconsistency of reading MaprDB JSON tables using hive plugin when native reader is enabled
     new de863af  DRILL-6944: UnsupportedOperationException thrown for view over MapR-DB binary table
     new a566b0a  DRILL-6942: Provide ability to sort list of profiles on Drill Web UI
     new 0f05f53  DRILL-6967: Fix TIMESTAMPDIFF function for QUARTER qualifier

The 6 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../store/mapr/db/MapRDBFormatPluginConfig.java    |   8 +
 .../mapr/db/json/CompareFunctionsProcessor.java    |  57 +++++-
 .../store/mapr/db/json/JsonConditionBuilder.java   |   7 +-
 .../store/mapr/db/json/MaprDBJsonRecordReader.java |  75 +++++--
 .../drill/maprdb/tests/json/TestScanRanges.java    |   4 +-
 ...ertHiveMapRDBJsonScanToDrillMapRDBJsonScan.java |  11 +-
 .../exec/store/hive/schema/DrillHiveViewTable.java |   6 +-
 .../TimestampDiff.java                             |   3 +-
 .../java/org/apache/drill/exec/ExecConstants.java  |   6 +
 .../java/org/apache/drill/exec/dotdrill/View.java  | 222 ++++++++++++++-------
 .../AbstractDisposableUserClientConnection.java    |   7 +
 .../exec/server/options/SystemOptionManager.java   |   1 +
 .../drill/exec/server/rest/QueryResources.java     |   6 +
 .../drill/exec/server/rest/QueryWrapper.java       |   2 +
 .../exec/server/rest/profile/ProfileResources.java |   8 +-
 .../java-exec/src/main/resources/drill-module.conf |   2 +
 exec/java-exec/src/main/resources/rest/options.ftl |  13 +-
 .../src/main/resources/rest/profile/list.ftl       |  14 +-
 .../src/main/resources/rest/profile/profile.ftl    |  14 +-
 .../src/main/resources/rest/query/result.ftl       |  17 +-
 .../rest/static/css/drill-dataTables.sortable.css  |  11 +
 .../fn/impl/TestTimestampAddDiffFunctions.java     |  80 ++++----
 .../apache/drill/exec/sql/TestBaseViewSupport.java |   4 +-
 .../org/apache/drill/exec/sql/TestViewSupport.java |  61 +++++-
 .../test/resources/avro/map_string_to_long.avro    | Bin 0 -> 207 bytes
 .../resources/view/vw_before_drill_6944.view.drill |  10 +
 .../drill/exec/expr/fn/impl/DateUtility.java       |   3 +
 27 files changed, 473 insertions(+), 179 deletions(-)
 create mode 100644 exec/java-exec/src/main/resources/rest/static/css/drill-dataTables.sortable.css
 create mode 100644 exec/java-exec/src/test/resources/avro/map_string_to_long.avro
 create mode 100644 exec/java-exec/src/test/resources/view/vw_before_drill_6944.view.drill


[drill] 04/06: DRILL-6944: UnsupportedOperationException thrown for view over MapR-DB binary table

Posted by vi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

vitalii pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit de863afcd17447c3f2bb91a7ddd9f1c273a633a4
Author: Igor Guzenko <ih...@gmail.com>
AuthorDate: Fri Jan 4 15:47:32 2019 +0200

    DRILL-6944: UnsupportedOperationException thrown for view over MapR-DB binary table
    
    1. Added persistence of MAP key and value types in Drill views (affects .view.drill file) for avoiding cast problems in future.
    2. Preserved backward compatibility of older view files by treating untyped maps as ANY.
    
    closes #1602
---
 .../exec/store/hive/schema/DrillHiveViewTable.java |   6 +-
 .../java/org/apache/drill/exec/dotdrill/View.java  | 222 ++++++++++++++-------
 .../apache/drill/exec/sql/TestBaseViewSupport.java |   4 +-
 .../org/apache/drill/exec/sql/TestViewSupport.java |  61 +++++-
 .../test/resources/avro/map_string_to_long.avro    | Bin 0 -> 207 bytes
 .../resources/view/vw_before_drill_6944.view.drill |  10 +
 6 files changed, 225 insertions(+), 78 deletions(-)

diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/schema/DrillHiveViewTable.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/schema/DrillHiveViewTable.java
index 5a9e92d..aeeb47c 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/schema/DrillHiveViewTable.java
+++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/schema/DrillHiveViewTable.java
@@ -84,7 +84,7 @@ public class DrillHiveViewTable extends DrillViewTable {
    * @return - View object for further usage
    */
   private static View createView(List<String> schemaPath, HiveTableWithColumnCache hiveView) {
-    List<View.FieldType> viewFields = getViewFieldTypes(hiveView);
+    List<View.Field> viewFields = getViewFieldTypes(hiveView);
     String viewName = hiveView.getTableName();
     String viewSql = hiveView.getViewExpandedText();
     return new View(viewName, viewSql, viewFields, schemaPath);
@@ -97,10 +97,10 @@ public class DrillHiveViewTable extends DrillViewTable {
    * @param hiveTable - hive view metadata
    * @return - list of fields for construction of View
    */
-  private static List<View.FieldType> getViewFieldTypes(HiveTableWithColumnCache hiveTable) {
+  private static List<View.Field> getViewFieldTypes(HiveTableWithColumnCache hiveTable) {
     return Stream.of(hiveTable.getColumnListsCache().getTableSchemaColumns(), hiveTable.getPartitionKeys())
         .flatMap(Collection::stream)
-        .map(hiveField -> new View.FieldType(hiveField.getName(), DATA_TYPE_CONVERTER.convertToNullableRelDataType(hiveField)))
+        .map(hiveField -> new View.Field(hiveField.getName(), DATA_TYPE_CONVERTER.convertToNullableRelDataType(hiveField)))
         .collect(toList());
   }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/dotdrill/View.java b/exec/java-exec/src/main/java/org/apache/drill/exec/dotdrill/View.java
index 43f7bdb..91900de 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/dotdrill/View.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/dotdrill/View.java
@@ -17,11 +17,13 @@
  */
 package org.apache.drill.exec.dotdrill;
 
+import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 import java.util.stream.Collectors;
 
 import org.apache.calcite.avatica.util.TimeUnit;
+import org.apache.calcite.rel.type.DynamicRecordType;
 import org.apache.calcite.sql.SqlIntervalQualifier;
 import org.apache.calcite.sql.parser.SqlParserPos;
 import org.apache.calcite.sql.type.SqlTypeFamily;
@@ -39,31 +41,33 @@ import com.fasterxml.jackson.annotation.JsonInclude;
 import com.fasterxml.jackson.annotation.JsonInclude.Include;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.annotation.JsonTypeName;
-import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
+
 
 @JsonTypeName("view")
 public class View {
 
   private final String name;
   private String sql;
-  private List<FieldType> fields;
+  private List<Field> fields;
 
   /* Current schema when view is created (not the schema to which view belongs to) */
   private List<String> workspaceSchemaPath;
 
   @JsonInclude(Include.NON_NULL)
-  public static class FieldType {
+  public static class Field {
 
     private final String name;
     private final SqlTypeName type;
-    private final Integer precision;
-    private final Integer scale;
-    private SqlIntervalQualifier intervalQualifier;
     private final Boolean isNullable;
+    private Integer precision;
+    private Integer scale;
+    private SqlIntervalQualifier intervalQualifier;
+    private Field keyType;
+    private Field valueType;
 
 
     @JsonCreator
-    public FieldType(
+    public Field(
         @JsonProperty("name")                       String name,
         @JsonProperty("type")                       SqlTypeName type,
         @JsonProperty("precision")                  Integer precision,
@@ -71,7 +75,9 @@ public class View {
         @JsonProperty("startUnit")                  TimeUnit startUnit,
         @JsonProperty("endUnit")                    TimeUnit endUnit,
         @JsonProperty("fractionalSecondPrecision")  Integer fractionalSecondPrecision,
-        @JsonProperty("isNullable")                 Boolean isNullable) {
+        @JsonProperty("isNullable")                 Boolean isNullable,
+        @JsonProperty("keyType") Field keyType,
+        @JsonProperty("valueType") Field valueType) {
       // Fix for views which were created on Calcite 1.4.
       // After Calcite upgrade star "*" was changed on dynamic star "**" (SchemaPath.DYNAMIC_STAR)
       // and type of star was changed to SqlTypeName.DYNAMIC_STAR
@@ -88,50 +94,59 @@ public class View {
       // Property "isNullable" is not part of the initial view definition and
       // was added in DRILL-2342.  If the default value is null, consider it as
       // "true".  It is safe to default to "nullable" than "required" type.
-      this.isNullable = isNullable == null ? true : isNullable;
+      this.isNullable = isNullable == null || isNullable;
+      this.keyType = keyType;
+      this.valueType = valueType;
     }
 
-    public FieldType(String name, RelDataType dataType) {
+    public Field(String name, RelDataType dataType) {
       this.name = name;
       this.type = dataType.getSqlTypeName();
-
-      Integer p = null;
-      Integer s = null;
-      Integer fractionalSecondPrecision = null;
-
+      this.isNullable = dataType.isNullable();
+      this.intervalQualifier = dataType.getIntervalQualifier();
       switch (dataType.getSqlTypeName()) {
-      case CHAR:
-      case BINARY:
-      case VARBINARY:
-      case VARCHAR:
-        p = dataType.getPrecision();
-        break;
-      case DECIMAL:
-        p = dataType.getPrecision();
-        s = dataType.getScale();
-        break;
-      case INTERVAL_YEAR:
-      case INTERVAL_YEAR_MONTH:
-      case INTERVAL_MONTH:
-      case INTERVAL_DAY:
-      case INTERVAL_DAY_HOUR:
-      case INTERVAL_DAY_MINUTE:
-      case INTERVAL_DAY_SECOND:
-      case INTERVAL_HOUR:
-      case INTERVAL_HOUR_MINUTE:
-      case INTERVAL_HOUR_SECOND:
-      case INTERVAL_MINUTE:
-      case INTERVAL_MINUTE_SECOND:
-      case INTERVAL_SECOND:
-        p = dataType.getIntervalQualifier().getStartPrecisionPreservingDefault();
-      default:
-        break;
+        case CHAR:
+        case BINARY:
+        case VARBINARY:
+        case VARCHAR:
+          this.precision = dataType.getPrecision();
+          break;
+        case DECIMAL:
+          this.precision = dataType.getPrecision();
+          this.scale = dataType.getScale();
+          break;
+        case INTERVAL_YEAR:
+        case INTERVAL_YEAR_MONTH:
+        case INTERVAL_MONTH:
+        case INTERVAL_DAY:
+        case INTERVAL_DAY_HOUR:
+        case INTERVAL_DAY_MINUTE:
+        case INTERVAL_DAY_SECOND:
+        case INTERVAL_HOUR:
+        case INTERVAL_HOUR_MINUTE:
+        case INTERVAL_HOUR_SECOND:
+        case INTERVAL_MINUTE:
+        case INTERVAL_MINUTE_SECOND:
+        case INTERVAL_SECOND:
+          this.precision = dataType.getIntervalQualifier().getStartPrecisionPreservingDefault();
+          break;
+        case MAP:
+          keyType = new Field(dataType.getKeyType());
+          valueType = new Field(dataType.getValueType());
+          break;
       }
+    }
 
-      this.precision = p;
-      this.scale = s;
-      this.intervalQualifier = dataType.getIntervalQualifier();
-      this.isNullable = dataType.isNullable();
+    /**
+     * Overloaded constructor for creation of fields
+     * which carry only about dataType and don't represent
+     * named columns. Example of such usage is key and value types
+     * of Map columns.
+     *
+     * @param dataType field type
+     */
+    public Field(RelDataType dataType) {
+      this(null, dataType);
     }
 
     /**
@@ -207,6 +222,52 @@ public class View {
       return isNullable;
     }
 
+    /**
+     * Gets key type for fields whose type is
+     * {@link org.apache.calcite.sql.type.SqlTypeName#MAP}
+     *
+     * @return key type of map
+     */
+    public Field getKeyType() {
+      return keyType;
+    }
+
+    /**
+     * Gets value type for fields whose type is
+     * {@link org.apache.calcite.sql.type.SqlTypeName#MAP}
+     *
+     * @return value type of map
+     */
+    public Field getValueType() {
+      return valueType;
+    }
+
+
+    /**
+     * Checks whether this field type is interval
+     * by comparing current type family with known
+     * INTERVAL_YEAR_MONTH and INTERVAL_DAY_TIME families
+     *
+     * @return whether current type relates to any known
+     *         interval families
+     */
+    @JsonIgnore
+    boolean isInterval() {
+      SqlTypeFamily family = type.getFamily();
+      return family == SqlTypeFamily.INTERVAL_YEAR_MONTH || family == SqlTypeFamily.INTERVAL_DAY_TIME;
+    }
+
+    /**
+     * Checks that for MAP fields key and value types
+     * were persisted
+     *
+     * @return is map key and value types present
+     */
+    @JsonIgnore
+    boolean isMapTypesPresent() {
+      return keyType != null && valueType != null;
+    }
+
   }
 
 
@@ -214,7 +275,7 @@ public class View {
     this(name,
         sql,
         rowType.getFieldList().stream()
-            .map(f -> new FieldType(f.getName(), f.getType()))
+            .map(f -> new Field(f.getName(), f.getType()))
             .collect(Collectors.toList()),
         workspaceSchemaPath);
   }
@@ -222,7 +283,7 @@ public class View {
   @JsonCreator
   public View(@JsonProperty("name") String name,
               @JsonProperty("sql") String sql,
-              @JsonProperty("fields") List<FieldType> fields,
+              @JsonProperty("fields") List<Field> fields,
               @JsonProperty("workspaceSchemaPath") List<String> workspaceSchemaPath) {
     this.name = name;
     this.sql = sql;
@@ -235,6 +296,16 @@ public class View {
             .collect(Collectors.toList());
   }
 
+
+  /**
+   * If view fields are present then attempts to gather them
+   * into struct type, otherwise returns extension of  {@link DynamicRecordType}.
+   *
+   * @param factory factory for rel data types creation
+   * @return struct type that describes names and types of all
+   *         view fields or extension of {@link DynamicRecordType}
+   *         when view fields are empty
+   */
   public RelDataType getRowType(RelDataTypeFactory factory) {
 
     // if there are no fields defined, this is a dynamic view.
@@ -242,30 +313,45 @@ public class View {
       return new RelDataTypeDrillImpl(new RelDataTypeHolder(), factory);
     }
 
-    List<RelDataType> types = Lists.newArrayList();
-    List<String> names = Lists.newArrayList();
-
-    for (FieldType field : fields) {
+    List<RelDataType> types = new ArrayList<>(fields.size());
+    List<String> names = new ArrayList<>(fields.size());
+    for (Field field : fields) {
       names.add(field.getName());
-      RelDataType type;
-      if (   SqlTypeFamily.INTERVAL_YEAR_MONTH == field.getType().getFamily()
-          || SqlTypeFamily.INTERVAL_DAY_TIME   == field.getType().getFamily() ) {
-       type = factory.createSqlIntervalType( field.getIntervalQualifier() );
-      } else if (field.getPrecision() == null && field.getScale() == null) {
-        type = factory.createSqlType(field.getType());
-      } else if (field.getPrecision() != null && field.getScale() == null) {
-        type = factory.createSqlType(field.getType(), field.getPrecision());
-      } else {
-        type = factory.createSqlType(field.getType(), field.getPrecision(), field.getScale());
-      }
+      types.add(getType(field, factory));
+    }
+    return factory.createStructType(types, names);
+  }
 
-      if (field.getIsNullable()) {
-        types.add(factory.createTypeWithNullability(type, true));
+  private RelDataType getType(Field field, RelDataTypeFactory factory) {
+    RelDataType type;
+    final SqlTypeName typeName = field.getType();
+    final Integer precision = field.getPrecision();
+    final Integer scale = field.getScale();
+
+    if (field.isInterval()) {
+      type = factory.createSqlIntervalType(field.getIntervalQualifier());
+    } else if (precision != null) {
+      type = scale != null
+          ? factory.createSqlType(typeName, precision, scale)
+          : factory.createSqlType(typeName, precision);
+    } else if (typeName == SqlTypeName.MAP) {
+      if (field.isMapTypesPresent()) {
+        type = factory.createMapType(getType(field.getKeyType(), factory), getType(field.getValueType(), factory));
       } else {
-        types.add(type);
+         /*
+            For older views that doesn't have info about map key and value types,
+            chosen type is ANY. Because use of raw MAP type causes creation of
+            MAP cast expression that can't be serialized by ExpressionStringBuilder's
+            visitCastExpression(CastExpression e, StringBuilder sb) method.
+            See DRILL-6944 for more details.
+         */
+        type = factory.createSqlType(SqlTypeName.ANY);
       }
+    } else {
+      type = factory.createSqlType(field.getType());
     }
-    return factory.createStructType(types, names);
+
+    return field.getIsNullable() ? factory.createTypeWithNullability(type, true) : type;
   }
 
   @JsonIgnore
@@ -275,7 +361,7 @@ public class View {
 
   @JsonIgnore
   public boolean hasStar() {
-    for (FieldType field : fields) {
+    for (Field field : fields) {
       if (StarColumnHelper.isNonPrefixedStarColumn(field.getName())) {
         return true;
       }
@@ -295,7 +381,7 @@ public class View {
     return name;
   }
 
-  public List<FieldType> getFields() {
+  public List<Field> getFields() {
     return fields;
   }
 
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestBaseViewSupport.java b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestBaseViewSupport.java
index adb2538..e9f7aab 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestBaseViewSupport.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestBaseViewSupport.java
@@ -17,8 +17,8 @@
  */
 package org.apache.drill.exec.sql;
 
+import org.apache.drill.PlanTestBase;
 import org.apache.drill.shaded.guava.com.google.common.base.Strings;
-import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.test.TestBuilder;
 
 import java.util.List;
@@ -28,7 +28,7 @@ import java.util.concurrent.atomic.AtomicInteger;
  * Base class for view tests. It has utility methods which can be used when writing tests for views on tables
  * in different storage engines such as Hive, HBase etc.
  */
-public class TestBaseViewSupport extends BaseTestQuery {
+public class TestBaseViewSupport extends PlanTestBase {
   private static AtomicInteger viewSeqNum = new AtomicInteger(0);
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestViewSupport.java b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestViewSupport.java
index a0773bc..a2eefcc 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestViewSupport.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestViewSupport.java
@@ -17,19 +17,19 @@
  */
 package org.apache.drill.exec.sql;
 
-import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableList;
+import java.io.File;
+import java.nio.file.Paths;
+import java.util.List;
+
 import org.apache.commons.io.FileUtils;
 import org.apache.drill.categories.SqlTest;
 import org.apache.drill.categories.UnlikelyTest;
+import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableList;
 import org.junit.BeforeClass;
 import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import java.io.File;
-import java.nio.file.Paths;
-import java.util.List;
-
 import static org.apache.drill.exec.util.StoragePluginTestUtils.DFS_TMP_SCHEMA;
 import static org.apache.drill.exec.util.StoragePluginTestUtils.TMP_SCHEMA;
 
@@ -39,6 +39,7 @@ public class TestViewSupport extends TestBaseViewSupport {
   @BeforeClass
   public static void setupTestFiles() {
     dirTestWatcher.copyResourceToRoot(Paths.get("nation"));
+    dirTestWatcher.copyResourceToRoot(Paths.get("avro", "map_string_to_long.avro"));
   }
 
   @Test
@@ -800,4 +801,54 @@ public class TestViewSupport extends TestBaseViewSupport {
       test("DROP VIEW IF EXISTS `%s`.`%s`", DFS_TMP_SCHEMA, viewName);
     }
   }
+
+  @Test // DRILL-6944
+  public void testSelectMapColumnOfNewlyCreatedView() throws Exception {
+    try {
+      test("CREATE VIEW dfs.tmp.`mapf_view` AS SELECT `mapf` FROM dfs.`avro/map_string_to_long.avro`");
+      test("SELECT * FROM dfs.tmp.`mapf_view`");
+      testBuilder()
+          .sqlQuery("SELECT `mapf`['ki'] as ki FROM dfs.tmp.`mapf_view`")
+          .unOrdered()
+          .baselineColumns("ki")
+          .baselineValues(1L)
+          .go();
+    } finally {
+      test("DROP VIEW IF EXISTS dfs.tmp.`mapf_view`");
+    }
+  }
+
+  @Test // DRILL-6944
+  public void testMapTypeFullyQualifiedInNewlyCreatedView() throws Exception {
+    try {
+      test("CREATE VIEW dfs.tmp.`mapf_view` AS SELECT `mapf` FROM dfs.`avro/map_string_to_long.avro`");
+      testPlanWithAttributesMatchingPatterns("SELECT * FROM dfs.tmp.`mapf_view`", new String[]{
+          "Screen : rowType = RecordType\\(\\(VARCHAR\\(65535\\), BIGINT\\) MAP mapf\\)",
+          "Project\\(mapf=\\[\\$0\\]\\) : rowType = RecordType\\(\\(VARCHAR\\(65535\\), BIGINT\\) MAP mapf\\)",
+          "Scan.*avro/map_string_to_long.avro.*rowType = RecordType\\(\\(VARCHAR\\(65535\\), BIGINT\\) MAP mapf\\)"
+      }, null);
+    } finally {
+      test("DROP VIEW IF EXISTS dfs.tmp.`mapf_view`");
+    }
+  }
+
+  @Test // DRILL-6944
+  public void testMapColumnOfOlderViewWithUntypedMap() throws Exception {
+    test("SELECT * FROM cp.`view/vw_before_drill_6944.view.drill`");
+    testBuilder()
+        .sqlQuery("SELECT `mapf`['ki'] as ki FROM cp.`view/vw_before_drill_6944.view.drill`")
+        .unOrdered()
+        .baselineColumns("ki")
+        .baselineValues(1L)
+        .go();
+  }
+
+  @Test // DRILL-6944
+  public void testMapTypeTreatedAsAnyInOlderViewWithUntypedMap() throws Exception {
+    testPlanWithAttributesMatchingPatterns("SELECT * FROM cp.`view/vw_before_drill_6944.view.drill`", new String[]{
+        "Screen : rowType = RecordType\\(ANY mapf\\)",
+        "Project.mapf=.CAST\\(\\$0\\):ANY NOT NULL.*"
+    }, null);
+  }
+
 }
diff --git a/exec/java-exec/src/test/resources/avro/map_string_to_long.avro b/exec/java-exec/src/test/resources/avro/map_string_to_long.avro
new file mode 100644
index 0000000..8d58a6b
Binary files /dev/null and b/exec/java-exec/src/test/resources/avro/map_string_to_long.avro differ
diff --git a/exec/java-exec/src/test/resources/view/vw_before_drill_6944.view.drill b/exec/java-exec/src/test/resources/view/vw_before_drill_6944.view.drill
new file mode 100644
index 0000000..de10426
--- /dev/null
+++ b/exec/java-exec/src/test/resources/view/vw_before_drill_6944.view.drill
@@ -0,0 +1,10 @@
+{
+  "name" : "vw_before_drill_6944",
+  "sql" : "SELECT `mapf`\nFROM dfs.`avro/map_string_to_long.avro`",
+  "fields" : [ {
+    "name" : "mapf",
+    "type" : "MAP",
+    "isNullable" : false
+  } ],
+  "workspaceSchemaPath" : [ ]
+}
\ No newline at end of file


[drill] 01/06: DRILL-6971: Display query state in query result page

Posted by vi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

vitalii pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit 4355e979e81ef7353b6e48b19283e0765e9a14bf
Author: Sorabh Hamirwasia <so...@apache.org>
AuthorDate: Fri Jan 11 16:42:21 2019 -0800

    DRILL-6971: Display query state in query result page
---
 .../drill/exec/rpc/AbstractDisposableUserClientConnection.java     | 7 +++++++
 .../java/org/apache/drill/exec/server/rest/QueryResources.java     | 6 ++++++
 .../main/java/org/apache/drill/exec/server/rest/QueryWrapper.java  | 2 ++
 exec/java-exec/src/main/resources/rest/query/result.ftl            | 3 +++
 4 files changed, 18 insertions(+)

diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/AbstractDisposableUserClientConnection.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/AbstractDisposableUserClientConnection.java
index 3d5b297..1b8a347 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/AbstractDisposableUserClientConnection.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/AbstractDisposableUserClientConnection.java
@@ -45,6 +45,8 @@ public abstract class AbstractDisposableUserClientConnection implements UserClie
 
   protected volatile UserException exception;
 
+  protected String queryState;
+
   /**
    * Wait until the query has completed or timeout is passed.
    *
@@ -73,6 +75,7 @@ public abstract class AbstractDisposableUserClientConnection implements UserClie
 
     // Release the wait latch if the query is terminated.
     final QueryState state = result.getQueryState();
+    queryState = state.toString();
     final QueryId queryId = result.getQueryId();
 
     if (logger.isDebugEnabled()) {
@@ -104,4 +107,8 @@ public abstract class AbstractDisposableUserClientConnection implements UserClie
   public DrillPBError getError() {
     return error;
   }
+
+  public String getQueryState() {
+    return queryState;
+  }
 }
\ No newline at end of file
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/QueryResources.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/QueryResources.java
index 61af7c7..e62d33d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/QueryResources.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/QueryResources.java
@@ -134,6 +134,7 @@ public class QueryResources {
     private final List<List<String>> rows;
     private final String queryId;
     private final String rowsPerPageValues;
+    private final String queryState;
 
     public TabularResult(QueryResult result, String rowsPerPageValuesAsStr) {
       rowsPerPageValues = rowsPerPageValuesAsStr;
@@ -149,6 +150,7 @@ public class QueryResources {
 
       this.columns = ImmutableList.copyOf(result.columns);
       this.rows = rows;
+      this.queryState = result.queryState;
     }
 
     public boolean isEmpty() {
@@ -171,6 +173,10 @@ public class QueryResources {
     public String getRowsPerPageValues() {
       return rowsPerPageValues;
     }
+
+    public String getQueryState() {
+      return queryState;
+    }
   }
 
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/QueryWrapper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/QueryWrapper.java
index 4eb1656..9501164 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/QueryWrapper.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/QueryWrapper.java
@@ -128,12 +128,14 @@ public class QueryWrapper {
     public final Collection<String> columns;
     public final List<Map<String, String>> rows;
     public final List<String> metadata;
+    public final String queryState;
 
     //DRILL-6847:  Modified the constructor so that the method has access to all the properties in webUserConnection
     public QueryResult(QueryId queryId, WebUserConnection webUserConnection, List<Map<String, String>> rows) {
         this.queryId = QueryIdHelper.getQueryId(queryId);
         this.columns = webUserConnection.columns;
         this.metadata = webUserConnection.metadata;
+        this.queryState = webUserConnection.getQueryState();
         this.rows = rows;
       }
 
diff --git a/exec/java-exec/src/main/resources/rest/query/result.ftl b/exec/java-exec/src/main/resources/rest/query/result.ftl
index 9f16c1f..13e675d 100644
--- a/exec/java-exec/src/main/resources/rest/query/result.ftl
+++ b/exec/java-exec/src/main/resources/rest/query/result.ftl
@@ -34,6 +34,9 @@
     <td align='left'>
       <button type="button"  title="Open in new window" onclick="popOutProfile('${model.getQueryId()}');" class="btn btn-default btn-sm">
       <b>Query Profile:</b> ${model.getQueryId()} <span class="glyphicon glyphicon-new-window"/></button>
+     </td>
+     <td>
+         <span class="input-group-addon" style="font-size:95%"><b>Query State:</b> ${model.getQueryState()}</span>
      </td><td align="right" width="100%">
        <div class="input-group">
          <span class="input-group-addon" style="font-size:95%">Delimiter </span>


[drill] 02/06: DRILL-6971: Labelled Query State with color coding

Posted by vi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

vitalii pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit da7cb4e2fc67393f0ef26b0a03954ad139fdf7f9
Author: Kunal Khatua <kk...@maprtech.com>
AuthorDate: Mon Jan 14 11:53:15 2019 -0800

    DRILL-6971: Labelled Query State with color coding
    
    closes #1611
---
 .../java-exec/src/main/resources/rest/query/result.ftl | 18 ++++++++++++++----
 1 file changed, 14 insertions(+), 4 deletions(-)

diff --git a/exec/java-exec/src/main/resources/rest/query/result.ftl b/exec/java-exec/src/main/resources/rest/query/result.ftl
index 13e675d..d9f653f 100644
--- a/exec/java-exec/src/main/resources/rest/query/result.ftl
+++ b/exec/java-exec/src/main/resources/rest/query/result.ftl
@@ -33,11 +33,21 @@
   <table><tr>
     <td align='left'>
       <button type="button"  title="Open in new window" onclick="popOutProfile('${model.getQueryId()}');" class="btn btn-default btn-sm">
-      <b>Query Profile:</b> ${model.getQueryId()} <span class="glyphicon glyphicon-new-window"/></button>
+      <b>Query Profile:</b> ${model.getQueryId()} <#switch model.getQueryState()>
+        <#case "COMPLETED">
+          <span class="label label-success">
+          <#break>
+        <#case "CANCELED">
+          <span class="label label-warning">
+          <#break>
+        <#case "FAILED">
+          <span class="label label-danger">
+          <#break>
+        <#default>
+          <span class="label label-default">
+      </#switch>${model.getQueryState()}</span>&nbsp;&nbsp;&nbsp;<span class="glyphicon glyphicon-new-window"/></button>
      </td>
-     <td>
-         <span class="input-group-addon" style="font-size:95%"><b>Query State:</b> ${model.getQueryState()}</span>
-     </td><td align="right" width="100%">
+     <td align="right" width="100%">
        <div class="input-group">
          <span class="input-group-addon" style="font-size:95%">Delimiter </span>
          <input id="delimitBy" type="text" class="form-control input-sm" name="delimitBy" title="Specify delimiter" placeholder="Required" maxlength="2" size="2" value=",">


[drill] 03/06: DRILL-6969: Fix inconsistency of reading MaprDB JSON tables using hive plugin when native reader is enabled

Posted by vi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

vitalii pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit 95d91f40c5991720b6f9d1cd2147edf58c8b136f
Author: Volodymyr Vysotskyi <vv...@gmail.com>
AuthorDate: Thu Jan 10 20:47:08 2019 +0200

    DRILL-6969: Fix inconsistency of reading MaprDB JSON tables using hive plugin when native reader is enabled
    
    closes #1610
---
 .../store/mapr/db/MapRDBFormatPluginConfig.java    |  8 +++
 .../mapr/db/json/CompareFunctionsProcessor.java    | 57 ++++++++++++----
 .../store/mapr/db/json/JsonConditionBuilder.java   |  7 +-
 .../store/mapr/db/json/MaprDBJsonRecordReader.java | 75 ++++++++++++++++++----
 .../drill/maprdb/tests/json/TestScanRanges.java    |  4 +-
 ...ertHiveMapRDBJsonScanToDrillMapRDBJsonScan.java | 11 +++-
 .../java/org/apache/drill/exec/ExecConstants.java  |  5 ++
 .../exec/server/options/SystemOptionManager.java   |  1 +
 .../java-exec/src/main/resources/drill-module.conf |  1 +
 .../drill/exec/expr/fn/impl/DateUtility.java       |  3 +
 10 files changed, 143 insertions(+), 29 deletions(-)

diff --git a/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/MapRDBFormatPluginConfig.java b/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/MapRDBFormatPluginConfig.java
index 07943f6..c17696c 100644
--- a/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/MapRDBFormatPluginConfig.java
+++ b/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/MapRDBFormatPluginConfig.java
@@ -32,6 +32,7 @@ public class MapRDBFormatPluginConfig extends TableFormatPluginConfig {
   public boolean ignoreSchemaChange = false;
   public boolean readAllNumbersAsDouble = false;
   public boolean disableCountOptimization = false;
+  public boolean readTimestampWithZoneOffset = false;
   /* This flag is a switch to do special handling in case of
    * no columns in the query exists in the maprdb table. This flag
    * can get deprecated once it is observed that this special handling
@@ -48,6 +49,7 @@ public class MapRDBFormatPluginConfig extends TableFormatPluginConfig {
     result = 31 * result + (readAllNumbersAsDouble ? 1231 : 1237);
     result = 31 * result + (disableCountOptimization ? 1231 : 1237);
     result = 31 * result + (nonExistentFieldSupport ? 1231 : 1237);
+    result = 31 * result + (readTimestampWithZoneOffset ? 1231 : 1237);
     return result;
   }
 
@@ -68,6 +70,8 @@ public class MapRDBFormatPluginConfig extends TableFormatPluginConfig {
       return false;
     } else if (!index.equals(other.index)) {
       return false;
+    } else if (readTimestampWithZoneOffset != other.readTimestampWithZoneOffset) {
+      return false;
     }
     return true;
   }
@@ -76,6 +80,10 @@ public class MapRDBFormatPluginConfig extends TableFormatPluginConfig {
     return readAllNumbersAsDouble;
   }
 
+  public boolean isReadTimestampWithZoneOffset() {
+    return readTimestampWithZoneOffset;
+  }
+
   public boolean isAllTextMode() {
     return allTextMode;
   }
diff --git a/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/json/CompareFunctionsProcessor.java b/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/json/CompareFunctionsProcessor.java
index a4cb0bd..d9db7bc 100644
--- a/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/json/CompareFunctionsProcessor.java
+++ b/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/json/CompareFunctionsProcessor.java
@@ -35,6 +35,7 @@ import org.apache.drill.common.expression.ValueExpressions.TimeExpression;
 import org.apache.drill.common.expression.ValueExpressions.TimeStampExpression;
 import org.apache.drill.common.expression.ValueExpressions.VarDecimalExpression;
 import org.apache.drill.common.expression.visitors.AbstractExprVisitor;
+import org.apache.drill.exec.expr.fn.impl.DateUtility;
 import org.joda.time.LocalTime;
 import org.ojai.Value;
 import org.ojai.types.ODate;
@@ -42,7 +43,6 @@ import org.ojai.types.OTime;
 
 import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableMap;
 import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableSet;
-import com.mapr.db.rowcol.KeyValueBuilder;
 import com.mapr.db.util.SqlHelper;
 
 import org.ojai.types.OTimestamp;
@@ -51,8 +51,8 @@ class CompareFunctionsProcessor extends AbstractExprVisitor<Boolean, LogicalExpr
 
   private String functionName;
   private Boolean success;
-  private Value value;
-  private SchemaPath path;
+  protected Value value;
+  protected SchemaPath path;
 
   public CompareFunctionsProcessor(String functionName) {
     this.functionName = functionName;
@@ -69,13 +69,45 @@ class CompareFunctionsProcessor extends AbstractExprVisitor<Boolean, LogicalExpr
     return false;
   }
 
+  /**
+   * Converts specified function call to be pushed into maprDB JSON scan.
+   *
+   * @param call function call to be pushed
+   * @return CompareFunctionsProcessor instance which contains converted function call
+   */
   public static CompareFunctionsProcessor process(FunctionCall call) {
+    return processWithEvaluator(call, new CompareFunctionsProcessor(call.getName()));
+  }
+
+  /**
+   * Converts specified function call to be pushed into maprDB JSON scan.
+   * For the case when timestamp value is used, it is converted to UTC timezone
+   * before converting to {@link OTimestamp} instance.
+   *
+   * @param call function call to be pushed
+   * @return CompareFunctionsProcessor instance which contains converted function call
+   */
+  public static CompareFunctionsProcessor processWithTimeZoneOffset(FunctionCall call) {
+    CompareFunctionsProcessor processor = new CompareFunctionsProcessor(call.getName()) {
+      @Override
+      protected boolean visitTimestampExpr(SchemaPath path, TimeStampExpression valueArg) {
+        // converts timestamp value from local time zone to UTC since the record reader
+        // reads the timestamp in local timezone if the readTimestampWithZoneOffset flag is enabled
+        long timeStamp = valueArg.getTimeStamp() - DateUtility.TIMEZONE_OFFSET_MILLIS;
+        this.value = KeyValueBuilder.initFrom(new OTimestamp(timeStamp));
+        this.path = path;
+        return true;
+      }
+    };
+    return processWithEvaluator(call, processor);
+  }
+
+  private static CompareFunctionsProcessor processWithEvaluator(FunctionCall call, CompareFunctionsProcessor evaluator) {
     String functionName = call.getName();
     LogicalExpression nameArg = call.args.get(0);
-    LogicalExpression valueArg = call.args.size() >= 2? call.args.get(1) : null;
-    CompareFunctionsProcessor evaluator = new CompareFunctionsProcessor(functionName);
+    LogicalExpression valueArg = call.args.size() >= 2 ? call.args.get(1) : null;
 
-    //if (valueArg != null) {
+    if (valueArg != null) {
       if (VALUE_EXPRESSION_CLASSES.contains(nameArg.getClass())) {
         LogicalExpression swapArg = valueArg;
         valueArg = nameArg;
@@ -83,7 +115,7 @@ class CompareFunctionsProcessor extends AbstractExprVisitor<Boolean, LogicalExpr
         evaluator.functionName = COMPARE_FUNCTIONS_TRANSPOSE_MAP.get(functionName);
       }
       evaluator.success = nameArg.accept(evaluator, valueArg);
-    //}
+    }
 
     return evaluator;
   }
@@ -187,14 +219,17 @@ class CompareFunctionsProcessor extends AbstractExprVisitor<Boolean, LogicalExpr
     }
 
     if (valueArg instanceof TimeStampExpression) {
-      this.value = KeyValueBuilder.initFrom(new OTimestamp(((TimeStampExpression)valueArg).getTimeStamp()));
-      this.path = path;
-      return true;
+      return visitTimestampExpr(path, (TimeStampExpression) valueArg);
     }
-
     return false;
   }
 
+  protected boolean visitTimestampExpr(SchemaPath path, TimeStampExpression valueArg) {
+    this.value = KeyValueBuilder.initFrom(new OTimestamp(valueArg.getTimeStamp()));
+    this.path = path;
+    return true;
+  }
+
   private static final ImmutableSet<Class<? extends LogicalExpression>> VALUE_EXPRESSION_CLASSES;
   static {
     ImmutableSet.Builder<Class<? extends LogicalExpression>> builder = ImmutableSet.builder();
diff --git a/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/json/JsonConditionBuilder.java b/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/json/JsonConditionBuilder.java
index 92d40f7..252bc53 100644
--- a/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/json/JsonConditionBuilder.java
+++ b/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/json/JsonConditionBuilder.java
@@ -74,7 +74,12 @@ public class JsonConditionBuilder extends AbstractExprVisitor<JsonScanSpec, Void
     ImmutableList<LogicalExpression> args = call.args;
 
     if (CompareFunctionsProcessor.isCompareFunction(functionName)) {
-      CompareFunctionsProcessor processor = CompareFunctionsProcessor.process(call);
+      CompareFunctionsProcessor processor;
+      if (groupScan.getFormatPlugin().getConfig().isReadTimestampWithZoneOffset()) {
+        processor = CompareFunctionsProcessor.processWithTimeZoneOffset(call);
+      } else {
+        processor = CompareFunctionsProcessor.process(call);
+      }
       if (processor.isSuccess()) {
         nodeScanSpec = createJsonScanSpec(call, processor);
       }
diff --git a/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/json/MaprDBJsonRecordReader.java b/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/json/MaprDBJsonRecordReader.java
index f13d64d..081d8fd 100644
--- a/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/json/MaprDBJsonRecordReader.java
+++ b/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/json/MaprDBJsonRecordReader.java
@@ -33,6 +33,7 @@ import org.apache.drill.common.expression.PathSegment;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.exception.SchemaChangeException;
+import org.apache.drill.exec.expr.fn.impl.DateUtility;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.ops.OperatorContext;
 import org.apache.drill.exec.ops.OperatorStats;
@@ -43,6 +44,7 @@ import org.apache.drill.exec.store.mapr.db.MapRDBSubScanSpec;
 import org.apache.drill.exec.util.EncodedSchemaPathSet;
 import org.apache.drill.exec.vector.BaseValueVector;
 import org.apache.drill.exec.vector.complex.fn.JsonReaderUtils;
+import org.apache.drill.exec.vector.complex.impl.MapOrListWriterImpl;
 import org.apache.drill.exec.vector.complex.impl.VectorContainerWriter;
 import org.apache.hadoop.fs.Path;
 import org.ojai.Document;
@@ -60,8 +62,9 @@ import org.apache.drill.shaded.guava.com.google.common.base.Stopwatch;
 import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableSet;
 import org.apache.drill.shaded.guava.com.google.common.collect.Iterables;
 import org.apache.drill.shaded.guava.com.google.common.collect.Sets;
-import org.apache.drill.shaded.guava.com.google.common.base.Predicate;
 
+import java.time.Instant;
+import java.time.OffsetDateTime;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.Iterator;
@@ -112,6 +115,7 @@ public class MaprDBJsonRecordReader extends AbstractRecordReader {
 
   private final boolean unionEnabled;
   private final boolean readNumbersAsDouble;
+  private final boolean readTimestampWithZoneOffset;
   private boolean disablePushdown;
   private final boolean allTextMode;
   private final boolean ignoreSchemaChange;
@@ -156,6 +160,7 @@ public class MaprDBJsonRecordReader extends AbstractRecordReader {
     setColumns(projectedColumns);
     unionEnabled = context.getOptions().getOption(ExecConstants.ENABLE_UNION_TYPE);
     readNumbersAsDouble = formatPlugin.getConfig().isReadAllNumbersAsDouble();
+    readTimestampWithZoneOffset = formatPlugin.getConfig().isReadTimestampWithZoneOffset();
     allTextMode = formatPlugin.getConfig().isAllTextMode();
     ignoreSchemaChange = formatPlugin.getConfig().isIgnoreSchemaChange();
     disablePushdown = !formatPlugin.getConfig().isEnablePushdown();
@@ -284,16 +289,53 @@ public class MaprDBJsonRecordReader extends AbstractRecordReader {
       throw new ExecutionSetupException(ex);
     }
   }
-  /*
-   * Setup the valueWriter and documentWriters based on config options
+
+  /**
+   * Setup the valueWriter and documentWriters based on config options.
    */
   private void setupWriter() {
     if (allTextMode) {
-      valueWriter = new AllTextValueWriter(buffer);
+      if (readTimestampWithZoneOffset) {
+        valueWriter = new AllTextValueWriter(buffer) {
+          /**
+           * Applies local time zone offset to timestamp value read using specified {@code reader}.
+           *
+           * @param writer    writer to store string representation of timestamp value
+           * @param fieldName name of the field
+           * @param reader    document reader
+           */
+          @Override
+          protected void writeTimeStamp(MapOrListWriterImpl writer, String fieldName, DocumentReader reader) {
+            String formattedTimestamp = Instant.ofEpochMilli(reader.getTimestampLong())
+                .atOffset(OffsetDateTime.now().getOffset()).format(DateUtility.UTC_FORMATTER);
+            writeString(writer, fieldName, formattedTimestamp);
+          }
+        };
+      } else {
+        valueWriter = new AllTextValueWriter(buffer);
+      }
     } else if (readNumbersAsDouble) {
-      valueWriter = new NumbersAsDoubleValueWriter(buffer);
+      if (readTimestampWithZoneOffset) {
+        valueWriter = new NumbersAsDoubleValueWriter(buffer) {
+          @Override
+          protected void writeTimeStamp(MapOrListWriterImpl writer, String fieldName, DocumentReader reader) {
+            writeTimestampWithLocalZoneOffset(writer, fieldName, reader);
+          }
+        };
+      } else {
+        valueWriter = new NumbersAsDoubleValueWriter(buffer);
+      }
     } else {
-      valueWriter = new OjaiValueWriter(buffer);
+      if (readTimestampWithZoneOffset) {
+        valueWriter = new OjaiValueWriter(buffer) {
+          @Override
+          protected void writeTimeStamp(MapOrListWriterImpl writer, String fieldName, DocumentReader reader) {
+            writeTimestampWithLocalZoneOffset(writer, fieldName, reader);
+          }
+        };
+      } else {
+        valueWriter = new OjaiValueWriter(buffer);
+      }
     }
 
     if (projectWholeDocument) {
@@ -307,6 +349,18 @@ public class MaprDBJsonRecordReader extends AbstractRecordReader {
     }
   }
 
+  /**
+   * Applies local time zone offset to timestamp value read using specified {@code reader}.
+   *
+   * @param writer    writer to store timestamp value
+   * @param fieldName name of the field
+   * @param reader    document reader
+   */
+  private void writeTimestampWithLocalZoneOffset(MapOrListWriterImpl writer, String fieldName, DocumentReader reader) {
+    long timestamp = reader.getTimestampLong() + DateUtility.TIMEZONE_OFFSET_MILLIS;
+    writer.timeStamp(fieldName).writeTimeStamp(timestamp);
+  }
+
   @Override
   public int next() {
     Stopwatch watch = Stopwatch.createUnstarted();
@@ -387,7 +441,7 @@ public class MaprDBJsonRecordReader extends AbstractRecordReader {
     }
 
     if (nonExistentColumnsProjection && recordCount > 0) {
-      JsonReaderUtils.ensureAtLeastOneField(vectorWriter, getColumns(), allTextMode, Collections.EMPTY_LIST);
+      JsonReaderUtils.ensureAtLeastOneField(vectorWriter, getColumns(), allTextMode, Collections.emptyList());
     }
     vectorWriter.setValueCount(recordCount);
     if (maxRecordsToRead > 0) {
@@ -463,12 +517,7 @@ public class MaprDBJsonRecordReader extends AbstractRecordReader {
   }
 
   public static boolean includesIdField(Collection<FieldPath> projected) {
-    return Iterables.tryFind(projected, new Predicate<FieldPath>() {
-      @Override
-      public boolean apply(FieldPath path) {
-        return Preconditions.checkNotNull(path).equals(ID_FIELD);
-      }
-    }).isPresent();
+    return Iterables.tryFind(projected, path -> Preconditions.checkNotNull(path).equals(ID_FIELD)).isPresent();
   }
 
   @Override
diff --git a/contrib/format-maprdb/src/test/java/com/mapr/drill/maprdb/tests/json/TestScanRanges.java b/contrib/format-maprdb/src/test/java/com/mapr/drill/maprdb/tests/json/TestScanRanges.java
index e521c64..efb70aa 100644
--- a/contrib/format-maprdb/src/test/java/com/mapr/drill/maprdb/tests/json/TestScanRanges.java
+++ b/contrib/format-maprdb/src/test/java/com/mapr/drill/maprdb/tests/json/TestScanRanges.java
@@ -82,8 +82,8 @@ public class TestScanRanges extends BaseJsonTest {
       table.flush();
       DBTests.waitForRowCount(table.getPath(), TOTAL_ROW_COUNT);
 
-      setSessionOption("planner.width.max_per_node", "5");
-   }
+      setSessionOption("planner.width.max_per_node", 5);
+    }
   }
 
   @AfterClass
diff --git a/contrib/storage-hive/core/scrMapr/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveMapRDBJsonScanToDrillMapRDBJsonScan.java b/contrib/storage-hive/core/scrMapr/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveMapRDBJsonScanToDrillMapRDBJsonScan.java
index b8c2675..7aca59d 100644
--- a/contrib/storage-hive/core/scrMapr/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveMapRDBJsonScanToDrillMapRDBJsonScan.java
+++ b/contrib/storage-hive/core/scrMapr/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveMapRDBJsonScanToDrillMapRDBJsonScan.java
@@ -23,8 +23,11 @@ import org.apache.calcite.rel.type.RelDataTypeFactory;
 import org.apache.calcite.rel.type.RelDataTypeField;
 import org.apache.drill.common.exceptions.DrillRuntimeException;
 import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.planner.logical.DrillScanRel;
 import org.apache.drill.exec.planner.logical.RelOptHelper;
+import org.apache.drill.exec.planner.physical.PlannerSettings;
+import org.apache.drill.exec.planner.physical.PrelUtil;
 import org.apache.drill.exec.store.StoragePluginOptimizerRule;
 import org.apache.drill.exec.store.hive.HiveMetadataProvider;
 import org.apache.drill.exec.store.hive.HiveReadEntry;
@@ -75,6 +78,7 @@ public class ConvertHiveMapRDBJsonScanToDrillMapRDBJsonScan extends StoragePlugi
   public void onMatch(RelOptRuleCall call) {
     try {
       DrillScanRel hiveScanRel = call.rel(0);
+      PlannerSettings settings = PrelUtil.getPlannerSettings(call.getPlanner());
 
       HiveScan hiveScan = (HiveScan) hiveScanRel.getGroupScan();
       HiveReadEntry hiveReadEntry = hiveScan.getHiveReadEntry();
@@ -90,7 +94,7 @@ public class ConvertHiveMapRDBJsonScanToDrillMapRDBJsonScan extends StoragePlugi
             "partitions");
       }
 
-      DrillScanRel nativeScanRel = createNativeScanRel(hiveScanRel);
+      DrillScanRel nativeScanRel = createNativeScanRel(hiveScanRel, settings);
       call.transformTo(nativeScanRel);
 
       /*
@@ -110,7 +114,7 @@ public class ConvertHiveMapRDBJsonScanToDrillMapRDBJsonScan extends StoragePlugi
   /**
    * Helper method which creates a DrillScanRel with native Drill HiveScan.
    */
-  private DrillScanRel createNativeScanRel(final DrillScanRel hiveScanRel) {
+  private DrillScanRel createNativeScanRel(DrillScanRel hiveScanRel, PlannerSettings settings) {
     RelDataTypeFactory typeFactory = hiveScanRel.getCluster().getTypeFactory();
     HiveScan hiveScan = (HiveScan) hiveScanRel.getGroupScan();
     Map<String, String> parameters = hiveScan.getHiveReadEntry().getHiveTableWrapper().getParameters();
@@ -129,6 +133,9 @@ public class ConvertHiveMapRDBJsonScanToDrillMapRDBJsonScan extends StoragePlugi
             hiveScanCols
         );
 
+    nativeMapRDBScan.getFormatPlugin().getConfig().readTimestampWithZoneOffset =
+        settings.getOptions().getBoolean(ExecConstants.HIVE_READ_MAPRDB_JSON_TIMESTAMP_WITH_TIMEZONE_OFFSET);
+
     List<String> nativeScanColNames = hiveScanRel.getRowType().getFieldList().stream()
         .map(field -> replaceOverriddenColumnId(parameters, field.getName()))
         .collect(Collectors.toList());
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
index 811c479..6ce4822 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
@@ -515,6 +515,11 @@ public final class ExecConstants {
       new BooleanValidator(HIVE_OPTIMIZE_MAPRDB_JSON_SCAN_WITH_NATIVE_READER,
           new OptionDescription("Enables Drill to use the Drill native reader (instead of the Hive Serde interface) to optimize reads of MapR Database JSON tables from Hive. Default is false. (Drill 1.14+)"));
 
+  public static final String HIVE_READ_MAPRDB_JSON_TIMESTAMP_WITH_TIMEZONE_OFFSET = "store.hive.maprdb_json.read_timestamp_with_timezone_offset";
+  public static final OptionValidator HIVE_READ_MAPRDB_JSON_TIMESTAMP_WITH_TIMEZONE_OFFSET_VALIDATOR =
+      new BooleanValidator(HIVE_READ_MAPRDB_JSON_TIMESTAMP_WITH_TIMEZONE_OFFSET,
+          new OptionDescription("Enables Drill to read timestamp values with timezone offset when hive plugin is used and Drill native MaprDB JSON reader usage is enabled. (Drill 1.16+)"));
+
   public static final String HIVE_CONF_PROPERTIES = "store.hive.conf.properties";
   public static final OptionValidator HIVE_CONF_PROPERTIES_VALIDATOR = new StringValidator(HIVE_CONF_PROPERTIES,
       new OptionDescription("Enables the user to specify Hive properties at the session level. Do not set the property values in quotes. Separate the property name and value by =. Separate each property with a new line (\\n). Example: set `store.hive.conf.properties` = 'hive.mapred.supports.subdirectories=true\\nmapred.input.dir.recursive=true'. (Drill 1.14+)"));
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java
index c97220c..d5463ef 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java
@@ -200,6 +200,7 @@ public class SystemOptionManager extends BaseOptionManager implements AutoClosea
       new OptionDefinition(ExecConstants.HIVE_OPTIMIZE_SCAN_WITH_NATIVE_READERS_VALIDATOR),
       new OptionDefinition(ExecConstants.HIVE_OPTIMIZE_PARQUET_SCAN_WITH_NATIVE_READER_VALIDATOR),
       new OptionDefinition(ExecConstants.HIVE_OPTIMIZE_MAPRDB_JSON_SCAN_WITH_NATIVE_READER_VALIDATOR),
+      new OptionDefinition(ExecConstants.HIVE_READ_MAPRDB_JSON_TIMESTAMP_WITH_TIMEZONE_OFFSET_VALIDATOR),
       new OptionDefinition(ExecConstants.HIVE_CONF_PROPERTIES_VALIDATOR),
       new OptionDefinition(ExecConstants.SLICE_TARGET_OPTION),
       new OptionDefinition(ExecConstants.AFFINITY_FACTOR),
diff --git a/exec/java-exec/src/main/resources/drill-module.conf b/exec/java-exec/src/main/resources/drill-module.conf
index 4a5f075..018dc91 100644
--- a/exec/java-exec/src/main/resources/drill-module.conf
+++ b/exec/java-exec/src/main/resources/drill-module.conf
@@ -596,6 +596,7 @@ drill.exec.options: {
     store.hive.optimize_scan_with_native_readers: false,
     store.hive.parquet.optimize_scan_with_native_reader: false,
     store.hive.maprdb_json.optimize_scan_with_native_reader: false,
+    store.hive.maprdb_json.read_timestamp_with_timezone_offset: false,
     # Properties values should NOT be set in double-quotes or any other quotes.
     # Property name and value should be separated by =.
     # Properties should be separated by new line (\n).
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/expr/fn/impl/DateUtility.java b/exec/vector/src/main/java/org/apache/drill/exec/expr/fn/impl/DateUtility.java
index 21a4352..1f7238a 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/expr/fn/impl/DateUtility.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/expr/fn/impl/DateUtility.java
@@ -635,6 +635,9 @@ public class DateUtility {
   public static final DateTimeFormatter isoFormatTimeStamp= buildFormatter("yyyy-MM-dd'T'HH:mm:ss.SSSXX");
   public static final DateTimeFormatter isoFormatTime     = buildFormatter("HH:mm:ss.SSSXX");
 
+  public static final DateTimeFormatter UTC_FORMATTER = buildFormatter("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
+  public static final long TIMEZONE_OFFSET_MILLIS = OffsetDateTime.now().getOffset().getTotalSeconds() * 1000;
+
   public static DateTimeFormatter dateTimeTZFormat = null;
   public static DateTimeFormatter timeFormat = null;
 


[drill] 06/06: DRILL-6967: Fix TIMESTAMPDIFF function for QUARTER qualifier

Posted by vi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

vitalii pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit 0f05f53e0e34ccc0606a561f44d82b794be32b83
Author: Volodymyr Vysotskyi <vv...@gmail.com>
AuthorDate: Fri Jan 11 16:25:02 2019 +0200

    DRILL-6967: Fix TIMESTAMPDIFF function for QUARTER qualifier
    
    closes #1609
---
 .../TimestampDiff.java                             |  3 +-
 .../fn/impl/TestTimestampAddDiffFunctions.java     | 80 ++++++++++++----------
 2 files changed, 45 insertions(+), 38 deletions(-)

diff --git a/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/TimestampDiff.java b/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/TimestampDiff.java
index 54232e2..b139efc 100644
--- a/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/TimestampDiff.java
+++ b/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/TimestampDiff.java
@@ -95,7 +95,8 @@ public class ${className} {
         <#if unit == "Month">
       out.value = months;
         <#elseif unit == "Quarter">
-      out.value = months / 4;
+      // Quarter has 3 month
+      out.value = months / 3;
         <#elseif unit == "Year">
       out.value = months / org.apache.drill.exec.vector.DateUtilities.yearsToMonths;
         </#if>
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestTimestampAddDiffFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestTimestampAddDiffFunctions.java
index 4b3e93b..f2d5af6 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestTimestampAddDiffFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestTimestampAddDiffFunctions.java
@@ -73,17 +73,12 @@ public class TestTimestampAddDiffFunctions extends ClusterTest {
         String dateTimeLiteral = typeResultPair.getValue();
         String type = typeResultPair.getKey();
 
-        client.queryBuilder()
-            .sql("SELECT TIMESTAMPADD(%s, 0, CAST('%s' AS %s)) col1",
-                qualifier, dateTimeLiteral, type)
-            .run();
+        run("SELECT TIMESTAMPADD(%s, 0, CAST('%s' AS %s)) col1", qualifier, dateTimeLiteral, type);
 
         // TIMESTAMPDIFF with args of different types
         for (Map.Entry<String, String> secondArg : dateTypes.entrySet()) {
-          client.queryBuilder()
-              .sql("SELECT TIMESTAMPDIFF(%s, CAST('%s' AS %s), CAST('%s' AS %s)) col1",
-                  qualifier, dateTimeLiteral, type, secondArg.getValue(), secondArg.getKey())
-              .run();
+          run("SELECT TIMESTAMPDIFF(%s, CAST('%s' AS %s), CAST('%s' AS %s)) col1",
+              qualifier, dateTimeLiteral, type, secondArg.getValue(), secondArg.getKey());
         }
       }
     }
@@ -92,31 +87,25 @@ public class TestTimestampAddDiffFunctions extends ClusterTest {
   @Test // DRILL-3610
   public void testTimestampAddDiffTypeInference() throws Exception {
     for (String qualifier : QUALIFIERS) {
-      client.queryBuilder()
-          .sql(
-            "SELECT TIMESTAMPADD(%1$s, 0, `date`) col1," +
-                    "TIMESTAMPADD(%1$s, 0, `time`) timeReq," +
-                    "TIMESTAMPADD(%1$s, 0, `timestamp`) timestampReq," +
-                    "TIMESTAMPADD(%1$s, 0, t.time_map.`date`) dateOpt," +
-                    "TIMESTAMPADD(%1$s, 0, t.time_map.`time`) timeOpt," +
-                    "TIMESTAMPADD(%1$s, 0, t.time_map.`timestamp`) timestampOpt\n" +
-            "FROM cp.`datetime.parquet` t", qualifier)
-          .run();
-
-      client.queryBuilder()
-          .sql(
-            "SELECT TIMESTAMPDIFF(%1$s, `date`, `date`) col1," +
-                    "TIMESTAMPDIFF(%1$s, `time`, `time`) timeReq," +
-                    "TIMESTAMPDIFF(%1$s, `timestamp`, `timestamp`) timestampReq," +
-                    "TIMESTAMPDIFF(%1$s, `timestamp`, t.time_map.`date`) timestampReqTimestampOpt," +
-                    "TIMESTAMPDIFF(%1$s, `timestamp`, t.time_map.`timestamp`) timestampReqTimestampOpt," +
-                    "TIMESTAMPDIFF(%1$s, `date`, `time`) timeDate," +
-                    "TIMESTAMPDIFF(%1$s, `time`, `date`) Datetime," +
-                    "TIMESTAMPDIFF(%1$s, t.time_map.`date`, t.time_map.`date`) dateOpt," +
-                    "TIMESTAMPDIFF(%1$s, t.time_map.`time`, t.time_map.`time`) timeOpt," +
-                    "TIMESTAMPDIFF(%1$s, t.time_map.`timestamp`, t.time_map.`timestamp`) timestampOpt\n" +
-            "FROM cp.`datetime.parquet` t", qualifier)
-          .run();
+      run("SELECT TIMESTAMPADD(%1$s, 0, `date`) col1," +
+                "TIMESTAMPADD(%1$s, 0, `time`) timeReq," +
+                "TIMESTAMPADD(%1$s, 0, `timestamp`) timestampReq," +
+                "TIMESTAMPADD(%1$s, 0, t.time_map.`date`) dateOpt," +
+                "TIMESTAMPADD(%1$s, 0, t.time_map.`time`) timeOpt," +
+                "TIMESTAMPADD(%1$s, 0, t.time_map.`timestamp`) timestampOpt\n" +
+          "FROM cp.`datetime.parquet` t", qualifier);
+
+      run("SELECT TIMESTAMPDIFF(%1$s, `date`, `date`) col1," +
+                "TIMESTAMPDIFF(%1$s, `time`, `time`) timeReq," +
+                "TIMESTAMPDIFF(%1$s, `timestamp`, `timestamp`) timestampReq," +
+                "TIMESTAMPDIFF(%1$s, `timestamp`, t.time_map.`date`) timestampReqTimestampOpt," +
+                "TIMESTAMPDIFF(%1$s, `timestamp`, t.time_map.`timestamp`) timestampReqTimestampOpt," +
+                "TIMESTAMPDIFF(%1$s, `date`, `time`) timeDate," +
+                "TIMESTAMPDIFF(%1$s, `time`, `date`) Datetime," +
+                "TIMESTAMPDIFF(%1$s, t.time_map.`date`, t.time_map.`date`) dateOpt," +
+                "TIMESTAMPDIFF(%1$s, t.time_map.`time`, t.time_map.`time`) timeOpt," +
+                "TIMESTAMPDIFF(%1$s, t.time_map.`timestamp`, t.time_map.`timestamp`) timestampOpt\n" +
+          "FROM cp.`datetime.parquet` t", qualifier);
     }
   }
 
@@ -131,7 +120,7 @@ public class TestTimestampAddDiffFunctions extends ClusterTest {
               "TIMESTAMPADD(YEAR, 1, t.time_map.`timestamp`) timestampOpt\n" +
         "FROM cp.`datetime.parquet` t";
 
-    client.testBuilder()
+    testBuilder()
         .sqlQuery(query)
         .unOrdered()
         .baselineColumns("dateReq", "timeReq", "timestampReq", "dateOpt", "timeOpt", "timestampOpt")
@@ -152,7 +141,7 @@ public class TestTimestampAddDiffFunctions extends ClusterTest {
             "TIMESTAMPDIFF(YEAR, TIMESTAMP '2020-03-24 17:40:52.123', t.time_map.`timestamp`) timestampOpt\n" +
         "FROM cp.`datetime.parquet` t";
 
-    client.testBuilder()
+    testBuilder()
         .sqlQuery(query)
         .unOrdered()
         .baselineColumns("dateReq", "timeReq", "timestampReq", "dateOpt", "timeOpt", "timestampOpt")
@@ -173,7 +162,7 @@ public class TestTimestampAddDiffFunctions extends ClusterTest {
             "(SELECT CASE WHEN FALSE THEN TIME '12:00:03.600' ELSE null END AS a," +
             "CASE WHEN FALSE THEN 2 ELSE null END AS b)";
 
-    client.testBuilder()
+    testBuilder()
         .sqlQuery(query)
         .unOrdered()
         .baselineColumns("col1", "col2", "col3", "col4", "col5", "col6")
@@ -192,11 +181,28 @@ public class TestTimestampAddDiffFunctions extends ClusterTest {
               "TIMESTAMPDIFF(DAY, DATE '2012-01-01', DATE '2013-01-01') col6," +
               "TIMESTAMPDIFF(DAY, DATE '2013-01-01', DATE '2014-01-01') col7";
 
-    client.testBuilder()
+    testBuilder()
         .sqlQuery(query)
         .unOrdered()
         .baselineColumns("col1", "col2", "col3", "col4", "col5", "col6", "col7")
         .baselineValues(0L, 0L, 0L, 1L, -1L, 366L, 365L)
         .go();
   }
+
+  @Test // DRILL-6967
+  public void testTimestampDiffQuarter() throws Exception {
+    String query =
+        "SELECT TIMESTAMPDIFF(SQL_TSI_QUARTER, date '1996-03-09', date '1998-03-09') AS col1," +
+                "TIMESTAMPDIFF(QUARTER, date '2019-01-01', date '2019-01-17') AS col2," +
+                "TIMESTAMPDIFF(SQL_TSI_QUARTER, date '2019-01-01', date '2019-03-31') AS col3," +
+                "TIMESTAMPDIFF(QUARTER, date '2019-01-01', date '2019-04-01') AS col4," +
+                "TIMESTAMPDIFF(SQL_TSI_QUARTER, date '1970-01-01', date '2019-01-11') AS col5";
+
+    testBuilder()
+        .sqlQuery(query)
+        .unOrdered()
+        .baselineColumns("col1", "col2", "col3", "col4", "col5")
+        .baselineValues(8L, 0L, 0L, 1L, 196L)
+        .go();
+  }
 }


[drill] 05/06: DRILL-6942: Provide ability to sort list of profiles on Drill Web UI

Posted by vi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

vitalii pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit a566b0af0eb24a9ee6ed91bd4ad31a17e529e8d9
Author: Kunal Khatua <kk...@maprtech.com>
AuthorDate: Fri Jan 4 15:14:58 2019 -0800

    DRILL-6942: Provide ability to sort list of profiles on Drill Web UI
    
    This provides an option to order the list of query profiles based on any of the displayed fields, including total duration. This way, a user can easily identify long running queries.
    In addition, the number of profiles listed per page for both, completed and running list of queries, has been made configurable with the parameter: `drill.exec.http.profiles_per_page` (default is 10,25,50,100)
    
    closes #1594
---
 .../src/main/java/org/apache/drill/exec/ExecConstants.java |  1 +
 .../drill/exec/server/rest/profile/ProfileResources.java   |  8 +++++++-
 exec/java-exec/src/main/resources/drill-module.conf        |  1 +
 exec/java-exec/src/main/resources/rest/options.ftl         | 13 +------------
 exec/java-exec/src/main/resources/rest/profile/list.ftl    | 14 ++++++++------
 exec/java-exec/src/main/resources/rest/profile/profile.ftl | 14 +-------------
 .../rest/static/css/drill-dataTables.sortable.css          | 11 +++++++++++
 7 files changed, 30 insertions(+), 32 deletions(-)

diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
index 6ce4822..bab94a1 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
@@ -203,6 +203,7 @@ public final class ExecConstants {
   public static final String HAZELCAST_SUBNETS = "drill.exec.cache.hazel.subnets";
   public static final String HTTP_ENABLE = "drill.exec.http.enabled";
   public static final String HTTP_MAX_PROFILES = "drill.exec.http.max_profiles";
+  public static final String HTTP_PROFILES_PER_PAGE = "drill.exec.http.profiles_per_page";
   public static final String HTTP_PORT = "drill.exec.http.port";
   public static final String HTTP_PORT_HUNT = "drill.exec.http.porthunt";
   public static final String HTTP_JETTY_SERVER_ACCEPTORS = "drill.exec.http.jetty.server.acceptors";
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/ProfileResources.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/ProfileResources.java
index af2b790..e88b57c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/ProfileResources.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/ProfileResources.java
@@ -57,7 +57,7 @@ import org.apache.drill.exec.store.sys.PersistentStoreProvider;
 import org.apache.drill.exec.work.WorkManager;
 import org.apache.drill.exec.work.foreman.Foreman;
 import org.glassfish.jersey.server.mvc.Viewable;
-
+import org.apache.drill.shaded.guava.com.google.common.base.Joiner;
 import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
 
 @Path("/")
@@ -219,6 +219,12 @@ public class ProfileResources {
       return work.getContext().getConfig().getInt(ExecConstants.HTTP_MAX_PROFILES);
     }
 
+    public String getQueriesPerPage() {
+      List<Integer> queriesPerPageOptions = work.getContext().getConfig().getIntList(ExecConstants.HTTP_PROFILES_PER_PAGE);
+      Collections.sort(queriesPerPageOptions);
+      return Joiner.on(",").join(queriesPerPageOptions);
+    }
+
     public List<String> getErrors() { return errors; }
   }
 
diff --git a/exec/java-exec/src/main/resources/drill-module.conf b/exec/java-exec/src/main/resources/drill-module.conf
index 018dc91..6b9fe6f 100644
--- a/exec/java-exec/src/main/resources/drill-module.conf
+++ b/exec/java-exec/src/main/resources/drill-module.conf
@@ -142,6 +142,7 @@ drill.exec: {
       }
     }
     max_profiles: 100,
+    profiles_per_page: [10, 25, 50, 100],
     profile.warning: {
       progress.threshold: 300,
       time.skew: {
diff --git a/exec/java-exec/src/main/resources/rest/options.ftl b/exec/java-exec/src/main/resources/rest/options.ftl
index 1fce03c..e1c904d 100644
--- a/exec/java-exec/src/main/resources/rest/options.ftl
+++ b/exec/java-exec/src/main/resources/rest/options.ftl
@@ -64,18 +64,7 @@
     <link href="/static/css/dataTables.colVis-1.1.0.min.css" rel="stylesheet">
     <link href="/static/css/dataTables.jqueryui.css" rel="stylesheet">
     <link href="/static/css/jquery-ui-1.10.3.min.css" rel="stylesheet">
-<style>
-/* DataTables Sorting: inherited via sortable class */
-table.sortable thead .sorting,.sorting_asc,.sorting_desc {
-  background-repeat: no-repeat;
-  background-position: center right;
-  cursor: pointer;
-}
-/* Sorting Symbols */
-table.sortable thead .sorting { background-image: url("/static/img/black-unsorted.gif"); }
-table.sortable thead .sorting_asc { background-image: url("/static/img/black-asc.gif"); }
-table.sortable thead .sorting_desc { background-image: url("/static/img/black-desc.gif"); }
-</style>
+    <link href="/static/css/drill-dataTables.sortable.css" rel="stylesheet">
 </#macro>
 
 <#macro page_body>
diff --git a/exec/java-exec/src/main/resources/rest/profile/list.ftl b/exec/java-exec/src/main/resources/rest/profile/list.ftl
index cc9b63c..1afeb7d 100644
--- a/exec/java-exec/src/main/resources/rest/profile/list.ftl
+++ b/exec/java-exec/src/main/resources/rest/profile/list.ftl
@@ -21,16 +21,18 @@
 <#macro page_head>
 
 <script src="/static/js/jquery.dataTables-1.10.16.min.js"></script>
+<link href="/static/css/drill-dataTables.sortable.css" rel="stylesheet">
 <script>
     $(document).ready(function() {
       $.each(["running","completed"], function(i, key) {
         $("#profileList_"+key).DataTable( {
-          //Preserve order
-          "ordering": false,
+          //Permit sorting-by-column
+          "ordering": true,
+          "order": [[0, "desc"]],
           "searching": true,
           "paging": true,
           "pagingType": "full_numbers",
-          "lengthMenu": [[10, 25, 50, -1], [10, 25, 50, "All"]],
+          "lengthMenu": [[${model.getQueriesPerPage()}, -1], [${model.getQueriesPerPage()}, "All"]],
           "lengthChange": true,
           "info": true,
           //Ref: https://legacy.datatables.net/ref#sDom
@@ -193,7 +195,7 @@
 
 <#macro list_queries queries stateList>
     <div class="table-responsive">
-        <table id="profileList_${stateList}" class="table table-hover dataTable" role="grid">
+        <table id="profileList_${stateList}" class="table table-hover sortable dataTable" role="grid">
             <thead>
             <tr role="row">
                 <#if stateList == "running" >
@@ -213,7 +215,7 @@
                 <#if stateList == "running" >
                 <td><input type="checkbox" name="cancelQ" value="${query.getQueryId()}"/></td>
                 </#if>
-                <td>${query.getTime()}</td>
+                <td data-order='${query.getStartTime()}'>${query.getTime()}</td>
                 <td>${query.getUser()}</td>
                 <td>
                     <a href="/profiles/${query.getQueryId()}">
@@ -221,7 +223,7 @@
                     </a>
                 </td>
                 <td>${query.getState()}</td>
-                <td>${query.getDuration()}</td>
+                <td data-order='${query.getEndTime() - query.getStartTime()}'>${query.getDuration()}</td>
                 <td>${query.getForeman()}</td>
             </tr>
             </#list>
diff --git a/exec/java-exec/src/main/resources/rest/profile/profile.ftl b/exec/java-exec/src/main/resources/rest/profile/profile.ftl
index e432e20..6d5690a 100644
--- a/exec/java-exec/src/main/resources/rest/profile/profile.ftl
+++ b/exec/java-exec/src/main/resources/rest/profile/profile.ftl
@@ -33,6 +33,7 @@
 <script src="/static/js/ace-code-editor/theme-sqlserver.js" type="text/javascript" charset="utf-8"></script>
 <script src="/static/js/ace-code-editor/snippets/sql.js" type="text/javascript" charset="utf-8"></script>
 <script src="/static/js/ace-code-editor/mode-snippets.js" type="text/javascript" charset="utf-8"></script>
+<link href="/static/css/drill-dataTables.sortable.css" rel="stylesheet">
 
 <script>
     var globalconfig = {
@@ -97,19 +98,6 @@
     };
 
 </script>
-<style>
-/* DataTables Sorting: inherited via sortable class */
-table.sortable thead .sorting,.sorting_asc,.sorting_desc {
-  background-repeat: no-repeat;
-  background-position: center right;
-  cursor: pointer;
-}
-/* Sorting Symbols */
-table.sortable thead .sorting { background-image: url("/static/img/black-unsorted.gif"); }
-table.sortable thead .sorting_asc { background-image: url("/static/img/black-asc.gif"); }
-table.sortable thead .sorting_desc { background-image: url("/static/img/black-desc.gif"); }
-</style>
-
 </#macro>
 
 <#macro page_body>
diff --git a/exec/java-exec/src/main/resources/rest/static/css/drill-dataTables.sortable.css b/exec/java-exec/src/main/resources/rest/static/css/drill-dataTables.sortable.css
new file mode 100644
index 0000000..47f4260
--- /dev/null
+++ b/exec/java-exec/src/main/resources/rest/static/css/drill-dataTables.sortable.css
@@ -0,0 +1,11 @@
+/* DataTables Sorting: inherited via sortable class */
+table.sortable thead .sorting,.sorting_asc,.sorting_desc {
+  background-repeat: no-repeat;
+  background-position: center right;
+  cursor: pointer;
+}
+/* Sorting Symbols */
+table.sortable thead .sorting { background-image: url("/static/img/black-unsorted.gif"); }
+table.sortable thead .sorting_asc { background-image: url("/static/img/black-asc.gif"); }
+table.sortable thead .sorting_desc { background-image: url("/static/img/black-desc.gif"); }
+