You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@drill.apache.org by am...@apache.org on 2016/06/23 18:10:41 UTC

[1/7] drill git commit: DRILL-2385: Count on complex objects failed with missing function implementation - added MapHolder, ListHolder; - added testCountComplexObjects() unit test.

Repository: drill
Updated Branches:
  refs/heads/1.7.0 [created] 552129fa6


DRILL-2385: Count on complex objects failed with missing function implementation - added MapHolder, ListHolder; - added testCountComplexObjects() unit test.


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/c5216a69
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/c5216a69
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/c5216a69

Branch: refs/heads/1.7.0
Commit: c5216a691c10fd4e90eda68c93a06aab7d036ec5
Parents: bd1d9c2
Author: Vitalii Diravka <vi...@gmail.com>
Authored: Thu Jun 16 16:45:20 2016 +0000
Committer: Aman Sinha <as...@maprtech.com>
Committed: Wed Jun 22 15:07:48 2016 -0700

----------------------------------------------------------------------
 .../src/main/codegen/data/CountAggrTypes.tdd    | 21 +++++++++++
 .../exec/resolver/ResolverTypePrecedence.java   |  2 ++
 .../drill/exec/resolver/TypeCastRules.java      |  2 ++
 .../exec/fn/impl/TestAggregateFunctions.java    | 34 ++++++++++++++++++
 .../test/resources/complex/json/complex.json    | 30 ++++++++++++++++
 .../resources/complex/json/repeated_list.json   |  2 +-
 .../complex/json/repeated_list_map.json         |  2 +-
 .../drill/exec/expr/holders/ListHolder.java     | 37 ++++++++++++++++++++
 .../drill/exec/expr/holders/MapHolder.java      | 32 +++++++++++++++++
 .../exec/expr/holders/RepeatedListHolder.java   | 20 +++++++++--
 .../exec/expr/holders/RepeatedMapHolder.java    | 25 +++++++++++--
 11 files changed, 201 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/c5216a69/exec/java-exec/src/main/codegen/data/CountAggrTypes.tdd
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/data/CountAggrTypes.tdd b/exec/java-exec/src/main/codegen/data/CountAggrTypes.tdd
index 53e25f7..aec73d1 100644
--- a/exec/java-exec/src/main/codegen/data/CountAggrTypes.tdd
+++ b/exec/java-exec/src/main/codegen/data/CountAggrTypes.tdd
@@ -2,38 +2,59 @@
   countFunctionsInput: [
     "Bit",
     "NullableBit",
+    "RepeatedBit",
     "Int",
     "NullableInt",
+    "RepeatedInt",
     "BigInt",
     "NullableBigInt",
+    "RepeatedBigInt",
     "Float4",
     "NullableFloat4",
+    "RepeatedFloat4",
     "Float8",
     "NullableFloat8",
+    "RepeatedFloat8",
     "Date",
     "NullableDate",
+    "RepeatedDate",
     "TimeStamp",
     "NullableTimeStamp",
+    "RepeatedTimeStamp",
     "Time",
     "NullableTime",
+    "RepeatedTime",
     "IntervalDay",
     "NullableIntervalDay",
+    "RepeatedIntervalDay",
     "IntervalYear",
     "NullableIntervalYear",
+    "RepeatedIntervalYear",
     "Interval",
     "NullableInterval",
+    "RepeatedInterval",
     "VarChar",
     "NullableVarChar",
+    "RepeatedVarChar",
     "VarBinary"
     "NullableVarBinary",
+    "RepeatedVarBinary"
     "Decimal9",
     "NullableDecimal9",
+    "RepeatedDecimal9",
     "Decimal18",
     "NullableDecimal18",
+    "RepeatedDecimal18",
     "Decimal28Sparse",
     "NullableDecimal28Sparse",
+    "RepeatedDecimal28Sparse",
     "Decimal38Sparse",
     "NullableDecimal38Sparse"
+    "RepeatedDecimal38Sparse",
+    "List"
+    "RepeatedList",
+    "Map"
+    "RepeatedMap"
   ]
 }
 

http://git-wip-us.apache.org/repos/asf/drill/blob/c5216a69/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/ResolverTypePrecedence.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/ResolverTypePrecedence.java b/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/ResolverTypePrecedence.java
index 8c602b3..a28c95a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/ResolverTypePrecedence.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/ResolverTypePrecedence.java
@@ -77,6 +77,8 @@ public class ResolverTypePrecedence {
     precedenceMap.put(MinorType.INTERVALDAY, i+= 2);
     precedenceMap.put(MinorType.INTERVALYEAR, i+= 2);
     precedenceMap.put(MinorType.INTERVAL, i+= 2);
+    precedenceMap.put(MinorType.MAP, i += 2);
+    precedenceMap.put(MinorType.LIST, i += 2);
     precedenceMap.put(MinorType.UNION, i += 2);
 
     MAX_IMPLICIT_CAST_COST = i;

http://git-wip-us.apache.org/repos/asf/drill/blob/c5216a69/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/TypeCastRules.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/TypeCastRules.java b/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/TypeCastRules.java
index ae42937..8bb6c2a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/TypeCastRules.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/resolver/TypeCastRules.java
@@ -754,6 +754,8 @@ public class TypeCastRules {
     rule.add(MinorType.FIXEDBINARY);
     rules.put(MinorType.VARBINARY, rule);
 
+    rules.put(MinorType.MAP, Sets.newHashSet(MinorType.MAP));
+    rules.put(MinorType.LIST, Sets.newHashSet(MinorType.LIST));
     rules.put(MinorType.UNION, Sets.newHashSet(MinorType.UNION));
   }
 

http://git-wip-us.apache.org/repos/asf/drill/blob/c5216a69/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestAggregateFunctions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestAggregateFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestAggregateFunctions.java
index 0e558a7..d99eb00 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestAggregateFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestAggregateFunctions.java
@@ -18,6 +18,7 @@
 package org.apache.drill.exec.fn.impl;
 
 import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
 import org.apache.commons.lang3.tuple.Pair;
 import org.apache.drill.BaseTestQuery;
 import org.apache.drill.PlanTestBase;
@@ -28,6 +29,7 @@ import org.junit.Ignore;
 import org.junit.Test;
 
 import java.util.List;
+import java.util.Map;
 
 public class TestAggregateFunctions extends BaseTestQuery {
 
@@ -525,4 +527,36 @@ public class TestAggregateFunctions extends BaseTestQuery {
     PlanTestBase.testPlanMatchingPatterns(sql, expectedPlan, excludedPatterns);
   }
 
+  @Test // DRILL-2385: count on complex objects failed with missing function implementation
+  public void testCountComplexObjects() throws Exception {
+    final String query = "select count(t.%s) %s from cp.`complex/json/complex.json` t";
+    Map<String, String> objectsMap = Maps.newHashMap();
+    objectsMap.put("COUNT_BIG_INT_REPEATED", "sia");
+    objectsMap.put("COUNT_FLOAT_REPEATED", "sfa");
+    // TODO: can be uncommented after fixing DRILL-4664
+    // objectsMap.put("COUNT_MAP_REPEATED", "soa");
+    // objectsMap.put("COUNT_MAP_REQUIRED", "oooi");
+    objectsMap.put("COUNT_LIST_REPEATED", "odd");
+    objectsMap.put("COUNT_LIST_OPTIONAL", "sia");
+
+    for (String object: objectsMap.keySet()) {
+      String optionSetting = "";
+      if (object.equals("COUNT_LIST_OPTIONAL")) {
+        // if `exec.enable_union_type` parameter is true then BIGINT<REPEATED> object is converted to LIST<OPTIONAL> one
+        optionSetting = "alter session set `exec.enable_union_type`=true";
+      }
+      try {
+        testBuilder()
+            .sqlQuery(query, objectsMap.get(object), object)
+            .optionSettingQueriesForTestQuery(optionSetting)
+            .unOrdered()
+            .baselineColumns(object)
+            .baselineValues(3L)
+            .go();
+      } finally {
+        test("ALTER SESSION RESET `exec.enable_union_type`");
+      }
+    }
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/c5216a69/exec/java-exec/src/test/resources/complex/json/complex.json
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/resources/complex/json/complex.json b/exec/java-exec/src/test/resources/complex/json/complex.json
new file mode 100644
index 0000000..5e52f63
--- /dev/null
+++ b/exec/java-exec/src/test/resources/complex/json/complex.json
@@ -0,0 +1,30 @@
+{"sia":[1, 11, 101, 1001],
+ "sfa":[0.0, 1.01, 10.222, 10.0006789],
+ "soa":[{"in":1},{"in":1,"fl":1.12345}, {"in":1, "fl":10.12345, "nul":null}, {"in":1, "fl":10.6789, "nul":null, "bool":true, "str":"here is a string at row 1"}],
+ "oooi":{"oa":{"oab":{"oabc":1}}},
+ "odd": [
+    [[1],[],[3]],
+    [],
+    [[5]]
+ ]
+}
+{"sia":[2, 12, 102, 1002],
+ "sfa":[0.0, 2.01, 20.222, 20.0006789],
+ "soa":[{"in":2},{"in":2,"fl":2.12345}, {"in":2, "fl":20.12345, "nul":"not null"}, {"in":2, "fl":20.6789, "nul":null, "bool":false, "str":"here is a string at row 2"}],
+ "oooi":{"oa":{"oab":{"oabc":2}}},
+ "odd": [
+    [[1],[],[3]],
+    [],
+    [[5]]
+ ]
+}
+{"sia":[3, 13, 103, 1003],
+ "sfa":[0.0, 3.01, 30.222, 30.0006789],
+ "soa":[{"in":3},{"in":3,"fl":3.12345}, {"in":3, "fl":30.12345, "nul":"not null"}, {"in":3, "fl":30.6789, "nul":"not null", "bool":true, "str":"here is a string at row 3"}],
+ "oooi":{"oa":{"oab":{"oabc":3}}},
+ "odd": [
+    [[1],[],[3]],
+    [],
+    [[5]]
+ ]
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/c5216a69/exec/java-exec/src/test/resources/complex/json/repeated_list.json
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/resources/complex/json/repeated_list.json b/exec/java-exec/src/test/resources/complex/json/repeated_list.json
index 696d069..b3418f2 100644
--- a/exec/java-exec/src/test/resources/complex/json/repeated_list.json
+++ b/exec/java-exec/src/test/resources/complex/json/repeated_list.json
@@ -4,4 +4,4 @@
     [],
     [[5]]
   ]
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/c5216a69/exec/java-exec/src/test/resources/complex/json/repeated_list_map.json
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/resources/complex/json/repeated_list_map.json b/exec/java-exec/src/test/resources/complex/json/repeated_list_map.json
index ef6c0ee..cad3fe8 100644
--- a/exec/java-exec/src/test/resources/complex/json/repeated_list_map.json
+++ b/exec/java-exec/src/test/resources/complex/json/repeated_list_map.json
@@ -4,4 +4,4 @@
     [],
     [{"val": [7]}]
   ]
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/c5216a69/exec/vector/src/main/java/org/apache/drill/exec/expr/holders/ListHolder.java
----------------------------------------------------------------------
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/expr/holders/ListHolder.java b/exec/vector/src/main/java/org/apache/drill/exec/expr/holders/ListHolder.java
new file mode 100644
index 0000000..ebdbae9
--- /dev/null
+++ b/exec/vector/src/main/java/org/apache/drill/exec/expr/holders/ListHolder.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.expr.holders;
+
+import org.apache.drill.common.types.TypeProtos;
+import org.apache.drill.common.types.Types;
+import org.apache.drill.exec.vector.complex.reader.FieldReader;
+
+public class ListHolder implements ValueHolder {
+    public static final TypeProtos.MajorType TYPE = Types.optional(TypeProtos.MinorType.LIST);
+    public FieldReader reader;
+    public int isSet;
+
+    public TypeProtos.MajorType getType() {
+        return TYPE;
+    }
+
+    public boolean isSet() {
+        return isSet == 1;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/c5216a69/exec/vector/src/main/java/org/apache/drill/exec/expr/holders/MapHolder.java
----------------------------------------------------------------------
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/expr/holders/MapHolder.java b/exec/vector/src/main/java/org/apache/drill/exec/expr/holders/MapHolder.java
new file mode 100644
index 0000000..8a38bd4
--- /dev/null
+++ b/exec/vector/src/main/java/org/apache/drill/exec/expr/holders/MapHolder.java
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.expr.holders;
+
+import org.apache.drill.common.types.TypeProtos;
+import org.apache.drill.common.types.Types;
+import org.apache.drill.exec.vector.complex.reader.FieldReader;
+
+public class MapHolder implements ValueHolder {
+    public static final TypeProtos.MajorType TYPE = Types.required(TypeProtos.MinorType.MAP);
+    public FieldReader reader;
+
+    public TypeProtos.MajorType getType() {
+        return TYPE;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/c5216a69/exec/vector/src/main/java/org/apache/drill/exec/expr/holders/RepeatedListHolder.java
----------------------------------------------------------------------
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/expr/holders/RepeatedListHolder.java b/exec/vector/src/main/java/org/apache/drill/exec/expr/holders/RepeatedListHolder.java
index 09746da..dc857de 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/expr/holders/RepeatedListHolder.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/expr/holders/RepeatedListHolder.java
@@ -17,7 +17,23 @@
  */
 package org.apache.drill.exec.expr.holders;
 
+import org.apache.drill.common.types.TypeProtos;
+import org.apache.drill.common.types.Types;
+import org.apache.drill.exec.vector.complex.ListVector;
+
 public final class RepeatedListHolder implements ValueHolder{
-  public int start;
-  public int end;
+
+    public static final TypeProtos.MajorType TYPE = Types.repeated(TypeProtos.MinorType.LIST);
+
+    public TypeProtos.MajorType getType() {return TYPE;}
+
+    /** The first index (inclusive) into the Vector. **/
+    public int start;
+
+    /** The last index (exclusive) into the Vector. **/
+    public int end;
+
+    /** The Vector holding the actual values. **/
+    public ListVector vector;
+
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/c5216a69/exec/vector/src/main/java/org/apache/drill/exec/expr/holders/RepeatedMapHolder.java
----------------------------------------------------------------------
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/expr/holders/RepeatedMapHolder.java b/exec/vector/src/main/java/org/apache/drill/exec/expr/holders/RepeatedMapHolder.java
index 247f75e..3db9020 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/expr/holders/RepeatedMapHolder.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/expr/holders/RepeatedMapHolder.java
@@ -17,7 +17,28 @@
  */
 package org.apache.drill.exec.expr.holders;
 
+import org.apache.drill.common.types.TypeProtos;
+import org.apache.drill.common.types.Types;
+import org.apache.drill.exec.record.MaterializedField;
+import org.apache.drill.exec.vector.complex.MapVector;
+
+import java.util.LinkedHashSet;
+
 public final class RepeatedMapHolder implements ValueHolder{
-  public int start;
-  public int end;
+
+    public static final TypeProtos.MajorType TYPE = Types.repeated(TypeProtos.MinorType.MAP);
+
+//    public final LinkedHashSet<ValueHolder> children = null;
+
+    public TypeProtos.MajorType getType() {return TYPE;}
+
+    /** The first index (inclusive) into the Vector. **/
+    public int start;
+
+    /** The last index (exclusive) into the Vector. **/
+    public int end;
+
+    /** The Vector holding the actual values. **/
+    public MapVector vector;
+
 }


[3/7] drill git commit: DRILL-4701: Fix log name and missing lines in logs on Web UI

Posted by am...@apache.org.
DRILL-4701: Fix log name and missing lines in logs on Web UI


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/7a8bb16c
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/7a8bb16c
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/7a8bb16c

Branch: refs/heads/1.7.0
Commit: 7a8bb16c14da605f947c9956cdbbab6aac01e1e8
Parents: 1ddd0ce
Author: Arina Ielchiieva <ar...@gmail.com>
Authored: Wed Jun 1 11:16:31 2016 +0000
Committer: Aman Sinha <as...@maprtech.com>
Committed: Wed Jun 22 15:15:39 2016 -0700

----------------------------------------------------------------------
 .../apache/drill/exec/server/rest/LogsResources.java | 15 ++++++++-------
 exec/java-exec/src/main/resources/rest/logs/log.ftl  |  6 +++---
 2 files changed, 11 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/7a8bb16c/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/LogsResources.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/LogsResources.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/LogsResources.java
index 8a89d41..16d213a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/LogsResources.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/LogsResources.java
@@ -21,6 +21,7 @@ package org.apache.drill.exec.server.rest;
 
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.common.base.Preconditions;
 import com.google.common.collect.Sets;
 import org.apache.drill.common.exceptions.DrillRuntimeException;
 import org.apache.drill.exec.ExecConstants;
@@ -48,7 +49,6 @@ import java.io.FileReader;
 import java.io.FilenameFilter;
 import java.io.IOException;
 import java.util.Collection;
-import java.util.Comparator;
 import java.util.LinkedHashMap;
 import java.util.Map;
 import java.util.Set;
@@ -111,19 +111,20 @@ public class LogsResources {
     final int maxLines = work.getContext().getOptionManager().getOption(ExecConstants.WEB_LOGS_MAX_LINES).num_val.intValue();
 
     try (BufferedReader br = new BufferedReader(new FileReader(file))) {
-      Map<String, String> cache = new LinkedHashMap<String, String>(maxLines, .75f, true) {
+      Map<Integer, String> cache = new LinkedHashMap<Integer, String>(maxLines, .75f, true) {
         @Override
-        protected boolean removeEldestEntry(Map.Entry<String, String> eldest) {
+        protected boolean removeEldestEntry(Map.Entry<Integer, String> eldest) {
           return size() > maxLines;
         }
       };
 
       String line;
+      int i = 0;
       while ((line = br.readLine()) != null) {
-        cache.put(line, null);
+        cache.put(i++, line);
       }
 
-      return new LogContent(file.getName(), cache.keySet(), maxLines);
+      return new LogContent(file.getName(), cache.values(), maxLines);
     }
   }
 
@@ -133,12 +134,12 @@ public class LogsResources {
   public Response getFullLog(@PathParam("name") final String name) {
     File file = getFileByName(getLogFolder(), name);
     Response.ResponseBuilder response = Response.ok(file);
-    response.header("Content-Disposition", String.format("attachment;filename\"%s\"", name));
+    response.header("Content-Disposition", String.format("attachment;filename=\"%s\"", name));
     return response.build();
   }
 
   private File getLogFolder() {
-    return new File(System.getenv("DRILL_LOG_DIR"));
+    return new File(Preconditions.checkNotNull(System.getenv("DRILL_LOG_DIR"), "DRILL_LOG_DIR variable is not set"));
   }
 
   private File getFileByName(File folder, final String name) {

http://git-wip-us.apache.org/repos/asf/drill/blob/7a8bb16c/exec/java-exec/src/main/resources/rest/logs/log.ftl
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/resources/rest/logs/log.ftl b/exec/java-exec/src/main/resources/rest/logs/log.ftl
index b09b57a..f5386bd 100644
--- a/exec/java-exec/src/main/resources/rest/logs/log.ftl
+++ b/exec/java-exec/src/main/resources/rest/logs/log.ftl
@@ -24,9 +24,9 @@
     <#if (model.getLines()?size > 0)>
     <pre>
         <#list model.getLines() as line>
-${line}
-            </#list>
-        </pre>
+${line?html}
+        </#list>
+     </pre>
     <#else>
     <div id="message" class="alert alert-info">
         <strong>Log is empty.</strong>


[6/7] drill git commit: DRILL-4733: max(dir0) reading more columns than necessary

Posted by am...@apache.org.
DRILL-4733: max(dir0) reading more columns than necessary


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/19cdcd95
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/19cdcd95
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/19cdcd95

Branch: refs/heads/1.7.0
Commit: 19cdcd95808ddb6f9e19bbccb753017993ad78d6
Parents: 4687a8b
Author: Arina Ielchiieva <ar...@gmail.com>
Authored: Tue Jun 21 12:33:32 2016 +0000
Committer: Aman Sinha <as...@maprtech.com>
Committed: Thu Jun 23 10:29:38 2016 -0700

----------------------------------------------------------------------
 .../drill/exec/store/ImplicitColumnExplorer.java  |   6 ------
 .../exec/store/dfs/easy/EasyFormatPlugin.java     |   7 ++++++-
 .../drill/exec/store/TestImplicitFileColumns.java |  17 +++++++++++++++++
 .../parquetWithSchemaChange/voter5/voter5.parquet | Bin 0 -> 1250 bytes
 .../voter50/voter50.parquet                       | Bin 0 -> 2960 bytes
 5 files changed, 23 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/19cdcd95/exec/java-exec/src/main/java/org/apache/drill/exec/store/ImplicitColumnExplorer.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ImplicitColumnExplorer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ImplicitColumnExplorer.java
index 94a0dca..af74eb7 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ImplicitColumnExplorer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ImplicitColumnExplorer.java
@@ -135,12 +135,6 @@ public class ImplicitColumnExplorer {
           tableColumns.add(column);
         }
       }
-
-      // We must make sure to pass a table column(not to be confused with partition column) to the underlying record
-      // reader.
-      if (tableColumns.size() == 0) {
-        tableColumns.add(AbstractRecordReader.STAR_COLUMN);
-      }
     }
   }
 

http://git-wip-us.apache.org/repos/asf/drill/blob/19cdcd95/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java
index 5881d33..f56f445 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java
@@ -41,6 +41,7 @@ import org.apache.drill.exec.planner.physical.PlannerSettings;
 import org.apache.drill.exec.record.CloseableRecordBatch;
 import org.apache.drill.exec.record.RecordBatch;
 import org.apache.drill.exec.server.DrillbitContext;
+import org.apache.drill.exec.store.AbstractRecordReader;
 import org.apache.drill.exec.store.ImplicitColumnExplorer;
 import org.apache.drill.exec.store.RecordReader;
 import org.apache.drill.exec.store.RecordWriter;
@@ -126,8 +127,12 @@ public abstract class EasyFormatPlugin<T extends FormatPluginConfig> implements
     final ImplicitColumnExplorer columnExplorer = new ImplicitColumnExplorer(context, scan.getColumns());
 
     if (!columnExplorer.isSelectAllColumns()) {
+      // We must make sure to pass a table column (not to be confused with implicit column) to the underlying record reader.
+      List<SchemaPath> tableColumns =
+          columnExplorer.getTableColumns().size() == 0 ?
+              Lists.<SchemaPath>newArrayList(AbstractRecordReader.STAR_COLUMN) : columnExplorer.getTableColumns();
       scan = new EasySubScan(scan.getUserName(), scan.getWorkUnits(), scan.getFormatPlugin(),
-          columnExplorer.getTableColumns(), scan.getSelectionRoot());
+          tableColumns, scan.getSelectionRoot());
       scan.setOperatorId(scan.getOperatorId());
     }
 

http://git-wip-us.apache.org/repos/asf/drill/blob/19cdcd95/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestImplicitFileColumns.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestImplicitFileColumns.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestImplicitFileColumns.java
index 6900da9..ce0f5e9 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestImplicitFileColumns.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestImplicitFileColumns.java
@@ -20,6 +20,7 @@ package org.apache.drill.exec.store;
 import com.google.common.base.Charsets;
 import com.google.common.io.Files;
 import org.apache.drill.BaseTestQuery;
+import org.apache.drill.common.util.TestTools;
 import org.apache.drill.exec.util.JsonStringArrayList;
 import org.apache.drill.exec.util.Text;
 import org.apache.hadoop.fs.Path;
@@ -110,4 +111,20 @@ public class TestImplicitFileColumns extends BaseTestQuery {
         .go();
   }
 
+  @Test // DRILL-4733
+  public void testMultilevelParquetWithSchemaChange() throws Exception {
+    try {
+      test("alter session set `planner.enable_decimal_data_type` = true");
+      testBuilder()
+          .sqlQuery(String.format("select max(dir0) as max_dir from dfs_test.`%s/src/test/resources/multilevel/parquetWithSchemaChange`",
+              TestTools.getWorkingPath()))
+          .unOrdered()
+          .baselineColumns("max_dir")
+          .baselineValues("voter50")
+          .go();
+    } finally {
+      test("alter session set `planner.enable_decimal_data_type` = false");
+    }
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/19cdcd95/exec/java-exec/src/test/resources/multilevel/parquetWithSchemaChange/voter5/voter5.parquet
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/resources/multilevel/parquetWithSchemaChange/voter5/voter5.parquet b/exec/java-exec/src/test/resources/multilevel/parquetWithSchemaChange/voter5/voter5.parquet
new file mode 100644
index 0000000..cc76280
Binary files /dev/null and b/exec/java-exec/src/test/resources/multilevel/parquetWithSchemaChange/voter5/voter5.parquet differ

http://git-wip-us.apache.org/repos/asf/drill/blob/19cdcd95/exec/java-exec/src/test/resources/multilevel/parquetWithSchemaChange/voter50/voter50.parquet
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/resources/multilevel/parquetWithSchemaChange/voter50/voter50.parquet b/exec/java-exec/src/test/resources/multilevel/parquetWithSchemaChange/voter50/voter50.parquet
new file mode 100644
index 0000000..ff66b42
Binary files /dev/null and b/exec/java-exec/src/test/resources/multilevel/parquetWithSchemaChange/voter50/voter50.parquet differ


[2/7] drill git commit: DRILL-3474: Add implicit file columns support

Posted by am...@apache.org.
DRILL-3474: Add implicit file columns support


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/1ddd0ce7
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/1ddd0ce7
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/1ddd0ce7

Branch: refs/heads/1.7.0
Commit: 1ddd0ce7186afd164052d6aa8f63a75fd01148f4
Parents: c5216a6
Author: Arina Ielchiieva <ar...@gmail.com>
Authored: Mon Apr 18 19:36:52 2016 +0300
Committer: Aman Sinha <as...@maprtech.com>
Committed: Wed Jun 22 15:15:28 2016 -0700

----------------------------------------------------------------------
 .../hive/HiveDrillNativeScanBatchCreator.java   |  34 ++-
 .../org/apache/drill/exec/ExecConstants.java    |  12 ++
 .../drill/exec/physical/impl/ScanBatch.java     |  88 ++++----
 .../impl/project/ProjectRecordBatch.java        |  15 +-
 .../server/options/SystemOptionManager.java     |   6 +-
 .../exec/store/ImplicitColumnExplorer.java      | 206 +++++++++++++++++++
 .../exec/store/dfs/easy/EasyFormatPlugin.java   |  81 +++-----
 .../store/parquet/ParquetScanBatchCreator.java  |  81 +++-----
 .../exec/store/TestImplicitFileColumns.java     | 113 ++++++++++
 9 files changed, 461 insertions(+), 175 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/1ddd0ce7/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveDrillNativeScanBatchCreator.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveDrillNativeScanBatchCreator.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveDrillNativeScanBatchCreator.java
index ab321ba..a9575ba 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveDrillNativeScanBatchCreator.java
+++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveDrillNativeScanBatchCreator.java
@@ -20,10 +20,10 @@ package org.apache.drill.exec.store.hive;
 import java.io.IOException;
 import java.util.List;
 import java.util.Map;
-import java.util.Map.Entry;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import com.google.common.base.Functions;
 import org.apache.drill.common.AutoCloseables;
 import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.common.expression.SchemaPath;
@@ -67,17 +67,15 @@ public class HiveDrillNativeScanBatchCreator implements BatchCreator<HiveDrillNa
     final List<SchemaPath> columns = config.getColumns();
     final String partitionDesignator = context.getOptions()
         .getOption(ExecConstants.FILESYSTEM_PARTITION_COLUMN_LABEL).string_val;
+    List<Map<String, String>> implicitColumns = Lists.newLinkedList();
+    boolean selectAllQuery = AbstractRecordReader.isStarQuery(columns);
 
     final boolean hasPartitions = (partitions != null && partitions.size() > 0);
 
     final List<String[]> partitionColumns = Lists.newArrayList();
     final List<Integer> selectedPartitionColumns = Lists.newArrayList();
     List<SchemaPath> newColumns = columns;
-    if (AbstractRecordReader.isStarQuery(columns)) {
-      for (int i = 0; i < table.getPartitionKeys().size(); i++) {
-        selectedPartitionColumns.add(i);
-      }
-    } else {
+    if (!selectAllQuery) {
       // Separate out the partition and non-partition columns. Non-partition columns are passed directly to the
       // ParquetRecordReader. Partition columns are passed to ScanBatch.
       newColumns = Lists.newArrayList();
@@ -86,7 +84,7 @@ public class HiveDrillNativeScanBatchCreator implements BatchCreator<HiveDrillNa
         Matcher m = pattern.matcher(column.getAsUnescapedPath());
         if (m.matches()) {
           selectedPartitionColumns.add(
-              Integer.parseInt(column.getAsUnescapedPath().toString().substring(partitionDesignator.length())));
+              Integer.parseInt(column.getAsUnescapedPath().substring(partitionDesignator.length())));
         } else {
           newColumns.add(column);
         }
@@ -103,6 +101,7 @@ public class HiveDrillNativeScanBatchCreator implements BatchCreator<HiveDrillNa
     // TODO: In future we can get this cache from Metadata cached on filesystem.
     final Map<String, ParquetMetadata> footerCache = Maps.newHashMap();
 
+    Map<String, String> mapWithMaxColumns = Maps.newLinkedHashMap();
     try {
       for (InputSplit split : splits) {
         final FileSplit fileSplit = (FileSplit) split;
@@ -128,10 +127,19 @@ public class HiveDrillNativeScanBatchCreator implements BatchCreator<HiveDrillNa
                   parquetMetadata,
                   newColumns)
           );
+          Map<String, String> implicitValues = Maps.newLinkedHashMap();
 
           if (hasPartitions) {
-            Partition p = partitions.get(currentPartitionIndex);
-            partitionColumns.add(p.getValues().toArray(new String[0]));
+            List<String> values = partitions.get(currentPartitionIndex).getValues();
+            for (int i = 0; i < values.size(); i++) {
+              if (selectAllQuery || selectedPartitionColumns.contains(i)) {
+                implicitValues.put(partitionDesignator + i, values.get(i));
+              }
+            }
+          }
+          implicitColumns.add(implicitValues);
+          if (implicitValues.size() > mapWithMaxColumns.size()) {
+            mapWithMaxColumns = implicitValues;
           }
         }
         currentPartitionIndex++;
@@ -141,6 +149,12 @@ public class HiveDrillNativeScanBatchCreator implements BatchCreator<HiveDrillNa
       throw new ExecutionSetupException("Failed to create RecordReaders. " + e.getMessage(), e);
     }
 
+    // all readers should have the same number of implicit columns, add missing ones with value null
+    mapWithMaxColumns = Maps.transformValues(mapWithMaxColumns, Functions.constant((String) null));
+    for (Map<String, String> map : implicitColumns) {
+      map.putAll(Maps.difference(map, mapWithMaxColumns).entriesOnlyOnRight());
+    }
+
     // If there are no readers created (which is possible when the table is empty or no row groups are matched),
     // create an empty RecordReader to output the schema
     if (readers.size() == 0) {
@@ -148,7 +162,7 @@ public class HiveDrillNativeScanBatchCreator implements BatchCreator<HiveDrillNa
         ImpersonationUtil.createProxyUgi(config.getUserName(), context.getQueryUserName())));
     }
 
-    return new ScanBatch(config, context, oContext, readers.iterator(), partitionColumns, selectedPartitionColumns);
+    return new ScanBatch(config, context, oContext, readers.iterator(), implicitColumns);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/drill/blob/1ddd0ce7/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
index 6eb8a3a..0bc8a07 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
@@ -149,6 +149,18 @@ public interface ExecConstants {
   String FILESYSTEM_PARTITION_COLUMN_LABEL = "drill.exec.storage.file.partition.column.label";
   OptionValidator FILESYSTEM_PARTITION_COLUMN_LABEL_VALIDATOR = new StringValidator(FILESYSTEM_PARTITION_COLUMN_LABEL, "dir");
 
+  /**
+   * Implicit file columns
+   */
+  String IMPLICIT_FILENAME_COLUMN_LABEL = "drill.exec.storage.implicit.filename.column.label";
+  OptionValidator IMPLICIT_FILENAME_COLUMN_LABEL_VALIDATOR = new StringValidator(IMPLICIT_FILENAME_COLUMN_LABEL, "filename");
+  String IMPLICIT_SUFFIX_COLUMN_LABEL = "drill.exec.storage.implicit.suffix.column.label";
+  OptionValidator IMPLICIT_SUFFIX_COLUMN_LABEL_VALIDATOR = new StringValidator(IMPLICIT_SUFFIX_COLUMN_LABEL, "suffix");
+  String IMPLICIT_FQN_COLUMN_LABEL = "drill.exec.storage.implicit.fqn.column.label";
+  OptionValidator IMPLICIT_FQN_COLUMN_LABEL_VALIDATOR = new StringValidator(IMPLICIT_FQN_COLUMN_LABEL, "fqn");
+  String IMPLICIT_FILEPATH_COLUMN_LABEL = "drill.exec.storage.implicit.filepath.column.label";
+  OptionValidator IMPLICIT_FILEPATH_COLUMN_LABEL_VALIDATOR = new StringValidator(IMPLICIT_FILEPATH_COLUMN_LABEL, "filepath");
+
   String JSON_READ_NUMBERS_AS_DOUBLE = "store.json.read_numbers_as_double";
   BooleanValidator JSON_READ_NUMBERS_AS_DOUBLE_VALIDATOR = new BooleanValidator(JSON_READ_NUMBERS_AS_DOUBLE, false);
 

http://git-wip-us.apache.org/repos/asf/drill/blob/1ddd0ce7/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java
index c1cd469..43fabba 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java
@@ -29,7 +29,6 @@ import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.common.types.Types;
-import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.exception.SchemaChangeException;
 import org.apache.drill.exec.expr.TypeHelper;
@@ -46,7 +45,6 @@ import org.apache.drill.exec.record.VectorWrapper;
 import org.apache.drill.exec.record.WritableBatch;
 import org.apache.drill.exec.record.selection.SelectionVector2;
 import org.apache.drill.exec.record.selection.SelectionVector4;
-import org.apache.drill.exec.server.options.OptionValue;
 import org.apache.drill.exec.store.RecordReader;
 import org.apache.drill.exec.testing.ControlsInjector;
 import org.apache.drill.exec.testing.ControlsInjectorFactory;
@@ -56,7 +54,6 @@ import org.apache.drill.exec.vector.NullableVarCharVector;
 import org.apache.drill.exec.vector.SchemaChangeCallBack;
 import org.apache.drill.exec.vector.ValueVector;
 
-import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 
 /**
@@ -80,20 +77,16 @@ public class ScanBatch implements CloseableRecordBatch {
   private RecordReader currentReader;
   private BatchSchema schema;
   private final Mutator mutator = new Mutator();
-  private Iterator<String[]> partitionColumns;
-  private String[] partitionValues;
-  private List<ValueVector> partitionVectors;
-  private List<Integer> selectedPartitionColumns;
-  private String partitionColumnDesignator;
   private boolean done = false;
   private SchemaChangeCallBack callBack = new SchemaChangeCallBack();
   private boolean hasReadNonEmptyFile = false;
-
+  private Map<String, ValueVector> implicitVectors;
+  private Iterator<Map<String, String>> implicitColumns;
+  private Map<String, String> implicitValues;
 
   public ScanBatch(PhysicalOperator subScanConfig, FragmentContext context,
                    OperatorContext oContext, Iterator<RecordReader> readers,
-                   List<String[]> partitionColumns,
-                   List<Integer> selectedPartitionColumns) throws ExecutionSetupException {
+                   List<Map<String, String>> implicitColumns) throws ExecutionSetupException {
     this.context = context;
     this.readers = readers;
     if (!readers.hasNext()) {
@@ -118,16 +111,10 @@ public class ScanBatch implements CloseableRecordBatch {
       }
       oContext.getStats().stopProcessing();
     }
-    this.partitionColumns = partitionColumns.iterator();
-    partitionValues = this.partitionColumns.hasNext() ? this.partitionColumns.next() : null;
-    this.selectedPartitionColumns = selectedPartitionColumns;
-
-    // TODO Remove null check after DRILL-2097 is resolved. That JIRA refers to test cases that do not initialize
-    // options; so labelValue = null.
-    final OptionValue labelValue = context.getOptions().getOption(ExecConstants.FILESYSTEM_PARTITION_COLUMN_LABEL);
-    partitionColumnDesignator = labelValue == null ? "dir" : labelValue.string_val;
+    this.implicitColumns = implicitColumns.iterator();
+    this.implicitValues = this.implicitColumns.hasNext() ? this.implicitColumns.next() : null;
 
-    addPartitionVectors();
+    addImplicitVectors();
   }
 
   public ScanBatch(PhysicalOperator subScanConfig, FragmentContext context,
@@ -135,7 +122,7 @@ public class ScanBatch implements CloseableRecordBatch {
       throws ExecutionSetupException {
     this(subScanConfig, context,
         context.newOperatorContext(subScanConfig),
-        readers, Collections.<String[]> emptyList(), Collections.<Integer> emptyList());
+        readers, Collections.<Map<String, String>> emptyList());
   }
 
   @Override
@@ -221,7 +208,7 @@ public class ScanBatch implements CloseableRecordBatch {
 
           currentReader.close();
           currentReader = readers.next();
-          partitionValues = partitionColumns.hasNext() ? partitionColumns.next() : null;
+          implicitValues = implicitColumns.hasNext() ? implicitColumns.next() : null;
           currentReader.setup(oContext, mutator);
           try {
             currentReader.allocate(fieldVectorMap);
@@ -230,8 +217,7 @@ public class ScanBatch implements CloseableRecordBatch {
             clearFieldVectorMap();
             return IterOutcome.OUT_OF_MEMORY;
           }
-          addPartitionVectors();
-
+          addImplicitVectors();
         } catch (ExecutionSetupException e) {
           this.context.fail(e);
           releaseAssets();
@@ -241,7 +227,7 @@ public class ScanBatch implements CloseableRecordBatch {
       // At this point, the current reader has read 1 or more rows.
 
       hasReadNonEmptyFile = true;
-      populatePartitionVectors();
+      populateImplicitVectors();
 
       for (VectorWrapper w : container) {
         w.getValueVector().getMutator().setValueCount(recordCount);
@@ -271,41 +257,43 @@ public class ScanBatch implements CloseableRecordBatch {
     }
   }
 
-  private void addPartitionVectors() throws ExecutionSetupException {
+  private void addImplicitVectors() throws ExecutionSetupException {
     try {
-      if (partitionVectors != null) {
-        for (ValueVector v : partitionVectors) {
+      if (implicitVectors != null) {
+        for (ValueVector v : implicitVectors.values()) {
           v.clear();
         }
       }
-      partitionVectors = Lists.newArrayList();
-      for (int i : selectedPartitionColumns) {
-        final MaterializedField field =
-            MaterializedField.create(SchemaPath.getSimplePath(partitionColumnDesignator + i).getAsUnescapedPath(),
-                                     Types.optional(MinorType.VARCHAR));
-        final ValueVector v = mutator.addField(field, NullableVarCharVector.class);
-        partitionVectors.add(v);
+      implicitVectors = Maps.newHashMap();
+
+      if (implicitValues != null) {
+        for (String column : implicitValues.keySet()) {
+          final MaterializedField field = MaterializedField.create(column, Types.optional(MinorType.VARCHAR));
+          final ValueVector v = mutator.addField(field, NullableVarCharVector.class);
+          implicitVectors.put(column, v);
+        }
       }
     } catch(SchemaChangeException e) {
       throw new ExecutionSetupException(e);
     }
   }
 
-  private void populatePartitionVectors() {
-    for (int index = 0; index < selectedPartitionColumns.size(); index++) {
-      final int i = selectedPartitionColumns.get(index);
-      final NullableVarCharVector v = (NullableVarCharVector) partitionVectors.get(index);
-      if (partitionValues.length > i) {
-        final String val = partitionValues[i];
-        AllocationHelper.allocate(v, recordCount, val.length());
-        final byte[] bytes = val.getBytes();
-        for (int j = 0; j < recordCount; j++) {
-          v.getMutator().setSafe(j, bytes, 0, bytes.length);
+  private void populateImplicitVectors() {
+    if (implicitValues != null) {
+      for (Map.Entry<String, String> entry : implicitValues.entrySet()) {
+        final NullableVarCharVector v = (NullableVarCharVector) implicitVectors.get(entry.getKey());
+        String val;
+        if ((val = entry.getValue()) != null) {
+          AllocationHelper.allocate(v, recordCount, val.length());
+          final byte[] bytes = val.getBytes();
+          for (int j = 0; j < recordCount; j++) {
+            v.getMutator().setSafe(j, bytes, 0, bytes.length);
+          }
+          v.getMutator().setValueCount(recordCount);
+        } else {
+          AllocationHelper.allocate(v, recordCount, 0);
+          v.getMutator().setValueCount(recordCount);
         }
-        v.getMutator().setValueCount(recordCount);
-      } else {
-        AllocationHelper.allocate(v, recordCount, 0);
-        v.getMutator().setValueCount(recordCount);
       }
     }
   }
@@ -418,7 +406,7 @@ public class ScanBatch implements CloseableRecordBatch {
   @Override
   public void close() throws Exception {
     container.clear();
-    for (final ValueVector v : partitionVectors) {
+    for (final ValueVector v : implicitVectors.values()) {
       v.clear();
     }
     fieldVectorMap.clear();

http://git-wip-us.apache.org/repos/asf/drill/blob/1ddd0ce7/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java
index 4ad5b8b..7892f75 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java
@@ -60,6 +60,7 @@ import org.apache.drill.exec.record.TransferPair;
 import org.apache.drill.exec.record.TypedFieldId;
 import org.apache.drill.exec.record.VectorContainer;
 import org.apache.drill.exec.record.VectorWrapper;
+import org.apache.drill.exec.store.ImplicitColumnExplorer;
 import org.apache.drill.exec.vector.AllocationHelper;
 import org.apache.drill.exec.vector.FixedWidthVector;
 import org.apache.drill.exec.vector.ValueVector;
@@ -73,7 +74,6 @@ import com.google.common.collect.Maps;
 
 public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project> {
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ProjectRecordBatch.class);
-
   private Projector projector;
   private List<ValueVector> allocationVectors;
   private List<ComplexWriter> complexWriters;
@@ -351,6 +351,11 @@ public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project> {
               if (name == EMPTY_STRING) {
                 continue;
               }
+
+              if (isImplicitFileColumn(vvIn)) {
+                continue;
+              }
+
               final FieldReference ref = new FieldReference(name);
               final ValueVector vvOut = container.addOrGet(MaterializedField.create(ref.getAsNamePart().getName(), vvIn.getField().getType()), callBack);
               final TransferPair tp = vvIn.makeTransferPair(vvOut);
@@ -369,6 +374,10 @@ public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project> {
                 continue;
               }
 
+              if (isImplicitFileColumn(vvIn)) {
+                continue;
+              }
+
               final LogicalExpression expr = ExpressionTreeMaterializer.materialize(originalPath, incoming, collector, context.getFunctionRegistry() );
               if (collector.hasErrors()) {
                 throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema.  Errors:\n %s.", collector.toErrorString()));
@@ -485,6 +494,10 @@ public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project> {
     }
   }
 
+  private boolean isImplicitFileColumn(ValueVector vvIn) {
+    return ImplicitColumnExplorer.initImplicitFileColumns(context.getOptions()).get(vvIn.getField().getName()) != null;
+  }
+
   private List<NamedExpression> getExpressionList() {
     if (popConfig.getExprs() != null) {
       return popConfig.getExprs();

http://git-wip-us.apache.org/repos/asf/drill/blob/1ddd0ce7/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java
index 579276e..119de98 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java
@@ -137,7 +137,11 @@ public class SystemOptionManager extends BaseOptionManager implements AutoClosea
       ClassTransformer.SCALAR_REPLACEMENT_VALIDATOR,
       ExecConstants.ENABLE_NEW_TEXT_READER,
       ExecConstants.ENABLE_BULK_LOAD_TABLE_LIST,
-      ExecConstants.WEB_LOGS_MAX_LINES_VALIDATOR
+      ExecConstants.WEB_LOGS_MAX_LINES_VALIDATOR,
+      ExecConstants.IMPLICIT_FILENAME_COLUMN_LABEL_VALIDATOR,
+      ExecConstants.IMPLICIT_SUFFIX_COLUMN_LABEL_VALIDATOR,
+      ExecConstants.IMPLICIT_FQN_COLUMN_LABEL_VALIDATOR,
+      ExecConstants.IMPLICIT_FILEPATH_COLUMN_LABEL_VALIDATOR
     };
     final Map<String, OptionValidator> tmp = new HashMap<>();
     for (final OptionValidator validator : validators) {

http://git-wip-us.apache.org/repos/asf/drill/blob/1ddd0ce7/exec/java-exec/src/main/java/org/apache/drill/exec/store/ImplicitColumnExplorer.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ImplicitColumnExplorer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ImplicitColumnExplorer.java
new file mode 100644
index 0000000..94a0dca
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ImplicitColumnExplorer.java
@@ -0,0 +1,206 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store;
+
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.io.Files;
+import org.apache.commons.lang3.ArrayUtils;
+import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.common.map.CaseInsensitiveMap;
+import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.ops.FragmentContext;
+import org.apache.drill.exec.server.options.OptionManager;
+import org.apache.drill.exec.server.options.OptionValue;
+import org.apache.drill.exec.store.dfs.easy.FileWork;
+import org.apache.hadoop.fs.Path;
+
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class ImplicitColumnExplorer {
+
+  private final String partitionDesignator;
+  private final List<SchemaPath> columns;
+  private final boolean selectAllColumns;
+  private final List<Integer> selectedPartitionColumns;
+  private final List<SchemaPath> tableColumns;
+  private final Map<String, ImplicitFileColumns> allImplicitColumns;
+  private final Map<String, ImplicitFileColumns> selectedImplicitColumns;
+
+
+  /**
+   * Helper class that encapsulates logic for sorting out columns
+   * between actual table columns, partition columns and implicit file columns.
+   * Also populates map with implicit columns names as keys and their values
+   */
+  public ImplicitColumnExplorer(FragmentContext context, List<SchemaPath> columns) {
+    this.partitionDesignator = context.getOptions().getOption(ExecConstants.FILESYSTEM_PARTITION_COLUMN_LABEL).string_val;
+    this.columns = columns;
+    this.selectAllColumns = columns != null && AbstractRecordReader.isStarQuery(columns);
+    this.selectedPartitionColumns = Lists.newArrayList();
+    this.tableColumns = Lists.newArrayList();
+    this.allImplicitColumns = initImplicitFileColumns(context.getOptions());
+    this.selectedImplicitColumns = CaseInsensitiveMap.newHashMap();
+
+    init();
+  }
+
+  /**
+   * Creates case insensitive map with implicit file columns as keys and appropriate ImplicitFileColumns enum as values
+   */
+  public static Map<String, ImplicitFileColumns> initImplicitFileColumns(OptionManager optionManager) {
+    Map<String, ImplicitFileColumns> map = CaseInsensitiveMap.newHashMap();
+    for (ImplicitFileColumns e : ImplicitFileColumns.values()) {
+      OptionValue optionValue;
+      if ((optionValue = optionManager.getOption(e.name)) != null) {
+        map.put(optionValue.string_val, e);
+      }
+    }
+    return map;
+  }
+
+  /**
+   * Compares selection root and actual file path to determine partition columns values.
+   * Adds implicit file columns according to columns list.
+   *
+   * @return map with columns names as keys and their values
+   */
+  public Map<String, String> populateImplicitColumns(FileWork work, String selectionRoot) {
+    Map<String, String> implicitValues = Maps.newLinkedHashMap();
+    if (selectionRoot != null) {
+      String[] r = Path.getPathWithoutSchemeAndAuthority(new Path(selectionRoot)).toString().split("/");
+      Path path = Path.getPathWithoutSchemeAndAuthority(new Path(work.getPath()));
+      String[] p = path.toString().split("/");
+      if (p.length > r.length) {
+        String[] q = ArrayUtils.subarray(p, r.length, p.length - 1);
+        for (int a = 0; a < q.length; a++) {
+          if (selectAllColumns || selectedPartitionColumns.contains(a)) {
+            implicitValues.put(partitionDesignator + a, q[a]);
+          }
+        }
+      }
+      //add implicit file columns
+      for (Map.Entry<String, ImplicitFileColumns> entry : selectedImplicitColumns.entrySet()) {
+        implicitValues.put(entry.getKey(), entry.getValue().getValue(path));
+      }
+    }
+    return implicitValues;
+  }
+
+  public boolean isSelectAllColumns() {
+    return selectAllColumns;
+  }
+
+  public List<SchemaPath> getTableColumns() {
+    return tableColumns;
+  }
+
+  /**
+   * If it is not select all query, sorts out columns into three categories:
+   * 1. table columns
+   * 2. partition columns
+   * 3. implicit file columns
+   */
+  private void init() {
+    if (selectAllColumns) {
+      selectedImplicitColumns.putAll(allImplicitColumns);
+    } else {
+      Pattern pattern = Pattern.compile(String.format("%s[0-9]+", partitionDesignator));
+      for (SchemaPath column : columns) {
+        String path = column.getAsUnescapedPath();
+        Matcher m = pattern.matcher(path);
+        if (m.matches()) {
+          selectedPartitionColumns.add(Integer.parseInt(path.substring(partitionDesignator.length())));
+        } else if (allImplicitColumns.get(path) != null) {
+          selectedImplicitColumns.put(path, allImplicitColumns.get(path));
+        } else {
+          tableColumns.add(column);
+        }
+      }
+
+      // We must make sure to pass a table column(not to be confused with partition column) to the underlying record
+      // reader.
+      if (tableColumns.size() == 0) {
+        tableColumns.add(AbstractRecordReader.STAR_COLUMN);
+      }
+    }
+  }
+
+  /**
+   * Columns that give information from where file data comes from.
+   * Columns are implicit, so should be called explicitly in query
+   */
+  public enum ImplicitFileColumns {
+
+    /**
+     * Fully qualified name, contains full path to file and file name
+     */
+    FQN (ExecConstants.IMPLICIT_FQN_COLUMN_LABEL) {
+      @Override
+      public String getValue(Path path) {
+        return path.toString();
+      }
+    },
+
+    /**
+     * Full path to file without file name
+     */
+    FILEPATH (ExecConstants.IMPLICIT_FILEPATH_COLUMN_LABEL) {
+      @Override
+      public String getValue(Path path) {
+        return path.getParent().toString();
+      }
+    },
+
+    /**
+     * File name with extension without path
+     */
+    FILENAME (ExecConstants.IMPLICIT_FILENAME_COLUMN_LABEL) {
+      @Override
+      public String getValue(Path path) {
+        return path.getName();
+      }
+    },
+
+    /**
+     * File suffix (without dot at the beginning)
+     */
+    SUFFIX (ExecConstants.IMPLICIT_SUFFIX_COLUMN_LABEL) {
+      @Override
+      public String getValue(Path path) {
+        return Files.getFileExtension(path.getName());
+      }
+    };
+
+    String name;
+
+    ImplicitFileColumns(String name) {
+      this.name = name;
+    }
+
+    /**
+     * Using file path calculates value for each implicit file column
+     */
+    public abstract String getValue(Path path);
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/1ddd0ce7/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java
index ec3cae8..5881d33 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java
@@ -19,16 +19,15 @@ package org.apache.drill.exec.store.dfs.easy;
 
 import java.io.IOException;
 import java.util.List;
+import java.util.Map;
 import java.util.Set;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
 
-import org.apache.commons.lang3.ArrayUtils;
+import com.google.common.base.Functions;
+import com.google.common.collect.Maps;
 import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.logical.FormatPluginConfig;
 import org.apache.drill.common.logical.StoragePluginConfig;
-import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.ops.OperatorContext;
 import org.apache.drill.exec.physical.base.AbstractGroupScan;
@@ -42,7 +41,7 @@ import org.apache.drill.exec.planner.physical.PlannerSettings;
 import org.apache.drill.exec.record.CloseableRecordBatch;
 import org.apache.drill.exec.record.RecordBatch;
 import org.apache.drill.exec.server.DrillbitContext;
-import org.apache.drill.exec.store.AbstractRecordReader;
+import org.apache.drill.exec.store.ImplicitColumnExplorer;
 import org.apache.drill.exec.store.RecordReader;
 import org.apache.drill.exec.store.RecordWriter;
 import org.apache.drill.exec.store.StoragePluginOptimizerRule;
@@ -53,7 +52,6 @@ import org.apache.drill.exec.store.dfs.FormatMatcher;
 import org.apache.drill.exec.store.dfs.FormatPlugin;
 import org.apache.drill.exec.store.schedule.CompleteFileWork;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
 
 import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.Lists;
@@ -125,41 +123,14 @@ public abstract class EasyFormatPlugin<T extends FormatPluginConfig> implements
       List<SchemaPath> columns, String userName) throws ExecutionSetupException;
 
   CloseableRecordBatch getReaderBatch(FragmentContext context, EasySubScan scan) throws ExecutionSetupException {
-    String partitionDesignator = context.getOptions()
-      .getOption(ExecConstants.FILESYSTEM_PARTITION_COLUMN_LABEL).string_val;
-    List<SchemaPath> columns = scan.getColumns();
-    List<RecordReader> readers = Lists.newArrayList();
-    List<String[]> partitionColumns = Lists.newArrayList();
-    List<Integer> selectedPartitionColumns = Lists.newArrayList();
-    boolean selectAllColumns = false;
-
-    if (columns == null || columns.size() == 0 || AbstractRecordReader.isStarQuery(columns)) {
-      selectAllColumns = true;
-    } else {
-      List<SchemaPath> newColumns = Lists.newArrayList();
-      Pattern pattern = Pattern.compile(String.format("%s[0-9]+", partitionDesignator));
-      for (SchemaPath column : columns) {
-        Matcher m = pattern.matcher(column.getAsUnescapedPath());
-        if (m.matches()) {
-          selectedPartitionColumns.add(Integer.parseInt(column.getAsUnescapedPath().toString().substring(partitionDesignator.length())));
-        } else {
-          newColumns.add(column);
-        }
-      }
+    final ImplicitColumnExplorer columnExplorer = new ImplicitColumnExplorer(context, scan.getColumns());
 
-      // We must make sure to pass a table column(not to be confused with partition column) to the underlying record
-      // reader.
-      if (newColumns.size()==0) {
-        newColumns.add(AbstractRecordReader.STAR_COLUMN);
-      }
-      // Create a new sub scan object with the new set of columns;
-      EasySubScan newScan = new EasySubScan(scan.getUserName(), scan.getWorkUnits(), scan.getFormatPlugin(),
-          newColumns, scan.getSelectionRoot());
-      newScan.setOperatorId(scan.getOperatorId());
-      scan = newScan;
+    if (!columnExplorer.isSelectAllColumns()) {
+      scan = new EasySubScan(scan.getUserName(), scan.getWorkUnits(), scan.getFormatPlugin(),
+          columnExplorer.getTableColumns(), scan.getSelectionRoot());
+      scan.setOperatorId(scan.getOperatorId());
     }
 
-    int numParts = 0;
     OperatorContext oContext = context.newOperatorContext(scan);
     final DrillFileSystem dfs;
     try {
@@ -168,30 +139,26 @@ public abstract class EasyFormatPlugin<T extends FormatPluginConfig> implements
       throw new ExecutionSetupException(String.format("Failed to create FileSystem: %s", e.getMessage()), e);
     }
 
-    for(FileWork work : scan.getWorkUnits()){
-      readers.add(getRecordReader(context, dfs, work, scan.getColumns(), scan.getUserName()));
-      if (scan.getSelectionRoot() != null) {
-        String[] r = Path.getPathWithoutSchemeAndAuthority(new Path(scan.getSelectionRoot())).toString().split("/");
-        String[] p = Path.getPathWithoutSchemeAndAuthority(new Path(work.getPath())).toString().split("/");
-        if (p.length > r.length) {
-          String[] q = ArrayUtils.subarray(p, r.length, p.length - 1);
-          partitionColumns.add(q);
-          numParts = Math.max(numParts, q.length);
-        } else {
-          partitionColumns.add(new String[] {});
-        }
-      } else {
-        partitionColumns.add(new String[] {});
+    List<RecordReader> readers = Lists.newArrayList();
+    List<Map<String, String>> implicitColumns = Lists.newArrayList();
+    Map<String, String> mapWithMaxColumns = Maps.newLinkedHashMap();
+    for(FileWork work : scan.getWorkUnits()) {
+      RecordReader recordReader = getRecordReader(context, dfs, work, scan.getColumns(), scan.getUserName());
+      readers.add(recordReader);
+      Map<String, String> implicitValues = columnExplorer.populateImplicitColumns(work, scan.getSelectionRoot());
+      implicitColumns.add(implicitValues);
+      if (implicitValues.size() > mapWithMaxColumns.size()) {
+        mapWithMaxColumns = implicitValues;
       }
     }
 
-    if (selectAllColumns) {
-      for (int i = 0; i < numParts; i++) {
-        selectedPartitionColumns.add(i);
-      }
+    // all readers should have the same number of implicit columns, add missing ones with value null
+    Map<String, String> diff = Maps.transformValues(mapWithMaxColumns, Functions.constant((String) null));
+    for (Map<String, String> map : implicitColumns) {
+      map.putAll(Maps.difference(map, diff).entriesOnlyOnRight());
     }
 
-    return new ScanBatch(scan, context, oContext, readers.iterator(), partitionColumns, selectedPartitionColumns);
+    return new ScanBatch(scan, context, oContext, readers.iterator(), implicitColumns);
   }
 
   public abstract RecordWriter getRecordWriter(FragmentContext context, EasyWriter writer) throws IOException;

http://git-wip-us.apache.org/repos/asf/drill/blob/1ddd0ce7/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetScanBatchCreator.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetScanBatchCreator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetScanBatchCreator.java
index c730bc9..4d4719b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetScanBatchCreator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetScanBatchCreator.java
@@ -18,25 +18,21 @@
 package org.apache.drill.exec.store.parquet;
 
 import java.io.IOException;
-import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.TimeUnit;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
 
+import com.google.common.base.Functions;
 import com.google.common.base.Stopwatch;
-import org.apache.commons.lang3.ArrayUtils;
+import com.google.common.collect.Maps;
 import org.apache.drill.common.exceptions.ExecutionSetupException;
-import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.ops.OperatorContext;
-import org.apache.drill.exec.physical.base.GroupScan;
 import org.apache.drill.exec.physical.impl.BatchCreator;
 import org.apache.drill.exec.physical.impl.ScanBatch;
 import org.apache.drill.exec.record.RecordBatch;
-import org.apache.drill.exec.store.AbstractRecordReader;
+import org.apache.drill.exec.store.ImplicitColumnExplorer;
 import org.apache.drill.exec.store.RecordReader;
 import org.apache.drill.exec.store.dfs.DrillFileSystem;
 import org.apache.drill.exec.store.parquet.columnreaders.ParquetRecordReader;
@@ -65,33 +61,14 @@ public class ParquetScanBatchCreator implements BatchCreator<ParquetRowGroupScan
   public ScanBatch getBatch(FragmentContext context, ParquetRowGroupScan rowGroupScan, List<RecordBatch> children)
       throws ExecutionSetupException {
     Preconditions.checkArgument(children.isEmpty());
-    String partitionDesignator = context.getOptions()
-      .getOption(ExecConstants.FILESYSTEM_PARTITION_COLUMN_LABEL).string_val;
-    List<SchemaPath> columns = rowGroupScan.getColumns();
-    List<RecordReader> readers = Lists.newArrayList();
     OperatorContext oContext = context.newOperatorContext(rowGroupScan);
 
-    List<String[]> partitionColumns = Lists.newArrayList();
-    List<Integer> selectedPartitionColumns = Lists.newArrayList();
-    boolean selectAllColumns = AbstractRecordReader.isStarQuery(columns);
-
-    List<SchemaPath> newColumns = columns;
-    if (!selectAllColumns) {
-      newColumns = Lists.newArrayList();
-      Pattern pattern = Pattern.compile(String.format("%s[0-9]+", partitionDesignator));
-      for (SchemaPath column : columns) {
-        Matcher m = pattern.matcher(column.getAsUnescapedPath());
-        if (m.matches()) {
-          selectedPartitionColumns.add(Integer.parseInt(column.getAsUnescapedPath().toString().substring(partitionDesignator.length())));
-        } else {
-          newColumns.add(column);
-        }
-      }
-      final int id = rowGroupScan.getOperatorId();
-      // Create the new row group scan with the new columns
+    final ImplicitColumnExplorer columnExplorer = new ImplicitColumnExplorer(context, rowGroupScan.getColumns());
+
+    if (!columnExplorer.isSelectAllColumns()) {
       rowGroupScan = new ParquetRowGroupScan(rowGroupScan.getUserName(), rowGroupScan.getStorageEngine(),
-          rowGroupScan.getRowGroupReadEntries(), newColumns, rowGroupScan.getSelectionRoot());
-      rowGroupScan.setOperatorId(id);
+          rowGroupScan.getRowGroupReadEntries(), columnExplorer.getTableColumns(), rowGroupScan.getSelectionRoot());
+      rowGroupScan.setOperatorId(rowGroupScan.getOperatorId());
     }
 
     DrillFileSystem fs;
@@ -106,8 +83,10 @@ public class ParquetScanBatchCreator implements BatchCreator<ParquetRowGroupScan
     conf.setBoolean(ENABLE_TIME_READ_COUNTER, false);
 
     // keep footers in a map to avoid re-reading them
-    Map<String, ParquetMetadata> footers = new HashMap<String, ParquetMetadata>();
-    int numParts = 0;
+    Map<String, ParquetMetadata> footers = Maps.newHashMap();
+    List<RecordReader> readers = Lists.newArrayList();
+    List<Map<String, String>> implicitColumns = Lists.newArrayList();
+    Map<String, String> mapWithMaxColumns = Maps.newLinkedHashMap();
     for(RowGroupReadEntry e : rowGroupScan.getRowGroupReadEntries()){
       /*
       Here we could store a map from file names to footers, to prevent re-reading the footer for each row group in a file
@@ -118,7 +97,7 @@ public class ParquetScanBatchCreator implements BatchCreator<ParquetRowGroupScan
       */
       try {
         Stopwatch timer = Stopwatch.createUnstarted();
-        if ( ! footers.containsKey(e.getPath())){
+        if (!footers.containsKey(e.getPath())){
           timer.start();
           ParquetMetadata footer = ParquetFileReader.readFooter(conf, new Path(e.getPath()));
           long timeToRead = timer.elapsed(TimeUnit.MICROSECONDS);
@@ -138,37 +117,27 @@ public class ParquetScanBatchCreator implements BatchCreator<ParquetRowGroupScan
           );
         } else {
           ParquetMetadata footer = footers.get(e.getPath());
-          readers.add(new DrillParquetReader(context, footer, e, newColumns, fs));
+          readers.add(new DrillParquetReader(context, footer, e, columnExplorer.getTableColumns(), fs));
         }
-        if (rowGroupScan.getSelectionRoot() != null) {
-          String[] r = Path.getPathWithoutSchemeAndAuthority(new Path(rowGroupScan.getSelectionRoot())).toString().split("/");
-          String[] p = Path.getPathWithoutSchemeAndAuthority(new Path(e.getPath())).toString().split("/");
-          if (p.length > r.length) {
-            String[] q = ArrayUtils.subarray(p, r.length, p.length - 1);
-            partitionColumns.add(q);
-            numParts = Math.max(numParts, q.length);
-          } else {
-            partitionColumns.add(new String[] {});
-          }
-        } else {
-          partitionColumns.add(new String[] {});
+
+        Map<String, String> implicitValues = columnExplorer.populateImplicitColumns(e, rowGroupScan.getSelectionRoot());
+        implicitColumns.add(implicitValues);
+        if (implicitValues.size() > mapWithMaxColumns.size()) {
+          mapWithMaxColumns = implicitValues;
         }
+
       } catch (IOException e1) {
         throw new ExecutionSetupException(e1);
       }
     }
 
-    if (selectAllColumns) {
-      for (int i = 0; i < numParts; i++) {
-        selectedPartitionColumns.add(i);
-      }
+    // all readers should have the same number of implicit columns, add missing ones with value null
+    Map<String, String> diff = Maps.transformValues(mapWithMaxColumns, Functions.constant((String) null));
+    for (Map<String, String> map : implicitColumns) {
+      map.putAll(Maps.difference(map, diff).entriesOnlyOnRight());
     }
 
-    ScanBatch s =
-        new ScanBatch(rowGroupScan, context, oContext, readers.iterator(), partitionColumns, selectedPartitionColumns);
-
-
-    return s;
+    return new ScanBatch(rowGroupScan, context, oContext, readers.iterator(), implicitColumns);
   }
 
   private static boolean isComplex(ParquetMetadata footer) {

http://git-wip-us.apache.org/repos/asf/drill/blob/1ddd0ce7/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestImplicitFileColumns.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestImplicitFileColumns.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestImplicitFileColumns.java
new file mode 100644
index 0000000..6900da9
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestImplicitFileColumns.java
@@ -0,0 +1,113 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store;
+
+import com.google.common.base.Charsets;
+import com.google.common.io.Files;
+import org.apache.drill.BaseTestQuery;
+import org.apache.drill.exec.util.JsonStringArrayList;
+import org.apache.drill.exec.util.Text;
+import org.apache.hadoop.fs.Path;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+import java.io.File;
+
+public class TestImplicitFileColumns extends BaseTestQuery {
+
+  public static final String MAIN = "main";
+  public static final String NESTED = "nested";
+  public static final String CSV = "csv";
+
+  private static final JsonStringArrayList<Text> mainColumnValues = new JsonStringArrayList<Text>() {{
+    add(new Text(MAIN));
+  }};
+  private static final JsonStringArrayList<Text> nestedColumnValues = new JsonStringArrayList<Text>() {{
+    add(new Text(NESTED));
+  }};
+
+  @Rule
+  public TemporaryFolder testFolder = new TemporaryFolder();
+
+  private File mainFile;
+  private File nestedFolder;
+  private File nestedFile;
+
+  @Before
+  public void setup() throws Exception {
+    mainFile = testFolder.newFile(MAIN + "." + CSV);
+    Files.write(MAIN, mainFile, Charsets.UTF_8);
+    nestedFolder = testFolder.newFolder(NESTED);
+    nestedFile = new File(nestedFolder, NESTED + "." + CSV);
+    Files.write(NESTED, nestedFile, Charsets.UTF_8);
+  }
+
+  @Test
+  public void testImplicitColumns() throws Exception {
+    testBuilder()
+        .sqlQuery("select *, filename, suffix, fqn, filepath from dfs.`" + testFolder.getRoot().getPath() + "` order by filename")
+        .ordered()
+        .baselineColumns("columns", "dir0", "filename", "suffix", "fqn", "filepath")
+        .baselineValues(mainColumnValues, null, mainFile.getName(), CSV, new Path(mainFile.getPath()).toString(), new Path(mainFile.getParent()).toString())
+        .baselineValues(nestedColumnValues, NESTED, nestedFile.getName(), CSV, new Path(nestedFile.getPath()).toString(), new Path(nestedFile.getParent()).toString())
+        .go();
+  }
+
+  @Test
+  public void testImplicitColumnInWhereClause() throws Exception {
+    testBuilder()
+        .sqlQuery("select * from dfs.`%s` where filename = '%s'", nestedFolder.getPath(), nestedFile.getName())
+        .unOrdered()
+        .baselineColumns("columns")
+        .baselineValues(nestedColumnValues)
+        .go();
+  }
+
+  @Test
+  public void testImplicitColumnAlone() throws Exception {
+    testBuilder()
+        .sqlQuery("select filename from dfs.`" + nestedFolder.getPath() + "`")
+        .unOrdered()
+        .baselineColumns("filename")
+        .baselineValues(nestedFile.getName())
+        .go();
+  }
+
+  @Test
+  public void testImplicitColumnWithTableColumns() throws Exception {
+    testBuilder()
+        .sqlQuery("select columns, filename from dfs.`" + nestedFolder.getPath() + "`")
+        .unOrdered()
+        .baselineColumns("columns", "filename")
+        .baselineValues(nestedColumnValues, nestedFile.getName())
+        .go();
+  }
+
+  @Test
+  public void testImplicitColumnsForParquet() throws Exception {
+    testBuilder()
+        .sqlQuery("select filename, suffix from cp.`tpch/region.parquet` limit 1")
+        .unOrdered()
+        .baselineColumns("filename", "suffix")
+        .baselineValues("region.parquet", "parquet")
+        .go();
+  }
+
+}


[4/7] drill git commit: DRILL-4716: status.json doesn't work in drill ui

Posted by am...@apache.org.
DRILL-4716: status.json doesn't work in drill ui


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/23a46a09
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/23a46a09
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/23a46a09

Branch: refs/heads/1.7.0
Commit: 23a46a09f28cae4a6e94b633e84f256ec816ff9c
Parents: 7a8bb16
Author: Arina Ielchiieva <ar...@gmail.com>
Authored: Sun Jun 12 16:10:52 2016 +0000
Committer: Aman Sinha <as...@maprtech.com>
Committed: Wed Jun 22 15:15:47 2016 -0700

----------------------------------------------------------------------
 .../apache/drill/exec/server/rest/StatusResources.java   | 11 ++++++++++-
 exec/java-exec/src/main/resources/rest/status.ftl        |  3 +--
 2 files changed, 11 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/23a46a09/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/StatusResources.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/StatusResources.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/StatusResources.java
index 439cb7f..d0007d3 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/StatusResources.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/StatusResources.java
@@ -33,6 +33,8 @@ import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.SecurityContext;
 import javax.xml.bind.annotation.XmlRootElement;
 
+import org.apache.commons.lang3.tuple.ImmutablePair;
+import org.apache.commons.lang3.tuple.Pair;
 import org.apache.drill.exec.server.options.OptionValue;
 import org.apache.drill.exec.server.options.OptionValue.Kind;
 import org.apache.drill.exec.server.rest.DrillRestServer.UserAuthEnabled;
@@ -53,10 +55,17 @@ public class StatusResources {
   @Inject SecurityContext sc;
 
   @GET
+  @Path("/status.json")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Pair<String, String> getStatusJSON() {
+    return new ImmutablePair<>("status", "Running!");
+  }
+
+  @GET
   @Path("/status")
   @Produces(MediaType.TEXT_HTML)
   public Viewable getStatus() {
-    return ViewableWithPermissions.create(authEnabled.get(), "/rest/status.ftl", sc, "Running!");
+    return ViewableWithPermissions.create(authEnabled.get(), "/rest/status.ftl", sc, getStatusJSON());
   }
 
   @GET

http://git-wip-us.apache.org/repos/asf/drill/blob/23a46a09/exec/java-exec/src/main/resources/rest/status.ftl
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/resources/rest/status.ftl b/exec/java-exec/src/main/resources/rest/status.ftl
index cafa523..c5992fb 100644
--- a/exec/java-exec/src/main/resources/rest/status.ftl
+++ b/exec/java-exec/src/main/resources/rest/status.ftl
@@ -17,10 +17,9 @@
   <a href="/queries">back</a><br/>
   <div class="page-header">
     <div class="alert alert-success">
-      <strong>${model}</strong>
+      <strong>${model.getValue()}</strong>
     </div>
   </div>
-  <a href="/status/options"> System Options </a>
 </#macro>
 
 <@page_html/>


[5/7] drill git commit: DRILL-2593: 500 error when crc for a query profile is out of sync

Posted by am...@apache.org.
DRILL-2593: 500 error when crc for a query profile is out of sync


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/4687a8b5
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/4687a8b5
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/4687a8b5

Branch: refs/heads/1.7.0
Commit: 4687a8b50c9cb032229c51084e56a5fab006ffc5
Parents: 23a46a0
Author: Arina Ielchiieva <ar...@gmail.com>
Authored: Sun Jun 12 13:48:54 2016 +0000
Committer: Aman Sinha <as...@maprtech.com>
Committed: Wed Jun 22 15:15:54 2016 -0700

----------------------------------------------------------------------
 .../server/rest/profile/ProfileResources.java   | 55 +++++++++++++-------
 .../src/main/resources/rest/profile/list.ftl    |  9 ++++
 2 files changed, 46 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/4687a8b5/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/ProfileResources.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/ProfileResources.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/ProfileResources.java
index 05441c0..d2e953d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/ProfileResources.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/ProfileResources.java
@@ -18,9 +18,9 @@
 package org.apache.drill.exec.server.rest.profile;
 
 import java.text.SimpleDateFormat;
-import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Date;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.TimeUnit;
@@ -84,7 +84,7 @@ public class ProfileResources {
       this.time = new Date(time);
       this.foreman = foreman;
       this.location = "http://localhost:8047/profile/" + queryId + ".json";
-      this.query = query = query.substring(0,  Math.min(query.length(), 150));
+      this.query = query.substring(0,  Math.min(query.length(), 150));
       this.state = state;
       this.user = user;
     }
@@ -137,10 +137,12 @@ public class ProfileResources {
   public class QProfiles {
     private List<ProfileInfo> runningQueries;
     private List<ProfileInfo> finishedQueries;
+    private List<String> errors;
 
-    public QProfiles(List<ProfileInfo> runningQueries, List<ProfileInfo> finishedQueries) {
+    public QProfiles(List<ProfileInfo> runningQueries, List<ProfileInfo> finishedQueries, List<String> erorrs) {
       this.runningQueries = runningQueries;
       this.finishedQueries = finishedQueries;
+      this.errors = erorrs;
     }
 
     public List<ProfileInfo> getRunningQueries() {
@@ -150,6 +152,8 @@ public class ProfileResources {
     public List<ProfileInfo> getFinishedQueries() {
       return finishedQueries;
     }
+
+    public List<String> getErrors() { return errors; }
   }
 
   @GET
@@ -160,33 +164,48 @@ public class ProfileResources {
       final PersistentStore<QueryProfile> completed = getProvider().getOrCreateStore(QueryManager.QUERY_PROFILE);
       final TransientStore<QueryInfo> running = getCoordinator().getOrCreateTransientStore(QueryManager.RUNNING_QUERY_INFO);
 
+      final List<String> errors = Lists.newArrayList();
+
       final List<ProfileInfo> runningQueries = Lists.newArrayList();
 
-      for (final Map.Entry<String, QueryInfo> entry: Lists.newArrayList(running.entries())) {
-        final QueryInfo profile = entry.getValue();
-        if (principal.canManageProfileOf(profile.getUser())) {
-          runningQueries.add(new ProfileInfo(entry.getKey(), profile.getStart(), profile.getForeman().getAddress(),
-              profile.getQuery(), profile.getState().name(), profile.getUser()));
+      final Iterator<Map.Entry<String, QueryInfo>> runningEntries = running.entries();
+      while (runningEntries.hasNext()) {
+        try {
+          final Map.Entry<String, QueryInfo> runningEntry = runningEntries.next();
+          final QueryInfo profile = runningEntry.getValue();
+          if (principal.canManageProfileOf(profile.getUser())) {
+            runningQueries.add(new ProfileInfo(runningEntry.getKey(), profile.getStart(), profile.getForeman().getAddress(), profile.getQuery(), profile.getState().name(), profile.getUser()));
+          }
+        } catch (Exception e) {
+          errors.add(e.getMessage());
+          logger.error("Error getting running query info.", e);
         }
       }
 
       Collections.sort(runningQueries, Collections.reverseOrder());
 
-      List<ProfileInfo> finishedQueries = Lists.newArrayList();
-      for (Map.Entry<String, QueryProfile> entry : Lists.newArrayList(completed.getRange(0, MAX_PROFILES))) {
-        QueryProfile profile = entry.getValue();
-        if (principal.canManageProfileOf(profile.getUser())) {
-          finishedQueries.add(new ProfileInfo(entry.getKey(), profile.getStart(), profile.getForeman().getAddress(),
-              profile.getQuery(), profile.getState().name(), profile.getUser()));
+      final List<ProfileInfo> finishedQueries = Lists.newArrayList();
+
+      final Iterator<Map.Entry<String, QueryProfile>> range = completed.getRange(0, MAX_PROFILES);
+      while (range.hasNext()) {
+        try {
+          final Map.Entry<String, QueryProfile> profileEntry = range.next();
+          final QueryProfile profile = profileEntry.getValue();
+          if (principal.canManageProfileOf(profile.getUser())) {
+            finishedQueries.add(new ProfileInfo(profileEntry.getKey(), profile.getStart(), profile.getForeman().getAddress(), profile.getQuery(), profile.getState().name(), profile.getUser()));
+          }
+        } catch (Exception e) {
+          errors.add(e.getMessage());
+          logger.error("Error getting finished query profile.", e);
         }
       }
 
-      return new QProfiles(runningQueries, finishedQueries);
+      return new QProfiles(runningQueries, finishedQueries, errors);
     } catch (Exception e) {
-      logger.debug("Failed to get profiles from persistent or ephemeral store.");
-      return new QProfiles(new ArrayList<ProfileInfo>(), new ArrayList<ProfileInfo>());
+      throw UserException.resourceError(e)
+          .message("Failed to get profiles from persistent or ephemeral store.")
+          .build(logger);
     }
-
   }
 
   @GET

http://git-wip-us.apache.org/repos/asf/drill/blob/4687a8b5/exec/java-exec/src/main/resources/rest/profile/list.ftl
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/resources/rest/profile/list.ftl b/exec/java-exec/src/main/resources/rest/profile/list.ftl
index cf92ede..88d1407 100644
--- a/exec/java-exec/src/main/resources/rest/profile/list.ftl
+++ b/exec/java-exec/src/main/resources/rest/profile/list.ftl
@@ -17,6 +17,15 @@
   <a href="/queries">back</a><br/>
   <div class="page-header">
   </div>
+  <#if (model.getErrors()?size > 0) >
+    <div id="message" class="alert alert-danger alert-dismissable">
+        <button type="button" class="close" data-dismiss="alert" aria-hidden="true">&times;</button>
+        <strong>Failed to get profiles:</strong><br>
+        <#list model.getErrors() as error>
+          ${error}<br>
+        </#list>
+    </div>
+  </#if>
   <#if (model.getRunningQueries()?size > 0) >
     <h3>Running Queries</h3>
     <div class="table-responsive">


[7/7] drill git commit: Added Aman's GPG key.

Posted by am...@apache.org.
Added Aman's GPG key.


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/552129fa
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/552129fa
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/552129fa

Branch: refs/heads/1.7.0
Commit: 552129fa67102b1013aafafcd98ff585c9288005
Parents: 19cdcd9
Author: Aman Sinha <as...@maprtech.com>
Authored: Mon Jun 20 14:39:04 2016 -0700
Committer: Aman Sinha <as...@maprtech.com>
Committed: Thu Jun 23 10:47:48 2016 -0700

----------------------------------------------------------------------
 KEYS | 58 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
 1 file changed, 58 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/552129fa/KEYS
----------------------------------------------------------------------
diff --git a/KEYS b/KEYS
index 8fbd08e..caf8d74 100644
--- a/KEYS
+++ b/KEYS
@@ -420,3 +420,61 @@ Qz0+40iT9Pb/wH9uVjxfJXoFvt5XQbfGzPpz9f0XecReDmQJjMy1iKFHps/VYGv5
 K1/IVGDMGfv8Wr0=
 =+a82
 -----END PGP PUBLIC KEY BLOCK-----
+pub   4096R/7489BEF5 2016-06-14 [expires: 2020-06-14]
+uid       [ultimate] Aman Sinha <am...@apache.org>
+sig 3        7489BEF5 2016-06-14  Aman Sinha <am...@apache.org>
+sub   4096R/6925B2D4 2016-06-14 [expires: 2020-06-14]
+sig          7489BEF5 2016-06-14  Aman Sinha <am...@apache.org>
+
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+Comment: GPGTools - https://gpgtools.org
+
+mQINBFdgZh0BEADUVC9Xi3KEuAQZ9qG/wWu7F/gfGyzJmtGp1EmARrZ12kADtCVf
+MMHJeHIGHepW7xnMj9RH/6uEEY4fksnoRwXhjzndSCC+V3wuwDbTqhn++YM0p693
+AfZgGm8pSLdUa+Z1gRH0/pAfrViLqRVTxf1cbElegICBMz7c7LA4Vh1S6ApZvPGa
+AxFZ1NujZ6fbP1+3LAIRWxORmn5IRkrsysmVsW6Us3X0h6/YI5Q4mI1hHWPorTN6
+y/7NsuHmhE5fdPDU+wWDz1DRDARlSIggpaCq2l7ibD18fqaV8qXJtziFTDPSYZXw
+725LQtKfkAs3RVgtlepnQcjA1yaorInSRjYLy1l8Kti2xPUovfNUbf0ZR/752FJi
+usIiPJdMGJJ3wrXEg+O0d1+KiAm2g0fQUJ2Xa4ObTNgwbjn+pYaH30pJfc2BOSMa
+IUONwNAEGFMrp3yidp5f03Lsbvrph71m1R2nwEyrZ28VVt7tWLUk3T+WOG3GG9VU
+k9xovH6MNqjSEPAlqN4w7bTJDvtQ5jXZP2HCaFzILBPULlNaRlxvFsGdmCKWSf/a
+xpYPN1nvY+UzobgogVSPzPaQVJO9ZiBYM7rjfzGXqFftQsjbA+0//j7Wptid/roh
+/L5ud1VQZPQ5n7uOfct1/4gk91jYYqs2lq1qUhOXAVoqkWGgsAnfcC7unQARAQAB
+tCFBbWFuIFNpbmhhIDxhbWFuc2luaGFAYXBhY2hlLm9yZz6JAj0EEwEKACcFAldg
+Zh0CGwMFCQeGH4AFCwkIBwMFFQoJCAsFFgIDAQACHgECF4AACgkQytLn6HSJvvWA
+Qg//cBYDCNAmVMuLx/5bGS3PJmBmKhWXxwdnRVA9SUYsei5fLx7JIw4pbQq1LTd1
+qz9XbXVlhKoarNqFEsqY2c8BOLZiQn+DJrM6vXCGAM6c7z/4PnrjoYmKcHKUqhWW
++/SLn6sR0PNHBOqLU8chrstViCL/ZlQ69SjIJQgM9PiAk6IxNK/1MF9L74qtdIaa
+qpmq3HB/3y8Ycw4d9cx9inyxFl676x/TduZqcGAyxe/7Dl0PIWjrD9pTt4/yvQXD
+1QpgydhCqDOssUUUz0m6wikxIfw6cmfkJrhNarfJXm0h7bmMWfxs6vRTMIq57duF
+xcP/49S1nySEoDL+bLqgTO4n5kE5WPeJUnWEQKySjrUvlqEUKOs/8rsxBsqMAxfw
+MeQwYWuAS95A8C0P87GZmKPQBmspTo7uwwk/LHyOfk2qdlUKW0/rs4b2PXhU5p7d
+iRO4Z8QLjqzXDTOPIFkAyAYJAT5VkEavtOkhNq+zf+QRKxtUSBgupoC21+Wo3nEu
+fHv6ViY1O4mGTn0IUdqdGqb5YlxTG9i4nfmFZxZIVqvfc0BDDn2P4jl/M9b0yR4U
+ejiDGOQ8nytXmUYJyukqu6vbltAxQaRiJBCE2bRauF3LQ0TclNP2DNkSVT1nEBxx
+V2sFTditHHAl1w/eFlVLSXtUm6KD1GANGKZkrnwT9jTuWWu5Ag0EV2BmHQEQAMc8
+7DX/sLQdOewZLtxOmobPc2InfwTjV8AyQ95iVxePl8pCTwTjn6SH52mK88IAm5ur
+kH/a696bave77xJGxj7XdH5iamBmHPWLQs0LacTM/GRuPR3dS1VD2NgEwL2Vuqiw
+D+B03SaOyhTj/3aMiiW/Hjkn1a7BUris7J4JgDPGRlbYWjh7PZjHvT1EGMwJmadB
+/VRShK0vkER9KhWcnFnRDsAW0C2iyAeaDcO1WfNzP0eseHpIvAi/Oh0yc8jjhmvg
+Y6G5F46jnbYRxXdQlPtp/OO/BRgs4NPguYEraAsVmhYZRz4kfbcbJSbLcp1sSB//
+mU9dpNO6LkkjSGM9xXzVSgyAz0g9B0pANQGwicCpIGE7Uxlm4vSGcBBUp97SX1eb
+MwEcAd+ATDQiaUH6evtsXNf5jdMLRd790sOCdet+hcwSWXT1OFQjm1WF0T7Nal2c
+64nm+K2fMXP3+5cXc/Giwa4nsVpnuO4OGdVIo/rl9lT5Igm7go3GK4XLs19+J+t6
+lvkxhZGgrLVEXoQEJjZ6bbZriWAdB4YBq0wNFJLuvHTPiHt5L7K9rJ0C+6ZgxE33
+CfOjLAAmG2aVeu6Ot024uxWAWz8yqNAbfnLv8fPijaRW3ZMAwS23znBfKX6KnFiV
+cGHhTYu4MfX5UCjupR3u3Bbm6bKOfYFSg5xaLqVpABEBAAGJAiUEGAEKAA8FAldg
+Zh0CGwwFCQeGH4AACgkQytLn6HSJvvUQcQ/7B1gCL3OmeznU/HnAogm2w5gtr5Ch
+DX2CWFdCkfNBINTUX09BtN/Omwc4CPVYmiLOatB1hWaCSNzxH8hFwOqDVoZ13+Xj
+C3lcsnFj/pdOS49qk8kiyB/6d9altrpG8qwXXqG7PI6BbD7MYDRqXD7fbsXDyqyU
+OJjjrIfBmgsEVnVGKBxSolX5BZPVIPBXTYrhesIXafscZ2P6crsW1uFnSTlx29Vc
+62NtXUW/8+AjHG+KRGlEL7++fka71Ux28d1yttcMOkUXIGxFhb1SU8CroI5VCSG4
+sIryOd/2lGISjBAgnxzaujp8aqRUr2xb6jam+pq48jjIiyJevvZGBuTIKKCAWSxz
+eEQVSAjquOA25bS2wXP56dS6Dg33BriSPtagnrpQWvBjpkt0kvoe4V9/qhJVJAyU
+NVtafW+/NkBfvshEMDf8KHKxttABpa3jRzecIk8L/tSIBqQuJJ9og4efZ2W5sGFO
+6FC70pT0UsUrFhDARxbAp8sICWutnIqilPFBr5DZdhwqODduhKCeMwEtYVvLwUWy
+7Yoi9fbl2xGUDruL8o2bCdQuft0vPjEctjoC8QtGQO7YDXExGYQLwEM/90mfi6V2
+04HOFZ/J3GmPcgS7SVMd1vnDKVKyKkI8Ns2UJ2ZKGWqWG6AY/ZKYF9d3XWd/OrBe
+U6G66UJWu+uWu7c=
+=V7Rj
+-----END PGP PUBLIC KEY BLOCK-----