You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ra...@apache.org on 2016/08/13 14:40:34 UTC

[1/2] incubator-carbondata git commit: fix the bug og block prune

Repository: incubator-carbondata
Updated Branches:
  refs/heads/master d92aa9907 -> c2ee9cd45


fix the bug og block prune

fix review comments

fix review comments

fix review comments


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/d03af4a5
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/d03af4a5
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/d03af4a5

Branch: refs/heads/master
Commit: d03af4a5523ee7fc2ac27271ed1787a88861707c
Parents: d92aa99
Author: Zhangshunyu <zh...@huawei.com>
Authored: Wed Aug 10 11:09:20 2016 +0800
Committer: ravipesala <ra...@gmail.com>
Committed: Sat Aug 13 20:07:39 2016 +0530

----------------------------------------------------------------------
 .../org/carbondata/scan/filter/FilterUtil.java  | 103 +++++++++++------
 .../filter/resolver/AndFilterResolverImpl.java  |  17 +--
 .../resolver/ConditionalFilterResolverImpl.java |  16 ++-
 .../filter/resolver/FilterResolverIntf.java     |   7 +-
 .../resolver/LogicalFilterResolverImpl.java     |  14 ++-
 .../resolver/RestructureFilterResolverImpl.java |   6 +-
 .../RowLevelRangeFilterResolverImpl.java        |  25 ++--
 .../blockprune/BlockPruneQueryTestCase.scala    | 115 +++++++++++++++++++
 8 files changed, 232 insertions(+), 71 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/d03af4a5/core/src/main/java/org/carbondata/scan/filter/FilterUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/scan/filter/FilterUtil.java b/core/src/main/java/org/carbondata/scan/filter/FilterUtil.java
index 23244f8..e172d25 100644
--- a/core/src/main/java/org/carbondata/scan/filter/FilterUtil.java
+++ b/core/src/main/java/org/carbondata/scan/filter/FilterUtil.java
@@ -669,27 +669,13 @@ public final class FilterUtil {
    * @param segmentProperties
    * @return long[] start key
    */
-  public static long[] getStartKey(DimColumnResolvedFilterInfo dimColResolvedFilterInfo,
-      SegmentProperties segmentProperties, long[] startKey) {
-    Map<CarbonDimension, List<DimColumnFilterInfo>> dimensionFilter =
-        dimColResolvedFilterInfo.getDimensionResolvedFilterInstance();
-    for (Map.Entry<CarbonDimension, List<DimColumnFilterInfo>> entry : dimensionFilter.entrySet()) {
-      List<DimColumnFilterInfo> values = entry.getValue();
-      if (null == values || !entry.getKey().hasEncoding(Encoding.DICTIONARY)) {
-        continue;
-      }
-      boolean isExcludePresent = false;
-      for (DimColumnFilterInfo info : values) {
-        if (!info.isIncludeFilter()) {
-          isExcludePresent = true;
-        }
-      }
-      if (isExcludePresent) {
-        continue;
-      }
-      getStartKeyBasedOnFilterResoverInfo(dimensionFilter, startKey);
+  public static void getStartKey(Map<CarbonDimension, List<DimColumnFilterInfo>> dimensionFilter,
+      long[] startKey, List<long[]> startKeyList) throws QueryExecutionException {
+    for(int i = 0; i < startKey.length; i++) {
+      // The min surrogate key is 1, set it as the init value for starkey of each column level
+      startKey[i] = 1;
     }
-    return startKey;
+    getStartKeyWithFilter(dimensionFilter, startKey, startKeyList);
   }
 
   /**
@@ -710,7 +696,7 @@ public final class FilterUtil {
    * @return
    */
   public static void getStartKeyForNoDictionaryDimension(
-      DimColumnResolvedFilterInfo dimColResolvedFilterInfo, SegmentProperties segmentProperties,
+      DimColumnResolvedFilterInfo dimColResolvedFilterInfo,
       SortedMap<Integer, byte[]> setOfStartKeyByteArray) {
     Map<CarbonDimension, List<DimColumnFilterInfo>> dimensionFilter =
         dimColResolvedFilterInfo.getDimensionResolvedFilterInstance();
@@ -765,7 +751,7 @@ public final class FilterUtil {
    * @return end key array
    */
   public static void getEndKeyForNoDictionaryDimension(
-      DimColumnResolvedFilterInfo dimColResolvedFilterInfo, SegmentProperties segmentProperties,
+      DimColumnResolvedFilterInfo dimColResolvedFilterInfo,
       SortedMap<Integer, byte[]> setOfEndKeyByteArray) {
 
     Map<CarbonDimension, List<DimColumnFilterInfo>> dimensionFilter =
@@ -832,11 +818,12 @@ public final class FilterUtil {
    * @param dimensionFilter
    * @param startKey
    */
-  private static void getStartKeyBasedOnFilterResoverInfo(
-      Map<CarbonDimension, List<DimColumnFilterInfo>> dimensionFilter, long[] startKey) {
+  private static void getStartKeyWithFilter(
+      Map<CarbonDimension, List<DimColumnFilterInfo>> dimensionFilter, long[] startKey,
+      List<long[]> startKeyList) {
     for (Map.Entry<CarbonDimension, List<DimColumnFilterInfo>> entry : dimensionFilter.entrySet()) {
       List<DimColumnFilterInfo> values = entry.getValue();
-      if (null == values) {
+      if (null == values || !entry.getKey().hasEncoding(Encoding.DICTIONARY)) {
         continue;
       }
       boolean isExcludePresent = false;
@@ -853,12 +840,15 @@ public final class FilterUtil {
           startKey[entry.getKey().getKeyOrdinal()] = info.getFilterList().get(0);
         }
       }
+      long[] newStartKey = new long[startKey.length];
+      System.arraycopy(startKey, 0, newStartKey, 0, startKey.length);
+      startKeyList.add(newStartKey);
     }
   }
 
   public static void getEndKey(Map<CarbonDimension, List<DimColumnFilterInfo>> dimensionFilter,
-      AbsoluteTableIdentifier tableIdentifier, long[] endKey, SegmentProperties segmentProperties)
-      throws QueryExecutionException {
+      AbsoluteTableIdentifier tableIdentifier, long[] endKey, SegmentProperties segmentProperties,
+      List<long[]> endKeyList) throws QueryExecutionException {
 
     List<CarbonDimension> updatedDimListBasedOnKeyGenerator =
         getCarbonDimsMappedToKeyGenerator(segmentProperties.getDimensions());
@@ -866,7 +856,7 @@ public final class FilterUtil {
       endKey[i] = getMaxValue(tableIdentifier, updatedDimListBasedOnKeyGenerator.get(i),
           segmentProperties.getDimColumnsCardinality());
     }
-    getEndKeyWithFilter(dimensionFilter, endKey);
+    getEndKeyWithFilter(dimensionFilter, endKey, endKeyList);
 
   }
 
@@ -885,7 +875,8 @@ public final class FilterUtil {
   }
 
   private static void getEndKeyWithFilter(
-      Map<CarbonDimension, List<DimColumnFilterInfo>> dimensionFilter, long[] endKey) {
+      Map<CarbonDimension, List<DimColumnFilterInfo>> dimensionFilter, long[] endKey,
+      List<long[]> endKeyList) {
     for (Map.Entry<CarbonDimension, List<DimColumnFilterInfo>> entry : dimensionFilter.entrySet()) {
       List<DimColumnFilterInfo> values = entry.getValue();
       if (null == values || !entry.getKey().hasEncoding(Encoding.DICTIONARY)) {
@@ -908,6 +899,9 @@ public final class FilterUtil {
               info.getFilterList().get(info.getFilterList().size() - 1);
         }
       }
+      long[] newEndKey = new long[endKey.length];
+      System.arraycopy(endKey, 0, newEndKey, 0, endKey.length);
+      endKeyList.add(newEndKey);
     }
 
   }
@@ -1157,8 +1151,42 @@ public final class FilterUtil {
     SortedMap<Integer, byte[]> setOfEndKeyByteArray = new TreeMap<Integer, byte[]>();
     SortedMap<Integer, byte[]> defaultStartValues = new TreeMap<Integer, byte[]>();
     SortedMap<Integer, byte[]> defaultEndValues = new TreeMap<Integer, byte[]>();
+    List<long[]> startKeyList = new ArrayList<long[]>();
+    List<long[]> endKeyList = new ArrayList<long[]>();
     traverseResolverTreeAndPopulateStartAndEndKeys(filterResolver, tableIdentifier,
-        segmentProperties, startKey, setOfStartKeyByteArray, endKey, setOfEndKeyByteArray);
+        segmentProperties, startKey, setOfStartKeyByteArray, endKey, setOfEndKeyByteArray,
+        startKeyList, endKeyList);
+    if (endKeyList.size() > 0) {
+      //get the new end key from list
+      for (int i = 0; i < endKey.length; i++) {
+        long[] endkeyColumnLevel = new long[endKeyList.size()];
+        int j = 0;
+        for (long[] oneEndKey : endKeyList) {
+          //get each column level end key
+          endkeyColumnLevel[j++] = oneEndKey[i];
+        }
+        Arrays.sort(endkeyColumnLevel);
+        // get the max one as end of this column level
+        endKey[i] = endkeyColumnLevel[endkeyColumnLevel.length - 1];
+      }
+    }
+
+    if (startKeyList.size() > 0) {
+      //get the new start key from list
+      for (int i = 0; i < startKey.length; i++) {
+        long[] startkeyColumnLevel = new long[startKeyList.size()];
+        int j = 0;
+        for (long[] oneStartKey : startKeyList) {
+          //get each column level start key
+          startkeyColumnLevel[j++] = oneStartKey[i];
+        }
+        Arrays.sort(startkeyColumnLevel);
+        // get the min - 1 as start of this column level, for example if a block contains 5,6
+        // the filter is 6, but that block's start key is 5, if not -1, this block will missing.
+        startKey[i] = startkeyColumnLevel[0] - 1;
+      }
+    }
+
     fillDefaultStartValue(defaultStartValues, segmentProperties);
     fillDefaultEndValue(defaultEndValues, segmentProperties);
     fillNullValuesStartIndexWithDefaultKeys(setOfStartKeyByteArray, segmentProperties);
@@ -1283,18 +1311,21 @@ public final class FilterUtil {
       FilterResolverIntf filterResolverTree, AbsoluteTableIdentifier tableIdentifier,
       SegmentProperties segmentProperties, long[] startKeys,
       SortedMap<Integer, byte[]> setOfStartKeyByteArray, long[] endKeys,
-      SortedMap<Integer, byte[]> setOfEndKeyByteArray) throws QueryExecutionException {
+      SortedMap<Integer, byte[]> setOfEndKeyByteArray, List<long[]> startKeyList,
+      List<long[]> endKeyList) throws QueryExecutionException {
     if (null == filterResolverTree) {
       return;
     }
     traverseResolverTreeAndPopulateStartAndEndKeys(filterResolverTree.getLeft(), tableIdentifier,
-        segmentProperties, startKeys, setOfStartKeyByteArray, endKeys, setOfEndKeyByteArray);
-
-    filterResolverTree.getStartKey(segmentProperties, startKeys, setOfStartKeyByteArray);
-    filterResolverTree.getEndKey(segmentProperties, tableIdentifier, endKeys, setOfEndKeyByteArray);
+        segmentProperties, startKeys, setOfStartKeyByteArray, endKeys, setOfEndKeyByteArray,
+        startKeyList, endKeyList);
+    filterResolverTree.getStartKey(startKeys, setOfStartKeyByteArray, startKeyList);
+    filterResolverTree.getEndKey(segmentProperties, tableIdentifier, endKeys, setOfEndKeyByteArray,
+        endKeyList);
 
     traverseResolverTreeAndPopulateStartAndEndKeys(filterResolverTree.getRight(), tableIdentifier,
-        segmentProperties, startKeys, setOfStartKeyByteArray, endKeys, setOfEndKeyByteArray);
+        segmentProperties, startKeys, setOfStartKeyByteArray, endKeys, setOfEndKeyByteArray,
+        startKeyList, endKeyList);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/d03af4a5/core/src/main/java/org/carbondata/scan/filter/resolver/AndFilterResolverImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/scan/filter/resolver/AndFilterResolverImpl.java b/core/src/main/java/org/carbondata/scan/filter/resolver/AndFilterResolverImpl.java
index 1be7595..3485bb8 100644
--- a/core/src/main/java/org/carbondata/scan/filter/resolver/AndFilterResolverImpl.java
+++ b/core/src/main/java/org/carbondata/scan/filter/resolver/AndFilterResolverImpl.java
@@ -18,6 +18,7 @@
  */
 package org.carbondata.scan.filter.resolver;
 
+import java.util.List;
 import java.util.SortedMap;
 
 import org.carbondata.core.carbon.AbsoluteTableIdentifier;
@@ -38,16 +39,18 @@ public class AndFilterResolverImpl extends LogicalFilterResolverImpl {
     super(leftEvalutor, rightEvalutor, expression);
   }
 
-  @Override public void getStartKey(SegmentProperties segmentProperties, long[] startKeys,
-      SortedMap<Integer, byte[]> noDicStartKeys) {
-    leftEvalutor.getStartKey(segmentProperties, startKeys, noDicStartKeys);
-    rightEvalutor.getStartKey(segmentProperties, startKeys, noDicStartKeys);
+  @Override public void getStartKey(long[] startKeys,
+      SortedMap<Integer, byte[]> noDicStartKeys, List<long[]> startKeyList)
+      throws QueryExecutionException {
+    leftEvalutor.getStartKey(startKeys, noDicStartKeys, startKeyList);
+    rightEvalutor.getStartKey(startKeys, noDicStartKeys, startKeyList);
   }
 
   @Override public void getEndKey(SegmentProperties segmentProperties,
       AbsoluteTableIdentifier tableIdentifier, long[] endKeys,
-      SortedMap<Integer, byte[]> noDicEndKeys) throws QueryExecutionException {
-    leftEvalutor.getEndKey(segmentProperties, tableIdentifier, endKeys, noDicEndKeys);
-    rightEvalutor.getEndKey(segmentProperties, tableIdentifier, endKeys, noDicEndKeys);
+      SortedMap<Integer, byte[]> noDicEndKeys, List<long[]> endKeyList)
+      throws QueryExecutionException {
+    leftEvalutor.getEndKey(segmentProperties, tableIdentifier, endKeys, noDicEndKeys, endKeyList);
+    rightEvalutor.getEndKey(segmentProperties, tableIdentifier, endKeys, noDicEndKeys, endKeyList);
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/d03af4a5/core/src/main/java/org/carbondata/scan/filter/resolver/ConditionalFilterResolverImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/scan/filter/resolver/ConditionalFilterResolverImpl.java b/core/src/main/java/org/carbondata/scan/filter/resolver/ConditionalFilterResolverImpl.java
index ff8d531..e943ff1 100644
--- a/core/src/main/java/org/carbondata/scan/filter/resolver/ConditionalFilterResolverImpl.java
+++ b/core/src/main/java/org/carbondata/scan/filter/resolver/ConditionalFilterResolverImpl.java
@@ -186,10 +186,13 @@ public class ConditionalFilterResolverImpl implements FilterResolverIntf {
   /**
    * method will calculates the start key based on the filter surrogates
    */
-  public void getStartKey(SegmentProperties segmentProperties, long[] startKey,
-      SortedMap<Integer, byte[]> setOfStartKeyByteArray) {
+  public void getStartKey(long[] startKey,
+      SortedMap<Integer, byte[]> setOfStartKeyByteArray, List<long[]> startKeyList)
+      throws QueryExecutionException {
     if (null == dimColResolvedFilterInfo.getStarIndexKey()) {
-      FilterUtil.getStartKeyForNoDictionaryDimension(dimColResolvedFilterInfo, segmentProperties,
+      FilterUtil.getStartKey(dimColResolvedFilterInfo.getDimensionResolvedFilterInstance(),
+          startKey, startKeyList);
+      FilterUtil.getStartKeyForNoDictionaryDimension(dimColResolvedFilterInfo,
           setOfStartKeyByteArray);
     }
   }
@@ -202,11 +205,12 @@ public class ConditionalFilterResolverImpl implements FilterResolverIntf {
    */
   @Override public void getEndKey(SegmentProperties segmentProperties,
       AbsoluteTableIdentifier absoluteTableIdentifier, long[] endKeys,
-      SortedMap<Integer, byte[]> setOfEndKeyByteArray) throws QueryExecutionException {
+      SortedMap<Integer, byte[]> setOfEndKeyByteArray, List<long[]> endKeyList)
+      throws QueryExecutionException {
     if (null == dimColResolvedFilterInfo.getEndIndexKey()) {
       FilterUtil.getEndKey(dimColResolvedFilterInfo.getDimensionResolvedFilterInstance(),
-          absoluteTableIdentifier, endKeys, segmentProperties);
-      FilterUtil.getEndKeyForNoDictionaryDimension(dimColResolvedFilterInfo, segmentProperties,
+          absoluteTableIdentifier, endKeys, segmentProperties, endKeyList);
+      FilterUtil.getEndKeyForNoDictionaryDimension(dimColResolvedFilterInfo,
           setOfEndKeyByteArray);
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/d03af4a5/core/src/main/java/org/carbondata/scan/filter/resolver/FilterResolverIntf.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/scan/filter/resolver/FilterResolverIntf.java b/core/src/main/java/org/carbondata/scan/filter/resolver/FilterResolverIntf.java
index 4981126..e8bb24d 100644
--- a/core/src/main/java/org/carbondata/scan/filter/resolver/FilterResolverIntf.java
+++ b/core/src/main/java/org/carbondata/scan/filter/resolver/FilterResolverIntf.java
@@ -19,6 +19,7 @@
 package org.carbondata.scan.filter.resolver;
 
 import java.io.Serializable;
+import java.util.List;
 import java.util.SortedMap;
 
 import org.carbondata.core.carbon.AbsoluteTableIdentifier;
@@ -72,8 +73,8 @@ public interface FilterResolverIntf extends Serializable {
    * @param startKey
    * @param setOfStartKeyByteArray
    */
-  void getStartKey(SegmentProperties segmentProperties, long[] startKey,
-      SortedMap<Integer, byte[]> setOfStartKeyByteArray);
+  void getStartKey(long[] startKey, SortedMap<Integer, byte[]> setOfStartKeyByteArray,
+      List<long[]> startKeyList) throws QueryExecutionException;
 
   /**
    * API will read the end key based on the max surrogate of
@@ -85,7 +86,7 @@ public interface FilterResolverIntf extends Serializable {
    * @throws QueryExecutionException
    */
   void getEndKey(SegmentProperties segmentProperties, AbsoluteTableIdentifier tableIdentifier,
-      long[] endKeys, SortedMap<Integer, byte[]> setOfEndKeyByteArray)
+      long[] endKeys, SortedMap<Integer, byte[]> setOfEndKeyByteArray, List<long[]> endKeyList)
       throws QueryExecutionException;
 
   /**

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/d03af4a5/core/src/main/java/org/carbondata/scan/filter/resolver/LogicalFilterResolverImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/scan/filter/resolver/LogicalFilterResolverImpl.java b/core/src/main/java/org/carbondata/scan/filter/resolver/LogicalFilterResolverImpl.java
index 8b18a21..31c4be6 100644
--- a/core/src/main/java/org/carbondata/scan/filter/resolver/LogicalFilterResolverImpl.java
+++ b/core/src/main/java/org/carbondata/scan/filter/resolver/LogicalFilterResolverImpl.java
@@ -19,6 +19,7 @@
 
 package org.carbondata.scan.filter.resolver;
 
+import java.util.List;
 import java.util.SortedMap;
 
 import org.carbondata.core.carbon.AbsoluteTableIdentifier;
@@ -86,17 +87,20 @@ public class LogicalFilterResolverImpl implements FilterResolverIntf {
     return null;
   }
 
-  @Override public void getStartKey(SegmentProperties segmentProperties, long[] startKey,
-      SortedMap<Integer, byte[]> setOfStartKeyByteArray) {
+  @Override public void getStartKey(long[] startKey,
+      SortedMap<Integer, byte[]> setOfStartKeyByteArray, List<long[]> startKeyList)
+      throws QueryExecutionException {
 
   }
 
   @Override public void getEndKey(SegmentProperties segmentProperties,
       AbsoluteTableIdentifier tableIdentifier, long[] endKeys,
-      SortedMap<Integer, byte[]> setOfEndKeyByteArray) throws QueryExecutionException {
-  }
+      SortedMap<Integer, byte[]> setOfEndKeyByteArray, List<long[]> endKeyList)
+      throws QueryExecutionException {
+
+    }
 
-  @Override public FilterExecuterType getFilterExecuterType() {
+    @Override public FilterExecuterType getFilterExecuterType() {
     switch (filterExpressionType) {
       case OR:
         return FilterExecuterType.OR;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/d03af4a5/core/src/main/java/org/carbondata/scan/filter/resolver/RestructureFilterResolverImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/scan/filter/resolver/RestructureFilterResolverImpl.java b/core/src/main/java/org/carbondata/scan/filter/resolver/RestructureFilterResolverImpl.java
index da71c22..6335377 100644
--- a/core/src/main/java/org/carbondata/scan/filter/resolver/RestructureFilterResolverImpl.java
+++ b/core/src/main/java/org/carbondata/scan/filter/resolver/RestructureFilterResolverImpl.java
@@ -174,8 +174,8 @@ public class RestructureFilterResolverImpl implements FilterResolverIntf {
    * For restructure resolver no implementation is required for getting
    * the start key since it already has default values
    */
-  @Override public void getStartKey(SegmentProperties segmentProperties, long[] startKeys,
-      SortedMap<Integer, byte[]> noDicStartKeys) {
+  @Override public void getStartKey(long[] startKeys,
+      SortedMap<Integer, byte[]> noDicStartKeys, List<long[]> startKeyList) {
 
   }
 
@@ -187,7 +187,7 @@ public class RestructureFilterResolverImpl implements FilterResolverIntf {
    */
   @Override public void getEndKey(SegmentProperties segmentProperties,
       AbsoluteTableIdentifier tableIdentifier, long[] endKeys,
-      SortedMap<Integer, byte[]> noDicEndKeys) {
+      SortedMap<Integer, byte[]> noDicEndKeys, List<long[]> endKeyList) {
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/d03af4a5/core/src/main/java/org/carbondata/scan/filter/resolver/RowLevelRangeFilterResolverImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/scan/filter/resolver/RowLevelRangeFilterResolverImpl.java b/core/src/main/java/org/carbondata/scan/filter/resolver/RowLevelRangeFilterResolverImpl.java
index cf2a998..f88040d 100644
--- a/core/src/main/java/org/carbondata/scan/filter/resolver/RowLevelRangeFilterResolverImpl.java
+++ b/core/src/main/java/org/carbondata/scan/filter/resolver/RowLevelRangeFilterResolverImpl.java
@@ -99,13 +99,17 @@ public class RowLevelRangeFilterResolverImpl extends ConditionalFilterResolverIm
    *
    * @return start IndexKey
    */
-  public void getStartKey(SegmentProperties segmentProperties, long[] startKey,
-      SortedMap<Integer, byte[]> noDictStartKeys) {
+  public void getStartKey(long[] startKey,
+      SortedMap<Integer, byte[]> noDictStartKeys, List<long[]> startKeyList) {
     if (null == dimColEvaluatorInfoList.get(0).getStarIndexKey()) {
-      FilterUtil.getStartKey(dimColEvaluatorInfoList.get(0), segmentProperties, startKey);
-      FilterUtil
-          .getStartKeyForNoDictionaryDimension(dimColEvaluatorInfoList.get(0), segmentProperties,
-              noDictStartKeys);
+      try {
+        FilterUtil.getStartKey(dimColEvaluatorInfoList.get(0).getDimensionResolvedFilterInstance(),
+            startKey, startKeyList);
+        FilterUtil
+            .getStartKeyForNoDictionaryDimension(dimColEvaluatorInfoList.get(0), noDictStartKeys);
+      } catch (QueryExecutionException e) {
+        LOGGER.error("Can not get the start key during block prune");
+      }
     }
   }
 
@@ -116,17 +120,16 @@ public class RowLevelRangeFilterResolverImpl extends ConditionalFilterResolverIm
    */
   @Override public void getEndKey(SegmentProperties segmentProperties,
       AbsoluteTableIdentifier absoluteTableIdentifier, long[] endKeys,
-      SortedMap<Integer, byte[]> noDicEndKeys) {
+      SortedMap<Integer, byte[]> noDicEndKeys, List<long[]> endKeyList) {
     if (null == dimColEvaluatorInfoList.get(0).getEndIndexKey()) {
       try {
         FilterUtil.getEndKey(dimColEvaluatorInfoList.get(0).getDimensionResolvedFilterInstance(),
-            absoluteTableIdentifier, endKeys, segmentProperties);
+            absoluteTableIdentifier, endKeys, segmentProperties, endKeyList);
         FilterUtil
-            .getEndKeyForNoDictionaryDimension(dimColEvaluatorInfoList.get(0), segmentProperties,
-                noDicEndKeys);
+            .getEndKeyForNoDictionaryDimension(dimColEvaluatorInfoList.get(0), noDicEndKeys);
       } catch (QueryExecutionException e) {
         // TODO Auto-generated catch block
-        e.printStackTrace();
+        LOGGER.error("Can not get the end key during block prune");
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/d03af4a5/integration/spark/src/test/scala/org/carbondata/spark/testsuite/blockprune/BlockPruneQueryTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/blockprune/BlockPruneQueryTestCase.scala b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/blockprune/BlockPruneQueryTestCase.scala
new file mode 100644
index 0000000..4455d62
--- /dev/null
+++ b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/blockprune/BlockPruneQueryTestCase.scala
@@ -0,0 +1,115 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.carbondata.spark.testsuite.blockprune
+
+import java.io.{DataOutputStream, File}
+
+import org.apache.spark.sql.Row
+import org.apache.spark.sql.common.util.CarbonHiveContext._
+import org.apache.spark.sql.common.util.QueryTest
+import org.carbondata.core.datastorage.store.impl.FileFactory
+import org.scalatest.BeforeAndAfterAll
+
+/**
+  * This class contains test cases for block prune query
+  */
+class BlockPruneQueryTestCase extends QueryTest with BeforeAndAfterAll {
+  def currentPath: String = new File(this.getClass.getResource("/").getPath + "/../../")
+    .getCanonicalPath
+  val outputPath = currentPath + "/src/test/resources/block_prune_test.csv"
+  override def beforeAll {
+    // Since the data needed for plock prune is big, need to create a temp data file
+    val testData: Array[String]= new Array[String](3);
+    testData(0) = "a"
+    testData(1) = "b"
+    testData(2) = "c"
+    var writer: DataOutputStream = null
+    try {
+      val fileType = FileFactory.getFileType(outputPath)
+      val file = FileFactory.getCarbonFile(outputPath, fileType)
+      if (!file.exists()) {
+        file.createNewFile()
+      }
+      writer = FileFactory.getDataOutputStream(outputPath, fileType)
+      for (i <- 0 to 2) {
+        for (j <- 0 to 240000) {
+          writer.writeBytes(testData(i) + "," + j + "\n")
+        }
+      }
+    } catch {
+      case ex: Exception =>
+        logError("Build test file for block prune failed" + ex)
+    } finally {
+      if (writer != null) {
+        try {
+          writer.close()
+        } catch {
+          case ex: Exception =>
+            logError("Close output stream catching exception:" + ex)
+        }
+      }
+    }
+
+    sql("DROP TABLE IF EXISTS blockprune")
+  }
+
+  test("test block prune query") {
+    sql(
+      """
+        CREATE TABLE IF NOT EXISTS blockprune (name string, id int)
+        STORED BY 'org.apache.carbondata.format'
+      """)
+    sql(
+        s"LOAD DATA LOCAL INPATH '$outputPath' INTO table blockprune options('FILEHEADER'='name,id')"
+      )
+    // data is in all 7 blocks
+    checkAnswer(
+      sql(
+        """
+          select name,count(name) as amount from blockprune
+          where name='c' or name='b' or name='a' group by name
+        """),
+      Seq(Row("a", 240001), Row("b", 240001), Row("c", 240001)))
+
+    // data only in middle 3/4/5 blocks
+    checkAnswer(
+      sql(
+        """
+          select name,count(name) as amount from blockprune
+          where name='b' group by name
+        """),
+      Seq(Row("b", 240001)))
+  }
+
+  override def afterAll {
+    // delete the temp data file
+    try {
+      val fileType = FileFactory.getFileType(outputPath)
+      val file = FileFactory.getCarbonFile(outputPath, fileType)
+      if (file.exists()) {
+        file.delete()
+      }
+    } catch {
+      case ex: Exception =>
+        logError("Delete temp test data file for block prune catching exception:" + ex)
+    }
+    sql("DROP TABLE IF EXISTS blockprune")
+  }
+
+}


[2/2] incubator-carbondata git commit: [CARBONDATA-154] Fix the bug of block prune that query result is wrong This closes #70

Posted by ra...@apache.org.
[CARBONDATA-154] Fix the bug of block prune that query result is wrong This closes #70


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/c2ee9cd4
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/c2ee9cd4
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/c2ee9cd4

Branch: refs/heads/master
Commit: c2ee9cd459c0c575703ee29f9f60b956fa97d8cf
Parents: d92aa99 d03af4a
Author: ravipesala <ra...@gmail.com>
Authored: Sat Aug 13 20:09:51 2016 +0530
Committer: ravipesala <ra...@gmail.com>
Committed: Sat Aug 13 20:09:51 2016 +0530

----------------------------------------------------------------------
 .../org/carbondata/scan/filter/FilterUtil.java  | 103 +++++++++++------
 .../filter/resolver/AndFilterResolverImpl.java  |  17 +--
 .../resolver/ConditionalFilterResolverImpl.java |  16 ++-
 .../filter/resolver/FilterResolverIntf.java     |   7 +-
 .../resolver/LogicalFilterResolverImpl.java     |  14 ++-
 .../resolver/RestructureFilterResolverImpl.java |   6 +-
 .../RowLevelRangeFilterResolverImpl.java        |  25 ++--
 .../blockprune/BlockPruneQueryTestCase.scala    | 115 +++++++++++++++++++
 8 files changed, 232 insertions(+), 71 deletions(-)
----------------------------------------------------------------------