You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ra...@apache.org on 2018/09/18 11:31:33 UTC

[1/2] carbondata git commit: [CARBONDATA-2942] Add read and write support for writing min max based on configurable bytes count

Repository: carbondata
Updated Branches:
  refs/heads/master 6e9ba6d19 -> 04084c73f


http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/stream/StreamPruner.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/stream/StreamPruner.java b/core/src/main/java/org/apache/carbondata/core/stream/StreamPruner.java
index b40355b..178b9f1 100644
--- a/core/src/main/java/org/apache/carbondata/core/stream/StreamPruner.java
+++ b/core/src/main/java/org/apache/carbondata/core/stream/StreamPruner.java
@@ -100,7 +100,8 @@ public class StreamPruner {
     }
     byte[][] maxValue = streamFile.getMinMaxIndex().getMaxValues();
     byte[][] minValue = streamFile.getMinMaxIndex().getMinValues();
-    BitSet bitSet = filterExecuter.isScanRequired(maxValue, minValue);
+    BitSet bitSet = filterExecuter
+        .isScanRequired(maxValue, minValue, streamFile.getMinMaxIndex().getIsMinMaxSet());
     if (!bitSet.isEmpty()) {
       return true;
     } else {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java b/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java
index 1143ed5..b1dd580 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java
@@ -19,6 +19,7 @@ package org.apache.carbondata.core.util;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.BitSet;
 import java.util.List;
 import java.util.Map;
@@ -285,10 +286,21 @@ public abstract class AbstractDataFileFooterConverter {
     byte[][] currentMaxValue = blockletIndexList.get(0).getMinMaxIndex().getMaxValues().clone();
     byte[][] minValue = null;
     byte[][] maxValue = null;
+    boolean[] blockletMinMaxFlag = null;
+    // flag at block level
+    boolean[] blockMinMaxFlag = blockletIndexList.get(0).getMinMaxIndex().getIsMinMaxSet();
     for (int i = 1; i < blockletIndexList.size(); i++) {
       minValue = blockletIndexList.get(i).getMinMaxIndex().getMinValues();
       maxValue = blockletIndexList.get(i).getMinMaxIndex().getMaxValues();
+      blockletMinMaxFlag = blockletIndexList.get(i).getMinMaxIndex().getIsMinMaxSet();
       for (int j = 0; j < maxValue.length; j++) {
+        // can be null for stores < 1.5.0 version
+        if (null != blockletMinMaxFlag && !blockletMinMaxFlag[i]) {
+          blockMinMaxFlag[i] = blockletMinMaxFlag[i];
+          currentMaxValue[j] = new byte[0];
+          currentMinValue[j] = new byte[0];
+          continue;
+        }
         if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(currentMinValue[j], minValue[j]) > 0) {
           currentMinValue[j] = minValue[j].clone();
         }
@@ -297,10 +309,14 @@ public abstract class AbstractDataFileFooterConverter {
         }
       }
     }
-
+    if (null == blockMinMaxFlag) {
+      blockMinMaxFlag = new boolean[currentMaxValue.length];
+      Arrays.fill(blockMinMaxFlag, true);
+    }
     BlockletMinMaxIndex minMax = new BlockletMinMaxIndex();
     minMax.setMaxValues(currentMaxValue);
     minMax.setMinValues(currentMinValue);
+    minMax.setIsMinMaxSet(blockMinMaxFlag);
     blockletIndex.setMinMaxIndex(minMax);
     return blockletIndex;
   }
@@ -418,9 +434,19 @@ public abstract class AbstractDataFileFooterConverter {
         blockletIndexThrift.getB_tree_index();
     org.apache.carbondata.format.BlockletMinMaxIndex minMaxIndex =
         blockletIndexThrift.getMin_max_index();
+    List<Boolean> isMinMaxSet = null;
+    // Below logic is added to handle backward compatibility
+    if (minMaxIndex.isSetMin_max_presence()) {
+      isMinMaxSet = minMaxIndex.getMin_max_presence();
+    } else {
+      Boolean[] minMaxFlag = new Boolean[minMaxIndex.getMax_values().size()];
+      Arrays.fill(minMaxFlag, true);
+      isMinMaxSet = Arrays.asList(minMaxFlag);
+    }
     return new BlockletIndex(
         new BlockletBTreeIndex(btreeIndex.getStart_key(), btreeIndex.getEnd_key()),
-        new BlockletMinMaxIndex(minMaxIndex.getMin_values(), minMaxIndex.getMax_values()));
+        new BlockletMinMaxIndex(minMaxIndex.getMin_values(), minMaxIndex.getMax_values(),
+            isMinMaxSet));
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/util/BlockletDataMapUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/BlockletDataMapUtil.java b/core/src/main/java/org/apache/carbondata/core/util/BlockletDataMapUtil.java
index 4a87e91..ac53b56 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/BlockletDataMapUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/BlockletDataMapUtil.java
@@ -48,6 +48,7 @@ import org.apache.carbondata.core.indexstore.blockletindex.BlockletDataMapDistri
 import org.apache.carbondata.core.indexstore.blockletindex.BlockletDataMapFactory;
 import org.apache.carbondata.core.indexstore.blockletindex.SegmentIndexFileStore;
 import org.apache.carbondata.core.metadata.blocklet.DataFileFooter;
+import org.apache.carbondata.core.metadata.blocklet.index.BlockletMinMaxIndex;
 import org.apache.carbondata.core.metadata.datatype.DataType;
 import org.apache.carbondata.core.metadata.datatype.DataTypes;
 import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
@@ -485,4 +486,23 @@ public class BlockletDataMapUtil {
     }
     return false;
   }
+
+  /**
+   * Method to update the min max flag. For CACHE_LEVEL=BLOCK, for any column if min max is not
+   * written in any of the blocklet then for that column the flag will be false for the
+   * complete block
+   *
+   * @param minMaxIndex
+   * @param minMaxFlag
+   */
+  public static void updateMinMaxFlag(BlockletMinMaxIndex minMaxIndex, boolean[] minMaxFlag) {
+    boolean[] isMinMaxSet = minMaxIndex.getIsMinMaxSet();
+    if (null != isMinMaxSet) {
+      for (int i = 0; i < minMaxFlag.length; i++) {
+        if (!isMinMaxSet[i]) {
+          minMaxFlag[i] = isMinMaxSet[i];
+        }
+      }
+    }
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/util/CarbonMetadataUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonMetadataUtil.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonMetadataUtil.java
index 4be4f78..0167c9a 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonMetadataUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonMetadataUtil.java
@@ -19,12 +19,16 @@ package org.apache.carbondata.core.util;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
 
+import org.apache.carbondata.common.logging.LogService;
+import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.datastore.blocklet.BlockletEncodedColumnPage;
 import org.apache.carbondata.core.datastore.blocklet.EncodedBlocklet;
 import org.apache.carbondata.core.datastore.compression.CompressorFactory;
 import org.apache.carbondata.core.datastore.page.encoding.EncodedColumnPage;
+import org.apache.carbondata.core.datastore.page.statistics.SimpleStatsResult;
 import org.apache.carbondata.core.datastore.page.statistics.TablePageStatistics;
 import org.apache.carbondata.core.metadata.ColumnarFormatVersion;
 import org.apache.carbondata.core.metadata.datatype.DataType;
@@ -52,6 +56,9 @@ import org.apache.carbondata.format.SegmentInfo;
  */
 public class CarbonMetadataUtil {
 
+  private static final LogService LOGGER =
+      LogServiceFactory.getLogService(CarbonMetadataUtil.class.getName());
+
   private CarbonMetadataUtil() {
   }
 
@@ -105,9 +112,16 @@ public class CarbonMetadataUtil {
     if (minMaxIndex == null) {
       return null;
     }
-
+    List<Boolean> isMinMaxSet = null;
+    if (minMaxIndex.isSetMin_max_presence()) {
+      isMinMaxSet = minMaxIndex.getMin_max_presence();
+    } else {
+      Boolean[] minMaxFlag = new Boolean[minMaxIndex.getMax_values().size()];
+      Arrays.fill(minMaxFlag, true);
+      isMinMaxSet = Arrays.asList(minMaxFlag);
+    }
     return new org.apache.carbondata.core.metadata.blocklet.index.BlockletMinMaxIndex(
-            minMaxIndex.getMin_values(), minMaxIndex.getMax_values());
+        minMaxIndex.getMin_values(), minMaxIndex.getMax_values(), isMinMaxSet);
   }
 
   /**
@@ -124,6 +138,7 @@ public class CarbonMetadataUtil {
     for (int i = 0; i < minMaxIndex.getMaxValues().length; i++) {
       blockletMinMaxIndex.addToMax_values(ByteBuffer.wrap(minMaxIndex.getMaxValues()[i]));
       blockletMinMaxIndex.addToMin_values(ByteBuffer.wrap(minMaxIndex.getMinValues()[i]));
+      blockletMinMaxIndex.addToMin_max_presence(minMaxIndex.getIsMinMaxSet()[i]);
     }
 
     return blockletMinMaxIndex;
@@ -177,20 +192,28 @@ public class CarbonMetadataUtil {
   public static BlockletIndex getBlockletIndex(EncodedBlocklet encodedBlocklet,
       List<CarbonMeasure> carbonMeasureList) {
     BlockletMinMaxIndex blockletMinMaxIndex = new BlockletMinMaxIndex();
-
+    // merge writeMinMax flag for all the dimensions
+    List<Boolean> writeMinMaxFlag =
+        mergeWriteMinMaxFlagForAllPages(blockletMinMaxIndex, encodedBlocklet);
     // Calculating min/max for every each column.
     TablePageStatistics stats =
         new TablePageStatistics(getEncodedColumnPages(encodedBlocklet, true, 0),
             getEncodedColumnPages(encodedBlocklet, false, 0));
     byte[][] minCol = stats.getDimensionMinValue().clone();
     byte[][] maxCol = stats.getDimensionMaxValue().clone();
-
     for (int pageIndex = 0; pageIndex < encodedBlocklet.getNumberOfPages(); pageIndex++) {
       stats = new TablePageStatistics(getEncodedColumnPages(encodedBlocklet, true, pageIndex),
           getEncodedColumnPages(encodedBlocklet, false, pageIndex));
       byte[][] columnMaxData = stats.getDimensionMaxValue();
       byte[][] columnMinData = stats.getDimensionMinValue();
       for (int i = 0; i < maxCol.length; i++) {
+        // if writeMonMaxFlag is set to false for the dimension at index i, then update the page
+        // and blocklet min/max with empty byte array
+        if (!writeMinMaxFlag.get(i)) {
+          maxCol[i] = new byte[0];
+          minCol[i] = new byte[0];
+          continue;
+        }
         if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(columnMaxData[i], maxCol[i]) > 0) {
           maxCol[i] = columnMaxData[i];
         }
@@ -250,6 +273,41 @@ public class CarbonMetadataUtil {
   }
 
   /**
+   * This method will combine the writeMinMax flag from all the pages. If any page for a given
+   * dimension has writeMinMax flag set to false then min max for that dimension will nto be
+   * written in any of the page and metadata
+   *
+   * @param blockletMinMaxIndex
+   * @param encodedBlocklet
+   */
+  private static List<Boolean> mergeWriteMinMaxFlagForAllPages(
+      BlockletMinMaxIndex blockletMinMaxIndex, EncodedBlocklet encodedBlocklet) {
+    Boolean[] mergedWriteMinMaxFlag =
+        new Boolean[encodedBlocklet.getNumberOfDimension() + encodedBlocklet.getNumberOfMeasure()];
+    // set writeMinMax flag to true for all the columns by default and then update if stats object
+    // has the this flag set to false
+    Arrays.fill(mergedWriteMinMaxFlag, true);
+    for (int i = 0; i < encodedBlocklet.getNumberOfDimension(); i++) {
+      for (int pageIndex = 0; pageIndex < encodedBlocklet.getNumberOfPages(); pageIndex++) {
+        EncodedColumnPage encodedColumnPage =
+            encodedBlocklet.getEncodedDimensionColumnPages().get(i).getEncodedColumnPageList()
+                .get(pageIndex);
+        SimpleStatsResult stats = encodedColumnPage.getStats();
+        if (!stats.writeMinMax()) {
+          mergedWriteMinMaxFlag[i] = stats.writeMinMax();
+          String columnName = encodedColumnPage.getActualPage().getColumnSpec().getFieldName();
+          LOGGER.info("Min Max writing of blocklet ignored for column with name " + columnName);
+          break;
+        }
+      }
+    }
+    List<Boolean> min_max_presence = Arrays.asList(mergedWriteMinMaxFlag);
+    blockletMinMaxIndex.setMin_max_presence(min_max_presence);
+    return min_max_presence;
+  }
+
+  /**
+   * Right now it is set to default values. We may use this in future
    * set the compressor.
    * before 1.5.0, we set a enum 'compression_codec';
    * after 1.5.0, we use string 'compressor_name' instead

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
index 559320a..3438c4e 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
@@ -41,6 +41,7 @@ import static org.apache.carbondata.core.constants.CarbonCommonConstants.CARBON_
 import static org.apache.carbondata.core.constants.CarbonCommonConstants.CARBON_DATA_FILE_VERSION;
 import static org.apache.carbondata.core.constants.CarbonCommonConstants.CARBON_DATE_FORMAT;
 import static org.apache.carbondata.core.constants.CarbonCommonConstants.CARBON_DYNAMIC_ALLOCATION_SCHEDULER_TIMEOUT;
+import static org.apache.carbondata.core.constants.CarbonCommonConstants.CARBON_MINMAX_ALLOWED_BYTE_COUNT;
 import static org.apache.carbondata.core.constants.CarbonCommonConstants.CARBON_PREFETCH_BUFFERSIZE;
 import static org.apache.carbondata.core.constants.CarbonCommonConstants.CARBON_SCHEDULER_MIN_REGISTERED_RESOURCES_RATIO;
 import static org.apache.carbondata.core.constants.CarbonCommonConstants.CARBON_SCHEDULER_MIN_REGISTERED_RESOURCES_RATIO_DEFAULT;
@@ -194,6 +195,9 @@ public final class CarbonProperties {
       case CARBON_LOAD_SORT_MEMORY_SPILL_PERCENTAGE:
         validateSortMemorySpillPercentage();
         break;
+      case CARBON_MINMAX_ALLOWED_BYTE_COUNT:
+        validateStringCharacterLimit();
+        break;
       // TODO : Validation for carbon.lock.type should be handled for addProperty flow
       default:
         // none
@@ -258,6 +262,7 @@ public final class CarbonProperties {
     validateSortStorageMemory();
     validateEnableQueryStatistics();
     validateSortMemorySpillPercentage();
+    validateStringCharacterLimit();
   }
 
   /**
@@ -1551,4 +1556,36 @@ public final class CarbonProperties {
           CarbonLoadOptionConstants.CARBON_LOAD_SORT_MEMORY_SPILL_PERCENTAGE_DEFAULT);
     }
   }
+
+  /**
+   * This method validates the allowed character limit for storing min/max for string type
+   */
+  private void validateStringCharacterLimit() {
+    int allowedCharactersLimit = 0;
+    try {
+      allowedCharactersLimit = Integer.parseInt(carbonProperties
+          .getProperty(CARBON_MINMAX_ALLOWED_BYTE_COUNT,
+              CarbonCommonConstants.CARBON_MINMAX_ALLOWED_BYTE_COUNT_DEFAULT));
+      if (allowedCharactersLimit < CarbonCommonConstants.CARBON_MINMAX_ALLOWED_BYTE_COUNT_MIN
+          || allowedCharactersLimit
+          > CarbonCommonConstants.CARBON_MINMAX_ALLOWED_BYTE_COUNT_MAX) {
+        LOGGER.info("The min max byte limit for string type value \"" + allowedCharactersLimit
+            + "\" is invalid. Using the default value \""
+            + CarbonCommonConstants.CARBON_MINMAX_ALLOWED_BYTE_COUNT_DEFAULT);
+        carbonProperties.setProperty(CARBON_MINMAX_ALLOWED_BYTE_COUNT,
+            CarbonCommonConstants.CARBON_MINMAX_ALLOWED_BYTE_COUNT_DEFAULT);
+      } else {
+        LOGGER.info(
+            "Considered value for min max byte limit for string is: " + allowedCharactersLimit);
+        carbonProperties
+            .setProperty(CARBON_MINMAX_ALLOWED_BYTE_COUNT, allowedCharactersLimit + "");
+      }
+    } catch (NumberFormatException e) {
+      LOGGER.info("The min max byte limit for string type value \"" + allowedCharactersLimit
+          + "\" is invalid. Using the default value \""
+          + CarbonCommonConstants.CARBON_MINMAX_ALLOWED_BYTE_COUNT_DEFAULT);
+      carbonProperties.setProperty(CARBON_MINMAX_ALLOWED_BYTE_COUNT,
+          CarbonCommonConstants.CARBON_MINMAX_ALLOWED_BYTE_COUNT_DEFAULT);
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/test/java/org/apache/carbondata/core/indexstore/blockletindex/TestBlockletDataMap.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/indexstore/blockletindex/TestBlockletDataMap.java b/core/src/test/java/org/apache/carbondata/core/indexstore/blockletindex/TestBlockletDataMap.java
index 85de7c4..fee2e9d 100644
--- a/core/src/test/java/org/apache/carbondata/core/indexstore/blockletindex/TestBlockletDataMap.java
+++ b/core/src/test/java/org/apache/carbondata/core/indexstore/blockletindex/TestBlockletDataMap.java
@@ -35,7 +35,7 @@ public class TestBlockletDataMap extends AbstractDictionaryCacheTest {
 
     new MockUp<ImplicitIncludeFilterExecutorImpl>() {
       @Mock BitSet isFilterValuesPresentInBlockOrBlocklet(byte[][] maxValue, byte[][] minValue,
-          String uniqueBlockPath) {
+          String uniqueBlockPath, boolean[] isMinMaxSet) {
         BitSet bitSet = new BitSet(1);
         bitSet.set(8);
         return bitSet;
@@ -45,13 +45,14 @@ public class TestBlockletDataMap extends AbstractDictionaryCacheTest {
     BlockDataMap blockletDataMap = new BlockletDataMap();
     Method method = BlockDataMap.class
         .getDeclaredMethod("addBlockBasedOnMinMaxValue", FilterExecuter.class, byte[][].class,
-            byte[][].class, String.class, int.class);
+            byte[][].class, boolean[].class, String.class, int.class);
     method.setAccessible(true);
 
     byte[][] minValue = { ByteUtil.toBytes("sfds") };
     byte[][] maxValue = { ByteUtil.toBytes("resa") };
+    boolean[] minMaxFlag = new boolean[] {true};
     Object result = method
-        .invoke(blockletDataMap, implicitIncludeFilterExecutor, minValue, maxValue,
+        .invoke(blockletDataMap, implicitIncludeFilterExecutor, minValue, maxValue, minMaxFlag,
             "/opt/store/default/carbon_table/Fact/Part0/Segment_0/part-0-0_batchno0-0-1514989110586.carbondata",
             0);
     assert ((boolean) result);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/test/java/org/apache/carbondata/core/util/CarbonMetadataUtilTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/util/CarbonMetadataUtilTest.java b/core/src/test/java/org/apache/carbondata/core/util/CarbonMetadataUtilTest.java
index 14cd57a..7aa2236 100644
--- a/core/src/test/java/org/apache/carbondata/core/util/CarbonMetadataUtilTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/util/CarbonMetadataUtilTest.java
@@ -172,10 +172,13 @@ public class CarbonMetadataUtilTest {
     List<ByteBuffer> maxList = new ArrayList<>();
     maxList.add(ByteBuffer.wrap(byteArr1));
 
+    List<Boolean> isMinMaxSet = new ArrayList<>();
+    isMinMaxSet.add(true);
+
     org.apache.carbondata.core.metadata.blocklet.index.BlockletMinMaxIndex
         blockletMinMaxIndex =
         new org.apache.carbondata.core.metadata.blocklet.index.BlockletMinMaxIndex(minList,
-            maxList);
+            maxList, isMinMaxSet);
     org.apache.carbondata.core.metadata.blocklet.index.BlockletBTreeIndex
         blockletBTreeIndex =
         new org.apache.carbondata.core.metadata.blocklet.index.BlockletBTreeIndex(startKey,

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/test/java/org/apache/carbondata/core/util/RangeFilterProcessorTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/util/RangeFilterProcessorTest.java b/core/src/test/java/org/apache/carbondata/core/util/RangeFilterProcessorTest.java
index 9b8be79..3fdce4e 100644
--- a/core/src/test/java/org/apache/carbondata/core/util/RangeFilterProcessorTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/util/RangeFilterProcessorTest.java
@@ -320,7 +320,7 @@ public class RangeFilterProcessorTest {
     Deencapsulation.setField(range, "isDimensionPresentInCurrentBlock", true);
     Deencapsulation.setField(range, "lessThanExp", true);
     Deencapsulation.setField(range, "greaterThanExp", true);
-    result = range.isScanRequired(BlockMin, BlockMax, filterMinMax);
+    result = range.isScanRequired(BlockMin, BlockMax, filterMinMax, true);
     Assert.assertFalse(result);
   }
 
@@ -336,7 +336,7 @@ public class RangeFilterProcessorTest {
     Deencapsulation.setField(range, "isDimensionPresentInCurrentBlock", true);
     Deencapsulation.setField(range, "lessThanExp", true);
     Deencapsulation.setField(range, "greaterThanExp", true);
-    result = range.isScanRequired(BlockMin, BlockMax, filterMinMax);
+    result = range.isScanRequired(BlockMin, BlockMax, filterMinMax, true);
     Assert.assertFalse(result);
   }
 
@@ -352,7 +352,7 @@ public class RangeFilterProcessorTest {
     Deencapsulation.setField(range, "isDimensionPresentInCurrentBlock", true);
     Deencapsulation.setField(range, "lessThanExp", true);
     Deencapsulation.setField(range, "greaterThanExp", true);
-    result = range.isScanRequired(BlockMin, BlockMax, filterMinMax);
+    result = range.isScanRequired(BlockMin, BlockMax, filterMinMax, true);
     Assert.assertTrue(result);
   }
 
@@ -370,7 +370,7 @@ public class RangeFilterProcessorTest {
     Deencapsulation.setField(range, "lessThanExp", true);
     Deencapsulation.setField(range, "greaterThanExp", true);
 
-    result = range.isScanRequired(BlockMin, BlockMax, filterMinMax);
+    result = range.isScanRequired(BlockMin, BlockMax, filterMinMax, true);
     rangeCovered = Deencapsulation.getField(range, "isRangeFullyCoverBlock");
     Assert.assertTrue(result);
     Assert.assertTrue(rangeCovered);
@@ -390,7 +390,7 @@ public class RangeFilterProcessorTest {
     Deencapsulation.setField(range, "lessThanExp", true);
     Deencapsulation.setField(range, "greaterThanExp", true);
 
-    result = range.isScanRequired(BlockMin, BlockMax, filterMinMax);
+    result = range.isScanRequired(BlockMin, BlockMax, filterMinMax, true);
     startBlockMinIsDefaultStart = Deencapsulation.getField(range, "startBlockMinIsDefaultStart");
     Assert.assertTrue(result);
     Assert.assertTrue(startBlockMinIsDefaultStart);
@@ -410,7 +410,7 @@ public class RangeFilterProcessorTest {
     Deencapsulation.setField(range, "lessThanExp", true);
     Deencapsulation.setField(range, "greaterThanExp", true);
 
-    result = range.isScanRequired(BlockMin, BlockMax, filterMinMax);
+    result = range.isScanRequired(BlockMin, BlockMax, filterMinMax, true);
     endBlockMaxisDefaultEnd = Deencapsulation.getField(range, "endBlockMaxisDefaultEnd");
     Assert.assertTrue(result);
     Assert.assertTrue(endBlockMaxisDefaultEnd);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMap.java
----------------------------------------------------------------------
diff --git a/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMap.java b/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMap.java
index 5460247..40dc975 100644
--- a/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMap.java
+++ b/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMap.java
@@ -140,7 +140,7 @@ public class MinMaxIndexDataMap extends CoarseGrainDataMap {
 
           BitSet bitSet = filterExecuter.isScanRequired(
               readMinMaxDataMap[blkIdx][blkltIdx].getMaxValues(),
-              readMinMaxDataMap[blkIdx][blkltIdx].getMinValues());
+              readMinMaxDataMap[blkIdx][blkltIdx].getMinValues(), null);
           if (!bitSet.isEmpty()) {
             String blockFileName = indexFilePath[blkIdx].substring(
                 indexFilePath[blkIdx].lastIndexOf(File.separatorChar) + 1,

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/format/src/main/thrift/carbondata.thrift
----------------------------------------------------------------------
diff --git a/format/src/main/thrift/carbondata.thrift b/format/src/main/thrift/carbondata.thrift
index 2423ffa..7130066 100644
--- a/format/src/main/thrift/carbondata.thrift
+++ b/format/src/main/thrift/carbondata.thrift
@@ -45,6 +45,7 @@ struct BlockletBTreeIndex{
 struct BlockletMinMaxIndex{
     1: required list<binary> min_values; //Min value of all columns of one blocklet Bit-Packed
     2: required list<binary> max_values; //Max value of all columns of one blocklet Bit-Packed
+    3: optional list<bool> min_max_presence; // flag to specify whether min max is written for a column or not
 }
 
 /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/integration/spark-datasource/src/test/scala/org/apache/spark/sql/carbondata/datasource/TestCreateTableUsingSparkCarbonFileFormat.scala
----------------------------------------------------------------------
diff --git a/integration/spark-datasource/src/test/scala/org/apache/spark/sql/carbondata/datasource/TestCreateTableUsingSparkCarbonFileFormat.scala b/integration/spark-datasource/src/test/scala/org/apache/spark/sql/carbondata/datasource/TestCreateTableUsingSparkCarbonFileFormat.scala
index 6a803fc..e6d4d48 100644
--- a/integration/spark-datasource/src/test/scala/org/apache/spark/sql/carbondata/datasource/TestCreateTableUsingSparkCarbonFileFormat.scala
+++ b/integration/spark-datasource/src/test/scala/org/apache/spark/sql/carbondata/datasource/TestCreateTableUsingSparkCarbonFileFormat.scala
@@ -19,24 +19,32 @@ package org.apache.spark.sql.carbondata.datasource
 
 import java.io.File
 import java.text.SimpleDateFormat
+import java.util
 import java.util.{Date, Random}
 
+import scala.collection.JavaConverters._
+
 import org.apache.commons.io.FileUtils
 import org.apache.commons.lang.RandomStringUtils
 import org.scalatest.{BeforeAndAfterAll, FunSuite}
 import org.apache.spark.util.SparkUtil
 import org.apache.spark.sql.carbondata.datasource.TestUtil.{spark, _}
+
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.constants.{CarbonCommonConstants, CarbonV3DataFormatConstants}
 import org.apache.carbondata.core.datastore.filesystem.CarbonFile
 import org.apache.carbondata.core.datastore.impl.FileFactory
 import org.apache.carbondata.core.metadata.datatype.DataTypes
-import org.apache.carbondata.core.util.{CarbonProperties, CarbonUtil}
+import org.apache.carbondata.core.util.{CarbonProperties, CarbonUtil, DataFileFooterConverter}
 import org.apache.carbondata.sdk.file.{CarbonWriter, Field, Schema}
 import org.apache.hadoop.conf.Configuration
 import org.apache.spark.sql.Row
 import org.apache.spark.sql.carbondata.execution.datasources.CarbonFileIndexReplaceRule
 
+import org.apache.carbondata.core.datamap.DataMapStoreManager
+import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier
+import org.apache.carbondata.core.metadata.blocklet.DataFileFooter
+
 class TestCreateTableUsingSparkCarbonFileFormat extends FunSuite with BeforeAndAfterAll {
 
 
@@ -46,6 +54,9 @@ class TestCreateTableUsingSparkCarbonFileFormat extends FunSuite with BeforeAndA
   }
 
   override def afterAll(): Unit = {
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_MINMAX_ALLOWED_BYTE_COUNT,
+        CarbonCommonConstants.CARBON_MINMAX_ALLOWED_BYTE_COUNT_DEFAULT)
     spark.sql("DROP TABLE IF EXISTS sdkOutputTable")
   }
 
@@ -328,8 +339,8 @@ class TestCreateTableUsingSparkCarbonFileFormat extends FunSuite with BeforeAndA
     assert(new File(filePath).exists())
     cleanTestData()
   }
-  test("Read data having multi blocklet ") {
-    buildTestDataMuliBlockLet(700000)
+  test("Read data having multi blocklet and validate min max flag") {
+    buildTestDataMuliBlockLet(750000, 50000)
     assert(new File(writerPath).exists())
     spark.sql("DROP TABLE IF EXISTS sdkOutputTable")
 
@@ -342,41 +353,90 @@ class TestCreateTableUsingSparkCarbonFileFormat extends FunSuite with BeforeAndA
         s"""CREATE TABLE sdkOutputTable USING carbon LOCATION
            |'$writerPath' """.stripMargin)
     }
-    spark.sql("select count(*) from sdkOutputTable").show(false)
-    val result=checkAnswer(spark.sql("select count(*) from sdkOutputTable"),Seq(Row(700000)))
+    val result=checkAnswer(spark.sql("select count(*) from sdkOutputTable"),Seq(Row(800000)))
     if(result.isDefined){
       assert(false,result.get)
     }
+    checkAnswer(spark
+      .sql(
+        "select count(*) from sdkOutputTable where from_email='Email for testing min max for " +
+        "allowed chars'"),
+      Seq(Row(50000)))
+    //expected answer for min max flag. FInally there should be 2 blocklets with one blocklet
+    // having min max flag as false for email column and other as true
+    val blocklet1MinMaxFlag = Array(true, true, true, true, true, false, true, true, true)
+    val blocklet2MinMaxFlag = Array(true, true, true, true, true, true, true, true, true)
+    val expectedMinMaxFlag = Array(blocklet1MinMaxFlag, blocklet2MinMaxFlag)
+    validateMinMaxFlag(expectedMinMaxFlag, 2)
+
     spark.sql("DROP TABLE sdkOutputTable")
     // drop table should not delete the files
     assert(new File(writerPath).exists())
+    clearDataMapCache
     cleanTestData()
   }
-  def buildTestDataMuliBlockLet(records :Int): Unit ={
+  def buildTestDataMuliBlockLet(recordsInBlocklet1 :Int, recordsInBlocklet2 :Int): Unit ={
     FileUtils.deleteDirectory(new File(writerPath))
     val fields=new Array[Field](8)
-    fields(0)=new Field("myid",DataTypes.INT);
-    fields(1)=new Field("event_id",DataTypes.STRING);
-    fields(2)=new Field("eve_time",DataTypes.DATE);
-    fields(3)=new Field("ingestion_time",DataTypes.TIMESTAMP);
-    fields(4)=new Field("alldate",DataTypes.createArrayType(DataTypes.DATE));
-    fields(5)=new Field("subject",DataTypes.STRING);
-    fields(6)=new Field("from_email",DataTypes.STRING);
-    fields(7)=new Field("sal",DataTypes.DOUBLE);
+    fields(0)=new Field("myid",DataTypes.INT)
+    fields(1)=new Field("event_id",DataTypes.STRING)
+    fields(2)=new Field("eve_time",DataTypes.DATE)
+    fields(3)=new Field("ingestion_time",DataTypes.TIMESTAMP)
+    fields(4)=new Field("alldate",DataTypes.createArrayType(DataTypes.DATE))
+    fields(5)=new Field("subject",DataTypes.STRING)
+    fields(6)=new Field("from_email",DataTypes.STRING)
+    fields(7)=new Field("sal",DataTypes.DOUBLE)
     import scala.collection.JavaConverters._
+    val emailDataBlocklet1 = "FromEmail"
+    val emailDataBlocklet2 = "Email for testing min max for allowed chars"
     try{
       val options=Map("bad_records_action"->"FORCE","complex_delimiter_level_1"->"$").asJava
       val writer=CarbonWriter.builder().outputPath(writerPath).withBlockletSize(16).sortBy(Array("myid","ingestion_time","event_id")).withLoadOptions(options).buildWriterForCSVInput(new Schema(fields),spark.sessionState.newHadoopConf())
       val timeF=new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
       val date_F=new SimpleDateFormat("yyyy-MM-dd")
-      for(i<-1 to records){
+      for(i<- 1 to recordsInBlocklet1){
+        val time=new Date(System.currentTimeMillis())
+        writer.write(Array(""+i,"event_"+i,""+date_F.format(time),""+timeF.format(time),""+date_F.format(time)+"$"+date_F.format(time),"Subject_0",emailDataBlocklet1,""+new Random().nextDouble()))
+      }
+      for(i<- 1 to recordsInBlocklet2){
         val time=new Date(System.currentTimeMillis())
-        writer.write(Array(""+i,"event_"+i,""+date_F.format(time),""+timeF.format(time),""+date_F.format(time)+"$"+date_F.format(time),"Subject_0","FromEmail",""+new Random().nextDouble()))
+        writer.write(Array(""+i,"event_"+i,""+date_F.format(time),""+timeF.format(time),""+date_F.format(time)+"$"+date_F.format(time),"Subject_0",emailDataBlocklet2,""+new Random().nextDouble()))
       }
       writer.close()
     }
   }
 
+  /**
+   * read carbon index file and  validate the min max flag written in each blocklet
+   *
+   * @param expectedMinMaxFlag
+   * @param numBlocklets
+   */
+  private def validateMinMaxFlag(expectedMinMaxFlag: Array[Array[Boolean]],
+      numBlocklets: Int): Unit = {
+    val carbonFiles: Array[File] = new File(writerPath).listFiles()
+    val carbonIndexFile = carbonFiles.filter(file => file.getName.endsWith(".carbonindex"))(0)
+    val converter: DataFileFooterConverter = new DataFileFooterConverter(spark.sessionState
+      .newHadoopConf())
+    val carbonIndexFilePath = FileFactory.getUpdatedFilePath(carbonIndexFile.getCanonicalPath)
+    val indexMetadata: List[DataFileFooter] = converter
+      .getIndexInfo(carbonIndexFilePath, null, false).asScala.toList
+    assert(indexMetadata.size == numBlocklets)
+    indexMetadata.zipWithIndex.foreach { filefooter =>
+      val isMinMaxSet: Array[Boolean] = filefooter._1.getBlockletIndex.getMinMaxIndex.getIsMinMaxSet
+      assert(isMinMaxSet.sameElements(expectedMinMaxFlag(filefooter._2)))
+    }
+  }
+
+  private def clearDataMapCache(): Unit = {
+    if (!spark.sparkContext.version.startsWith("2.1")) {
+      val mapSize = DataMapStoreManager.getInstance().getAllDataMaps.size()
+      DataMapStoreManager.getInstance()
+        .clearDataMaps(AbsoluteTableIdentifier.from(writerPath))
+      assert(mapSize > DataMapStoreManager.getInstance().getAllDataMaps.size())
+    }
+  }
+
   test("Test with long string columns") {
     FileUtils.deleteDirectory(new File(writerPath))
     // here we specify the long string column as varchar
@@ -434,6 +494,7 @@ class TestCreateTableUsingSparkCarbonFileFormat extends FunSuite with BeforeAndA
     val op1 = spark.sql("select address from sdkOutputTableWithoutSchema limit 1").collectAsList()
     assert(op1.get(0).getString(0).length == 75000)
     spark.sql("DROP TABLE sdkOutputTableWithoutSchema")
+    clearDataMapCache
     cleanTestData()
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/integration/spark-datasource/src/test/scala/org/apache/spark/sql/carbondata/datasource/TestUtil.scala
----------------------------------------------------------------------
diff --git a/integration/spark-datasource/src/test/scala/org/apache/spark/sql/carbondata/datasource/TestUtil.scala b/integration/spark-datasource/src/test/scala/org/apache/spark/sql/carbondata/datasource/TestUtil.scala
index 6727ca7..8b0eca8 100644
--- a/integration/spark-datasource/src/test/scala/org/apache/spark/sql/carbondata/datasource/TestUtil.scala
+++ b/integration/spark-datasource/src/test/scala/org/apache/spark/sql/carbondata/datasource/TestUtil.scala
@@ -25,6 +25,9 @@ import org.apache.spark.sql.{DataFrame, Row, SparkSession}
 import org.apache.spark.sql.catalyst.plans.logical
 import org.apache.spark.sql.catalyst.util.sideBySide
 
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.util.CarbonProperties
+
 object TestUtil {
 
   val rootPath = new File(this.getClass.getResource("/").getPath
@@ -44,6 +47,8 @@ object TestUtil {
   if (!spark.sparkContext.version.startsWith("2.1")) {
     spark.experimental.extraOptimizations = Seq(new CarbonFileIndexReplaceRule)
   }
+  CarbonProperties.getInstance()
+    .addProperty(CarbonCommonConstants.CARBON_MINMAX_ALLOWED_BYTE_COUNT, "40")
 
   def checkAnswer(df: DataFrame, expectedAnswer: java.util.List[Row]):Unit = {
     checkAnswer(df, expectedAnswer.asScala) match {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/integration/spark2/src/main/scala/org/apache/carbondata/stream/CarbonStreamRecordReader.java
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/stream/CarbonStreamRecordReader.java b/integration/spark2/src/main/scala/org/apache/carbondata/stream/CarbonStreamRecordReader.java
index 6d69eb5..6c65285 100644
--- a/integration/spark2/src/main/scala/org/apache/carbondata/stream/CarbonStreamRecordReader.java
+++ b/integration/spark2/src/main/scala/org/apache/carbondata/stream/CarbonStreamRecordReader.java
@@ -421,8 +421,9 @@ public class CarbonStreamRecordReader extends RecordReader<Void, Object> {
       BlockletMinMaxIndex minMaxIndex = CarbonMetadataUtil.convertExternalMinMaxIndex(
           header.getBlocklet_index().getMin_max_index());
       if (minMaxIndex != null) {
-        BitSet bitSet =
-            filter.isScanRequired(minMaxIndex.getMaxValues(), minMaxIndex.getMinValues());
+        BitSet bitSet = filter
+            .isScanRequired(minMaxIndex.getMaxValues(), minMaxIndex.getMinValues(),
+                minMaxIndex.getIsMinMaxSet());
         if (bitSet.isEmpty()) {
           return false;
         } else {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/integration/spark2/src/test/scala/org/apache/spark/sql/CarbonGetTableDetailComandTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/spark/sql/CarbonGetTableDetailComandTestCase.scala b/integration/spark2/src/test/scala/org/apache/spark/sql/CarbonGetTableDetailComandTestCase.scala
index a49d5bb..ad6823d 100644
--- a/integration/spark2/src/test/scala/org/apache/spark/sql/CarbonGetTableDetailComandTestCase.scala
+++ b/integration/spark2/src/test/scala/org/apache/spark/sql/CarbonGetTableDetailComandTestCase.scala
@@ -43,9 +43,9 @@ class CarbonGetTableDetailCommandTestCase extends QueryTest with BeforeAndAfterA
     assertResult(2)(result.length)
     assertResult("table_info1")(result(0).getString(0))
     // 2087 is the size of carbon table. Note that since 1.5.0, we add additional compressor name in metadata
-    assertResult(2187)(result(0).getLong(1))
+    assertResult(2216)(result(0).getLong(1))
     assertResult("table_info2")(result(1).getString(0))
-    assertResult(2187)(result(1).getLong(1))
+    assertResult(2216)(result(1).getLong(1))
   }
 
   override def afterAll: Unit = {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/processing/src/main/java/org/apache/carbondata/processing/store/writer/v3/CarbonFactDataWriterImplV3.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/writer/v3/CarbonFactDataWriterImplV3.java b/processing/src/main/java/org/apache/carbondata/processing/store/writer/v3/CarbonFactDataWriterImplV3.java
index 8622fcd..25204cb 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/store/writer/v3/CarbonFactDataWriterImplV3.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/store/writer/v3/CarbonFactDataWriterImplV3.java
@@ -327,9 +327,10 @@ public class CarbonFactDataWriterImplV3 extends AbstractFactDataWriter {
           model.getSegmentProperties().getDimensions().size());
       BlockletBTreeIndex bTreeIndex = new BlockletBTreeIndex(index.b_tree_index.getStart_key(),
           index.b_tree_index.getEnd_key());
-      BlockletMinMaxIndex minMaxIndex = new BlockletMinMaxIndex();
-      minMaxIndex.setMinValues(toByteArray(index.getMin_max_index().getMin_values()));
-      minMaxIndex.setMaxValues(toByteArray(index.getMin_max_index().getMax_values()));
+      BlockletMinMaxIndex minMaxIndex =
+          new BlockletMinMaxIndex(index.getMin_max_index().getMin_values(),
+              index.getMin_max_index().getMax_values(),
+              index.getMin_max_index().getMin_max_presence());
       org.apache.carbondata.core.metadata.blocklet.index.BlockletIndex bIndex =
           new org.apache.carbondata.core.metadata.blocklet.index.BlockletIndex(bTreeIndex,
               minMaxIndex);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/streaming/src/main/java/org/apache/carbondata/streaming/segment/StreamSegment.java
----------------------------------------------------------------------
diff --git a/streaming/src/main/java/org/apache/carbondata/streaming/segment/StreamSegment.java b/streaming/src/main/java/org/apache/carbondata/streaming/segment/StreamSegment.java
index 744915d..51417c4 100644
--- a/streaming/src/main/java/org/apache/carbondata/streaming/segment/StreamSegment.java
+++ b/streaming/src/main/java/org/apache/carbondata/streaming/segment/StreamSegment.java
@@ -20,6 +20,7 @@ package org.apache.carbondata.streaming.segment;
 import java.io.File;
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -257,6 +258,10 @@ public class StreamSegment {
           CarbonUtil.getValueAsBytes(mrsStats[index].getDataType(), mrsStats[index].getMin());
     }
     minMaxIndex.setMinValues(minIndexes);
+    // TODO: handle the min max writing for string type based on character limit for streaming
+    boolean[] isMinMaxSet = new boolean[dimStats.length + mrsStats.length];
+    Arrays.fill(isMinMaxSet, true);
+    minMaxIndex.setIsMinMaxSet(isMinMaxSet);
     return minMaxIndex;
   }
 


[2/2] carbondata git commit: [CARBONDATA-2942] Add read and write support for writing min max based on configurable bytes count

Posted by ra...@apache.org.
[CARBONDATA-2942] Add read and write support for writing min max based on configurable bytes count

This PR contains
support for writing min max based on configurable bytes count for transactional and non transactional table which covers standard carbon table, File format and SDK.
A new system level configurable property is introduced. Please find the details below for the same.
image

This closes #2725


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/04084c73
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/04084c73
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/04084c73

Branch: refs/heads/master
Commit: 04084c73f0390908d7e19a7e8483114dda779229
Parents: 6e9ba6d
Author: manishgupta88 <to...@gmail.com>
Authored: Fri Sep 14 10:43:20 2018 +0530
Committer: ravipesala <ra...@gmail.com>
Committed: Tue Sep 18 17:01:21 2018 +0530

----------------------------------------------------------------------
 .../core/constants/CarbonCommonConstants.java   | 10 +++
 .../carbondata/core/datastore/DataRefNode.java  |  8 ++
 .../datastore/chunk/AbstractRawColumnChunk.java |  9 ++
 ...mpressedDimensionChunkFileBasedReaderV3.java | 10 +++
 .../page/encoding/ColumnPageEncoder.java        |  1 +
 .../page/statistics/DummyStatsCollector.java    |  4 +
 .../page/statistics/KeyPageStatsCollector.java  |  4 +
 .../page/statistics/LVStringStatsCollector.java | 43 ++++++++-
 .../statistics/PrimitivePageStatsCollector.java |  4 +
 .../page/statistics/SimpleStatsResult.java      |  2 +
 .../page/statistics/TablePageStatistics.java    | 13 +++
 .../core/indexstore/UnsafeMemoryDMStore.java    |  5 ++
 .../indexstore/blockletindex/BlockDataMap.java  | 89 +++++++++++++++----
 .../blockletindex/BlockletDataMap.java          | 21 +++++
 .../BlockletDataMapRowIndexes.java              | 12 ++-
 .../blockletindex/BlockletDataRefNode.java      | 11 +++
 .../core/indexstore/row/DataMapRow.java         |  4 +
 .../core/indexstore/row/DataMapRowImpl.java     |  9 ++
 .../core/indexstore/row/UnsafeDataMapRow.java   | 16 ++++
 .../core/indexstore/schema/SchemaGenerator.java | 28 ++++++
 .../blocklet/index/BlockletMinMaxIndex.java     | 19 +++-
 .../scan/filter/FilterExpressionProcessor.java  | 11 +--
 .../filter/executer/AndFilterExecuterImpl.java  | 28 +++---
 .../executer/ExcludeFilterExecuterImpl.java     |  3 +-
 .../filter/executer/FalseFilterExecutor.java    |  3 +-
 .../scan/filter/executer/FilterExecuter.java    |  6 +-
 .../executer/ImplicitColumnFilterExecutor.java  |  5 +-
 .../ImplicitIncludeFilterExecutorImpl.java      |  9 +-
 .../executer/IncludeFilterExecuterImpl.java     | 15 +++-
 .../filter/executer/OrFilterExecuterImpl.java   |  7 +-
 .../executer/RangeValueFilterExecuterImpl.java  | 17 ++--
 .../RestructureExcludeFilterExecutorImpl.java   |  3 +-
 .../RestructureIncludeFilterExecutorImpl.java   |  2 +-
 .../executer/RowLevelFilterExecuterImpl.java    |  3 +-
 .../RowLevelRangeGrtThanFiterExecuterImpl.java  | 16 ++--
 ...elRangeGrtrThanEquaToFilterExecuterImpl.java | 15 +++-
 ...velRangeLessThanEqualFilterExecuterImpl.java | 15 +++-
 ...RowLevelRangeLessThanFilterExecuterImpl.java | 15 +++-
 .../filter/executer/TrueFilterExecutor.java     |  3 +-
 .../scanner/impl/BlockletFilterScanner.java     |  4 +-
 .../carbondata/core/stream/StreamPruner.java    |  3 +-
 .../util/AbstractDataFileFooterConverter.java   | 30 ++++++-
 .../core/util/BlockletDataMapUtil.java          | 20 +++++
 .../core/util/CarbonMetadataUtil.java           | 66 +++++++++++++-
 .../carbondata/core/util/CarbonProperties.java  | 37 ++++++++
 .../blockletindex/TestBlockletDataMap.java      |  7 +-
 .../core/util/CarbonMetadataUtilTest.java       |  5 +-
 .../core/util/RangeFilterProcessorTest.java     | 12 +--
 .../datamap/examples/MinMaxIndexDataMap.java    |  2 +-
 format/src/main/thrift/carbondata.thrift        |  1 +
 ...tCreateTableUsingSparkCarbonFileFormat.scala | 93 ++++++++++++++++----
 .../sql/carbondata/datasource/TestUtil.scala    |  5 ++
 .../stream/CarbonStreamRecordReader.java        |  5 +-
 .../CarbonGetTableDetailComandTestCase.scala    |  4 +-
 .../writer/v3/CarbonFactDataWriterImplV3.java   |  7 +-
 .../streaming/segment/StreamSegment.java        |  5 ++
 56 files changed, 675 insertions(+), 129 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
index 7a34c98..21f1f34 100644
--- a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
+++ b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
@@ -1831,6 +1831,16 @@ public final class CarbonCommonConstants {
 
   public static final short LOCAL_DICT_ENCODED_BYTEARRAY_SIZE = 3;
 
+  /**
+   * property to be used for specifying the max byte limit for string/varchar data type till
+   * where storing min/max in data file will be considered
+   */
+  public static final String CARBON_MINMAX_ALLOWED_BYTE_COUNT =
+      "carbon.minmax.allowed.byte.count";
+  public static final String CARBON_MINMAX_ALLOWED_BYTE_COUNT_DEFAULT = "200";
+  public static final int CARBON_MINMAX_ALLOWED_BYTE_COUNT_MIN = 10;
+  public static final int CARBON_MINMAX_ALLOWED_BYTE_COUNT_MAX = 1000;
+
   private CarbonCommonConstants() {
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/datastore/DataRefNode.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/DataRefNode.java b/core/src/main/java/org/apache/carbondata/core/datastore/DataRefNode.java
index df0896a..3869333 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/DataRefNode.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/DataRefNode.java
@@ -125,4 +125,12 @@ public interface DataRefNode {
    * @return
    */
   BitSetGroup getIndexedData();
+
+  /**
+   * Return the array which contains the flag for each column whether the min max for that column
+   * is written in metadata or not
+   *
+   * @return min max flag for each column
+   */
+  boolean[] minMaxFlagArray();
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/datastore/chunk/AbstractRawColumnChunk.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/AbstractRawColumnChunk.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/AbstractRawColumnChunk.java
index af1c811..e140823 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/AbstractRawColumnChunk.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/AbstractRawColumnChunk.java
@@ -29,6 +29,8 @@ public abstract class AbstractRawColumnChunk {
 
   private byte[][] maxValues;
 
+  private boolean[] minMaxFlagArray;
+
   protected ByteBuffer rawData;
 
   private int[] offsets;
@@ -120,4 +122,11 @@ public abstract class AbstractRawColumnChunk {
     this.dataChunkV3 = dataChunkV3;
   }
 
+  public boolean[] getMinMaxFlagArray() {
+    return minMaxFlagArray;
+  }
+
+  public void setMinMaxFlagArray(boolean[] minMaxFlagArray) {
+    this.minMaxFlagArray = minMaxFlagArray;
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimensionChunkFileBasedReaderV3.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimensionChunkFileBasedReaderV3.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimensionChunkFileBasedReaderV3.java
index dc0f171..486cc2d 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimensionChunkFileBasedReaderV3.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimensionChunkFileBasedReaderV3.java
@@ -18,6 +18,7 @@ package org.apache.carbondata.core.datastore.chunk.reader.dimension.v3;
 
 import java.io.IOException;
 import java.nio.ByteBuffer;
+import java.util.Arrays;
 import java.util.List;
 
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
@@ -120,6 +121,8 @@ public class CompressedDimensionChunkFileBasedReaderV3 extends AbstractChunkRead
     int numberOfPages = dataChunk.getPage_length().size();
     byte[][] maxValueOfEachPage = new byte[numberOfPages][];
     byte[][] minValueOfEachPage = new byte[numberOfPages][];
+    boolean[] minMaxFlag = new boolean[minValueOfEachPage.length];
+    Arrays.fill(minMaxFlag, true);
     int[] eachPageLength = new int[numberOfPages];
     for (int i = 0; i < minValueOfEachPage.length; i++) {
       maxValueOfEachPage[i] =
@@ -127,12 +130,19 @@ public class CompressedDimensionChunkFileBasedReaderV3 extends AbstractChunkRead
       minValueOfEachPage[i] =
           dataChunk.getData_chunk_list().get(i).getMin_max().getMin_values().get(0).array();
       eachPageLength[i] = dataChunk.getData_chunk_list().get(i).getNumberOfRowsInpage();
+      boolean isMinMaxFlagSet =
+          dataChunk.getData_chunk_list().get(i).getMin_max().isSetMin_max_presence();
+      if (isMinMaxFlagSet) {
+        minMaxFlag[i] =
+            dataChunk.getData_chunk_list().get(i).getMin_max().getMin_max_presence().get(0);
+      }
     }
     rawColumnChunk.setDataChunkV3(dataChunk);
     rawColumnChunk.setFileReader(fileReader);
     rawColumnChunk.setPagesCount(dataChunk.getPage_length().size());
     rawColumnChunk.setMaxValues(maxValueOfEachPage);
     rawColumnChunk.setMinValues(minValueOfEachPage);
+    rawColumnChunk.setMinMaxFlagArray(minMaxFlag);
     rawColumnChunk.setRowCount(eachPageLength);
     rawColumnChunk.setOffsets(ArrayUtils
         .toPrimitive(dataChunk.page_offset.toArray(new Integer[dataChunk.page_offset.size()])));

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/ColumnPageEncoder.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/ColumnPageEncoder.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/ColumnPageEncoder.java
index 2ed12a0..5b560ab 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/ColumnPageEncoder.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/ColumnPageEncoder.java
@@ -131,6 +131,7 @@ public abstract class ColumnPageEncoder {
         CarbonUtil.getValueAsBytes(inputPage.getDataType(), inputPage.getStatistics().getMin()));
     index.addToMax_values(max);
     index.addToMin_values(min);
+    index.addToMin_max_presence(inputPage.getStatistics().writeMinMax());
     return index;
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/DummyStatsCollector.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/DummyStatsCollector.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/DummyStatsCollector.java
index a8bc5f1..fc91489 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/DummyStatsCollector.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/DummyStatsCollector.java
@@ -48,6 +48,10 @@ public class DummyStatsCollector implements ColumnPageStatsCollector {
       return BYTE_ARRAY;
     }
 
+    @Override public boolean writeMinMax() {
+      return true;
+    }
+
   };
 
   @Override public void updateNull(int rowId) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/KeyPageStatsCollector.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/KeyPageStatsCollector.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/KeyPageStatsCollector.java
index 22537db..30edc53 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/KeyPageStatsCollector.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/KeyPageStatsCollector.java
@@ -107,6 +107,10 @@ public class KeyPageStatsCollector implements ColumnPageStatsCollector {
         return dataType;
       }
 
+      @Override public boolean writeMinMax() {
+        return true;
+      }
+
     };
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/LVStringStatsCollector.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/LVStringStatsCollector.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/LVStringStatsCollector.java
index e1ac676..b04f2ee 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/LVStringStatsCollector.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/LVStringStatsCollector.java
@@ -19,13 +19,30 @@ package org.apache.carbondata.core.datastore.page.statistics;
 
 import java.math.BigDecimal;
 
+import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.metadata.datatype.DataType;
 import org.apache.carbondata.core.metadata.datatype.DataTypes;
 import org.apache.carbondata.core.util.ByteUtil;
+import org.apache.carbondata.core.util.CarbonProperties;
 
 public abstract class LVStringStatsCollector implements ColumnPageStatsCollector {
 
+  /**
+   * allowed character limit for to be considered for storing min max
+   */
+  protected static final int allowedMinMaxByteLimit = Integer.parseInt(
+      CarbonProperties.getInstance()
+          .getProperty(CarbonCommonConstants.CARBON_MINMAX_ALLOWED_BYTE_COUNT));
+  /**
+   * variables for storing min max value
+   */
   private byte[] min, max;
+  /**
+   * This flag will be used to decide whether to write min/max in the metadata or not. This will be
+   * helpful for reducing the store size in scenarios where the numbers of characters in
+   * string/varchar type columns are more
+   */
+  private boolean ignoreWritingMinMax;
 
   @Override
   public void updateNull(int rowId) {
@@ -66,34 +83,50 @@ public abstract class LVStringStatsCollector implements ColumnPageStatsCollector
 
   @Override
   public void update(byte[] value) {
+    // return if min/max need not be written
+    if (isIgnoreMinMaxFlagSet(value)) {
+      return;
+    }
     // input value is LV encoded
     byte[] newValue = getActualValue(value);
     if (min == null) {
       min = newValue;
     }
-
     if (null == max) {
       max = newValue;
     }
-
     if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(min, newValue) > 0) {
       min = newValue;
     }
-
     if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(max, newValue) < 0) {
       max = newValue;
     }
   }
 
+  private boolean isIgnoreMinMaxFlagSet(byte[] value) {
+    if (!ignoreWritingMinMax) {
+      if (null != value && value.length > allowedMinMaxByteLimit) {
+        ignoreWritingMinMax = true;
+      }
+    }
+    return ignoreWritingMinMax;
+  }
+
   @Override
   public SimpleStatsResult getPageStats() {
     return new SimpleStatsResult() {
 
       @Override public Object getMin() {
+        if (null == min || ignoreWritingMinMax) {
+          min = new byte[0];
+        }
         return min;
       }
 
       @Override public Object getMax() {
+        if (null == max || ignoreWritingMinMax) {
+          max = new byte[0];
+        }
         return max;
       }
 
@@ -105,6 +138,10 @@ public abstract class LVStringStatsCollector implements ColumnPageStatsCollector
         return DataTypes.STRING;
       }
 
+      @Override public boolean writeMinMax() {
+        return !ignoreWritingMinMax;
+      }
+
     };
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/PrimitivePageStatsCollector.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/PrimitivePageStatsCollector.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/PrimitivePageStatsCollector.java
index 3d70d45..4af5b14 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/PrimitivePageStatsCollector.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/PrimitivePageStatsCollector.java
@@ -349,4 +349,8 @@ public class PrimitivePageStatsCollector implements ColumnPageStatsCollector, Si
     return dataType;
   }
 
+  @Override public boolean writeMinMax() {
+    return true;
+  }
+
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/SimpleStatsResult.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/SimpleStatsResult.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/SimpleStatsResult.java
index 0e1f650..e7b4265 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/SimpleStatsResult.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/SimpleStatsResult.java
@@ -29,4 +29,6 @@ public interface SimpleStatsResult {
 
   DataType getDataType();
 
+  boolean writeMinMax();
+
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/TablePageStatistics.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/TablePageStatistics.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/TablePageStatistics.java
index 46ad09c..665860b 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/TablePageStatistics.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/TablePageStatistics.java
@@ -35,6 +35,13 @@ public class TablePageStatistics {
   // max os each measure column
   private byte[][] measureMaxValue;
 
+  /**
+   * array for storing the flag which will say whether to store min/max for dimension or not
+   * Note: Currently this array is being used only for dimensions. It can extended further to store
+   * the flag for measures also
+   */
+  private boolean[] writeMinMaxForDimensions;
+
   public TablePageStatistics(EncodedColumnPage[] dimensions,
       EncodedColumnPage[] measures) {
     int numDimensionsExpanded = dimensions.length;
@@ -43,6 +50,7 @@ public class TablePageStatistics {
     this.dimensionMaxValue = new byte[numDimensionsExpanded][];
     this.measureMinValue = new byte[numMeasures][];
     this.measureMaxValue = new byte[numMeasures][];
+    this.writeMinMaxForDimensions = new boolean[numDimensionsExpanded];
     updateDimensionMinMax(dimensions);
     updateMeasureMinMax(measures);
   }
@@ -52,6 +60,7 @@ public class TablePageStatistics {
       SimpleStatsResult stats = dimensions[i].getStats();
       dimensionMaxValue[i] = CarbonUtil.getValueAsBytes(stats.getDataType(), stats.getMax());
       dimensionMinValue[i] = CarbonUtil.getValueAsBytes(stats.getDataType(), stats.getMin());
+      writeMinMaxForDimensions[i] = stats.writeMinMax();
     }
   }
 
@@ -79,4 +88,8 @@ public class TablePageStatistics {
     return measureMaxValue;
   }
 
+  public boolean[] getWriteMinMaxForDimensions() {
+    return writeMinMaxForDimensions;
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/indexstore/UnsafeMemoryDMStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/UnsafeMemoryDMStore.java b/core/src/main/java/org/apache/carbondata/core/indexstore/UnsafeMemoryDMStore.java
index 9296c99..cdf6c56 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/UnsafeMemoryDMStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/UnsafeMemoryDMStore.java
@@ -108,6 +108,11 @@ public class UnsafeMemoryDMStore extends AbstractMemoryDMStore {
               .putByte(memoryBlock.getBaseObject(), memoryBlock.getBaseOffset() + runningLength,
                   row.getByte(index));
           runningLength += row.getSizeInBytes(index);
+        } else if (dataType == DataTypes.BOOLEAN) {
+          getUnsafe()
+              .putBoolean(memoryBlock.getBaseObject(), memoryBlock.getBaseOffset() + runningLength,
+                  row.getBoolean(index));
+          runningLength += row.getSizeInBytes(index);
         } else if (dataType == DataTypes.SHORT) {
           getUnsafe()
               .putShort(memoryBlock.getBaseObject(), memoryBlock.getBaseOffset() + runningLength,

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockDataMap.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockDataMap.java b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockDataMap.java
index d6e5784..8a1538e 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockDataMap.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockDataMap.java
@@ -210,6 +210,8 @@ public class BlockDataMap extends CoarseGrainDataMap
       List<DataFileFooter> indexInfo) throws IOException, MemoryException {
     DataMapRowImpl summaryRow = null;
     CarbonRowSchema[] schema = getFileFooterEntrySchema();
+    boolean[] minMaxFlag = new boolean[segmentProperties.getDimensions().size()];
+    Arrays.fill(minMaxFlag, true);
     for (DataFileFooter fileFooter : indexInfo) {
       TableBlockInfo blockInfo = fileFooter.getBlockInfo().getTableBlockInfo();
       BlockMetaInfo blockMetaInfo =
@@ -233,12 +235,21 @@ public class BlockDataMap extends CoarseGrainDataMap
             CarbonUtil.updateMinMaxValues(fileFooter, maxValues, minValues, false);
         summaryRow = loadToUnsafeBlock(schema, taskSummarySchema, fileFooter, segmentProperties,
             getMinMaxCacheColumns(), blockInfo.getFilePath(), summaryRow,
-            blockMetaInfo, updatedMinValues, updatedMaxValues);
+            blockMetaInfo, updatedMinValues, updatedMaxValues, minMaxFlag);
       }
     }
+    setMinMaxFlagForTaskSummary(summaryRow, taskSummarySchema, segmentProperties, minMaxFlag);
     return summaryRow;
   }
 
+  protected void setMinMaxFlagForTaskSummary(DataMapRow summaryRow,
+      CarbonRowSchema[] taskSummarySchema, SegmentProperties segmentProperties,
+      boolean[] minMaxFlag) {
+    // add min max flag for all the dimension columns
+    addMinMaxFlagValues(summaryRow, taskSummarySchema[TASK_MIN_MAX_FLAG], minMaxFlag,
+        TASK_MIN_MAX_FLAG, segmentProperties.getDimensions().size());
+  }
+
   /**
    * Method to load block metadata information
    *
@@ -260,6 +271,12 @@ public class BlockDataMap extends CoarseGrainDataMap
     short totalBlockletsInOneBlock = 0;
     boolean isLastFileFooterEntryNeedToBeAdded = false;
     CarbonRowSchema[] schema = getFileFooterEntrySchema();
+    // flag for each block entry
+    boolean[] minMaxFlag = new boolean[segmentProperties.getDimensions().size()];
+    Arrays.fill(minMaxFlag, true);
+    // min max flag for task summary
+    boolean[] taskSummaryMinMaxFlag = new boolean[segmentProperties.getDimensions().size()];
+    Arrays.fill(taskSummaryMinMaxFlag, true);
     for (DataFileFooter fileFooter : indexInfo) {
       TableBlockInfo blockInfo = fileFooter.getBlockInfo().getTableBlockInfo();
       BlockMetaInfo blockMetaInfo =
@@ -273,6 +290,8 @@ public class BlockDataMap extends CoarseGrainDataMap
           // 1st time assign the min and max values from the current file footer
           blockMinValues = fileFooter.getBlockletIndex().getMinMaxIndex().getMinValues();
           blockMaxValues = fileFooter.getBlockletIndex().getMinMaxIndex().getMaxValues();
+          updateMinMaxFlag(fileFooter, minMaxFlag);
+          updateMinMaxFlag(fileFooter, taskSummaryMinMaxFlag);
           previousDataFileFooter = fileFooter;
           totalBlockletsInOneBlock++;
         } else if (blockInfo.getFilePath().equals(tempFilePath)) {
@@ -285,6 +304,8 @@ public class BlockDataMap extends CoarseGrainDataMap
           blockMaxValues =
               compareAndUpdateMinMax(currentFooterMinMaxIndex.getMaxValues(), blockMaxValues,
                   false);
+          updateMinMaxFlag(fileFooter, minMaxFlag);
+          updateMinMaxFlag(fileFooter, taskSummaryMinMaxFlag);
           totalBlockletsInOneBlock++;
         }
         // as one task contains entries for all the blocklets we need iterate and load only the
@@ -297,7 +318,9 @@ public class BlockDataMap extends CoarseGrainDataMap
               segmentProperties, getMinMaxCacheColumns(), previousBlockInfo.getFilePath(),
               summaryRow,
               blockletDataMapInfo.getBlockMetaInfoMap().get(previousBlockInfo.getFilePath()),
-              blockMinValues, blockMaxValues);
+              blockMinValues, blockMaxValues, minMaxFlag);
+          minMaxFlag = new boolean[segmentProperties.getDimensions().size()];
+          Arrays.fill(minMaxFlag, true);
           // flag to check whether last file footer entry is different from previous entry.
           // If yes then it need to be added at last
           isLastFileFooterEntryNeedToBeAdded =
@@ -307,6 +330,8 @@ public class BlockDataMap extends CoarseGrainDataMap
           tempFilePath = blockInfo.getFilePath();
           blockMinValues = fileFooter.getBlockletIndex().getMinMaxIndex().getMinValues();
           blockMaxValues = fileFooter.getBlockletIndex().getMinMaxIndex().getMaxValues();
+          updateMinMaxFlag(fileFooter, minMaxFlag);
+          updateMinMaxFlag(fileFooter, taskSummaryMinMaxFlag);
           previousDataFileFooter = fileFooter;
           blockletCountInEachBlock.add(totalBlockletsInOneBlock);
           // for next block count will start from 1 because a row is created whenever a new file
@@ -323,15 +348,22 @@ public class BlockDataMap extends CoarseGrainDataMap
               previousDataFileFooter.getBlockInfo().getTableBlockInfo().getFilePath(), summaryRow,
               blockletDataMapInfo.getBlockMetaInfoMap()
                   .get(previousDataFileFooter.getBlockInfo().getTableBlockInfo().getFilePath()),
-              blockMinValues, blockMaxValues);
+              blockMinValues, blockMaxValues, minMaxFlag);
       blockletCountInEachBlock.add(totalBlockletsInOneBlock);
     }
     byte[] blockletCount = convertRowCountFromShortToByteArray(blockletCountInEachBlock);
     // blocklet count index is the last index
     summaryRow.setByteArray(blockletCount, taskSummarySchema.length - 1);
+    setMinMaxFlagForTaskSummary(summaryRow, taskSummarySchema, segmentProperties,
+        taskSummaryMinMaxFlag);
     return summaryRow;
   }
 
+  protected void updateMinMaxFlag(DataFileFooter fileFooter, boolean[] minMaxFlag) {
+    BlockletDataMapUtil
+        .updateMinMaxFlag(fileFooter.getBlockletIndex().getMinMaxIndex(), minMaxFlag);
+  }
+
   private byte[] convertRowCountFromShortToByteArray(List<Short> blockletCountInEachBlock) {
     int bufferSize = blockletCountInEachBlock.size() * 2;
     ByteBuffer byteBuffer = ByteBuffer.allocate(bufferSize);
@@ -358,7 +390,7 @@ public class BlockDataMap extends CoarseGrainDataMap
       CarbonRowSchema[] taskSummarySchema, DataFileFooter fileFooter,
       SegmentProperties segmentProperties, List<CarbonColumn> minMaxCacheColumns, String filePath,
       DataMapRowImpl summaryRow, BlockMetaInfo blockMetaInfo, byte[][] minValues,
-      byte[][] maxValues) {
+      byte[][] maxValues, boolean[] minMaxFlag) {
     // Add one row to maintain task level min max for segment pruning
     if (summaryRow == null) {
       summaryRow = new DataMapRowImpl(taskSummarySchema);
@@ -397,7 +429,10 @@ public class BlockDataMap extends CoarseGrainDataMap
     try {
       setLocations(blockMetaInfo.getLocationInfo(), row, ordinal++);
       // store block size
-      row.setLong(blockMetaInfo.getSize(), ordinal);
+      row.setLong(blockMetaInfo.getSize(), ordinal++);
+      // add min max flag for all the dimension columns
+      addMinMaxFlagValues(row, schema[ordinal], minMaxFlag, ordinal,
+          segmentProperties.getDimensions().size());
       memoryDMStore.addIndexRow(schema, row);
     } catch (Exception e) {
       throw new RuntimeException(e);
@@ -405,6 +440,19 @@ public class BlockDataMap extends CoarseGrainDataMap
     return summaryRow;
   }
 
+  protected void addMinMaxFlagValues(DataMapRow row, CarbonRowSchema carbonRowSchema,
+      boolean[] minMaxFlag, int ordinal, int dimensionCount) {
+    CarbonRowSchema[] minMaxFlagSchema =
+        ((CarbonRowSchema.StructCarbonRowSchema) carbonRowSchema).getChildSchemas();
+    DataMapRow minMaxFlagRow = new DataMapRowImpl(minMaxFlagSchema);
+    int flagOrdinal = 0;
+    // min value adding
+    for (int i = 0; i < dimensionCount; i++) {
+      minMaxFlagRow.setBoolean(minMaxFlag[i], flagOrdinal++);
+    }
+    row.setRow(minMaxFlagRow, ordinal);
+  }
+
   protected String getFileNameFromPath(String filePath) {
     return CarbonTablePath.getCarbonDataFileName(filePath);
   }
@@ -540,7 +588,8 @@ public class BlockDataMap extends CoarseGrainDataMap
         .getDataMapRow(getTaskSummarySchema(), taskSummaryDMStore.getRowCount() - 1);
     boolean isScanRequired = FilterExpressionProcessor
         .isScanRequired(filterExecuter, getMinMaxValue(unsafeRow, TASK_MAX_VALUES_INDEX),
-            getMinMaxValue(unsafeRow, TASK_MIN_VALUES_INDEX));
+            getMinMaxValue(unsafeRow, TASK_MIN_VALUES_INDEX),
+            getMinMaxFlag(unsafeRow, TASK_MIN_MAX_FLAG));
     if (isScanRequired) {
       return true;
     }
@@ -619,11 +668,12 @@ public class BlockDataMap extends CoarseGrainDataMap
       // min and max for executor pruning
       while (entryIndex < numEntries) {
         DataMapRow safeRow = memoryDMStore.getDataMapRow(schema, entryIndex).convertToSafeRow();
+        boolean[] minMaxFlag = getMinMaxFlag(safeRow, BLOCK_MIN_MAX_FLAG);
         String fileName = getFileNameWithFilePath(safeRow, filePath);
         short blockletId = getBlockletId(safeRow);
         boolean isValid =
             addBlockBasedOnMinMaxValue(filterExecuter, getMinMaxValue(safeRow, MAX_VALUES_INDEX),
-                getMinMaxValue(safeRow, MIN_VALUES_INDEX), fileName, blockletId);
+                getMinMaxValue(safeRow, MIN_VALUES_INDEX), minMaxFlag, fileName, blockletId);
         if (isValid) {
           blocklets.add(createBlocklet(safeRow, fileName, blockletId, useMinMaxForPruning));
           hitBlocklets += getBlockletNumOfEntry(entryIndex);
@@ -643,13 +693,8 @@ public class BlockDataMap extends CoarseGrainDataMap
     return blocklets;
   }
 
-  private boolean useMinMaxForExecutorPruning(FilterResolverIntf filterResolverIntf) {
-    boolean useMinMaxForPruning = false;
-    if (!isLegacyStore && this instanceof BlockletDataMap) {
-      useMinMaxForPruning = BlockletDataMapUtil
-          .useMinMaxForBlockletPruning(filterResolverIntf, getMinMaxCacheColumns());
-    }
-    return useMinMaxForPruning;
+  protected boolean useMinMaxForExecutorPruning(FilterResolverIntf filterResolverIntf) {
+    return false;
   }
 
   @Override
@@ -729,12 +774,13 @@ public class BlockDataMap extends CoarseGrainDataMap
    * @param filterExecuter
    * @param maxValue
    * @param minValue
+   * @param minMaxFlag
    * @param filePath
    * @param blockletId
    * @return
    */
   private boolean addBlockBasedOnMinMaxValue(FilterExecuter filterExecuter, byte[][] maxValue,
-      byte[][] minValue, String filePath, int blockletId) {
+      byte[][] minValue, boolean[] minMaxFlag, String filePath, int blockletId) {
     BitSet bitSet = null;
     if (filterExecuter instanceof ImplicitColumnFilterExecutor) {
       String uniqueBlockPath = filePath.substring(filePath.lastIndexOf("/Part") + 1);
@@ -744,9 +790,9 @@ public class BlockDataMap extends CoarseGrainDataMap
         uniqueBlockPath = uniqueBlockPath + CarbonCommonConstants.FILE_SEPARATOR + blockletId;
       }
       bitSet = ((ImplicitColumnFilterExecutor) filterExecuter)
-          .isFilterValuesPresentInBlockOrBlocklet(maxValue, minValue, uniqueBlockPath);
+          .isFilterValuesPresentInBlockOrBlocklet(maxValue, minValue, uniqueBlockPath, minMaxFlag);
     } else {
-      bitSet = filterExecuter.isScanRequired(maxValue, minValue);
+      bitSet = filterExecuter.isScanRequired(maxValue, minValue, minMaxFlag);
     }
     if (!bitSet.isEmpty()) {
       return true;
@@ -836,6 +882,15 @@ public class BlockDataMap extends CoarseGrainDataMap
     return minMax;
   }
 
+  private boolean[] getMinMaxFlag(DataMapRow row, int index) {
+    DataMapRow minMaxFlagRow = row.getRow(index);
+    boolean[] minMaxFlag = new boolean[minMaxFlagRow.getColumnCount()];
+    for (int i = 0; i < minMaxFlag.length; i++) {
+      minMaxFlag[i] = minMaxFlagRow.getBoolean(i);
+    }
+    return minMaxFlag;
+  }
+
   protected short getBlockletId(DataMapRow dataMapRow) {
     return BLOCK_DEFAULT_BLOCKLET_ID;
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMap.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMap.java b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMap.java
index ed6ecdc..a3ca1cd 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMap.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMap.java
@@ -21,6 +21,7 @@ import java.io.DataOutput;
 import java.io.DataOutputStream;
 import java.io.IOException;
 import java.io.Serializable;
+import java.util.Arrays;
 import java.util.List;
 
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
@@ -39,6 +40,7 @@ import org.apache.carbondata.core.metadata.blocklet.BlockletInfo;
 import org.apache.carbondata.core.metadata.blocklet.DataFileFooter;
 import org.apache.carbondata.core.metadata.blocklet.index.BlockletMinMaxIndex;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonColumn;
+import org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf;
 import org.apache.carbondata.core.util.BlockletDataMapUtil;
 
 /**
@@ -114,9 +116,13 @@ public class BlockletDataMap extends BlockDataMap implements Serializable {
     String tempFilePath = null;
     DataMapRowImpl summaryRow = null;
     CarbonRowSchema[] schema = getFileFooterEntrySchema();
+    boolean[] summaryRowMinMaxFlag = new boolean[segmentProperties.getDimensions().size()];
+    Arrays.fill(summaryRowMinMaxFlag, true);
     // Relative blocklet ID is the id assigned to a blocklet within a part file
     int relativeBlockletId = 0;
     for (DataFileFooter fileFooter : indexInfo) {
+      // update the min max flag for summary row
+      updateMinMaxFlag(fileFooter, summaryRowMinMaxFlag);
       TableBlockInfo blockInfo = fileFooter.getBlockInfo().getTableBlockInfo();
       BlockMetaInfo blockMetaInfo =
           blockletDataMapInfo.getBlockMetaInfoMap().get(blockInfo.getFilePath());
@@ -140,6 +146,8 @@ public class BlockletDataMap extends BlockDataMap implements Serializable {
         relativeBlockletId += fileFooter.getBlockletList().size();
       }
     }
+    setMinMaxFlagForTaskSummary(summaryRow, taskSummarySchema, segmentProperties,
+        summaryRowMinMaxFlag);
     return summaryRow;
   }
 
@@ -192,6 +200,11 @@ public class BlockletDataMap extends BlockDataMap implements Serializable {
         setLocations(blockMetaInfo.getLocationInfo(), row, ordinal++);
         // Store block size
         row.setLong(blockMetaInfo.getSize(), ordinal++);
+        // add min max flag for all the dimension columns
+        addMinMaxFlagValues(row, schema[ordinal],
+            fileFooter.getBlockletIndex().getMinMaxIndex().getIsMinMaxSet(), ordinal,
+            segmentProperties.getDimensions().size());
+        ordinal++;
         // add blocklet info
         ByteArrayOutputStream stream = new ByteArrayOutputStream();
         DataOutput dataOutput = new DataOutputStream(stream);
@@ -232,6 +245,14 @@ public class BlockletDataMap extends BlockDataMap implements Serializable {
     return dataMapRow.getShort(BLOCKLET_ID_INDEX);
   }
 
+  protected boolean useMinMaxForExecutorPruning(FilterResolverIntf filterResolverIntf) {
+    if (isLegacyStore) {
+      return super.useMinMaxForExecutorPruning(filterResolverIntf);
+    }
+    return BlockletDataMapUtil
+        .useMinMaxForBlockletPruning(filterResolverIntf, getMinMaxCacheColumns());
+  }
+
   @Override
   protected ExtendedBlocklet createBlocklet(DataMapRow row, String fileName, short blockletId,
       boolean useMinMaxForPruning) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapRowIndexes.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapRowIndexes.java b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapRowIndexes.java
index 7f61d77..085fb7d 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapRowIndexes.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapRowIndexes.java
@@ -40,12 +40,14 @@ public interface BlockletDataMapRowIndexes {
 
   int BLOCK_LENGTH = 8;
 
+  int BLOCK_MIN_MAX_FLAG = 9;
+
   // below variables are specific for blockletDataMap
-  int BLOCKLET_INFO_INDEX = 9;
+  int BLOCKLET_INFO_INDEX = 10;
 
-  int BLOCKLET_PAGE_COUNT_INDEX = 10;
+  int BLOCKLET_PAGE_COUNT_INDEX = 11;
 
-  int BLOCKLET_ID_INDEX = 11;
+  int BLOCKLET_ID_INDEX = 12;
 
   // Summary dataMap row indexes
   int TASK_MIN_VALUES_INDEX = 0;
@@ -56,5 +58,7 @@ public interface BlockletDataMapRowIndexes {
 
   int SUMMARY_SEGMENTID = 3;
 
-  int SUMMARY_INDEX_PATH = 4;
+  int TASK_MIN_MAX_FLAG = 4;
+
+  int SUMMARY_INDEX_PATH = 5;
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataRefNode.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataRefNode.java b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataRefNode.java
index 5681528..9046ade 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataRefNode.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataRefNode.java
@@ -134,6 +134,17 @@ public class BlockletDataRefNode implements DataRefNode {
   }
 
   @Override
+  public boolean[] minMaxFlagArray() {
+    BlockletIndex blockletIndex =
+        blockInfos.get(index).getDetailInfo().getBlockletInfo().getBlockletIndex();
+    boolean[] isMinMaxSet = null;
+    if (null != blockletIndex) {
+      isMinMaxSet = blockletIndex.getMinMaxIndex().getIsMinMaxSet();
+    }
+    return isMinMaxSet;
+  }
+
+  @Override
   public DimensionRawColumnChunk[] readDimensionChunks(FileReader fileReader, int[][] blockIndexes)
       throws IOException {
     DimensionColumnChunkReader dimensionChunksReader = getDimensionColumnChunkReader(fileReader);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/indexstore/row/DataMapRow.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/row/DataMapRow.java b/core/src/main/java/org/apache/carbondata/core/indexstore/row/DataMapRow.java
index d9467c6..c0ea0a0 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/row/DataMapRow.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/row/DataMapRow.java
@@ -69,6 +69,10 @@ public abstract class DataMapRow implements Serializable {
 
   public abstract int getLengthInBytes(int ordinal);
 
+  public abstract void setBoolean(boolean value, int ordinal);
+
+  public abstract boolean getBoolean(int ordinal);
+
   public int getTotalSizeInBytes() {
     int len = 0;
     for (int i = 0; i < schemas.length; i++) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/indexstore/row/DataMapRowImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/row/DataMapRowImpl.java b/core/src/main/java/org/apache/carbondata/core/indexstore/row/DataMapRowImpl.java
index 39536f5..5068fa9 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/row/DataMapRowImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/row/DataMapRowImpl.java
@@ -41,6 +41,15 @@ public class DataMapRowImpl extends DataMapRow {
     return ((byte[]) data[ordinal]).length;
   }
 
+  @Override public void setBoolean(boolean value, int ordinal) {
+    assert (schemas[ordinal].getDataType() == DataTypes.BOOLEAN);
+    data[ordinal] = value;
+  }
+
+  @Override public boolean getBoolean(int ordinal) {
+    return (boolean) data[ordinal];
+  }
+
   @Override public DataMapRow getRow(int ordinal) {
     return (DataMapRow) data[ordinal];
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/indexstore/row/UnsafeDataMapRow.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/row/UnsafeDataMapRow.java b/core/src/main/java/org/apache/carbondata/core/indexstore/row/UnsafeDataMapRow.java
index a245bc0..70f0e0d 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/row/UnsafeDataMapRow.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/row/UnsafeDataMapRow.java
@@ -86,6 +86,15 @@ public class UnsafeDataMapRow extends DataMapRow {
     return length;
   }
 
+  @Override public void setBoolean(boolean value, int ordinal) {
+    throw new UnsupportedOperationException("Not supported to set on unsafe row");
+  }
+
+  @Override public boolean getBoolean(int ordinal) {
+    return getUnsafe()
+        .getBoolean(block.getBaseObject(), block.getBaseOffset() + pointer + getPosition(ordinal));
+  }
+
   private int getLengthInBytes(int ordinal, int position) {
     int length;
     switch (schemas[ordinal].getSchemaType()) {
@@ -191,6 +200,13 @@ public class UnsafeDataMapRow extends DataMapRow {
                     block.getBaseOffset() + pointer + runningLength),
                 i);
             runningLength += schema.getLength();
+          } else if (dataType == DataTypes.BOOLEAN) {
+            row.setBoolean(
+                getUnsafe().getBoolean(
+                    block.getBaseObject(),
+                    block.getBaseOffset() + pointer + runningLength),
+                i);
+            runningLength += schema.getLength();
           } else if (dataType == DataTypes.SHORT) {
             row.setShort(
                 getUnsafe().getShort(

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/indexstore/schema/SchemaGenerator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/schema/SchemaGenerator.java b/core/src/main/java/org/apache/carbondata/core/indexstore/schema/SchemaGenerator.java
index f5df4bb..f3016a4 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/schema/SchemaGenerator.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/schema/SchemaGenerator.java
@@ -56,6 +56,9 @@ public class SchemaGenerator {
     indexSchemas.add(new CarbonRowSchema.VariableCarbonRowSchema(DataTypes.BYTE_ARRAY));
     // for storing block length.
     indexSchemas.add(new CarbonRowSchema.FixedCarbonRowSchema(DataTypes.LONG));
+    // for storing min max flag for each column which reflects whether min max for a column is
+    // written in the metadata or not.
+    addMinMaxFlagSchema(indexSchemas, segmentProperties);
     CarbonRowSchema[] schema = indexSchemas.toArray(new CarbonRowSchema[indexSchemas.size()]);
     return schema;
   }
@@ -85,6 +88,9 @@ public class SchemaGenerator {
     indexSchemas.add(new CarbonRowSchema.VariableCarbonRowSchema(DataTypes.BYTE_ARRAY));
     // for storing block length.
     indexSchemas.add(new CarbonRowSchema.FixedCarbonRowSchema(DataTypes.LONG));
+    // for storing min max flag for each column which reflects whether min max for a column is
+    // written in the metadata or not.
+    addMinMaxFlagSchema(indexSchemas, segmentProperties);
     //for blocklet info
     indexSchemas.add(new CarbonRowSchema.VariableCarbonRowSchema(DataTypes.BYTE_ARRAY));
     // for number of pages.
@@ -115,6 +121,9 @@ public class SchemaGenerator {
     // for storing segmentid
     taskMinMaxSchemas
         .add(new CarbonRowSchema.VariableCarbonRowSchema(DataTypes.BYTE_ARRAY));
+    // for storing min max flag for each column which reflects whether min max for a column is
+    // written in the metadata or not.
+    addMinMaxFlagSchema(taskMinMaxSchemas, segmentProperties);
     // store path only in case of partition table or non transactional table
     if (filePathToBeStored) {
       // for storing file path
@@ -169,6 +178,25 @@ public class SchemaGenerator {
   }
 
   /**
+   * Method to add min max flag schema for all the dimensions
+   *
+   * @param indexSchemas
+   * @param segmentProperties
+   */
+  private static void addMinMaxFlagSchema(List<CarbonRowSchema> indexSchemas,
+      SegmentProperties segmentProperties) {
+    int totalDimensions = segmentProperties.getDimensions().size();
+    CarbonRowSchema[] minMaxFlagSchemas = new CarbonRowSchema[totalDimensions];
+    for (int i = 0; i < totalDimensions; i++) {
+      minMaxFlagSchemas[i] = new CarbonRowSchema.FixedCarbonRowSchema(DataTypes.BOOLEAN);
+    }
+    CarbonRowSchema structMinMaxFlagSchema =
+        new CarbonRowSchema.StructCarbonRowSchema(DataTypes.createDefaultStructType(),
+            minMaxFlagSchemas);
+    indexSchemas.add(structMinMaxFlagSchema);
+  }
+
+  /**
    * Method to get the min max length of each column. It will return the length of only column
    * which will be cached
    *

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/metadata/blocklet/index/BlockletMinMaxIndex.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/blocklet/index/BlockletMinMaxIndex.java b/core/src/main/java/org/apache/carbondata/core/metadata/blocklet/index/BlockletMinMaxIndex.java
index 5c0272d..4b6a361 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/blocklet/index/BlockletMinMaxIndex.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/blocklet/index/BlockletMinMaxIndex.java
@@ -21,6 +21,8 @@ import java.io.Serializable;
 import java.nio.ByteBuffer;
 import java.util.List;
 
+import org.apache.commons.lang3.ArrayUtils;
+
 /**
  * Below class holds the information of max and min value of all the columns in a blocklet
  */
@@ -41,16 +43,23 @@ public class BlockletMinMaxIndex implements Serializable {
    */
   private byte[][] maxValues;
 
+  /**
+   * flag to check whether min max is written for a column or not
+   */
+  private boolean[] isMinMaxSet;
+
   public BlockletMinMaxIndex() {
   }
 
-  public BlockletMinMaxIndex(List<ByteBuffer> minValues, List<ByteBuffer> maxValues) {
+  public BlockletMinMaxIndex(List<ByteBuffer> minValues, List<ByteBuffer> maxValues,
+      List<Boolean> isMinMaxSet) {
     this.minValues = new byte[minValues.size()][];
     this.maxValues = new byte[maxValues.size()][];
     for (int i = 0; i < minValues.size(); i++) {
       this.minValues[i] = minValues.get(i).array();
       this.maxValues[i] = maxValues.get(i).array();
     }
+    this.isMinMaxSet = ArrayUtils.toPrimitive(isMinMaxSet.toArray(new Boolean[isMinMaxSet.size()]));
   }
 
   /**
@@ -81,4 +90,12 @@ public class BlockletMinMaxIndex implements Serializable {
     this.maxValues = maxValues;
   }
 
+  public boolean[] getIsMinMaxSet() {
+    return isMinMaxSet;
+  }
+
+  public void setIsMinMaxSet(boolean[] isMinMaxSet) {
+    this.isMinMaxSet = isMinMaxSet;
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java
index 74c9ae2..fcb374f 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java
@@ -206,13 +206,14 @@ public class FilterExpressionProcessor implements FilterProcessor {
    * @param dataRefNode
    */
   private void addBlockBasedOnMinMaxValue(FilterExecuter filterExecuter,
-      List<DataRefNode> listOfDataBlocksToScan, DataRefNode dataRefNode) {
+      List<DataRefNode> listOfDataBlocksToScan, DataRefNode dataRefNode, boolean[] isMinMaxSet) {
     if (null == dataRefNode.getColumnsMinValue() || null == dataRefNode.getColumnsMaxValue()) {
       listOfDataBlocksToScan.add(dataRefNode);
       return;
     }
     BitSet bitSet = filterExecuter
-        .isScanRequired(dataRefNode.getColumnsMaxValue(), dataRefNode.getColumnsMinValue());
+        .isScanRequired(dataRefNode.getColumnsMaxValue(), dataRefNode.getColumnsMinValue(),
+            isMinMaxSet);
     if (!bitSet.isEmpty()) {
       listOfDataBlocksToScan.add(dataRefNode);
     }
@@ -472,13 +473,13 @@ public class FilterExpressionProcessor implements FilterProcessor {
   }
 
   public static boolean isScanRequired(FilterExecuter filterExecuter, byte[][] maxValue,
-      byte[][] minValue) {
+      byte[][] minValue, boolean[] isMinMaxSet) {
     if (filterExecuter instanceof ImplicitColumnFilterExecutor) {
       return ((ImplicitColumnFilterExecutor) filterExecuter)
-          .isFilterValuesPresentInAbstractIndex(maxValue, minValue);
+          .isFilterValuesPresentInAbstractIndex(maxValue, minValue, isMinMaxSet);
     } else {
       // otherwise decide based on min/max value
-      BitSet bitSet = filterExecuter.isScanRequired(maxValue, minValue);
+      BitSet bitSet = filterExecuter.isScanRequired(maxValue, minValue, isMinMaxSet);
       return !bitSet.isEmpty();
     }
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/AndFilterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/AndFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/AndFilterExecuterImpl.java
index e309451..d743151 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/AndFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/AndFilterExecuterImpl.java
@@ -56,12 +56,13 @@ public class AndFilterExecuterImpl implements FilterExecuter, ImplicitColumnFilt
         rightExecuter.applyFilter(value, dimOrdinalMax);
   }
 
-  @Override public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue) {
-    BitSet leftFilters = leftExecuter.isScanRequired(blockMaxValue, blockMinValue);
+  @Override public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue,
+      boolean[] isMinMaxSet) {
+    BitSet leftFilters = leftExecuter.isScanRequired(blockMaxValue, blockMinValue, isMinMaxSet);
     if (leftFilters.isEmpty()) {
       return leftFilters;
     }
-    BitSet rightFilter = rightExecuter.isScanRequired(blockMaxValue, blockMinValue);
+    BitSet rightFilter = rightExecuter.isScanRequired(blockMaxValue, blockMinValue, isMinMaxSet);
     if (rightFilter.isEmpty()) {
       return rightFilter;
     }
@@ -77,14 +78,14 @@ public class AndFilterExecuterImpl implements FilterExecuter, ImplicitColumnFilt
 
   @Override
   public BitSet isFilterValuesPresentInBlockOrBlocklet(byte[][] maxValue, byte[][] minValue,
-      String uniqueBlockPath) {
+      String uniqueBlockPath, boolean[] isMinMaxSet) {
     BitSet leftFilters = null;
     if (leftExecuter instanceof ImplicitColumnFilterExecutor) {
       leftFilters = ((ImplicitColumnFilterExecutor) leftExecuter)
-          .isFilterValuesPresentInBlockOrBlocklet(maxValue, minValue,uniqueBlockPath);
+          .isFilterValuesPresentInBlockOrBlocklet(maxValue, minValue,uniqueBlockPath, isMinMaxSet);
     } else {
       leftFilters = leftExecuter
-          .isScanRequired(maxValue, minValue);
+          .isScanRequired(maxValue, minValue, isMinMaxSet);
     }
     if (leftFilters.isEmpty()) {
       return leftFilters;
@@ -92,9 +93,9 @@ public class AndFilterExecuterImpl implements FilterExecuter, ImplicitColumnFilt
     BitSet rightFilter = null;
     if (rightExecuter instanceof ImplicitColumnFilterExecutor) {
       rightFilter = ((ImplicitColumnFilterExecutor) rightExecuter)
-          .isFilterValuesPresentInBlockOrBlocklet(maxValue, minValue, uniqueBlockPath);
+          .isFilterValuesPresentInBlockOrBlocklet(maxValue, minValue, uniqueBlockPath, isMinMaxSet);
     } else {
-      rightFilter = rightExecuter.isScanRequired(maxValue, minValue);
+      rightFilter = rightExecuter.isScanRequired(maxValue, minValue, isMinMaxSet);
     }
     if (rightFilter.isEmpty()) {
       return rightFilter;
@@ -104,15 +105,16 @@ public class AndFilterExecuterImpl implements FilterExecuter, ImplicitColumnFilt
   }
 
   @Override
-  public Boolean isFilterValuesPresentInAbstractIndex(byte[][] maxValue, byte[][] minValue) {
+  public Boolean isFilterValuesPresentInAbstractIndex(byte[][] maxValue, byte[][] minValue,
+      boolean[] isMinMaxSet) {
     Boolean leftRes;
     BitSet tempFilter;
     if (leftExecuter instanceof ImplicitColumnFilterExecutor) {
       leftRes = ((ImplicitColumnFilterExecutor) leftExecuter)
-          .isFilterValuesPresentInAbstractIndex(maxValue, minValue);
+          .isFilterValuesPresentInAbstractIndex(maxValue, minValue, isMinMaxSet);
     } else {
       tempFilter = leftExecuter
-          .isScanRequired(maxValue, minValue);
+          .isScanRequired(maxValue, minValue, isMinMaxSet);
       leftRes = !tempFilter.isEmpty();
     }
     if (!leftRes) {
@@ -122,10 +124,10 @@ public class AndFilterExecuterImpl implements FilterExecuter, ImplicitColumnFilt
     Boolean rightRes = null;
     if (rightExecuter instanceof ImplicitColumnFilterExecutor) {
       rightRes = ((ImplicitColumnFilterExecutor) rightExecuter)
-          .isFilterValuesPresentInAbstractIndex(maxValue, minValue);
+          .isFilterValuesPresentInAbstractIndex(maxValue, minValue, isMinMaxSet);
     } else {
       tempFilter = rightExecuter
-          .isScanRequired(maxValue, minValue);
+          .isScanRequired(maxValue, minValue, isMinMaxSet);
       rightRes = !tempFilter.isEmpty();
     }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ExcludeFilterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ExcludeFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ExcludeFilterExecuterImpl.java
index 71646c9..7017f21 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ExcludeFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ExcludeFilterExecuterImpl.java
@@ -434,7 +434,8 @@ public class ExcludeFilterExecuterImpl implements FilterExecuter {
   }
 
   @Override
-  public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue) {
+  public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue,
+      boolean[] isMinMaxSet) {
     BitSet bitSet = new BitSet(1);
     bitSet.flip(0, 1);
     return bitSet;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/FalseFilterExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/FalseFilterExecutor.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/FalseFilterExecutor.java
index ee3b0fc..c2fa1ef 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/FalseFilterExecutor.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/FalseFilterExecutor.java
@@ -51,7 +51,8 @@ public class FalseFilterExecutor implements FilterExecuter {
   }
 
   @Override
-  public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue) {
+  public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue,
+      boolean[] isMinMaxSet) {
     return new BitSet();
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/FilterExecuter.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/FilterExecuter.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/FilterExecuter.java
index a64341e..edf88b1 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/FilterExecuter.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/FilterExecuter.java
@@ -48,13 +48,15 @@ public interface FilterExecuter {
    *
    * @param blockMaxValue, maximum value of the
    * @param blockMinValue
+   * @param isMinMaxSet  flag to specify whether min max for the filter dimension is written or not
    * @return BitSet
    */
-  BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue);
+  BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue, boolean[] isMinMaxSet);
 
   /**
    * It just reads necessary block for filter executor, it does not uncompress the data.
+   *
    * @param rawBlockletColumnChunks
    */
-  void readColumnChunks(RawBlockletColumnChunks rawBlockletColumnChunks)throws IOException;
+  void readColumnChunks(RawBlockletColumnChunks rawBlockletColumnChunks) throws IOException;
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ImplicitColumnFilterExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ImplicitColumnFilterExecutor.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ImplicitColumnFilterExecutor.java
index 8f2ad57..8cdb0f1 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ImplicitColumnFilterExecutor.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ImplicitColumnFilterExecutor.java
@@ -34,7 +34,7 @@ public interface ImplicitColumnFilterExecutor {
    * @return
    */
   BitSet isFilterValuesPresentInBlockOrBlocklet(byte[][] maxValue, byte[][] minValue,
-      String uniqueBlockPath);
+      String uniqueBlockPath, boolean[] isMinMaxSet);
 
   /**
    * This method will validate the abstract index
@@ -43,5 +43,6 @@ public interface ImplicitColumnFilterExecutor {
    *
    * @return
    */
-  Boolean isFilterValuesPresentInAbstractIndex(byte[][] maxValue, byte[][] minValue);
+  Boolean isFilterValuesPresentInAbstractIndex(byte[][] maxValue, byte[][] minValue,
+      boolean[] isMinMaxSet);
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ImplicitIncludeFilterExecutorImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ImplicitIncludeFilterExecutorImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ImplicitIncludeFilterExecutorImpl.java
index b683fd6..bb30d71 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ImplicitIncludeFilterExecutorImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ImplicitIncludeFilterExecutorImpl.java
@@ -59,8 +59,8 @@ public class ImplicitIncludeFilterExecutorImpl
     return false;
   }
 
-  @Override
-  public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue) {
+  @Override public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue,
+      boolean[] isMinMaxSet) {
     return null;
   }
 
@@ -71,7 +71,7 @@ public class ImplicitIncludeFilterExecutorImpl
 
   @Override
   public BitSet isFilterValuesPresentInBlockOrBlocklet(byte[][] maxValue, byte[][] minValue,
-      String uniqueBlockPath) {
+      String uniqueBlockPath, boolean[] isMinMaxSet) {
     BitSet bitSet = new BitSet(1);
     boolean isScanRequired = false;
     String shortBlockId = CarbonTablePath.getShortBlockId(uniqueBlockPath);
@@ -104,7 +104,8 @@ public class ImplicitIncludeFilterExecutorImpl
   }
 
   @Override
-  public Boolean isFilterValuesPresentInAbstractIndex(byte[][] maxValue, byte[][] minValue) {
+  public Boolean isFilterValuesPresentInAbstractIndex(byte[][] maxValue, byte[][] minValue,
+      boolean[] isMinMaxSet) {
     return true;
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/IncludeFilterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/IncludeFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/IncludeFilterExecuterImpl.java
index 974830d..b9df60c 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/IncludeFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/IncludeFilterExecuterImpl.java
@@ -111,7 +111,8 @@ public class IncludeFilterExecuterImpl implements FilterExecuter {
       for (int i = 0; i < dimensionRawColumnChunk.getPagesCount(); i++) {
         if (dimensionRawColumnChunk.getMaxValues() != null) {
           if (isScanRequired(dimensionRawColumnChunk.getMaxValues()[i],
-              dimensionRawColumnChunk.getMinValues()[i], dimColumnExecuterInfo.getFilterKeys())) {
+              dimensionRawColumnChunk.getMinValues()[i], dimColumnExecuterInfo.getFilterKeys(),
+              dimensionRawColumnChunk.getMinMaxFlagArray()[i])) {
             DimensionColumnPage dimensionColumnPage = dimensionRawColumnChunk.decodeColumnPage(i);
             if (!isDecoded) {
               filterValues =  FilterUtil
@@ -449,7 +450,7 @@ public class IncludeFilterExecuterImpl implements FilterExecuter {
   }
 
   @Override
-  public BitSet isScanRequired(byte[][] blkMaxVal, byte[][] blkMinVal) {
+  public BitSet isScanRequired(byte[][] blkMaxVal, byte[][] blkMinVal, boolean[] isMinMaxSet) {
     BitSet bitSet = new BitSet(1);
     byte[][] filterValues;
     int chunkIndex = 0;
@@ -458,7 +459,8 @@ public class IncludeFilterExecuterImpl implements FilterExecuter {
     if (isDimensionPresentInCurrentBlock) {
       filterValues = dimColumnExecuterInfo.getFilterKeys();
       chunkIndex = dimColumnEvaluatorInfo.getColumnIndexInMinMaxByteArray();
-      isScanRequired = isScanRequired(blkMaxVal[chunkIndex], blkMinVal[chunkIndex], filterValues);
+      isScanRequired = isScanRequired(blkMaxVal[chunkIndex], blkMinVal[chunkIndex], filterValues,
+          isMinMaxSet[chunkIndex]);
     } else if (isMeasurePresentInCurrentBlock) {
       chunkIndex = msrColumnEvaluatorInfo.getColumnIndexInMinMaxByteArray();
       isScanRequired = isScanRequired(blkMaxVal[chunkIndex], blkMinVal[chunkIndex],
@@ -472,7 +474,12 @@ public class IncludeFilterExecuterImpl implements FilterExecuter {
     return bitSet;
   }
 
-  private boolean isScanRequired(byte[] blkMaxVal, byte[] blkMinVal, byte[][] filterValues) {
+  private boolean isScanRequired(byte[] blkMaxVal, byte[] blkMinVal, byte[][] filterValues,
+      boolean isMinMaxSet) {
+    if (!isMinMaxSet) {
+      // scan complete data if min max is not written for a given column
+      return true;
+    }
     boolean isScanRequired = false;
     for (int k = 0; k < filterValues.length; k++) {
       // filter value should be in range of max and min value i.e

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/OrFilterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/OrFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/OrFilterExecuterImpl.java
index 9f0afb5..4e7dec2 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/OrFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/OrFilterExecuterImpl.java
@@ -52,9 +52,10 @@ public class OrFilterExecuterImpl implements FilterExecuter {
   }
 
   @Override
-  public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue) {
-    BitSet leftFilters = leftExecuter.isScanRequired(blockMaxValue, blockMinValue);
-    BitSet rightFilters = rightExecuter.isScanRequired(blockMaxValue, blockMinValue);
+  public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue,
+      boolean[] isMinMaxSet) {
+    BitSet leftFilters = leftExecuter.isScanRequired(blockMaxValue, blockMinValue, isMinMaxSet);
+    BitSet rightFilters = rightExecuter.isScanRequired(blockMaxValue, blockMinValue, isMinMaxSet);
     leftFilters.or(rightFilters);
     return leftFilters;
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecuterImpl.java
index 9718dcf..7a0a386 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecuterImpl.java
@@ -236,7 +236,12 @@ public class RangeValueFilterExecuterImpl implements FilterExecuter {
    * @param filterValues
    * @return
    */
-  public boolean isScanRequired(byte[] blockMinValue, byte[] blockMaxValue, byte[][] filterValues) {
+  public boolean isScanRequired(byte[] blockMinValue, byte[] blockMaxValue, byte[][] filterValues,
+      boolean isMinMaxSet) {
+    if (!isMinMaxSet) {
+      // scan complete data if min max is not written for a given column
+      return true;
+    }
     boolean isScanRequired = true;
     isRangeFullyCoverBlock = false;
     startBlockMinIsDefaultStart = false;
@@ -314,12 +319,14 @@ public class RangeValueFilterExecuterImpl implements FilterExecuter {
    * @return
    */
   @Override
-  public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue) {
+  public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue,
+      boolean[] isMinMaxSet) {
     BitSet bitSet = new BitSet(1);
     byte[][] filterValues = this.filterRangesValues;
     int columnIndex = this.dimColEvaluatorInfo.getColumnIndexInMinMaxByteArray();
-    boolean isScanRequired = columnIndex >= blockMinValue.length ||
-        isScanRequired(blockMinValue[columnIndex], blockMaxValue[columnIndex], filterValues);
+    boolean isScanRequired =
+        columnIndex >= blockMinValue.length || isScanRequired(blockMinValue[columnIndex],
+            blockMaxValue[columnIndex], filterValues, isMinMaxSet[columnIndex]);
     if (isScanRequired) {
       bitSet.set(0);
     }
@@ -361,7 +368,7 @@ public class RangeValueFilterExecuterImpl implements FilterExecuter {
     for (int i = 0; i < rawColumnChunk.getPagesCount(); i++) {
       if (rawColumnChunk.getMaxValues() != null) {
         if (isScanRequired(rawColumnChunk.getMinValues()[i], rawColumnChunk.getMaxValues()[i],
-            this.filterRangesValues)) {
+            this.filterRangesValues, rawColumnChunk.getMinMaxFlagArray()[i])) {
           if (isRangeFullyCoverBlock) {
             // Set all the bits in this case as filter Min Max values cover the whole block.
             BitSet bitSet = new BitSet(rawColumnChunk.getRowCount()[i]);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RestructureExcludeFilterExecutorImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RestructureExcludeFilterExecutorImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RestructureExcludeFilterExecutorImpl.java
index d7bec7e..a57c730 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RestructureExcludeFilterExecutorImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RestructureExcludeFilterExecutorImpl.java
@@ -63,7 +63,8 @@ public class RestructureExcludeFilterExecutorImpl extends RestructureEvaluatorIm
   }
 
   @Override
-  public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue) {
+  public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue,
+      boolean[] isMinMaxSet) {
     BitSet bitSet = new BitSet(1);
     bitSet.flip(0, 1);
     return bitSet;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RestructureIncludeFilterExecutorImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RestructureIncludeFilterExecutorImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RestructureIncludeFilterExecutorImpl.java
index c874fc5..fc6f9c3 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RestructureIncludeFilterExecutorImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RestructureIncludeFilterExecutorImpl.java
@@ -61,7 +61,7 @@ public class RestructureIncludeFilterExecutorImpl extends RestructureEvaluatorIm
     throw new FilterUnsupportedException("Unsupported RestructureIncludeFilterExecutorImpl on row");
   }
 
-  public BitSet isScanRequired(byte[][] blkMaxVal, byte[][] blkMinVal) {
+  public BitSet isScanRequired(byte[][] blkMaxVal, byte[][] blkMinVal, boolean[] isMinMaxSet) {
     BitSet bitSet = new BitSet(1);
     bitSet.set(0, isDefaultValuePresentInFilterValues);
     return bitSet;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java
index 7e5826c..d5f4ee2 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java
@@ -597,7 +597,8 @@ public class RowLevelFilterExecuterImpl implements FilterExecuter {
   }
 
   @Override
-  public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue) {
+  public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue,
+      boolean[] isMinMaxSet) {
     BitSet bitSet = new BitSet(1);
     bitSet.set(0);
     return bitSet;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtThanFiterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtThanFiterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtThanFiterExecuterImpl.java
index e561f96..31ab42e 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtThanFiterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtThanFiterExecuterImpl.java
@@ -114,8 +114,8 @@ public class RowLevelRangeGrtThanFiterExecuterImpl extends RowLevelFilterExecute
     }
   }
 
-  @Override
-  public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue) {
+  @Override public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue,
+      boolean[] isMinMaxSet) {
     BitSet bitSet = new BitSet(1);
     boolean isScanRequired = false;
     byte[] maxValue = null;
@@ -126,7 +126,8 @@ public class RowLevelRangeGrtThanFiterExecuterImpl extends RowLevelFilterExecute
             isScanRequired(maxValue, msrFilterRangeValues, msrColEvalutorInfoList.get(0).getType());
       } else {
         maxValue = blockMaxValue[dimensionChunkIndex[0]];
-        isScanRequired = isScanRequired(maxValue, filterRangeValues);
+        isScanRequired =
+            isScanRequired(maxValue, filterRangeValues, isMinMaxSet[dimensionChunkIndex[0]]);
       }
     } else {
       isScanRequired = isDefaultValuePresentInFilter;
@@ -139,7 +140,11 @@ public class RowLevelRangeGrtThanFiterExecuterImpl extends RowLevelFilterExecute
   }
 
 
-  private boolean isScanRequired(byte[] blockMaxValue, byte[][] filterValues) {
+  private boolean isScanRequired(byte[] blockMaxValue, byte[][] filterValues, boolean isMinMaxSet) {
+    if (!isMinMaxSet) {
+      // scan complete data if min max is not written for a given column
+      return true;
+    }
     boolean isScanRequired = false;
     for (int k = 0; k < filterValues.length; k++) {
       // filter value should be in range of max and min value i.e
@@ -196,7 +201,8 @@ public class RowLevelRangeGrtThanFiterExecuterImpl extends RowLevelFilterExecute
       boolean isExclude = false;
       for (int i = 0; i < rawColumnChunk.getPagesCount(); i++) {
         if (rawColumnChunk.getMaxValues() != null) {
-          if (isScanRequired(rawColumnChunk.getMaxValues()[i], this.filterRangeValues)) {
+          if (isScanRequired(rawColumnChunk.getMaxValues()[i],
+              this.filterRangeValues, rawColumnChunk.getMinMaxFlagArray()[i])) {
             int compare = ByteUtil.UnsafeComparer.INSTANCE
                 .compareTo(filterRangeValues[0], rawColumnChunk.getMinValues()[i]);
             if (compare < 0) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtrThanEquaToFilterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtrThanEquaToFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtrThanEquaToFilterExecuterImpl.java
index 53c2d76..8944a51 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtrThanEquaToFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtrThanEquaToFilterExecuterImpl.java
@@ -112,7 +112,8 @@ public class RowLevelRangeGrtrThanEquaToFilterExecuterImpl extends RowLevelFilte
     }
   }
 
-  @Override public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue) {
+  @Override public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue,
+      boolean[] isMinMaxSet) {
     BitSet bitSet = new BitSet(1);
     boolean isScanRequired = false;
     byte[] maxValue = null;
@@ -123,7 +124,8 @@ public class RowLevelRangeGrtrThanEquaToFilterExecuterImpl extends RowLevelFilte
             isScanRequired(maxValue, msrFilterRangeValues, msrColEvalutorInfoList.get(0).getType());
       } else {
         maxValue = blockMaxValue[dimensionChunkIndex[0]];
-        isScanRequired = isScanRequired(maxValue, filterRangeValues);
+        isScanRequired =
+            isScanRequired(maxValue, filterRangeValues, isMinMaxSet[dimensionChunkIndex[0]]);
       }
     } else {
       isScanRequired = isDefaultValuePresentInFilter;
@@ -135,7 +137,11 @@ public class RowLevelRangeGrtrThanEquaToFilterExecuterImpl extends RowLevelFilte
     return bitSet;
   }
 
-  private boolean isScanRequired(byte[] blockMaxValue, byte[][] filterValues) {
+  private boolean isScanRequired(byte[] blockMaxValue, byte[][] filterValues, boolean isMinMaxSet) {
+    if (!isMinMaxSet) {
+      // scan complete data if min max is not written for a given column
+      return true;
+    }
     boolean isScanRequired = false;
     for (int k = 0; k < filterValues.length; k++) {
       // filter value should be in range of max and min value i.e
@@ -193,7 +199,8 @@ public class RowLevelRangeGrtrThanEquaToFilterExecuterImpl extends RowLevelFilte
       boolean isExclude = false;
       for (int i = 0; i < rawColumnChunk.getPagesCount(); i++) {
         if (rawColumnChunk.getMaxValues() != null) {
-          if (isScanRequired(rawColumnChunk.getMaxValues()[i], this.filterRangeValues)) {
+          if (isScanRequired(rawColumnChunk.getMaxValues()[i], this.filterRangeValues,
+              rawColumnChunk.getMinMaxFlagArray()[i])) {
             int compare = ByteUtil.UnsafeComparer.INSTANCE
                 .compareTo(filterRangeValues[0], rawColumnChunk.getMinValues()[i]);
             if (compare <= 0) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java
index f410322..038d50b 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java
@@ -115,7 +115,8 @@ public class RowLevelRangeLessThanEqualFilterExecuterImpl extends RowLevelFilter
     }
   }
 
-  @Override public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue) {
+  @Override public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue,
+      boolean[] isMinMaxSet) {
     BitSet bitSet = new BitSet(1);
     byte[] minValue = null;
     boolean isScanRequired = false;
@@ -126,7 +127,8 @@ public class RowLevelRangeLessThanEqualFilterExecuterImpl extends RowLevelFilter
             isScanRequired(minValue, msrFilterRangeValues, msrColEvalutorInfoList.get(0).getType());
       } else {
         minValue = blockMinValue[dimensionChunkIndex[0]];
-        isScanRequired = isScanRequired(minValue, filterRangeValues);
+        isScanRequired =
+            isScanRequired(minValue, filterRangeValues, isMinMaxSet[dimensionChunkIndex[0]]);
       }
     } else {
       isScanRequired = isDefaultValuePresentInFilter;
@@ -137,7 +139,11 @@ public class RowLevelRangeLessThanEqualFilterExecuterImpl extends RowLevelFilter
     return bitSet;
   }
 
-  private boolean isScanRequired(byte[] blockMinValue, byte[][] filterValues) {
+  private boolean isScanRequired(byte[] blockMinValue, byte[][] filterValues, boolean isMinMaxSet) {
+    if (!isMinMaxSet) {
+      // scan complete data if min max is not written for a given column
+      return true;
+    }
     boolean isScanRequired = false;
     for (int k = 0; k < filterValues.length; k++) {
       // and filter-min should be positive
@@ -195,7 +201,8 @@ public class RowLevelRangeLessThanEqualFilterExecuterImpl extends RowLevelFilter
       boolean isExclude = false;
       for (int i = 0; i < rawColumnChunk.getPagesCount(); i++) {
         if (rawColumnChunk.getMinValues() != null) {
-          if (isScanRequired(rawColumnChunk.getMinValues()[i], this.filterRangeValues)) {
+          if (isScanRequired(rawColumnChunk.getMinValues()[i], this.filterRangeValues,
+              rawColumnChunk.getMinMaxFlagArray()[i])) {
             BitSet bitSet;
             DimensionColumnPage dimensionColumnPage = rawColumnChunk.decodeColumnPage(i);
             if (null != rawColumnChunk.getLocalDictionary()) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFilterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFilterExecuterImpl.java
index 1f3b9a4..fddb4ff 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFilterExecuterImpl.java
@@ -115,7 +115,8 @@ public class RowLevelRangeLessThanFilterExecuterImpl extends RowLevelFilterExecu
     }
   }
 
-  @Override public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue) {
+  @Override public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue,
+      boolean[] isMinMaxSet) {
     BitSet bitSet = new BitSet(1);
     byte[] minValue = null;
     boolean isScanRequired = false;
@@ -126,7 +127,8 @@ public class RowLevelRangeLessThanFilterExecuterImpl extends RowLevelFilterExecu
             isScanRequired(minValue, msrFilterRangeValues, msrColEvalutorInfoList.get(0).getType());
       } else {
         minValue = blockMinValue[dimensionChunkIndex[0]];
-        isScanRequired = isScanRequired(minValue, filterRangeValues);
+        isScanRequired =
+            isScanRequired(minValue, filterRangeValues, isMinMaxSet[dimensionChunkIndex[0]]);
       }
     } else {
       isScanRequired = isDefaultValuePresentInFilter;
@@ -138,7 +140,11 @@ public class RowLevelRangeLessThanFilterExecuterImpl extends RowLevelFilterExecu
   }
 
 
-  private boolean isScanRequired(byte[] blockMinValue, byte[][] filterValues) {
+  private boolean isScanRequired(byte[] blockMinValue, byte[][] filterValues, boolean isMinMaxSet) {
+    if (!isMinMaxSet) {
+      // scan complete data if min max is not written for a given column
+      return true;
+    }
     boolean isScanRequired = false;
     for (int k = 0; k < filterValues.length; k++) {
       // and filter-min should be positive
@@ -195,7 +201,8 @@ public class RowLevelRangeLessThanFilterExecuterImpl extends RowLevelFilterExecu
       boolean isExclude = false;
       for (int i = 0; i < rawColumnChunk.getPagesCount(); i++) {
         if (rawColumnChunk.getMinValues() != null) {
-          if (isScanRequired(rawColumnChunk.getMinValues()[i], this.filterRangeValues)) {
+          if (isScanRequired(rawColumnChunk.getMinValues()[i], this.filterRangeValues,
+              rawColumnChunk.getMinMaxFlagArray()[i])) {
             BitSet bitSet;
             DimensionColumnPage dimensionColumnPage = rawColumnChunk.decodeColumnPage(i);
             if (null != rawColumnChunk.getLocalDictionary()) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/TrueFilterExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/TrueFilterExecutor.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/TrueFilterExecutor.java
index 2cbb234..08831a4 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/TrueFilterExecutor.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/TrueFilterExecutor.java
@@ -58,7 +58,8 @@ public class TrueFilterExecutor implements FilterExecuter {
    * @param blockMinValue
    * @return BitSet
    */
-  public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue) {
+  public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue,
+      boolean[] isMinMaxSet) {
     BitSet bitSet = new BitSet(1);
     bitSet.flip(0, 1);
     return bitSet;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04084c73/core/src/main/java/org/apache/carbondata/core/scan/scanner/impl/BlockletFilterScanner.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/scanner/impl/BlockletFilterScanner.java b/core/src/main/java/org/apache/carbondata/core/scan/scanner/impl/BlockletFilterScanner.java
index 0100c8b..57849e2 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/scanner/impl/BlockletFilterScanner.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/scanner/impl/BlockletFilterScanner.java
@@ -122,11 +122,11 @@ public class BlockletFilterScanner extends BlockletFullScanner {
         bitSet = ((ImplicitColumnFilterExecutor) filterExecuter)
             .isFilterValuesPresentInBlockOrBlocklet(
                 dataBlock.getColumnsMaxValue(),
-                dataBlock.getColumnsMinValue(), blockletId);
+                dataBlock.getColumnsMinValue(), blockletId, dataBlock.minMaxFlagArray());
       } else {
         bitSet = this.filterExecuter
             .isScanRequired(dataBlock.getColumnsMaxValue(),
-                dataBlock.getColumnsMinValue());
+                dataBlock.getColumnsMinValue(), dataBlock.minMaxFlagArray());
       }
       return !bitSet.isEmpty();
     }