You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by gv...@apache.org on 2017/03/16 09:28:47 UTC

[04/14] incubator-carbondata git commit: Following changes are done as part of this commit.

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/70256e77/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtrThanEquaToFilterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtrThanEquaToFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtrThanEquaToFilterExecuterImpl.java
index 27377c2..f188a67 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtrThanEquaToFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtrThanEquaToFilterExecuterImpl.java
@@ -26,6 +26,7 @@ import org.apache.carbondata.core.datastore.chunk.impl.DimensionRawColumnChunk;
 import org.apache.carbondata.core.datastore.chunk.impl.FixedLengthDimensionDataChunk;
 import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
 import org.apache.carbondata.core.metadata.encoder.Encoding;
+import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
 import org.apache.carbondata.core.scan.expression.Expression;
 import org.apache.carbondata.core.scan.expression.exception.FilterUnsupportedException;
 import org.apache.carbondata.core.scan.filter.resolver.resolverinfo.DimColumnResolvedFilterInfo;
@@ -39,6 +40,11 @@ public class RowLevelRangeGrtrThanEquaToFilterExecuterImpl extends RowLevelFilte
 
   protected byte[][] filterRangeValues;
 
+  /**
+   * flag to check whether default values is present in the filter value list
+   */
+  private boolean isDefaultValuePresentInFilter;
+
   public RowLevelRangeGrtrThanEquaToFilterExecuterImpl(
       List<DimColumnResolvedFilterInfo> dimColEvaluatorInfoList,
       List<MeasureColumnResolvedFilterInfo> msrColEvalutorInfoList, Expression exp,
@@ -47,44 +53,71 @@ public class RowLevelRangeGrtrThanEquaToFilterExecuterImpl extends RowLevelFilte
     super(dimColEvaluatorInfoList, msrColEvalutorInfoList, exp, tableIdentifier, segmentProperties,
         null);
     this.filterRangeValues = filterRangeValues;
+    checkIfDefaultValueIsPresentInFilterList();
+  }
+
+  /**
+   * This method will check whether default value is present in the given filter values
+   */
+  private void checkIfDefaultValueIsPresentInFilterList() {
+    if (!this.isDimensionPresentInCurrentBlock[0]) {
+      CarbonDimension dimension = this.dimColEvaluatorInfoList.get(0).getDimension();
+      byte[] defaultValue = dimension.getDefaultValue();
+      if (null != defaultValue) {
+        for (int k = 0; k < filterRangeValues.length; k++) {
+          int maxCompare =
+              ByteUtil.UnsafeComparer.INSTANCE.compareTo(filterRangeValues[k], defaultValue);
+          if (maxCompare <= 0) {
+            isDefaultValuePresentInFilter = true;
+            break;
+          }
+        }
+      }
+    }
   }
 
   @Override public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue) {
     BitSet bitSet = new BitSet(1);
-    byte[][] filterValues = this.filterRangeValues;
-    int columnIndex = this.dimColEvaluatorInfoList.get(0).getColumnIndex();
-    boolean isScanRequired = isScanRequired(blockMaxValue[columnIndex], filterValues);
+    boolean isScanRequired =
+        isScanRequired(blockMaxValue[dimensionBlocksIndex[0]], filterRangeValues);
     if (isScanRequired) {
       bitSet.set(0);
     }
     return bitSet;
-
   }
 
   private boolean isScanRequired(byte[] blockMaxValue, byte[][] filterValues) {
     boolean isScanRequired = false;
-    for (int k = 0; k < filterValues.length; k++) {
-      // filter value should be in range of max and min value i.e
-      // max>filtervalue>min
-      // so filter-max should be negative
-      int maxCompare = ByteUtil.UnsafeComparer.INSTANCE.compareTo(filterValues[k], blockMaxValue);
-      // if any filter value is in range than this block needs to be
-      // scanned less than equal to max range.
-      if (maxCompare <= 0) {
-        isScanRequired = true;
-        break;
+    if (isDimensionPresentInCurrentBlock[0]) {
+      for (int k = 0; k < filterValues.length; k++) {
+        // filter value should be in range of max and min value i.e
+        // max>filtervalue>min
+        // so filter-max should be negative
+        int maxCompare = ByteUtil.UnsafeComparer.INSTANCE.compareTo(filterValues[k], blockMaxValue);
+        // if any filter value is in range than this block needs to be
+        // scanned less than equal to max range.
+        if (maxCompare <= 0) {
+          isScanRequired = true;
+          break;
+        }
       }
+    } else {
+      isScanRequired = isDefaultValuePresentInFilter;
     }
     return isScanRequired;
   }
 
   @Override public BitSetGroup applyFilter(BlocksChunkHolder blockChunkHolder)
       throws FilterUnsupportedException, IOException {
+    // select all rows if dimension does not exists in the current block
+    if (!isDimensionPresentInCurrentBlock[0]) {
+      return getDefaultBitSetGroup(blockChunkHolder.getDataBlock().nodeSize());
+    }
     if (!dimColEvaluatorInfoList.get(0).getDimension().hasEncoding(Encoding.DICTIONARY)) {
       return super.applyFilter(blockChunkHolder);
     }
     int blockIndex = segmentProperties.getDimensionOrdinalToBlockMapping()
-        .get(dimColEvaluatorInfoList.get(0).getColumnIndex());
+        .get(dimensionBlocksIndex[0]);
     if (null == blockChunkHolder.getDimensionRawDataChunk()[blockIndex]) {
       blockChunkHolder.getDimensionRawDataChunk()[blockIndex] = blockChunkHolder.getDataBlock()
           .getDimensionChunk(blockChunkHolder.getFileReader(), blockIndex);
@@ -225,14 +258,15 @@ public class RowLevelRangeGrtrThanEquaToFilterExecuterImpl extends RowLevelFilte
   }
 
   @Override public void readBlocks(BlocksChunkHolder blockChunkHolder) throws IOException {
-    if (!dimColEvaluatorInfoList.get(0).getDimension().hasEncoding(Encoding.DICTIONARY)) {
-      super.readBlocks(blockChunkHolder);
-    }
-    int blockIndex = segmentProperties.getDimensionOrdinalToBlockMapping()
-        .get(dimColEvaluatorInfoList.get(0).getColumnIndex());
-    if (null == blockChunkHolder.getDimensionRawDataChunk()[blockIndex]) {
-      blockChunkHolder.getDimensionRawDataChunk()[blockIndex] = blockChunkHolder.getDataBlock()
-          .getDimensionChunk(blockChunkHolder.getFileReader(), blockIndex);
+    if (isDimensionPresentInCurrentBlock[0]) {
+      if (!dimColEvaluatorInfoList.get(0).getDimension().hasEncoding(Encoding.DICTIONARY)) {
+        super.readBlocks(blockChunkHolder);
+      }
+      int blockIndex = dimensionBlocksIndex[0];
+      if (null == blockChunkHolder.getDimensionRawDataChunk()[blockIndex]) {
+        blockChunkHolder.getDimensionRawDataChunk()[blockIndex] = blockChunkHolder.getDataBlock()
+            .getDimensionChunk(blockChunkHolder.getFileReader(), blockIndex);
+      }
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/70256e77/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java
index c7e2acc..f455acf 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java
@@ -28,6 +28,7 @@ import org.apache.carbondata.core.keygenerator.directdictionary.DirectDictionary
 import org.apache.carbondata.core.keygenerator.directdictionary.DirectDictionaryKeyGeneratorFactory;
 import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
 import org.apache.carbondata.core.metadata.encoder.Encoding;
+import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
 import org.apache.carbondata.core.scan.expression.Expression;
 import org.apache.carbondata.core.scan.expression.exception.FilterUnsupportedException;
 import org.apache.carbondata.core.scan.filter.FilterUtil;
@@ -41,6 +42,11 @@ import org.apache.carbondata.core.util.CarbonUtil;
 public class RowLevelRangeLessThanEqualFilterExecuterImpl extends RowLevelFilterExecuterImpl {
   protected byte[][] filterRangeValues;
 
+  /**
+   * flag to check whether default values is present in the filter value list
+   */
+  private boolean isDefaultValuePresentInFilter;
+
   public RowLevelRangeLessThanEqualFilterExecuterImpl(
       List<DimColumnResolvedFilterInfo> dimColEvaluatorInfoList,
       List<MeasureColumnResolvedFilterInfo> msrColEvalutorInfoList, Expression exp,
@@ -49,13 +55,33 @@ public class RowLevelRangeLessThanEqualFilterExecuterImpl extends RowLevelFilter
     super(dimColEvaluatorInfoList, msrColEvalutorInfoList, exp, tableIdentifier, segmentProperties,
         null);
     this.filterRangeValues = filterRangeValues;
+    checkIfDefaultValueIsPresentInFilterList();
+  }
+
+  /**
+   * This method will check whether default value is present in the given filter values
+   */
+  private void checkIfDefaultValueIsPresentInFilterList() {
+    if (!this.isDimensionPresentInCurrentBlock[0]) {
+      CarbonDimension dimension = this.dimColEvaluatorInfoList.get(0).getDimension();
+      byte[] defaultValue = dimension.getDefaultValue();
+      if (null != defaultValue) {
+        for (int k = 0; k < filterRangeValues.length; k++) {
+          int maxCompare =
+              ByteUtil.UnsafeComparer.INSTANCE.compareTo(filterRangeValues[k], defaultValue);
+          if (maxCompare >= 0) {
+            isDefaultValuePresentInFilter = true;
+            break;
+          }
+        }
+      }
+    }
   }
 
   @Override public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue) {
     BitSet bitSet = new BitSet(1);
-    byte[][] filterValues = this.filterRangeValues;
-    int columnIndex = this.dimColEvaluatorInfoList.get(0).getColumnIndex();
-    boolean isScanRequired = isScanRequired(blockMinValue[columnIndex], filterValues);
+    boolean isScanRequired =
+        isScanRequired(blockMinValue[dimensionBlocksIndex[0]], filterRangeValues);
     if (isScanRequired) {
       bitSet.set(0);
     }
@@ -64,28 +90,36 @@ public class RowLevelRangeLessThanEqualFilterExecuterImpl extends RowLevelFilter
 
   private boolean isScanRequired(byte[] blockMinValue, byte[][] filterValues) {
     boolean isScanRequired = false;
-    for (int k = 0; k < filterValues.length; k++) {
-      // and filter-min should be positive
-      int minCompare = ByteUtil.UnsafeComparer.INSTANCE.compareTo(filterValues[k], blockMinValue);
+    if (isDimensionPresentInCurrentBlock[0]) {
+      for (int k = 0; k < filterValues.length; k++) {
+        // and filter-min should be positive
+        int minCompare = ByteUtil.UnsafeComparer.INSTANCE.compareTo(filterValues[k], blockMinValue);
 
-      // if any filter applied is not in range of min and max of block
-      // then since its a less than equal to fiter validate whether the block
-      // min range is less than equal to applied filter member
-      if (minCompare >= 0) {
-        isScanRequired = true;
-        break;
+        // if any filter applied is not in range of min and max of block
+        // then since its a less than equal to fiter validate whether the block
+        // min range is less than equal to applied filter member
+        if (minCompare >= 0) {
+          isScanRequired = true;
+          break;
+        }
       }
+    } else {
+      isScanRequired = isDefaultValuePresentInFilter;
     }
     return isScanRequired;
   }
 
   @Override public BitSetGroup applyFilter(BlocksChunkHolder blockChunkHolder)
       throws FilterUnsupportedException, IOException {
+    // select all rows if dimension does not exists in the current block
+    if (!isDimensionPresentInCurrentBlock[0]) {
+      return getDefaultBitSetGroup(blockChunkHolder.getDataBlock().nodeSize());
+    }
     if (!dimColEvaluatorInfoList.get(0).getDimension().hasEncoding(Encoding.DICTIONARY)) {
       return super.applyFilter(blockChunkHolder);
     }
     int blockIndex = segmentProperties.getDimensionOrdinalToBlockMapping()
-        .get(dimColEvaluatorInfoList.get(0).getColumnIndex());
+        .get(dimensionBlocksIndex[0]);
     if (null == blockChunkHolder.getDimensionRawDataChunk()[blockIndex]) {
       blockChunkHolder.getDimensionRawDataChunk()[blockIndex] = blockChunkHolder.getDataBlock()
           .getDimensionChunk(blockChunkHolder.getFileReader(), blockIndex);
@@ -117,7 +151,9 @@ public class RowLevelRangeLessThanEqualFilterExecuterImpl extends RowLevelFilter
           .getDirectDictionaryGenerator(
               dimColEvaluatorInfoList.get(0).getDimension().getDataType());
       int key = directDictionaryGenerator.generateDirectSurrogateKey(null) + 1;
-      defaultValue = FilterUtil.getMaskKey(key, dimColEvaluatorInfoList.get(0).getDimension(),
+      CarbonDimension currentBlockDimension =
+          segmentProperties.getDimensions().get(dimensionBlocksIndex[0]);
+      defaultValue = FilterUtil.getMaskKey(key, currentBlockDimension,
           this.segmentProperties.getDimensionKeyGenerator());
     }
     if (dimensionColumnDataChunk.isExplicitSorted()
@@ -264,14 +300,15 @@ public class RowLevelRangeLessThanEqualFilterExecuterImpl extends RowLevelFilter
   }
 
   @Override public void readBlocks(BlocksChunkHolder blockChunkHolder) throws IOException {
-    if (!dimColEvaluatorInfoList.get(0).getDimension().hasEncoding(Encoding.DICTIONARY)) {
-      super.readBlocks(blockChunkHolder);
-    }
-    int blockIndex = segmentProperties.getDimensionOrdinalToBlockMapping()
-        .get(dimColEvaluatorInfoList.get(0).getColumnIndex());
-    if (null == blockChunkHolder.getDimensionRawDataChunk()[blockIndex]) {
-      blockChunkHolder.getDimensionRawDataChunk()[blockIndex] = blockChunkHolder.getDataBlock()
-          .getDimensionChunk(blockChunkHolder.getFileReader(), blockIndex);
+    if (isDimensionPresentInCurrentBlock[0]) {
+      if (!dimColEvaluatorInfoList.get(0).getDimension().hasEncoding(Encoding.DICTIONARY)) {
+        super.readBlocks(blockChunkHolder);
+      }
+      int blockIndex = dimensionBlocksIndex[0];
+      if (null == blockChunkHolder.getDimensionRawDataChunk()[blockIndex]) {
+        blockChunkHolder.getDimensionRawDataChunk()[blockIndex] = blockChunkHolder.getDataBlock()
+            .getDimensionChunk(blockChunkHolder.getFileReader(), blockIndex);
+      }
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/70256e77/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFiterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFiterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFiterExecuterImpl.java
index d9795eb..69c0538 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFiterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFiterExecuterImpl.java
@@ -28,6 +28,7 @@ import org.apache.carbondata.core.keygenerator.directdictionary.DirectDictionary
 import org.apache.carbondata.core.keygenerator.directdictionary.DirectDictionaryKeyGeneratorFactory;
 import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
 import org.apache.carbondata.core.metadata.encoder.Encoding;
+import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
 import org.apache.carbondata.core.scan.expression.Expression;
 import org.apache.carbondata.core.scan.expression.exception.FilterUnsupportedException;
 import org.apache.carbondata.core.scan.filter.FilterUtil;
@@ -41,6 +42,11 @@ import org.apache.carbondata.core.util.CarbonUtil;
 public class RowLevelRangeLessThanFiterExecuterImpl extends RowLevelFilterExecuterImpl {
   private byte[][] filterRangeValues;
 
+  /**
+   * flag to check whether default values is present in the filter value list
+   */
+  private boolean isDefaultValuePresentInFilter;
+
   public RowLevelRangeLessThanFiterExecuterImpl(
       List<DimColumnResolvedFilterInfo> dimColEvaluatorInfoList,
       List<MeasureColumnResolvedFilterInfo> msrColEvalutorInfoList, Expression exp,
@@ -49,13 +55,33 @@ public class RowLevelRangeLessThanFiterExecuterImpl extends RowLevelFilterExecut
     super(dimColEvaluatorInfoList, msrColEvalutorInfoList, exp, tableIdentifier, segmentProperties,
         null);
     this.filterRangeValues = filterRangeValues;
+    checkIfDefaultValueIsPresentInFilterList();
+  }
+
+  /**
+   * This method will check whether default value is present in the given filter values
+   */
+  private void checkIfDefaultValueIsPresentInFilterList() {
+    if (!this.isDimensionPresentInCurrentBlock[0]) {
+      CarbonDimension dimension = this.dimColEvaluatorInfoList.get(0).getDimension();
+      byte[] defaultValue = dimension.getDefaultValue();
+      if (null != defaultValue) {
+        for (int k = 0; k < filterRangeValues.length; k++) {
+          int maxCompare =
+              ByteUtil.UnsafeComparer.INSTANCE.compareTo(filterRangeValues[k], defaultValue);
+          if (maxCompare > 0) {
+            isDefaultValuePresentInFilter = true;
+            break;
+          }
+        }
+      }
+    }
   }
 
   @Override public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue) {
     BitSet bitSet = new BitSet(1);
-    byte[][] filterValues = this.filterRangeValues;
-    int columnIndex = this.dimColEvaluatorInfoList.get(0).getColumnIndex();
-    boolean isScanRequired = isScanRequired(blockMinValue[columnIndex], filterValues);
+    boolean isScanRequired =
+        isScanRequired(blockMinValue[dimensionBlocksIndex[0]], filterRangeValues);
     if (isScanRequired) {
       bitSet.set(0);
     }
@@ -64,28 +90,36 @@ public class RowLevelRangeLessThanFiterExecuterImpl extends RowLevelFilterExecut
 
   private boolean isScanRequired(byte[] blockMinValue, byte[][] filterValues) {
     boolean isScanRequired = false;
-    for (int k = 0; k < filterValues.length; k++) {
-      // and filter-min should be positive
-      int minCompare = ByteUtil.UnsafeComparer.INSTANCE.compareTo(filterValues[k], blockMinValue);
+    if (isDimensionPresentInCurrentBlock[0]) {
+      for (int k = 0; k < filterValues.length; k++) {
+        // and filter-min should be positive
+        int minCompare = ByteUtil.UnsafeComparer.INSTANCE.compareTo(filterValues[k], blockMinValue);
 
-      // if any filter applied is not in range of min and max of block
-      // then since its a less than fiter validate whether the block
-      // min range is less  than applied filter member
-      if (minCompare > 0) {
-        isScanRequired = true;
-        break;
+        // if any filter applied is not in range of min and max of block
+        // then since its a less than fiter validate whether the block
+        // min range is less  than applied filter member
+        if (minCompare > 0) {
+          isScanRequired = true;
+          break;
+        }
       }
+    } else {
+      isScanRequired = isDefaultValuePresentInFilter;
     }
     return isScanRequired;
   }
 
   @Override public BitSetGroup applyFilter(BlocksChunkHolder blockChunkHolder)
       throws FilterUnsupportedException, IOException {
+    // select all rows if dimension does not exists in the current block
+    if (!isDimensionPresentInCurrentBlock[0]) {
+      return getDefaultBitSetGroup(blockChunkHolder.getDataBlock().nodeSize());
+    }
     if (!dimColEvaluatorInfoList.get(0).getDimension().hasEncoding(Encoding.DICTIONARY)) {
       return super.applyFilter(blockChunkHolder);
     }
     int blockIndex = segmentProperties.getDimensionOrdinalToBlockMapping()
-        .get(dimColEvaluatorInfoList.get(0).getColumnIndex());
+        .get(dimensionBlocksIndex[0]);
     if (null == blockChunkHolder.getDimensionRawDataChunk()[blockIndex]) {
       blockChunkHolder.getDimensionRawDataChunk()[blockIndex] = blockChunkHolder.getDataBlock()
           .getDimensionChunk(blockChunkHolder.getFileReader(), blockIndex);
@@ -117,7 +151,9 @@ public class RowLevelRangeLessThanFiterExecuterImpl extends RowLevelFilterExecut
           .getDirectDictionaryGenerator(
               dimColEvaluatorInfoList.get(0).getDimension().getDataType());
       int key = directDictionaryGenerator.generateDirectSurrogateKey(null) + 1;
-      defaultValue = FilterUtil.getMaskKey(key, dimColEvaluatorInfoList.get(0).getDimension(),
+      CarbonDimension currentBlockDimension =
+          segmentProperties.getDimensions().get(dimensionBlocksIndex[0]);
+      defaultValue = FilterUtil.getMaskKey(key, currentBlockDimension,
           this.segmentProperties.getDimensionKeyGenerator());
     }
     if (dimensionColumnDataChunk.isExplicitSorted()
@@ -272,14 +308,15 @@ public class RowLevelRangeLessThanFiterExecuterImpl extends RowLevelFilterExecut
   }
 
   @Override public void readBlocks(BlocksChunkHolder blockChunkHolder) throws IOException {
-    if (!dimColEvaluatorInfoList.get(0).getDimension().hasEncoding(Encoding.DICTIONARY)) {
-      super.readBlocks(blockChunkHolder);
-    }
-    int blockIndex = segmentProperties.getDimensionOrdinalToBlockMapping()
-        .get(dimColEvaluatorInfoList.get(0).getColumnIndex());
-    if (null == blockChunkHolder.getDimensionRawDataChunk()[blockIndex]) {
-      blockChunkHolder.getDimensionRawDataChunk()[blockIndex] = blockChunkHolder.getDataBlock()
-          .getDimensionChunk(blockChunkHolder.getFileReader(), blockIndex);
+    if (isDimensionPresentInCurrentBlock[0]) {
+      if (!dimColEvaluatorInfoList.get(0).getDimension().hasEncoding(Encoding.DICTIONARY)) {
+        super.readBlocks(blockChunkHolder);
+      }
+      int blockIndex = dimensionBlocksIndex[0];
+      if (null == blockChunkHolder.getDimensionRawDataChunk()[blockIndex]) {
+        blockChunkHolder.getDimensionRawDataChunk()[blockIndex] = blockChunkHolder.getDataBlock()
+            .getDimensionChunk(blockChunkHolder.getFileReader(), blockIndex);
+      }
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/70256e77/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/ConditionalFilterResolverImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/ConditionalFilterResolverImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/ConditionalFilterResolverImpl.java
index 549dcfd..69560c3 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/ConditionalFilterResolverImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/ConditionalFilterResolverImpl.java
@@ -185,12 +185,12 @@ public class ConditionalFilterResolverImpl implements FilterResolverIntf {
   /**
    * method will calculates the start key based on the filter surrogates
    */
-  public void getStartKey(long[] startKey,
+  public void getStartKey(SegmentProperties segmentProperties, long[] startKey,
       SortedMap<Integer, byte[]> setOfStartKeyByteArray, List<long[]> startKeyList) {
     FilterUtil.getStartKey(dimColResolvedFilterInfo.getDimensionResolvedFilterInstance(),
-        startKey, startKeyList);
+        segmentProperties, startKey, startKeyList);
     FilterUtil.getStartKeyForNoDictionaryDimension(dimColResolvedFilterInfo,
-        setOfStartKeyByteArray);
+        segmentProperties, setOfStartKeyByteArray);
   }
 
   /**
@@ -200,7 +200,7 @@ public class ConditionalFilterResolverImpl implements FilterResolverIntf {
       SortedMap<Integer, byte[]> setOfEndKeyByteArray, List<long[]> endKeyList) {
     FilterUtil.getEndKey(dimColResolvedFilterInfo.getDimensionResolvedFilterInstance(), endKeys,
         segmentProperties, endKeyList);
-    FilterUtil.getEndKeyForNoDictionaryDimension(dimColResolvedFilterInfo,
+    FilterUtil.getEndKeyForNoDictionaryDimension(dimColResolvedFilterInfo, segmentProperties,
         setOfEndKeyByteArray);
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/70256e77/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/FilterResolverIntf.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/FilterResolverIntf.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/FilterResolverIntf.java
index 72237cb..746b96d 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/FilterResolverIntf.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/FilterResolverIntf.java
@@ -68,11 +68,13 @@ public interface FilterResolverIntf extends Serializable {
   /**
    * API will get the start key based on the filter applied based on the key generator
    *
+   * @param segmentProperties
    * @param startKey
    * @param setOfStartKeyByteArray
+   * @param startKeyList
    */
-  void getStartKey(long[] startKey, SortedMap<Integer, byte[]> setOfStartKeyByteArray,
-      List<long[]> startKeyList);
+  void getStartKey(SegmentProperties segmentProperties, long[] startKey,
+      SortedMap<Integer, byte[]> setOfStartKeyByteArray, List<long[]> startKeyList);
 
   /**
    * API will read the end key based on the max surrogate of

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/70256e77/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/LogicalFilterResolverImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/LogicalFilterResolverImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/LogicalFilterResolverImpl.java
index 5b3221a..db35823 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/LogicalFilterResolverImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/LogicalFilterResolverImpl.java
@@ -84,9 +84,8 @@ public class LogicalFilterResolverImpl implements FilterResolverIntf {
     return null;
   }
 
-  @Override
-  public void getStartKey(long[] startKey, SortedMap<Integer, byte[]> setOfStartKeyByteArray,
-      List<long[]> startKeyList) {
+  @Override public void getStartKey(SegmentProperties segmentProperties, long[] startKey,
+      SortedMap<Integer, byte[]> setOfStartKeyByteArray, List<long[]> startKeyList) {
 
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/70256e77/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/RowLevelFilterResolverImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/RowLevelFilterResolverImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/RowLevelFilterResolverImpl.java
index 0e2ed22..acb3f8b 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/RowLevelFilterResolverImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/RowLevelFilterResolverImpl.java
@@ -68,6 +68,7 @@ public class RowLevelFilterResolverImpl extends ConditionalFilterResolverImpl {
           dimColEvaluatorInfoList.add(dimColumnEvaluatorInfo);
         } else {
           msrColumnEvalutorInfo = new MeasureColumnResolvedFilterInfo();
+          msrColumnEvalutorInfo.setCarbonColumn(columnExpression.getCarbonColumn());
           msrColumnEvalutorInfo.setRowIndex(index++);
           msrColumnEvalutorInfo
               .setColumnIndex(columnExpression.getCarbonColumn().getOrdinal());

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/70256e77/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/RowLevelRangeFilterResolverImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/RowLevelRangeFilterResolverImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/RowLevelRangeFilterResolverImpl.java
index af5568e..8a91499 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/RowLevelRangeFilterResolverImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/RowLevelRangeFilterResolverImpl.java
@@ -29,6 +29,7 @@ import org.apache.carbondata.core.keygenerator.directdictionary.DirectDictionary
 import org.apache.carbondata.core.keygenerator.directdictionary.DirectDictionaryKeyGeneratorFactory;
 import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
 import org.apache.carbondata.core.metadata.encoder.Encoding;
+import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
 import org.apache.carbondata.core.scan.expression.ColumnExpression;
 import org.apache.carbondata.core.scan.expression.Expression;
 import org.apache.carbondata.core.scan.expression.ExpressionResult;
@@ -42,6 +43,7 @@ import org.apache.carbondata.core.scan.filter.intf.FilterExecuterType;
 import org.apache.carbondata.core.scan.filter.resolver.resolverinfo.DimColumnResolvedFilterInfo;
 import org.apache.carbondata.core.scan.filter.resolver.resolverinfo.MeasureColumnResolvedFilterInfo;
 import org.apache.carbondata.core.util.ByteUtil;
+import org.apache.carbondata.core.util.CarbonUtil;
 
 public class RowLevelRangeFilterResolverImpl extends ConditionalFilterResolverImpl {
 
@@ -78,9 +80,13 @@ public class RowLevelRangeFilterResolverImpl extends ConditionalFilterResolverIm
       return noDictFilterValuesList.toArray((new byte[noDictFilterValuesList.size()][]));
     } else if (null != dimColEvaluatorInfoList.get(0).getFilterValues() && dimColEvaluatorInfoList
         .get(0).getDimension().hasEncoding(Encoding.DIRECT_DICTIONARY)) {
-      return FilterUtil.getKeyArray(this.dimColEvaluatorInfoList.get(0).getFilterValues(),
-          this.dimColEvaluatorInfoList.get(0).getDimension(),
-          segmentProperties);
+      CarbonDimension dimensionFromCurrentBlock = CarbonUtil
+          .getDimensionFromCurrentBlock(segmentProperties.getDimensions(),
+              this.dimColEvaluatorInfoList.get(0).getDimension());
+      if (null != dimensionFromCurrentBlock) {
+        return FilterUtil.getKeyArray(this.dimColEvaluatorInfoList.get(0).getFilterValues(),
+            dimensionFromCurrentBlock, segmentProperties);
+      }
     }
     return null;
 
@@ -91,15 +97,16 @@ public class RowLevelRangeFilterResolverImpl extends ConditionalFilterResolverIm
    *
    * @return start IndexKey
    */
-  public void getStartKey(long[] startKey, SortedMap<Integer, byte[]> noDictStartKeys,
-      List<long[]> startKeyList) {
+  public void getStartKey(SegmentProperties segmentProperties, long[] startKey,
+      SortedMap<Integer, byte[]> noDictStartKeys, List<long[]> startKeyList) {
     switch (exp.getFilterExpressionType()) {
       case GREATERTHAN:
       case GREATERTHAN_EQUALTO:
         FilterUtil.getStartKey(dimColEvaluatorInfoList.get(0).getDimensionResolvedFilterInstance(),
-            startKey, startKeyList);
+            segmentProperties, startKey, startKeyList);
         FilterUtil
-            .getStartKeyForNoDictionaryDimension(dimColEvaluatorInfoList.get(0), noDictStartKeys);
+            .getStartKeyForNoDictionaryDimension(dimColEvaluatorInfoList.get(0), segmentProperties,
+                noDictStartKeys);
         break;
       default:
         //do nothing
@@ -119,7 +126,9 @@ public class RowLevelRangeFilterResolverImpl extends ConditionalFilterResolverIm
         FilterUtil
             .getEndKey(dimColEvaluatorInfoList.get(0).getDimensionResolvedFilterInstance(), endKeys,
                 segmentProperties, endKeyList);
-        FilterUtil.getEndKeyForNoDictionaryDimension(dimColEvaluatorInfoList.get(0), noDicEndKeys);
+        FilterUtil
+            .getEndKeyForNoDictionaryDimension(dimColEvaluatorInfoList.get(0), segmentProperties,
+                noDicEndKeys);
         break;
       default:
         //do nothing

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/70256e77/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/DimColumnResolvedFilterInfo.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/DimColumnResolvedFilterInfo.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/DimColumnResolvedFilterInfo.java
index abd4652..fee15a4 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/DimColumnResolvedFilterInfo.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/DimColumnResolvedFilterInfo.java
@@ -48,8 +48,6 @@ public class DimColumnResolvedFilterInfo implements Serializable {
 
   private boolean isDimensionExistsInCurrentSilce = true;
 
-  private String defaultValue;
-
   private CarbonDimension dimension;
 
   /**
@@ -119,14 +117,6 @@ public class DimColumnResolvedFilterInfo implements Serializable {
     this.isDimensionExistsInCurrentSilce = isDimensionExistsInCurrentSilce;
   }
 
-  public String getDefaultValue() {
-    return defaultValue;
-  }
-
-  public void setDefaultValue(String defaultValue) {
-    this.defaultValue = defaultValue;
-  }
-
   public void populateFilterInfoBasedOnColumnType(ResolvedFilterInfoVisitorIntf visitor,
       FilterResolverMetadata metadata) throws FilterUnsupportedException, IOException {
     if (null != visitor) {
@@ -138,4 +128,18 @@ public class DimColumnResolvedFilterInfo implements Serializable {
     }
 
   }
+
+  /**
+   * This method will clone the current object
+   *
+   * @return
+   */
+  public DimColumnResolvedFilterInfo getCopyObject() {
+    DimColumnResolvedFilterInfo dimColumnResolvedFilterInfo = new DimColumnResolvedFilterInfo();
+    dimColumnResolvedFilterInfo.resolvedFilterValueObj = this.resolvedFilterValueObj;
+    dimColumnResolvedFilterInfo.rowIndex = this.rowIndex;
+    dimColumnResolvedFilterInfo.dimensionResolvedFilter = this.dimensionResolvedFilter;
+    dimColumnResolvedFilterInfo.isDimensionExistsInCurrentSilce = isDimensionExistsInCurrentSilce;
+    return dimColumnResolvedFilterInfo;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/70256e77/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/MeasureColumnResolvedFilterInfo.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/MeasureColumnResolvedFilterInfo.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/MeasureColumnResolvedFilterInfo.java
index 3d69553..4c50825 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/MeasureColumnResolvedFilterInfo.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/MeasureColumnResolvedFilterInfo.java
@@ -19,6 +19,8 @@ package org.apache.carbondata.core.scan.filter.resolver.resolverinfo;
 
 import java.io.Serializable;
 
+import org.apache.carbondata.core.metadata.schema.table.column.CarbonColumn;
+
 public class MeasureColumnResolvedFilterInfo implements Serializable {
   /**
    *
@@ -31,6 +33,8 @@ public class MeasureColumnResolvedFilterInfo implements Serializable {
 
   private Object defaultValue;
 
+  private CarbonColumn carbonColumn;
+
   private org.apache.carbondata.core.metadata.datatype.DataType type;
 
   public int getColumnIndex() {
@@ -65,4 +69,11 @@ public class MeasureColumnResolvedFilterInfo implements Serializable {
     return defaultValue;
   }
 
+  public CarbonColumn getCarbonColumn() {
+    return carbonColumn;
+  }
+
+  public void setCarbonColumn(CarbonColumn carbonColumn) {
+    this.carbonColumn = carbonColumn;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/70256e77/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/visitor/NoDictionaryTypeVisitor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/visitor/NoDictionaryTypeVisitor.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/visitor/NoDictionaryTypeVisitor.java
index bc1def7..c51133d 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/visitor/NoDictionaryTypeVisitor.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/visitor/NoDictionaryTypeVisitor.java
@@ -16,9 +16,11 @@
  */
 package org.apache.carbondata.core.scan.filter.resolver.resolverinfo.visitor;
 
+import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
+import org.apache.carbondata.core.scan.expression.ExpressionResult;
 import org.apache.carbondata.core.scan.expression.exception.FilterIllegalMemberException;
 import org.apache.carbondata.core.scan.expression.exception.FilterUnsupportedException;
 import org.apache.carbondata.core.scan.filter.DimColumnFilterInfo;
@@ -45,7 +47,15 @@ public class NoDictionaryTypeVisitor implements ResolvedFilterInfoVisitorIntf {
     DimColumnFilterInfo resolvedFilterObject = null;
     List<String> evaluateResultListFinal;
     try {
-      evaluateResultListFinal = metadata.getExpression().evaluate(null).getListAsString();
+      ExpressionResult result = metadata.getExpression().evaluate(null);
+      Boolean booleanResult = result.getBoolean();
+      // handling for is null case scenarios
+      if (null != booleanResult && booleanResult == metadata.isIncludeFilter()) {
+        evaluateResultListFinal = new ArrayList<>(1);
+        evaluateResultListFinal.add(CarbonCommonConstants.MEMBER_DEFAULT_VAL);
+      } else {
+        evaluateResultListFinal = result.getListAsString();
+      }
       // Adding default  null member inorder to not display the same while
       // displaying the report as per hive compatibility.
       if (!metadata.isIncludeFilter() && !evaluateResultListFinal

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/70256e77/core/src/main/java/org/apache/carbondata/core/scan/model/QueryModel.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/model/QueryModel.java b/core/src/main/java/org/apache/carbondata/core/scan/model/QueryModel.java
index 210ee11..9162ff8 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/model/QueryModel.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/model/QueryModel.java
@@ -172,7 +172,7 @@ public class QueryModel implements Serializable {
 
   private static CarbonMeasure getCarbonMetadataMeasure(String name, List<CarbonMeasure> measures) {
     for (CarbonMeasure measure : measures) {
-      if (measure.getColName().equalsIgnoreCase(name)) {
+      if (!measure.isInvisible() && measure.getColName().equalsIgnoreCase(name)) {
         return measure;
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/70256e77/core/src/main/java/org/apache/carbondata/core/scan/processor/AbstractDataBlockIterator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/processor/AbstractDataBlockIterator.java b/core/src/main/java/org/apache/carbondata/core/scan/processor/AbstractDataBlockIterator.java
index f0cebf4..e4314da 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/processor/AbstractDataBlockIterator.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/processor/AbstractDataBlockIterator.java
@@ -28,10 +28,8 @@ import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.datastore.DataRefNode;
 import org.apache.carbondata.core.datastore.FileHolder;
+import org.apache.carbondata.core.scan.collector.ResultCollectorFactory;
 import org.apache.carbondata.core.scan.collector.ScannedResultCollector;
-import org.apache.carbondata.core.scan.collector.impl.DictionaryBasedResultCollector;
-import org.apache.carbondata.core.scan.collector.impl.DictionaryBasedVectorResultCollector;
-import org.apache.carbondata.core.scan.collector.impl.RawBasedResultCollector;
 import org.apache.carbondata.core.scan.executor.infos.BlockExecutionInfo;
 import org.apache.carbondata.core.scan.result.AbstractScannedResult;
 import org.apache.carbondata.core.scan.result.vector.CarbonColumnarBatch;
@@ -98,19 +96,8 @@ public abstract class AbstractDataBlockIterator extends CarbonIterator<List<Obje
     } else {
       blockletScanner = new NonFilterScanner(blockExecutionInfo, queryStatisticsModel);
     }
-    if (blockExecutionInfo.isRawRecordDetailQuery()) {
-      LOGGER.info("Row based raw collector is used to scan and collect the data");
-      this.scannerResultAggregator =
-          new RawBasedResultCollector(blockExecutionInfo);
-    } else if (blockExecutionInfo.isVectorBatchCollector()) {
-      LOGGER.info("Vector based dictionary collector is used to scan and collect the data");
-      this.scannerResultAggregator =
-          new DictionaryBasedVectorResultCollector(blockExecutionInfo);
-    } else {
-      LOGGER.info("Row based dictionary collector is used to scan and collect the data");
-      this.scannerResultAggregator =
-          new DictionaryBasedResultCollector(blockExecutionInfo);
-    }
+    this.scannerResultAggregator =
+        ResultCollectorFactory.getScannedResultCollector(blockExecutionInfo);
     this.batchSize = batchSize;
     this.executorService = executorService;
     this.nextBlock = new AtomicBoolean(false);

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/70256e77/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
index 36e75f3..33d9aab 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
@@ -39,6 +39,9 @@ import java.util.Set;
 
 import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.carbondata.core.cache.Cache;
+import org.apache.carbondata.core.cache.CacheProvider;
+import org.apache.carbondata.core.cache.CacheType;
 import org.apache.carbondata.core.cache.dictionary.Dictionary;
 import org.apache.carbondata.core.cache.dictionary.DictionaryColumnUniqueIdentifier;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
@@ -53,13 +56,18 @@ import org.apache.carbondata.core.datastore.columnar.UnBlockIndexer;
 import org.apache.carbondata.core.datastore.compression.MeasureMetaDataModel;
 import org.apache.carbondata.core.datastore.compression.WriterCompressModel;
 import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
+import org.apache.carbondata.core.datastore.filesystem.CarbonFileFilter;
 import org.apache.carbondata.core.datastore.impl.FileFactory;
 import org.apache.carbondata.core.keygenerator.mdkey.NumberCompressor;
 import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
+import org.apache.carbondata.core.metadata.CarbonTableIdentifier;
+import org.apache.carbondata.core.metadata.ColumnIdentifier;
 import org.apache.carbondata.core.metadata.ValueEncoderMeta;
 import org.apache.carbondata.core.metadata.blocklet.DataFileFooter;
 import org.apache.carbondata.core.metadata.datatype.DataType;
 import org.apache.carbondata.core.metadata.encoder.Encoding;
+import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
+import org.apache.carbondata.core.metadata.schema.table.column.CarbonColumn;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure;
 import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
@@ -1060,20 +1068,22 @@ public final class CarbonUtil {
     List<Boolean> isDictionaryDimensions = new ArrayList<Boolean>();
     Set<Integer> processedColumnGroup = new HashSet<Integer>();
     for (CarbonDimension carbonDimension : tableDimensionList) {
-      List<CarbonDimension> childs = carbonDimension.getListOfChildDimensions();
-      //assuming complex dimensions will always be atlast
-      if (null != childs && childs.size() > 0) {
-        break;
-      }
-      if (carbonDimension.isColumnar() && hasEncoding(carbonDimension.getEncoder(),
-          Encoding.DICTIONARY)) {
-        isDictionaryDimensions.add(true);
-      } else if (!carbonDimension.isColumnar()) {
-        if (processedColumnGroup.add(carbonDimension.columnGroupId())) {
+      if (!carbonDimension.isInvisible()) {
+        List<CarbonDimension> childs = carbonDimension.getListOfChildDimensions();
+        //assuming complex dimensions will always be atlast
+        if (null != childs && childs.size() > 0) {
+          break;
+        }
+        if (carbonDimension.isColumnar() && hasEncoding(carbonDimension.getEncoder(),
+            Encoding.DICTIONARY)) {
           isDictionaryDimensions.add(true);
+        } else if (!carbonDimension.isColumnar()) {
+          if (processedColumnGroup.add(carbonDimension.columnGroupId())) {
+            isDictionaryDimensions.add(true);
+          }
+        } else {
+          isDictionaryDimensions.add(false);
         }
-      } else {
-        isDictionaryDimensions.add(false);
       }
     }
     boolean[] primitive = ArrayUtils
@@ -1147,7 +1157,7 @@ public final class CarbonUtil {
   public static CarbonDimension findDimension(List<CarbonDimension> dimensions, String carbonDim) {
     CarbonDimension findDim = null;
     for (CarbonDimension dimension : dimensions) {
-      if (dimension.getColName().equalsIgnoreCase(carbonDim)) {
+      if (!dimension.isInvisible() && dimension.getColName().equalsIgnoreCase(carbonDim)) {
         findDim = dimension;
         break;
       }
@@ -1156,6 +1166,44 @@ public final class CarbonUtil {
   }
 
   /**
+   * This method will search for a given dimension in the current block dimensions list
+   *
+   * @param blockDimensions
+   * @param dimensionToBeSearched
+   * @return
+   */
+  public static CarbonDimension getDimensionFromCurrentBlock(
+      List<CarbonDimension> blockDimensions, CarbonDimension dimensionToBeSearched) {
+    CarbonDimension currentBlockDimension = null;
+    for (CarbonDimension blockDimension : blockDimensions) {
+      if (dimensionToBeSearched.getColumnId().equals(blockDimension.getColumnId())) {
+        currentBlockDimension = blockDimension;
+        break;
+      }
+    }
+    return currentBlockDimension;
+  }
+
+  /**
+   * This method will search for a given measure in the current block measures list
+   *
+   * @param blockMeasures
+   * @param columnId
+   * @return
+   */
+  public static CarbonMeasure getMeasureFromCurrentBlock(List<CarbonMeasure> blockMeasures,
+      String columnId) {
+    CarbonMeasure currentBlockMeasure = null;
+    for (CarbonMeasure blockMeasure : blockMeasures) {
+      if (columnId.equals(blockMeasure.getColumnId())) {
+        currentBlockMeasure = blockMeasure;
+        break;
+      }
+    }
+    return currentBlockMeasure;
+  }
+
+  /**
    * This method will be used to clear the dictionary cache after its usage is complete
    * so that if memory threshold is reached it can evicted from LRU cache
    *
@@ -1195,7 +1243,9 @@ public final class CarbonUtil {
     List<ColumnSchema> wrapperColumnSchemaList = new ArrayList<ColumnSchema>();
     fillCollumnSchemaListForComplexDims(carbonDimensionsList, wrapperColumnSchemaList);
     for (CarbonMeasure carbonMeasure : carbonMeasureList) {
-      wrapperColumnSchemaList.add(carbonMeasure.getColumnSchema());
+      if (!carbonMeasure.isInvisible()) {
+        wrapperColumnSchemaList.add(carbonMeasure.getColumnSchema());
+      }
     }
     return wrapperColumnSchemaList;
   }
@@ -1203,10 +1253,12 @@ public final class CarbonUtil {
   private static void fillCollumnSchemaListForComplexDims(
       List<CarbonDimension> carbonDimensionsList, List<ColumnSchema> wrapperColumnSchemaList) {
     for (CarbonDimension carbonDimension : carbonDimensionsList) {
-      wrapperColumnSchemaList.add(carbonDimension.getColumnSchema());
-      List<CarbonDimension> childDims = carbonDimension.getListOfChildDimensions();
-      if (null != childDims && childDims.size() > 0) {
-        fillCollumnSchemaListForComplexDims(childDims, wrapperColumnSchemaList);
+      if (!carbonDimension.isInvisible()) {
+        wrapperColumnSchemaList.add(carbonDimension.getColumnSchema());
+        List<CarbonDimension> childDims = carbonDimension.getListOfChildDimensions();
+        if (null != childDims && childDims.size() > 0) {
+          fillCollumnSchemaListForComplexDims(childDims, wrapperColumnSchemaList);
+        }
       }
     }
   }
@@ -1614,6 +1666,122 @@ public final class CarbonUtil {
   }
 
   /**
+   * This method will delete the dictionary files for the given column IDs and
+   * clear the dictionary cache
+   *
+   * @param dictionaryColumns
+   * @param carbonTable
+   */
+  public static void deleteDictionaryFileAndCache(List<CarbonColumn> dictionaryColumns,
+      CarbonTable carbonTable) {
+    if (!dictionaryColumns.isEmpty()) {
+      CarbonTableIdentifier carbonTableIdentifier = carbonTable.getCarbonTableIdentifier();
+      CarbonTablePath carbonTablePath =
+          CarbonStorePath.getCarbonTablePath(carbonTable.getStorePath(), carbonTableIdentifier);
+      String metadataDirectoryPath = carbonTablePath.getMetadataDirectoryPath();
+      CarbonFile metadataDir = FileFactory
+          .getCarbonFile(metadataDirectoryPath, FileFactory.getFileType(metadataDirectoryPath));
+      for (final CarbonColumn column : dictionaryColumns) {
+        // sort index file is created with dictionary size appended to it. So all the files
+        // with a given column ID need to be listed
+        CarbonFile[] listFiles = metadataDir.listFiles(new CarbonFileFilter() {
+          @Override public boolean accept(CarbonFile path) {
+            if (path.getName().startsWith(column.getColumnId())) {
+              return true;
+            }
+            return false;
+          }
+        });
+        for (CarbonFile file : listFiles) {
+          // try catch is inside for loop because even if one deletion fails, other files
+          // still need to be deleted
+          try {
+            FileFactory.deleteFile(file.getCanonicalPath(),
+                FileFactory.getFileType(file.getCanonicalPath()));
+          } catch (IOException e) {
+            LOGGER.error(
+                "Failed to delete dictionary or sortIndex file for column " + column.getColName()
+                    + "with column ID " + column.getColumnId());
+          }
+        }
+        // remove dictionary cache
+        removeDictionaryColumnFromCache(carbonTable, column.getColumnId());
+      }
+    }
+  }
+
+  /**
+   * This method will remove dictionary cache from driver for both reverse and forward dictionary
+   *
+   * @param carbonTable
+   * @param columnId
+   */
+  public static void removeDictionaryColumnFromCache(CarbonTable carbonTable, String columnId) {
+    Cache<DictionaryColumnUniqueIdentifier, Dictionary> dictCache = CacheProvider.getInstance()
+        .createCache(CacheType.REVERSE_DICTIONARY, carbonTable.getStorePath());
+    DictionaryColumnUniqueIdentifier dictionaryColumnUniqueIdentifier =
+        new DictionaryColumnUniqueIdentifier(carbonTable.getCarbonTableIdentifier(),
+            new ColumnIdentifier(columnId, null, null));
+    dictCache.invalidate(dictionaryColumnUniqueIdentifier);
+    dictCache = CacheProvider.getInstance()
+        .createCache(CacheType.FORWARD_DICTIONARY, carbonTable.getStorePath());
+    dictCache.invalidate(dictionaryColumnUniqueIdentifier);
+  }
+
+  /**
+   * This method will prepare the cardinality of dictionary columns based on the latest schema
+   *
+   * @param lookUpDimensions         dimensions list where a given dimension will be searched to get
+   *                                 the index for getting the cardinality of that column
+   * @param masterSchemaDimensions   latest schema dimensions
+   * @param mappingColumnCardinality cardinality of columns in the given carbondata file
+   * @return
+   */
+  public static int[] getUpdatedColumnCardinalities(List<ColumnSchema> lookUpDimensions,
+      List<CarbonDimension> masterSchemaDimensions, int[] mappingColumnCardinality) {
+    List<Integer> updatedDictionaryColumnCardinalities =
+        new ArrayList<>(masterSchemaDimensions.size());
+    for (CarbonDimension masterDimension : masterSchemaDimensions) {
+      // dimension should be visible and should be a dictionary column
+      if (!masterDimension.isInvisible() && hasEncoding(masterDimension.getEncoder(),
+          Encoding.DICTIONARY)) {
+        int destinationDimensionIndex = 0;
+        boolean isDimensionFoundInDestinationSegment = false;
+        for (ColumnSchema destinationDimension : lookUpDimensions) {
+          if (masterDimension.getColumnId().equals(destinationDimension.getColumnUniqueId())) {
+            isDimensionFoundInDestinationSegment = true;
+            break;
+          }
+          destinationDimensionIndex++;
+        }
+        if (!isDimensionFoundInDestinationSegment) {
+          if (hasEncoding(masterDimension.getEncoder(), Encoding.DIRECT_DICTIONARY)) {
+            updatedDictionaryColumnCardinalities.add(Integer.MAX_VALUE);
+          } else {
+            if (null != masterDimension.getDefaultValue()) {
+              // added +1 because if default value is provided then the cardinality of
+              // column will be 2. 1 for member default value and 1 for the value
+              // provided by the user
+              updatedDictionaryColumnCardinalities
+                  .add(CarbonCommonConstants.DICTIONARY_DEFAULT_CARDINALITY + 1);
+            } else {
+              updatedDictionaryColumnCardinalities
+                  .add(CarbonCommonConstants.DICTIONARY_DEFAULT_CARDINALITY);
+            }
+          }
+        } else {
+          // add the cardinality of the existing column in the schema
+          updatedDictionaryColumnCardinalities
+              .add(mappingColumnCardinality[destinationDimensionIndex]);
+        }
+      }
+    }
+    int[] updatedCardinalities = ArrayUtils.toPrimitive(updatedDictionaryColumnCardinalities
+        .toArray(new Integer[updatedDictionaryColumnCardinalities.size()]));
+    return updatedCardinalities;
+  }
+
+  /**
    * Below method will be used to convert byte data to surrogate key based
    * column value size
    *

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/70256e77/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java b/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
index 1d983e0..1ba8942 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
@@ -20,6 +20,7 @@ package org.apache.carbondata.core.util;
 import java.math.BigDecimal;
 import java.math.BigInteger;
 import java.math.RoundingMode;
+import java.nio.charset.Charset;
 import java.text.DateFormat;
 import java.text.ParseException;
 import java.text.SimpleDateFormat;
@@ -31,9 +32,12 @@ import java.util.Map;
 import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
+import org.apache.carbondata.core.keygenerator.directdictionary.DirectDictionaryGenerator;
+import org.apache.carbondata.core.keygenerator.directdictionary.DirectDictionaryKeyGeneratorFactory;
 import org.apache.carbondata.core.metadata.datatype.DataType;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure;
+import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
 
 import org.apache.spark.unsafe.types.UTF8String;
 
@@ -47,19 +51,17 @@ public final class DataTypeUtil {
   private static final Map<String, String> dataTypeDisplayNames;
 
   private static final ThreadLocal<DateFormat> timeStampformatter = new ThreadLocal<DateFormat>() {
-    @Override
-    protected DateFormat initialValue() {
-      return new SimpleDateFormat(
-          CarbonProperties.getInstance().getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+    @Override protected DateFormat initialValue() {
+      return new SimpleDateFormat(CarbonProperties.getInstance()
+          .getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
               CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT));
     }
   };
 
   private static final ThreadLocal<DateFormat> dateformatter = new ThreadLocal<DateFormat>() {
-    @Override
-    protected DateFormat initialValue() {
-      return new SimpleDateFormat(
-          CarbonProperties.getInstance().getProperty(CarbonCommonConstants.CARBON_DATE_FORMAT,
+    @Override protected DateFormat initialValue() {
+      return new SimpleDateFormat(CarbonProperties.getInstance()
+          .getProperty(CarbonCommonConstants.CARBON_DATE_FORMAT,
               CarbonCommonConstants.CARBON_DATE_DEFAULT_FORMAT));
     }
   };
@@ -198,6 +200,7 @@ public final class DataTypeUtil {
     BigInteger sig = new BigInteger(unscale);
     return new BigDecimal(sig, scale);
   }
+
   /**
    * returns the SqlStatement.Type of corresponding string value
    *
@@ -245,6 +248,36 @@ public final class DataTypeUtil {
   }
 
   /**
+   * This method will convert the data according to its data type and perform a
+   * special handling for decimal data types
+   *
+   * @param dataInBytes
+   * @param dimension
+   * @return
+   */
+  public static Object getDataBasedOnDataType(byte[] dataInBytes, CarbonDimension dimension) {
+    if (null == dataInBytes || Arrays
+        .equals(CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY, dataInBytes)) {
+      return null;
+    }
+    switch (dimension.getDataType()) {
+      case DECIMAL:
+        String data = new String(dataInBytes, CarbonCommonConstants.DEFAULT_CHARSET_CLASS);
+        if (data.isEmpty()) {
+          return null;
+        }
+        java.math.BigDecimal javaDecVal = new java.math.BigDecimal(data);
+        if (dimension.getColumnSchema().getScale() > javaDecVal.scale()) {
+          javaDecVal =
+              javaDecVal.setScale(dimension.getColumnSchema().getScale(), RoundingMode.HALF_UP);
+        }
+        return org.apache.spark.sql.types.Decimal.apply(javaDecVal);
+      default:
+        return getDataBasedOnDataType(dataInBytes, dimension.getDataType());
+    }
+  }
+
+  /**
    * Below method will be used to convert the data passed to its actual data
    * type
    *
@@ -538,6 +571,7 @@ public final class DataTypeUtil {
     }
     return null;
   }
+
   /**
    * This method will compare double values it will preserve
    * the -0.0 and 0.0 equality as per == ,also preserve NaN equality check as per
@@ -555,4 +589,99 @@ public final class DataTypeUtil {
     }
     return 1;
   }
+
+  /**
+   * Below method will be used to convert the data into byte[]
+   *
+   * @param data
+   * @param actualDataType actual data type
+   * @return actual data in byte[]
+   */
+  public static byte[] convertDataToBytesBasedOnDataType(String data, DataType actualDataType) {
+    if (null == data) {
+      return null;
+    } else if (CarbonCommonConstants.MEMBER_DEFAULT_VAL.equals(data)) {
+      LOGGER.error("Default value should not be carbon specific null value : " + data);
+      return null;
+    }
+    try {
+      long parsedIntVal = 0;
+      switch (actualDataType) {
+        case INT:
+          parsedIntVal = (long) Integer.parseInt(data);
+          return String.valueOf(parsedIntVal)
+              .getBytes(Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET));
+        case SHORT:
+          parsedIntVal = (long) Short.parseShort(data);
+          return String.valueOf(parsedIntVal)
+              .getBytes(Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET));
+        case DOUBLE:
+          return String.valueOf(Double.parseDouble(data))
+              .getBytes(Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET));
+        case LONG:
+          return String.valueOf(Long.parseLong(data))
+              .getBytes(Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET));
+        case DATE:
+        case TIMESTAMP:
+          DirectDictionaryGenerator directDictionaryGenerator =
+              DirectDictionaryKeyGeneratorFactory.getDirectDictionaryGenerator(actualDataType);
+          int value = directDictionaryGenerator.generateDirectSurrogateKey(data);
+          return String.valueOf(value)
+              .getBytes(Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET));
+        case DECIMAL:
+          java.math.BigDecimal javaDecVal = new java.math.BigDecimal(data);
+          return bigDecimalToByte(javaDecVal);
+        default:
+          return UTF8String.fromString(data).getBytes();
+      }
+    } catch (NumberFormatException ex) {
+      LOGGER.error("Problem while converting data type" + data);
+      return null;
+    }
+  }
+
+  /**
+   * This method will parse a given string value corresponding to its data type
+   *
+   * @param value     value to parse
+   * @param columnSchema dimension to get data type and precision and scale in case of decimal
+   *                  data type
+   * @return
+   */
+  public static String normalizeColumnValueForItsDataType(String value, ColumnSchema columnSchema) {
+    try {
+      Object parsedValue = null;
+      switch (columnSchema.getDataType()) {
+        case DECIMAL:
+          return parseStringToBigDecimal(value, columnSchema);
+        case SHORT:
+        case INT:
+        case LONG:
+          parsedValue = normalizeIntAndLongValues(value, columnSchema.getDataType());
+          break;
+        case DOUBLE:
+          parsedValue = Double.parseDouble(value);
+          break;
+        default:
+          return value;
+      }
+      if (null != parsedValue) {
+        return value;
+      }
+      return null;
+    } catch (Exception e) {
+      return null;
+    }
+  }
+
+  private static String parseStringToBigDecimal(String value, ColumnSchema columnSchema) {
+    BigDecimal bigDecimal = new BigDecimal(value)
+        .setScale(columnSchema.getScale(), RoundingMode.HALF_UP);
+    BigDecimal normalizedValue =
+        normalizeDecimalValue(bigDecimal, columnSchema.getPrecision());
+    if (null != normalizedValue) {
+      return normalizedValue.toString();
+    }
+    return null;
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/70256e77/core/src/main/java/org/apache/carbondata/core/writer/CarbonDictionaryWriterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/writer/CarbonDictionaryWriterImpl.java b/core/src/main/java/org/apache/carbondata/core/writer/CarbonDictionaryWriterImpl.java
index 1827fb0..eae99f2 100644
--- a/core/src/main/java/org/apache/carbondata/core/writer/CarbonDictionaryWriterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/writer/CarbonDictionaryWriterImpl.java
@@ -370,7 +370,7 @@ public class CarbonDictionaryWriterImpl implements CarbonDictionaryWriter {
   /**
    * close dictionary thrift writer
    */
-  private void closeThriftWriter() {
+  private void closeThriftWriter() throws IOException {
     if (null != dictionaryThriftWriter) {
       dictionaryThriftWriter.close();
     }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/70256e77/core/src/main/java/org/apache/carbondata/core/writer/CarbonIndexFileWriter.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/writer/CarbonIndexFileWriter.java b/core/src/main/java/org/apache/carbondata/core/writer/CarbonIndexFileWriter.java
index 5238894..d17bcf7 100644
--- a/core/src/main/java/org/apache/carbondata/core/writer/CarbonIndexFileWriter.java
+++ b/core/src/main/java/org/apache/carbondata/core/writer/CarbonIndexFileWriter.java
@@ -55,7 +55,7 @@ public class CarbonIndexFileWriter {
   /**
    * Below method will be used to close the thrift object
    */
-  public void close() {
+  public void close() throws IOException {
     thriftWriter.close();
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/70256e77/core/src/main/java/org/apache/carbondata/core/writer/ThriftWriter.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/writer/ThriftWriter.java b/core/src/main/java/org/apache/carbondata/core/writer/ThriftWriter.java
index 92bae42..9bf549d 100644
--- a/core/src/main/java/org/apache/carbondata/core/writer/ThriftWriter.java
+++ b/core/src/main/java/org/apache/carbondata/core/writer/ThriftWriter.java
@@ -21,6 +21,9 @@ import java.io.DataOutputStream;
 import java.io.IOException;
 
 import org.apache.carbondata.core.datastore.impl.FileFactory;
+import org.apache.carbondata.core.fileoperations.AtomicFileOperations;
+import org.apache.carbondata.core.fileoperations.AtomicFileOperationsImpl;
+import org.apache.carbondata.core.fileoperations.FileWriteOperation;
 import org.apache.carbondata.core.util.CarbonUtil;
 
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -56,6 +59,11 @@ public class ThriftWriter {
   private TProtocol binaryOut;
 
   /**
+   * Identifier for performing atomic file operations
+   */
+  private AtomicFileOperations atomicFileOperationsWriter;
+
+  /**
    * flag to append to existing file
    */
   private boolean append;
@@ -78,6 +86,19 @@ public class ThriftWriter {
   }
 
   /**
+   * Method for opening file writing for atomic operations
+   *
+   * @param fileWriteOperation
+   * @throws IOException
+   */
+  public void open(FileWriteOperation fileWriteOperation) throws IOException {
+    FileFactory.FileType fileType = FileFactory.getFileType(fileName);
+    atomicFileOperationsWriter = new AtomicFileOperationsImpl(fileName, fileType);
+    dataOutputStream = atomicFileOperationsWriter.openForWrite(fileWriteOperation);
+    binaryOut = new TCompactProtocol(new TIOStreamTransport(dataOutputStream));
+  }
+
+  /**
    * This will check whether stream and binary out is open or not.
    * @return
    */
@@ -113,11 +134,25 @@ public class ThriftWriter {
   /**
    * Close the file stream.
    */
-  public void close() {
+  public void close() throws IOException {
+    closeAtomicFileWriter();
     CarbonUtil.closeStreams(dataOutputStream);
   }
 
   /**
+   * This method will close the atomic file operations writer
+   *
+   * @throws IOException
+   */
+  private void closeAtomicFileWriter() throws IOException {
+    if (null != atomicFileOperationsWriter) {
+      atomicFileOperationsWriter.close();
+      // set output stream to null as atomic writer will close the data output stream internally
+      dataOutputStream = null;
+    }
+  }
+
+  /**
    * Flush data to HDFS file
    */
   public void sync() throws IOException {

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/70256e77/core/src/test/java/org/apache/carbondata/core/scan/collector/impl/DictionaryBasedResultCollectorTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/scan/collector/impl/DictionaryBasedResultCollectorTest.java b/core/src/test/java/org/apache/carbondata/core/scan/collector/impl/DictionaryBasedResultCollectorTest.java
index 66f4830..8f902b1 100644
--- a/core/src/test/java/org/apache/carbondata/core/scan/collector/impl/DictionaryBasedResultCollectorTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/scan/collector/impl/DictionaryBasedResultCollectorTest.java
@@ -26,13 +26,13 @@ public class DictionaryBasedResultCollectorTest {
 //    blockExecutionInfo = new BlockExecutionInfo();
 //    KeyStructureInfo keyStructureInfo = new KeyStructureInfo();
 //    blockExecutionInfo.setKeyStructureInfo(keyStructureInfo);
-//    AggregatorInfo aggregatorInfo = new AggregatorInfo();
+//    MeasureInfo aggregatorInfo = new MeasureInfo();
 //    aggregatorInfo.setMeasureOrdinals(new int[] { 10, 20, 30, 40 });
 //    aggregatorInfo.setMeasureExists(new boolean[] { true, false, false, false });
 //    aggregatorInfo.setDefaultValues(new Object[] { 1, 2, 3, 4 });
 //    aggregatorInfo.setMeasureDataTypes(
 //        new DataType[] { DataType.INT, DataType.TIMESTAMP, DataType.INT, DataType.INT });
-//    blockExecutionInfo.setAggregatorInfo(aggregatorInfo);
+//    blockExecutionInfo.setMeasureInfo(aggregatorInfo);
 //    QueryDimension queryDimension1 = new QueryDimension("QDCol1");
 //    queryDimension1.setQueryOrder(1);
 //    ColumnSchema columnSchema = new ColumnSchema();

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/70256e77/core/src/test/java/org/apache/carbondata/core/scan/collector/impl/RawBasedResultCollectorTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/scan/collector/impl/RawBasedResultCollectorTest.java b/core/src/test/java/org/apache/carbondata/core/scan/collector/impl/RawBasedResultCollectorTest.java
index d3b3a49..2a8f78e 100644
--- a/core/src/test/java/org/apache/carbondata/core/scan/collector/impl/RawBasedResultCollectorTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/scan/collector/impl/RawBasedResultCollectorTest.java
@@ -44,7 +44,7 @@ public class RawBasedResultCollectorTest {
 //    keyStructureInfo.setMaskedBytes(new int[] { 1, 2 });
 //    keyStructureInfo.setMaskByteRanges(new int[] { 1, 2 });
 //    blockExecutionInfo.setKeyStructureInfo(keyStructureInfo);
-//    AggregatorInfo aggregatorInfo = new AggregatorInfo();
+//    MeasureInfo aggregatorInfo = new MeasureInfo();
 //    aggregatorInfo.setMeasureOrdinals(new int[] { 10, 20, 30, 40 });
 //    aggregatorInfo.setMeasureExists(new boolean[] { true, false, false, false });
 //    aggregatorInfo.setDefaultValues(new Object[] { 1, 2, 3, 4 });
@@ -77,7 +77,7 @@ public class RawBasedResultCollectorTest {
 //    blockExecutionInfo.setQueryMeasures(
 //        new QueryMeasure[] { queryMeasure1, queryMeasure2, queryMeasure3, queryMeasure4 });
 //    blockExecutionInfo.setFixedKeyUpdateRequired(true);
-//    blockExecutionInfo.setAggregatorInfo(aggregatorInfo);
+//    blockExecutionInfo.setMeasureInfo(aggregatorInfo);
 //    blockExecutionInfo.setMaskedByteForBlock(new int[] { 1, 2 });
 //    blockExecutionInfo.setBlockKeyGenerator(keyGenerator);
 //    rawBasedResultCollector = new RawBasedResultCollector(blockExecutionInfo);

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/70256e77/core/src/test/java/org/apache/carbondata/core/scan/executor/util/RestructureUtilTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/scan/executor/util/RestructureUtilTest.java b/core/src/test/java/org/apache/carbondata/core/scan/executor/util/RestructureUtilTest.java
index 7bfaa3e..634a301 100644
--- a/core/src/test/java/org/apache/carbondata/core/scan/executor/util/RestructureUtilTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/scan/executor/util/RestructureUtilTest.java
@@ -20,7 +20,9 @@ import org.apache.carbondata.core.metadata.encoder.Encoding;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure;
 import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
-import org.apache.carbondata.core.scan.executor.infos.AggregatorInfo;
+import org.apache.carbondata.core.scan.executor.exception.QueryExecutionException;
+import org.apache.carbondata.core.scan.executor.infos.BlockExecutionInfo;
+import org.apache.carbondata.core.scan.executor.infos.MeasureInfo;
 import org.apache.carbondata.core.scan.model.QueryDimension;
 import org.apache.carbondata.core.scan.model.QueryMeasure;
 
@@ -37,6 +39,7 @@ import java.util.List;
 public class RestructureUtilTest {
 
   @Test public void testToGetUpdatedQueryDimension() {
+    BlockExecutionInfo blockExecutionInfo = new BlockExecutionInfo();
     List<Encoding> encodingList = new ArrayList<Encoding>();
     encodingList.add(Encoding.DICTIONARY);
     ColumnSchema columnSchema1 = new ColumnSchema();
@@ -75,8 +78,10 @@ public class RestructureUtilTest {
     List<QueryDimension> queryDimensions =
         Arrays.asList(queryDimension1, queryDimension2, queryDimension3);
 
-    List<QueryDimension> result = RestructureUtil
-        .getUpdatedQueryDimension(queryDimensions, tableBlockDimensions, tableComplexDimensions);
+    List<QueryDimension> result = null;
+    result = RestructureUtil
+        .createDimensionInfoAndGetUpdatedQueryDimension(blockExecutionInfo, queryDimensions, tableBlockDimensions,
+            tableComplexDimensions);
 
     assertThat(result, is(equalTo(Arrays.asList(queryDimension1, queryDimension2))));
   }
@@ -92,7 +97,7 @@ public class RestructureUtilTest {
     CarbonMeasure carbonMeasure1 = new CarbonMeasure(columnSchema1, 1);
     CarbonMeasure carbonMeasure2 = new CarbonMeasure(columnSchema2, 2);
     CarbonMeasure carbonMeasure3 = new CarbonMeasure(columnSchema3, 3);
-    carbonMeasure3.setDefaultValue("3".getBytes());
+    carbonMeasure3.getColumnSchema().setDefaultValue("3".getBytes());
     List<CarbonMeasure> currentBlockMeasures = Arrays.asList(carbonMeasure1, carbonMeasure2);
 
     QueryMeasure queryMeasure1 = new QueryMeasure("Id");
@@ -102,12 +107,13 @@ public class RestructureUtilTest {
     QueryMeasure queryMeasure3 = new QueryMeasure("Age");
     queryMeasure3.setMeasure(carbonMeasure3);
     List<QueryMeasure> queryMeasures = Arrays.asList(queryMeasure1, queryMeasure2, queryMeasure3);
-
-    AggregatorInfo aggregatorInfo =
-        RestructureUtil.getAggregatorInfos(queryMeasures, currentBlockMeasures);
+    BlockExecutionInfo blockExecutionInfo = new BlockExecutionInfo();
+    RestructureUtil.createMeasureInfoAndGetUpdatedQueryMeasures(blockExecutionInfo, queryMeasures,
+        currentBlockMeasures);
+    MeasureInfo measureInfo = blockExecutionInfo.getMeasureInfo();
     boolean[] measuresExist = { true, true, false };
-    assertThat(aggregatorInfo.getMeasureExists(), is(equalTo(measuresExist)));
+    assertThat(measureInfo.getMeasureExists(), is(equalTo(measuresExist)));
     Object[] defaultValues = { null, null, "3".getBytes() };
-    assertThat(aggregatorInfo.getDefaultValues(), is(equalTo(defaultValues)));
+    assertThat(measureInfo.getDefaultValues(), is(equalTo(defaultValues)));
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/70256e77/processing/src/main/java/org/apache/carbondata/processing/newflow/CarbonDataLoadConfiguration.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/newflow/CarbonDataLoadConfiguration.java b/processing/src/main/java/org/apache/carbondata/processing/newflow/CarbonDataLoadConfiguration.java
index 2c01746..cd58a00 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/newflow/CarbonDataLoadConfiguration.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/newflow/CarbonDataLoadConfiguration.java
@@ -17,7 +17,9 @@
 
 package org.apache.carbondata.processing.newflow;
 
+import java.util.ArrayList;
 import java.util.HashMap;
+import java.util.List;
 import java.util.Map;
 
 import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
@@ -27,6 +29,10 @@ public class CarbonDataLoadConfiguration {
 
   private DataField[] dataFields;
 
+  private DataField[] dimensionFields;
+
+  private DataField[] measureFields;
+
   private AbsoluteTableIdentifier tableIdentifier;
 
   private String[] header;
@@ -58,17 +64,43 @@ public class CarbonDataLoadConfiguration {
 
   private boolean preFetch;
 
+  private int dimensionCount;
+
+  private int measureCount;
+
   public CarbonDataLoadConfiguration() {
   }
 
-  public int getDimensionCount() {
-    int dimCount = 0;
+  private void initDimensionFields() {
+    List<Integer> dimensionIndexes = new ArrayList<>(dataFields.length);
     for (int i = 0; i < dataFields.length; i++) {
       if (dataFields[i].getColumn().isDimesion()) {
-        dimCount++;
+        dimensionIndexes.add(i);
+        dimensionCount++;
       }
     }
-    return dimCount;
+    dimensionFields = new DataField[dimensionCount];
+    for (int i = 0; i < dimensionCount; i++) {
+      dimensionFields[i] = dataFields[dimensionIndexes.get(i)];
+    }
+  }
+
+  private void initMeasureFields() {
+    List<Integer> measureIndexes = new ArrayList<>(dataFields.length);
+    for (int i = 0; i < dataFields.length; i++) {
+      if (!dataFields[i].getColumn().isDimesion()) {
+        measureIndexes.add(i);
+        measureCount++;
+      }
+    }
+    measureFields = new DataField[measureCount];
+    for (int i = 0; i < measureCount; i++) {
+      measureFields[i] = dataFields[measureIndexes.get(i)];
+    }
+  }
+
+  public int getDimensionCount() {
+    return dimensionCount;
   }
 
   public int getNoDictionaryCount() {
@@ -92,13 +124,7 @@ public class CarbonDataLoadConfiguration {
   }
 
   public int getMeasureCount() {
-    int msrCount = 0;
-    for (int i = 0; i < dataFields.length; i++) {
-      if (!dataFields[i].getColumn().isDimesion()) {
-        msrCount++;
-      }
-    }
-    return msrCount;
+    return measureCount;
   }
 
   public DataField[] getDataFields() {
@@ -107,6 +133,8 @@ public class CarbonDataLoadConfiguration {
 
   public void setDataFields(DataField[] dataFields) {
     this.dataFields = dataFields;
+    initDimensionFields();
+    initMeasureFields();
   }
 
   public String[] getHeader() {
@@ -196,4 +224,12 @@ public class CarbonDataLoadConfiguration {
   public void setPreFetch(boolean preFetch) {
     this.preFetch = preFetch;
   }
+
+  public DataField[] getDimensionFields() {
+    return dimensionFields;
+  }
+
+  public DataField[] getMeasureFields() {
+    return measureFields;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/70256e77/processing/src/main/java/org/apache/carbondata/processing/newflow/DataLoadProcessBuilder.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/newflow/DataLoadProcessBuilder.java b/processing/src/main/java/org/apache/carbondata/processing/newflow/DataLoadProcessBuilder.java
index b89e283..5e01abe 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/newflow/DataLoadProcessBuilder.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/newflow/DataLoadProcessBuilder.java
@@ -146,18 +146,20 @@ public final class DataLoadProcessBuilder {
     // First add dictionary and non dictionary dimensions because these are part of mdk key.
     // And then add complex data types and measures.
     for (CarbonColumn column : dimensions) {
-      DataField dataField = new DataField(column);
-      dataField.setDateFormat(dateFormatMap.get(column.getColName()));
-      if (column.isComplex()) {
-        complexDataFields.add(dataField);
-      } else {
-        dataFields.add(dataField);
+      if (!column.isInvisible()) {
+        DataField dataField = new DataField(column);
+        dataField.setDateFormat(dateFormatMap.get(column.getColName()));
+        if (column.isComplex()) {
+          complexDataFields.add(dataField);
+        } else {
+          dataFields.add(dataField);
+        }
       }
     }
     dataFields.addAll(complexDataFields);
     for (CarbonColumn column : measures) {
       // This dummy measure is added when no measure was present. We no need to load it.
-      if (!(column.getColName().equals("default_dummy_measure"))) {
+      if (!column.isInvisible() && !(column.getColName().equals("default_dummy_measure"))) {
         dataFields.add(new DataField(column));
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/70256e77/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/SortParameters.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/SortParameters.java b/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/SortParameters.java
index f618965..a52ebb2 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/SortParameters.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/SortParameters.java
@@ -430,8 +430,7 @@ public class SortParameters {
         CarbonCommonConstants.CARBON_PREFETCH_BUFFERSIZE_DEFAULT)));
 
     char[] aggType = CarbonDataProcessorUtil
-        .getAggType(parameters.getMeasureColCount(), parameters.getDatabaseName(),
-            parameters.getTableName());
+        .getAggType(configuration.getMeasureCount(), configuration.getMeasureFields());
     parameters.setAggType(aggType);
     parameters.setUseKettle(false);
     return parameters;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/70256e77/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerModel.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerModel.java b/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerModel.java
index cae22a4..594f0e2 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerModel.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerModel.java
@@ -269,7 +269,7 @@ public class CarbonFactDataHandlerModel {
     carbonFactDataHandlerModel.setColCardinality(colCardinality);
     carbonFactDataHandlerModel.setDataWritingRequest(true);
     carbonFactDataHandlerModel.setAggType(CarbonDataProcessorUtil
-        .getAggType(measureCount, identifier.getDatabaseName(), identifier.getTableName()));
+        .getAggType(configuration.getMeasureCount(), configuration.getMeasureFields()));
     carbonFactDataHandlerModel.setFactDimLens(dimLens);
     carbonFactDataHandlerModel.setWrapperColumnSchema(wrapperColumnSchema);
     carbonFactDataHandlerModel.setPrimitiveDimLens(simpleDimsLen);

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/70256e77/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java b/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java
index a56e211..eff5fca 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java
@@ -397,34 +397,15 @@ public final class CarbonDataProcessorUtil {
    */
   public static Set<String> getSchemaColumnNames(CarbonDataLoadSchema schema, String tableName) {
     Set<String> columnNames = new HashSet<String>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
-
     String factTableName = schema.getCarbonTable().getFactTableName();
     if (tableName.equals(factTableName)) {
-
       List<CarbonDimension> dimensions =
           schema.getCarbonTable().getDimensionByTableName(factTableName);
-
       for (CarbonDimension dimension : dimensions) {
-
-        String foreignKey = null;
-        for (CarbonDataLoadSchema.DimensionRelation dimRel : schema.getDimensionRelationList()) {
-          for (String field : dimRel.getColumns()) {
-            if (dimension.getColName().equals(field)) {
-              foreignKey = dimRel.getRelation().getFactForeignKeyColumn();
-              break;
-            }
-          }
-          if (null != foreignKey) {
-            break;
-          }
-        }
-        if (null == foreignKey) {
+        if (!dimension.isInvisible()) {
           columnNames.add(dimension.getColName());
-        } else {
-          columnNames.add(foreignKey);
         }
       }
-
       List<CarbonMeasure> measures = schema.getCarbonTable().getMeasureByTableName(factTableName);
       for (CarbonMeasure msr : measures) {
         if (!msr.getColumnSchema().isInvisible()) {
@@ -436,15 +417,12 @@ public final class CarbonDataProcessorUtil {
       for (CarbonDimension dimension : dimensions) {
         columnNames.add(dimension.getColName());
       }
-
       List<CarbonMeasure> measures = schema.getCarbonTable().getMeasureByTableName(tableName);
       for (CarbonMeasure msr : measures) {
         columnNames.add(msr.getColName());
       }
     }
-
     return columnNames;
-
   }
 
   /**
@@ -481,6 +459,18 @@ public final class CarbonDataProcessorUtil {
   }
 
   /**
+   * get agg type
+   */
+  public static char[] getAggType(int measureCount, DataField[] measureFields) {
+    char[] aggType = new char[measureCount];
+    Arrays.fill(aggType, 'n');
+    for (int i = 0; i < measureFields.length; i++) {
+      aggType[i] = DataTypeUtil.getAggType(measureFields[i].getColumn().getDataType());
+    }
+    return aggType;
+  }
+
+  /**
    * Creates map for columns which dateformats mentioned while loading the data.
    * @param dataFormatString
    * @return