You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ch...@apache.org on 2016/07/20 10:14:11 UTC

[43/50] [abbrv] incubator-carbondata git commit: Merge remote-tracking branch 'carbon_master/master' into apache/master

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaecb651/core/src/main/java/org/carbondata/scan/filter/executer/RowLevelRangeGrtThanFiterExecuterImpl.java
----------------------------------------------------------------------
diff --cc core/src/main/java/org/carbondata/scan/filter/executer/RowLevelRangeGrtThanFiterExecuterImpl.java
index 0f2eda1,0000000..5c22566
mode 100644,000000..100644
--- a/core/src/main/java/org/carbondata/scan/filter/executer/RowLevelRangeGrtThanFiterExecuterImpl.java
+++ b/core/src/main/java/org/carbondata/scan/filter/executer/RowLevelRangeGrtThanFiterExecuterImpl.java
@@@ -1,208 -1,0 +1,209 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *    http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.carbondata.scan.filter.executer;
 +
 +import java.util.BitSet;
 +import java.util.List;
 +
 +import org.carbondata.core.carbon.AbsoluteTableIdentifier;
 +import org.carbondata.core.carbon.datastore.block.SegmentProperties;
 +import org.carbondata.core.carbon.datastore.chunk.DimensionColumnDataChunk;
 +import org.carbondata.core.carbon.datastore.chunk.impl.FixedLengthDimensionDataChunk;
 +import org.carbondata.core.carbon.metadata.encoder.Encoding;
 +import org.carbondata.core.util.ByteUtil;
 +import org.carbondata.core.util.CarbonUtil;
 +import org.carbondata.scan.expression.Expression;
 +import org.carbondata.scan.expression.exception.FilterUnsupportedException;
 +import org.carbondata.scan.filter.resolver.resolverinfo.DimColumnResolvedFilterInfo;
 +import org.carbondata.scan.filter.resolver.resolverinfo.MeasureColumnResolvedFilterInfo;
 +import org.carbondata.scan.processor.BlocksChunkHolder;
 +
 +public class RowLevelRangeGrtThanFiterExecuterImpl extends RowLevelFilterExecuterImpl {
 +  private byte[][] filterRangeValues;
 +
 +  public RowLevelRangeGrtThanFiterExecuterImpl(
 +      List<DimColumnResolvedFilterInfo> dimColEvaluatorInfoList,
 +      List<MeasureColumnResolvedFilterInfo> msrColEvalutorInfoList, Expression exp,
 +      AbsoluteTableIdentifier tableIdentifier, byte[][] filterRangeValues,
 +      SegmentProperties segmentProperties) {
-     super(dimColEvaluatorInfoList, msrColEvalutorInfoList, exp, tableIdentifier, segmentProperties);
++    super(dimColEvaluatorInfoList, msrColEvalutorInfoList, exp, tableIdentifier, segmentProperties,
++        null);
 +    this.filterRangeValues = filterRangeValues;
 +  }
 +
 +  @Override public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue) {
 +    BitSet bitSet = new BitSet(1);
 +    byte[][] filterValues = this.filterRangeValues;
 +    int columnIndex = this.dimColEvaluatorInfoList.get(0).getColumnIndex();
 +    boolean isScanRequired = false;
 +    for (int k = 0; k < filterValues.length; k++) {
 +      // filter value should be in range of max and min value i.e
 +      // max>filtervalue>min
 +      // so filter-max should be negative
 +      int maxCompare =
 +          ByteUtil.UnsafeComparer.INSTANCE.compareTo(filterValues[k], blockMaxValue[columnIndex]);
 +      // if any filter value is in range than this block needs to be
 +      // scanned means always less than block max range.
 +      if (maxCompare < 0) {
 +        isScanRequired = true;
 +        break;
 +      }
 +    }
 +    if (isScanRequired) {
 +      bitSet.set(0);
 +    }
 +    return bitSet;
 +
 +  }
 +
 +  @Override public BitSet applyFilter(BlocksChunkHolder blockChunkHolder)
 +      throws FilterUnsupportedException {
 +    if (!dimColEvaluatorInfoList.get(0).getDimension().hasEncoding(Encoding.DICTIONARY)) {
 +      return super.applyFilter(blockChunkHolder);
 +    }
 +    int blockIndex = segmentProperties.getDimensionOrdinalToBlockMapping()
 +        .get(dimColEvaluatorInfoList.get(0).getColumnIndex());
 +    if (null == blockChunkHolder.getDimensionDataChunk()[blockIndex]) {
 +      blockChunkHolder.getDimensionDataChunk()[blockIndex] = blockChunkHolder.getDataBlock()
 +          .getDimensionChunk(blockChunkHolder.getFileReader(), blockIndex);
 +    }
 +    return getFilteredIndexes(blockChunkHolder.getDimensionDataChunk()[blockIndex],
 +        blockChunkHolder.getDataBlock().nodeSize());
 +  }
 +
 +  private BitSet getFilteredIndexes(DimensionColumnDataChunk dimensionColumnDataChunk,
 +      int numerOfRows) {
 +    if (null != dimensionColumnDataChunk.getAttributes().getInvertedIndexes()
 +        && dimensionColumnDataChunk instanceof FixedLengthDimensionDataChunk) {
 +      return setFilterdIndexToBitSetWithColumnIndex(
 +          (FixedLengthDimensionDataChunk) dimensionColumnDataChunk, numerOfRows);
 +    }
 +    return setFilterdIndexToBitSet(dimensionColumnDataChunk, numerOfRows);
 +  }
 +
 +  /**
 +   * Method will scan the block and finds the range start index from which all members
 +   * will be considered for applying range filters. this method will be called if the
 +   * column is not supported by default so column index mapping  will be present for
 +   * accesing the members from the block.
 +   *
 +   * @param dimensionColumnDataChunk
 +   * @param numerOfRows
 +   * @return BitSet.
 +   */
 +  private BitSet setFilterdIndexToBitSetWithColumnIndex(
 +      FixedLengthDimensionDataChunk dimensionColumnDataChunk, int numerOfRows) {
 +    BitSet bitSet = new BitSet(numerOfRows);
 +    int[] columnIndex = dimensionColumnDataChunk.getAttributes().getInvertedIndexes();
 +    int start = 0;
 +    int last = 0;
 +    int startIndex = 0;
 +    byte[][] filterValues = this.filterRangeValues;
 +    for (int i = 0; i < filterValues.length; i++) {
 +      start = CarbonUtil
 +          .getFirstIndexUsingBinarySearch(dimensionColumnDataChunk, startIndex, numerOfRows - 1,
 +              filterValues[i], true);
 +      if (start >= 0) {
 +        start = CarbonUtil.nextGreaterValueToTarget(start,
 +            (FixedLengthDimensionDataChunk) dimensionColumnDataChunk, filterValues[i], numerOfRows);
 +      }
 +      // Logic will handle the case where the range filter member is not present in block
 +      // in this case the binary search will return the index from where the bit sets will be
 +      // set inorder to apply filters. this is greater than filter so the range will be taken
 +      // from the next element which is greater than filter member.
 +      if (start < 0) {
 +        start = -(start + 1);
 +        if (start == numerOfRows) {
 +          start = start - 1;
 +        }
 +        // Method will compare the tentative index value after binary search, this tentative
 +        // index needs to be compared by the filter member if its > filter then from that
 +        // index the bitset will be considered for filtering process.
 +        if (ByteUtil
 +            .compare(filterValues[i], dimensionColumnDataChunk.getChunkData(columnIndex[start]))
 +            > 0) {
 +          start = start + 1;
 +        }
 +      }
 +
 +      last = start;
 +      for (int j = start; j < numerOfRows; j++) {
 +        bitSet.set(columnIndex[j]);
 +        last++;
 +      }
 +      startIndex = last;
 +      if (startIndex >= numerOfRows) {
 +        break;
 +      }
 +    }
 +
 +    return bitSet;
 +  }
 +
 +  /**
 +   * Method will scan the block and finds the range start index from which all
 +   * members will be considered for applying range filters. this method will
 +   * be called if the column is sorted default so column index
 +   * mapping will be present for accesing the members from the block.
 +   *
 +   * @param dimensionColumnDataChunk
 +   * @param numerOfRows
 +   * @return BitSet.
 +   */
 +  private BitSet setFilterdIndexToBitSet(DimensionColumnDataChunk dimensionColumnDataChunk,
 +      int numerOfRows) {
 +    BitSet bitSet = new BitSet(numerOfRows);
 +    if (dimensionColumnDataChunk instanceof FixedLengthDimensionDataChunk) {
 +      int start = 0;
 +      int last = 0;
 +      int startIndex = 0;
 +      byte[][] filterValues = this.filterRangeValues;
 +      for (int k = 0; k < filterValues.length; k++) {
 +        start = CarbonUtil.getFirstIndexUsingBinarySearch(
 +            (FixedLengthDimensionDataChunk) dimensionColumnDataChunk, startIndex, numerOfRows - 1,
 +            filterValues[k], true);
 +        start = CarbonUtil.nextGreaterValueToTarget(start,
 +            (FixedLengthDimensionDataChunk) dimensionColumnDataChunk, filterValues[k], numerOfRows);
 +        if (start < 0) {
 +          start = -(start + 1);
 +          if (start == numerOfRows) {
 +            start = start - 1;
 +          }
 +          // Method will compare the tentative index value after binary search, this tentative
 +          // index needs to be compared by the filter member if its > filter then from that
 +          // index the bitset will be considered for filtering process.
 +          if (ByteUtil.compare(filterValues[k], dimensionColumnDataChunk.getChunkData(start)) > 0) {
 +            start = start + 1;
 +          }
 +        }
 +        last = start;
 +        for (int j = start; j < numerOfRows; j++) {
 +          bitSet.set(j);
 +          last++;
 +        }
 +        startIndex = last;
 +        if (startIndex >= numerOfRows) {
 +          break;
 +        }
 +      }
 +    }
 +    return bitSet;
 +  }
 +
 +}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaecb651/core/src/main/java/org/carbondata/scan/filter/executer/RowLevelRangeGrtrThanEquaToFilterExecuterImpl.java
----------------------------------------------------------------------
diff --cc core/src/main/java/org/carbondata/scan/filter/executer/RowLevelRangeGrtrThanEquaToFilterExecuterImpl.java
index e715261,0000000..0d857d5
mode 100644,000000..100644
--- a/core/src/main/java/org/carbondata/scan/filter/executer/RowLevelRangeGrtrThanEquaToFilterExecuterImpl.java
+++ b/core/src/main/java/org/carbondata/scan/filter/executer/RowLevelRangeGrtrThanEquaToFilterExecuterImpl.java
@@@ -1,199 -1,0 +1,200 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *    http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.carbondata.scan.filter.executer;
 +
 +import java.util.BitSet;
 +import java.util.List;
 +
 +import org.carbondata.core.carbon.AbsoluteTableIdentifier;
 +import org.carbondata.core.carbon.datastore.block.SegmentProperties;
 +import org.carbondata.core.carbon.datastore.chunk.DimensionColumnDataChunk;
 +import org.carbondata.core.carbon.datastore.chunk.impl.FixedLengthDimensionDataChunk;
 +import org.carbondata.core.carbon.metadata.encoder.Encoding;
 +import org.carbondata.core.util.ByteUtil;
 +import org.carbondata.core.util.CarbonUtil;
 +import org.carbondata.scan.expression.Expression;
 +import org.carbondata.scan.expression.exception.FilterUnsupportedException;
 +import org.carbondata.scan.filter.resolver.resolverinfo.DimColumnResolvedFilterInfo;
 +import org.carbondata.scan.filter.resolver.resolverinfo.MeasureColumnResolvedFilterInfo;
 +import org.carbondata.scan.processor.BlocksChunkHolder;
 +
 +public class RowLevelRangeGrtrThanEquaToFilterExecuterImpl extends RowLevelFilterExecuterImpl {
 +
 +  protected byte[][] filterRangeValues;
 +
 +  public RowLevelRangeGrtrThanEquaToFilterExecuterImpl(
 +      List<DimColumnResolvedFilterInfo> dimColEvaluatorInfoList,
 +      List<MeasureColumnResolvedFilterInfo> msrColEvalutorInfoList, Expression exp,
 +      AbsoluteTableIdentifier tableIdentifier, byte[][] filterRangeValues,
 +      SegmentProperties segmentProperties) {
-     super(dimColEvaluatorInfoList, msrColEvalutorInfoList, exp, tableIdentifier, segmentProperties);
++    super(dimColEvaluatorInfoList, msrColEvalutorInfoList, exp, tableIdentifier, segmentProperties,
++        null);
 +    this.filterRangeValues = filterRangeValues;
 +  }
 +
 +  @Override public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue) {
 +    BitSet bitSet = new BitSet(1);
 +    byte[][] filterValues = this.filterRangeValues;
 +    int columnIndex = this.dimColEvaluatorInfoList.get(0).getColumnIndex();
 +    boolean isScanRequired = false;
 +    for (int k = 0; k < filterValues.length; k++) {
 +      // filter value should be in range of max and min value i.e
 +      // max>filtervalue>min
 +      // so filter-max should be negative
 +      int maxCompare =
 +          ByteUtil.UnsafeComparer.INSTANCE.compareTo(filterValues[k], blockMaxValue[columnIndex]);
 +      // if any filter value is in range than this block needs to be
 +      // scanned less than equal to max range.
 +      if (maxCompare <= 0) {
 +        isScanRequired = true;
 +        break;
 +      }
 +    }
 +    if (isScanRequired) {
 +      bitSet.set(0);
 +    }
 +    return bitSet;
 +
 +  }
 +
 +  @Override public BitSet applyFilter(BlocksChunkHolder blockChunkHolder)
 +      throws FilterUnsupportedException {
 +    if (!dimColEvaluatorInfoList.get(0).getDimension().hasEncoding(Encoding.DICTIONARY)) {
 +      return super.applyFilter(blockChunkHolder);
 +    }
 +    int blockIndex = segmentProperties.getDimensionOrdinalToBlockMapping()
 +        .get(dimColEvaluatorInfoList.get(0).getColumnIndex());
 +    if (null == blockChunkHolder.getDimensionDataChunk()[blockIndex]) {
 +      blockChunkHolder.getDimensionDataChunk()[blockIndex] = blockChunkHolder.getDataBlock()
 +          .getDimensionChunk(blockChunkHolder.getFileReader(), blockIndex);
 +    }
 +    return getFilteredIndexes(blockChunkHolder.getDimensionDataChunk()[blockIndex],
 +        blockChunkHolder.getDataBlock().nodeSize());
 +  }
 +
 +  private BitSet getFilteredIndexes(DimensionColumnDataChunk dimensionColumnDataChunk,
 +      int numerOfRows) {
 +    if (null != dimensionColumnDataChunk.getAttributes().getInvertedIndexes()
 +        && dimensionColumnDataChunk instanceof FixedLengthDimensionDataChunk) {
 +      return setFilterdIndexToBitSetWithColumnIndex(
 +          (FixedLengthDimensionDataChunk) dimensionColumnDataChunk, numerOfRows);
 +    }
 +    return setFilterdIndexToBitSet(dimensionColumnDataChunk, numerOfRows);
 +  }
 +
 +  /**
 +   * Method will scan the block and finds the range start index from which all members
 +   * will be considered for applying range filters. this method will be called if the
 +   * column is not supported by default so column index mapping  will be present for
 +   * accesing the members from the block.
 +   *
 +   * @param dimensionColumnDataChunk
 +   * @param numerOfRows
 +   * @return BitSet.
 +   */
 +  private BitSet setFilterdIndexToBitSetWithColumnIndex(
 +      FixedLengthDimensionDataChunk dimensionColumnDataChunk, int numerOfRows) {
 +    BitSet bitSet = new BitSet(numerOfRows);
 +    int[] columnIndex = dimensionColumnDataChunk.getAttributes().getInvertedIndexes();
 +    int start = 0;
 +    int last = 0;
 +    int startIndex = 0;
 +    byte[][] filterValues = this.filterRangeValues;
 +    for (int i = 0; i < filterValues.length; i++) {
 +      start = CarbonUtil
 +          .getFirstIndexUsingBinarySearch(dimensionColumnDataChunk, startIndex, numerOfRows - 1,
 +              filterValues[i], false);
 +      if (start < 0) {
 +        start = -(start + 1);
 +        if (start == numerOfRows) {
 +          start = start - 1;
 +        }
 +        // Method will compare the tentative index value after binary search, this tentative
 +        // index needs to be compared by the filter member if its >= filter then from that
 +        // index the bitset will be considered for filtering process.
 +        if (ByteUtil
 +            .compare(filterValues[i], dimensionColumnDataChunk.getChunkData(columnIndex[start]))
 +            >= 0) {
 +          start = start + 1;
 +        }
 +      }
 +      last = start;
 +      for (int j = start; j < numerOfRows; j++) {
 +
 +        bitSet.set(columnIndex[j]);
 +        last++;
 +      }
 +      startIndex = last;
 +      if (startIndex >= numerOfRows) {
 +        break;
 +      }
 +    }
 +    return bitSet;
 +  }
 +
 +  /**
 +   * Method will scan the block and finds the range start index from which all
 +   * members will be considered for applying range filters. this method will
 +   * be called if the column is sorted default so column index
 +   * mapping will be present for accesing the members from the block.
 +   *
 +   * @param dimensionColumnDataChunk
 +   * @param numerOfRows
 +   * @return BitSet.
 +   */
 +  private BitSet setFilterdIndexToBitSet(DimensionColumnDataChunk dimensionColumnDataChunk,
 +      int numerOfRows) {
 +    BitSet bitSet = new BitSet(numerOfRows);
 +    if (dimensionColumnDataChunk instanceof FixedLengthDimensionDataChunk) {
 +      int start = 0;
 +      int last = 0;
 +      int startIndex = 0;
 +      byte[][] filterValues = this.filterRangeValues;
 +      for (int k = 0; k < filterValues.length; k++) {
 +        start = CarbonUtil.getFirstIndexUsingBinarySearch(
 +            (FixedLengthDimensionDataChunk) dimensionColumnDataChunk, startIndex, numerOfRows - 1,
 +            filterValues[k], false);
 +        if (start < 0) {
 +          start = -(start + 1);
 +          if (start == numerOfRows) {
 +            start = start - 1;
 +          }
 +          // Method will compare the tentative index value after binary search, this tentative
 +          // index needs to be compared by the filter member if its >= filter then from that
 +          // index the bitset will be considered for filtering process.
 +          if (ByteUtil.compare(filterValues[k], dimensionColumnDataChunk.getChunkData(start))
 +              >= 0) {
 +            start = start + 1;
 +          }
 +        }
 +
 +        last = start;
 +        for (int j = start; j < numerOfRows; j++) {
 +          bitSet.set(j);
 +          last++;
 +        }
 +        startIndex = last;
 +        if (startIndex >= numerOfRows) {
 +          break;
 +        }
 +      }
 +    }
 +    return bitSet;
 +  }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaecb651/core/src/main/java/org/carbondata/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java
----------------------------------------------------------------------
diff --cc core/src/main/java/org/carbondata/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java
index 92efb0a,0000000..b1ebf0a
mode 100644,000000..100644
--- a/core/src/main/java/org/carbondata/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java
+++ b/core/src/main/java/org/carbondata/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java
@@@ -1,247 -1,0 +1,248 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *    http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.carbondata.scan.filter.executer;
 +
 +import java.util.BitSet;
 +import java.util.List;
 +
 +import org.carbondata.core.carbon.AbsoluteTableIdentifier;
 +import org.carbondata.core.carbon.datastore.block.SegmentProperties;
 +import org.carbondata.core.carbon.datastore.chunk.DimensionColumnDataChunk;
 +import org.carbondata.core.carbon.datastore.chunk.impl.FixedLengthDimensionDataChunk;
 +import org.carbondata.core.carbon.metadata.encoder.Encoding;
 +import org.carbondata.core.keygenerator.directdictionary.DirectDictionaryGenerator;
 +import org.carbondata.core.keygenerator.directdictionary.DirectDictionaryKeyGeneratorFactory;
 +import org.carbondata.core.util.ByteUtil;
 +import org.carbondata.core.util.CarbonUtil;
 +import org.carbondata.scan.expression.Expression;
 +import org.carbondata.scan.expression.exception.FilterUnsupportedException;
 +import org.carbondata.scan.filter.FilterUtil;
 +import org.carbondata.scan.filter.resolver.resolverinfo.DimColumnResolvedFilterInfo;
 +import org.carbondata.scan.filter.resolver.resolverinfo.MeasureColumnResolvedFilterInfo;
 +import org.carbondata.scan.processor.BlocksChunkHolder;
 +
 +public class RowLevelRangeLessThanEqualFilterExecuterImpl extends RowLevelFilterExecuterImpl {
 +  protected byte[][] filterRangeValues;
 +
 +  public RowLevelRangeLessThanEqualFilterExecuterImpl(
 +      List<DimColumnResolvedFilterInfo> dimColEvaluatorInfoList,
 +      List<MeasureColumnResolvedFilterInfo> msrColEvalutorInfoList, Expression exp,
 +      AbsoluteTableIdentifier tableIdentifier, byte[][] filterRangeValues,
 +      SegmentProperties segmentProperties) {
-     super(dimColEvaluatorInfoList, msrColEvalutorInfoList, exp, tableIdentifier, segmentProperties);
++    super(dimColEvaluatorInfoList, msrColEvalutorInfoList, exp, tableIdentifier, segmentProperties,
++        null);
 +    this.filterRangeValues = filterRangeValues;
 +  }
 +
 +  @Override public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue) {
 +    BitSet bitSet = new BitSet(1);
 +    byte[][] filterValues = this.filterRangeValues;
 +    int columnIndex = this.dimColEvaluatorInfoList.get(0).getColumnIndex();
 +    boolean isScanRequired = false;
 +    for (int k = 0; k < filterValues.length; k++) {
 +      // and filter-min should be positive
 +      int minCompare =
 +          ByteUtil.UnsafeComparer.INSTANCE.compareTo(filterValues[k], blockMinValue[columnIndex]);
 +
 +      // if any filter applied is not in range of min and max of block
 +      // then since its a less than equal to fiter validate whether the block
 +      // min range is less than equal to applied filter member
 +      if (minCompare >= 0) {
 +        isScanRequired = true;
 +        break;
 +      }
 +    }
 +    if (isScanRequired) {
 +      bitSet.set(0);
 +    }
 +    return bitSet;
 +
 +  }
 +
 +  @Override public BitSet applyFilter(BlocksChunkHolder blockChunkHolder)
 +      throws FilterUnsupportedException {
 +    if (!dimColEvaluatorInfoList.get(0).getDimension().hasEncoding(Encoding.DICTIONARY)) {
 +      return super.applyFilter(blockChunkHolder);
 +    }
 +    int blockIndex = segmentProperties.getDimensionOrdinalToBlockMapping()
 +        .get(dimColEvaluatorInfoList.get(0).getColumnIndex());
 +    if (null == blockChunkHolder.getDimensionDataChunk()[blockIndex]) {
 +      blockChunkHolder.getDimensionDataChunk()[blockIndex] = blockChunkHolder.getDataBlock()
 +          .getDimensionChunk(blockChunkHolder.getFileReader(), blockIndex);
 +    }
 +    return getFilteredIndexes(blockChunkHolder.getDimensionDataChunk()[blockIndex],
 +        blockChunkHolder.getDataBlock().nodeSize());
 +  }
 +
 +  private BitSet getFilteredIndexes(DimensionColumnDataChunk dimensionColumnDataChunk,
 +      int numerOfRows) {
 +    byte[] defaultValue = null;
 +    if (dimColEvaluatorInfoList.get(0).getDimension().hasEncoding(Encoding.DIRECT_DICTIONARY)) {
 +      DirectDictionaryGenerator directDictionaryGenerator = DirectDictionaryKeyGeneratorFactory
 +          .getDirectDictionaryGenerator(
 +              dimColEvaluatorInfoList.get(0).getDimension().getDataType());
 +      int key = directDictionaryGenerator.generateDirectSurrogateKey(null) + 1;
 +      defaultValue = FilterUtil.getMaskKey(key, dimColEvaluatorInfoList.get(0).getDimension(),
 +          this.segmentProperties.getDimensionKeyGenerator());
 +    }
 +    if (null != dimensionColumnDataChunk.getAttributes().getInvertedIndexes()
 +        && dimensionColumnDataChunk instanceof FixedLengthDimensionDataChunk) {
 +
 +      return setFilterdIndexToBitSetWithColumnIndex(
 +          (FixedLengthDimensionDataChunk) dimensionColumnDataChunk, numerOfRows, defaultValue);
 +
 +    }
 +    return setFilterdIndexToBitSet(dimensionColumnDataChunk, numerOfRows, defaultValue);
 +  }
 +
 +  /**
 +   * Method will scan the block and finds the range start index from which all members
 +   * will be considered for applying range filters. this method will be called if the
 +   * column is not supported by default so column index mapping  will be present for
 +   * accesing the members from the block.
 +   *
 +   * @param dimensionColumnDataChunk
 +   * @param numerOfRows
 +   * @return BitSet.
 +   */
 +  private BitSet setFilterdIndexToBitSetWithColumnIndex(
 +      FixedLengthDimensionDataChunk dimensionColumnDataChunk, int numerOfRows,
 +      byte[] defaultValue) {
 +    BitSet bitSet = new BitSet(numerOfRows);
 +    int[] columnIndex = dimensionColumnDataChunk.getAttributes().getInvertedIndexes();
 +    int start = 0;
 +    int last = 0;
 +    int skip = 0;
 +    int startIndex = 0;
 +    byte[][] filterValues = this.filterRangeValues;
 +    //find the number of default values to skip the null value in case of direct dictionary
 +    if (null != defaultValue) {
 +      start = CarbonUtil
 +          .getFirstIndexUsingBinarySearch(dimensionColumnDataChunk, startIndex, numerOfRows - 1,
 +              defaultValue, true);
 +      if (start < 0) {
 +        skip = -(start + 1);
 +        // end of block
 +        if (skip == numerOfRows) {
 +          return bitSet;
 +        }
 +      } else {
 +        skip = start;
 +      }
 +      startIndex = skip;
 +    }
 +    for (int i = 0; i < filterValues.length; i++) {
 +      start = CarbonUtil
 +          .getFirstIndexUsingBinarySearch(dimensionColumnDataChunk, startIndex, numerOfRows - 1,
 +              filterValues[i], true);
 +      if (start < 0) {
 +        start = -(start + 1);
 +        if (start == numerOfRows) {
 +          start = start - 1;
 +        }
 +        // Method will compare the tentative index value after binary search, this tentative
 +        // index needs to be compared by the filter member if its >= filter then from that
 +        // index the bitset will be considered for filtering process.
 +        if (ByteUtil
 +            .compare(filterValues[i], dimensionColumnDataChunk.getChunkData(columnIndex[start]))
 +            <= 0) {
 +          start = start - 1;
 +        }
 +      }
 +      last = start;
 +      for (int j = start; j >= skip; j--) {
 +        bitSet.set(columnIndex[j]);
 +        last--;
 +      }
 +      startIndex = last;
 +      if (startIndex <= 0) {
 +        break;
 +      }
 +    }
 +    return bitSet;
 +  }
 +
 +  /**
 +   * Method will scan the block and finds the range start index from which all
 +   * members will be considered for applying range filters. this method will
 +   * be called if the column is sorted default so column index
 +   * mapping will be present for accesing the members from the block.
 +   *
 +   * @param dimensionColumnDataChunk
 +   * @param numerOfRows
 +   * @param defaultValue
 +   * @return BitSet.
 +   */
 +  private BitSet setFilterdIndexToBitSet(DimensionColumnDataChunk dimensionColumnDataChunk,
 +      int numerOfRows, byte[] defaultValue) {
 +    BitSet bitSet = new BitSet(numerOfRows);
 +    if (dimensionColumnDataChunk instanceof FixedLengthDimensionDataChunk) {
 +      int start = 0;
 +      int last = 0;
 +      int startIndex = 0;
 +      byte[][] filterValues = this.filterRangeValues;
 +      int skip = 0;
 +      //find the number of default values to skip the null value in case of direct dictionary
 +      if (null != defaultValue) {
 +        start = CarbonUtil.getFirstIndexUsingBinarySearch(
 +            (FixedLengthDimensionDataChunk) dimensionColumnDataChunk, startIndex, numerOfRows - 1,
 +            defaultValue, true);
 +        if (start < 0) {
 +          skip = -(start + 1);
 +          // end of block
 +          if (skip == numerOfRows) {
 +            return bitSet;
 +          }
 +        } else {
 +          skip = start;
 +        }
 +        startIndex = skip;
 +      }
 +      for (int k = 0; k < filterValues.length; k++) {
 +        start = CarbonUtil.getFirstIndexUsingBinarySearch(
 +            (FixedLengthDimensionDataChunk) dimensionColumnDataChunk, startIndex, numerOfRows - 1,
 +            filterValues[k], true);
 +        if (start < 0) {
 +          start = -(start + 1);
 +          if (start == numerOfRows) {
 +            start = start - 1;
 +          }
 +          // Method will compare the tentative index value after binary search, this tentative
 +          // index needs to be compared by the filter member if its <= filter then from that
 +          // index the bitset will be considered for filtering process.
 +          if (ByteUtil.compare(filterValues[k], dimensionColumnDataChunk.getChunkData(start))
 +              <= 0) {
 +            start = start - 1;
 +          }
 +        }
 +        last = start;
 +        for (int j = start; j >= skip; j--) {
 +          bitSet.set(j);
 +          last--;
 +        }
 +        startIndex = last;
 +        if (startIndex <= 0) {
 +          break;
 +        }
 +      }
 +    }
 +    return bitSet;
 +  }
 +
 +}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaecb651/core/src/main/java/org/carbondata/scan/filter/executer/RowLevelRangeLessThanFiterExecuterImpl.java
----------------------------------------------------------------------
diff --cc core/src/main/java/org/carbondata/scan/filter/executer/RowLevelRangeLessThanFiterExecuterImpl.java
index 46e7d1b,0000000..00f927b
mode 100644,000000..100644
--- a/core/src/main/java/org/carbondata/scan/filter/executer/RowLevelRangeLessThanFiterExecuterImpl.java
+++ b/core/src/main/java/org/carbondata/scan/filter/executer/RowLevelRangeLessThanFiterExecuterImpl.java
@@@ -1,251 -1,0 +1,252 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *    http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.carbondata.scan.filter.executer;
 +
 +import java.util.BitSet;
 +import java.util.List;
 +
 +import org.carbondata.core.carbon.AbsoluteTableIdentifier;
 +import org.carbondata.core.carbon.datastore.block.SegmentProperties;
 +import org.carbondata.core.carbon.datastore.chunk.DimensionColumnDataChunk;
 +import org.carbondata.core.carbon.datastore.chunk.impl.FixedLengthDimensionDataChunk;
 +import org.carbondata.core.carbon.metadata.encoder.Encoding;
 +import org.carbondata.core.keygenerator.directdictionary.DirectDictionaryGenerator;
 +import org.carbondata.core.keygenerator.directdictionary.DirectDictionaryKeyGeneratorFactory;
 +import org.carbondata.core.util.ByteUtil;
 +import org.carbondata.core.util.CarbonUtil;
 +import org.carbondata.scan.expression.Expression;
 +import org.carbondata.scan.expression.exception.FilterUnsupportedException;
 +import org.carbondata.scan.filter.FilterUtil;
 +import org.carbondata.scan.filter.resolver.resolverinfo.DimColumnResolvedFilterInfo;
 +import org.carbondata.scan.filter.resolver.resolverinfo.MeasureColumnResolvedFilterInfo;
 +import org.carbondata.scan.processor.BlocksChunkHolder;
 +
 +public class RowLevelRangeLessThanFiterExecuterImpl extends RowLevelFilterExecuterImpl {
 +  private byte[][] filterRangeValues;
 +
 +  public RowLevelRangeLessThanFiterExecuterImpl(
 +      List<DimColumnResolvedFilterInfo> dimColEvaluatorInfoList,
 +      List<MeasureColumnResolvedFilterInfo> msrColEvalutorInfoList, Expression exp,
 +      AbsoluteTableIdentifier tableIdentifier, byte[][] filterRangeValues,
 +      SegmentProperties segmentProperties) {
-     super(dimColEvaluatorInfoList, msrColEvalutorInfoList, exp, tableIdentifier, segmentProperties);
++    super(dimColEvaluatorInfoList, msrColEvalutorInfoList, exp, tableIdentifier, segmentProperties,
++        null);
 +    this.filterRangeValues = filterRangeValues;
 +  }
 +
 +  @Override public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue) {
 +    BitSet bitSet = new BitSet(1);
 +    byte[][] filterValues = this.filterRangeValues;
 +    int columnIndex = this.dimColEvaluatorInfoList.get(0).getColumnIndex();
 +    boolean isScanRequired = false;
 +    for (int k = 0; k < filterValues.length; k++) {
 +      // and filter-min should be positive
 +      int minCompare =
 +          ByteUtil.UnsafeComparer.INSTANCE.compareTo(filterValues[k], blockMinValue[columnIndex]);
 +
 +      // if any filter applied is not in range of min and max of block
 +      // then since its a less than fiter validate whether the block
 +      // min range is less  than applied filter member
 +      if (minCompare > 0) {
 +        isScanRequired = true;
 +        break;
 +      }
 +    }
 +    if (isScanRequired) {
 +      bitSet.set(0);
 +    }
 +    return bitSet;
 +
 +  }
 +
 +  @Override public BitSet applyFilter(BlocksChunkHolder blockChunkHolder)
 +      throws FilterUnsupportedException {
 +    if (!dimColEvaluatorInfoList.get(0).getDimension().hasEncoding(Encoding.DICTIONARY)) {
 +      return super.applyFilter(blockChunkHolder);
 +    }
 +    int blockIndex = segmentProperties.getDimensionOrdinalToBlockMapping()
 +        .get(dimColEvaluatorInfoList.get(0).getColumnIndex());
 +    if (null == blockChunkHolder.getDimensionDataChunk()[blockIndex]) {
 +      blockChunkHolder.getDimensionDataChunk()[blockIndex] = blockChunkHolder.getDataBlock()
 +          .getDimensionChunk(blockChunkHolder.getFileReader(), blockIndex);
 +    }
 +    return getFilteredIndexes(blockChunkHolder.getDimensionDataChunk()[blockIndex],
 +        blockChunkHolder.getDataBlock().nodeSize());
 +  }
 +
 +  private BitSet getFilteredIndexes(DimensionColumnDataChunk dimensionColumnDataChunk,
 +      int numerOfRows) {
 +    byte[] defaultValue = null;
 +    if (dimColEvaluatorInfoList.get(0).getDimension().hasEncoding(Encoding.DIRECT_DICTIONARY)) {
 +      DirectDictionaryGenerator directDictionaryGenerator = DirectDictionaryKeyGeneratorFactory
 +          .getDirectDictionaryGenerator(
 +              dimColEvaluatorInfoList.get(0).getDimension().getDataType());
 +      int key = directDictionaryGenerator.generateDirectSurrogateKey(null) + 1;
 +      defaultValue = FilterUtil.getMaskKey(key, dimColEvaluatorInfoList.get(0).getDimension(),
 +          this.segmentProperties.getDimensionKeyGenerator());
 +    }
 +    if (null != dimensionColumnDataChunk.getAttributes().getInvertedIndexes()
 +        && dimensionColumnDataChunk instanceof FixedLengthDimensionDataChunk) {
 +      return setFilterdIndexToBitSetWithColumnIndex(
 +          (FixedLengthDimensionDataChunk) dimensionColumnDataChunk, numerOfRows, defaultValue);
 +    }
 +    return setFilterdIndexToBitSet(dimensionColumnDataChunk, numerOfRows, defaultValue);
 +  }
 +
 +  /**
 +   * Method will scan the block and finds the range start index from which all members
 +   * will be considered for applying range filters. this method will be called if the
 +   * column is not supported by default so column index mapping  will be present for
 +   * accesing the members from the block.
 +   *
 +   * @param dimensionColumnDataChunk
 +   * @param numerOfRows
 +   * @return BitSet.
 +   */
 +  private BitSet setFilterdIndexToBitSetWithColumnIndex(
 +      FixedLengthDimensionDataChunk dimensionColumnDataChunk, int numerOfRows,
 +      byte[] defaultValue) {
 +    BitSet bitSet = new BitSet(numerOfRows);
 +    int[] columnIndex = dimensionColumnDataChunk.getAttributes().getInvertedIndexes();
 +    int start = 0;
 +    int last = 0;
 +    int startIndex = 0;
 +    int skip = 0;
 +    byte[][] filterValues = this.filterRangeValues;
 +
 +    //find the number of default values to skip the null value in case of direct dictionary
 +    if (null != defaultValue) {
 +      start = CarbonUtil
 +          .getFirstIndexUsingBinarySearch(dimensionColumnDataChunk, startIndex, numerOfRows - 1,
 +              defaultValue, false);
 +      if (start < 0) {
 +        skip = -(start + 1);
 +        // end of block
 +        if (skip == numerOfRows) {
 +          return bitSet;
 +        }
 +      } else {
 +        skip = start;
 +      }
 +      startIndex = skip;
 +    }
 +
 +    for (int i = 0; i < filterValues.length; i++) {
 +      start = CarbonUtil
 +          .getFirstIndexUsingBinarySearch(dimensionColumnDataChunk, startIndex, numerOfRows - 1,
 +              filterValues[i], false);
 +      // Logic will handle the case where the range filter member is not present in block
 +      // in this case the binary search will return the index from where the bit sets will be
 +      // set inorder to apply filters. this is Lesser than filter so the range will be taken
 +      // from the prev element which is Lesser than filter member.
 +      start = CarbonUtil.nextLesserValueToTarget(start, dimensionColumnDataChunk, filterValues[i]);
 +      if (start < 0) {
 +        start = -(start + 1);
 +        if (start == numerOfRows) {
 +          start = start - 1;
 +        }
 +        // Method will compare the tentative index value after binary search, this tentative
 +        // index needs to be compared by the filter member if its < filter then from that
 +        // index the bitset will be considered for filtering process.
 +        if (ByteUtil
 +            .compare(filterValues[i], dimensionColumnDataChunk.getChunkData(columnIndex[start]))
 +            < 0) {
 +          start = start - 1;
 +        }
 +      }
 +      last = start;
 +      for (int j = start; j >= skip; j--) {
 +        bitSet.set(columnIndex[j]);
 +        last--;
 +      }
 +      startIndex = last;
 +      if (startIndex >= 0) {
 +        break;
 +      }
 +    }
 +    return bitSet;
 +  }
 +
 +  /**
 +   * Method will scan the block and finds the range start index from which all
 +   * members will be considered for applying range filters. this method will
 +   * be called if the column is sorted default so column index
 +   * mapping will be present for accesing the members from the block.
 +   *
 +   * @param dimensionColumnDataChunk
 +   * @param numerOfRows
 +   * @return BitSet.
 +   */
 +  private BitSet setFilterdIndexToBitSet(DimensionColumnDataChunk dimensionColumnDataChunk,
 +      int numerOfRows, byte[] defaultValue) {
 +    BitSet bitSet = new BitSet(numerOfRows);
 +    if (dimensionColumnDataChunk instanceof FixedLengthDimensionDataChunk) {
 +      int start = 0;
 +      int last = 0;
 +      int startIndex = 0;
 +      int skip = 0;
 +      byte[][] filterValues = this.filterRangeValues;
 +      //find the number of default values to skip the null value in case of direct dictionary
 +      if (null != defaultValue) {
 +        start = CarbonUtil.getFirstIndexUsingBinarySearch(
 +            (FixedLengthDimensionDataChunk) dimensionColumnDataChunk, startIndex, numerOfRows - 1,
 +            defaultValue, false);
 +        if (start < 0) {
 +          skip = -(start + 1);
 +          // end of block
 +          if (skip == numerOfRows) {
 +            return bitSet;
 +          }
 +        } else {
 +          skip = start;
 +        }
 +        startIndex = skip;
 +      }
 +      for (int k = 0; k < filterValues.length; k++) {
 +        start = CarbonUtil.getFirstIndexUsingBinarySearch(
 +            (FixedLengthDimensionDataChunk) dimensionColumnDataChunk, startIndex, numerOfRows - 1,
 +            filterValues[k], false);
 +        start = CarbonUtil.nextLesserValueToTarget(start,
 +            (FixedLengthDimensionDataChunk) dimensionColumnDataChunk, filterValues[k]);
 +        if (start < 0) {
 +          start = -(start + 1);
 +          if (start >= numerOfRows) {
 +            start = numerOfRows - 1;
 +          }
 +          // Method will compare the tentative index value after binary search, this tentative
 +          // index needs to be compared by the filter member if its < filter then from that
 +          // index the bitset will be considered for filtering process.
 +          if (ByteUtil.compare(filterValues[k], dimensionColumnDataChunk.getChunkData(start)) < 0) {
 +            start = start - 1;
 +          }
 +        }
 +        last = start;
 +        for (int j = start; j >= skip; j--) {
 +          bitSet.set(j);
 +          last--;
 +        }
 +        startIndex = last;
 +        if (startIndex <= 0) {
 +          break;
 +        }
 +      }
 +    }
 +    return bitSet;
 +  }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaecb651/core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/DimColumnResolvedFilterInfo.java
----------------------------------------------------------------------
diff --cc core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/DimColumnResolvedFilterInfo.java
index ad7942a,0000000..0856846
mode 100644,000000..100644
--- a/core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/DimColumnResolvedFilterInfo.java
+++ b/core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/DimColumnResolvedFilterInfo.java
@@@ -1,206 -1,0 +1,195 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *    http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +package org.carbondata.scan.filter.resolver.resolverinfo;
 +
 +import java.io.Serializable;
 +import java.util.ArrayList;
 +import java.util.HashMap;
 +import java.util.List;
 +import java.util.Map;
 +
 +import org.carbondata.core.carbon.datastore.IndexKey;
 +import org.carbondata.core.carbon.metadata.schema.table.column.CarbonDimension;
 +import org.carbondata.scan.expression.exception.FilterUnsupportedException;
 +import org.carbondata.scan.filter.DimColumnFilterInfo;
 +import org.carbondata.scan.filter.GenericQueryType;
 +import org.carbondata.scan.filter.resolver.metadata.FilterResolverMetadata;
 +import org.carbondata.scan.filter.resolver.resolverinfo.visitable.ResolvedFilterInfoVisitable;
 +import org.carbondata.scan.filter.resolver.resolverinfo.visitor.ResolvedFilterInfoVisitorIntf;
 +
 +public class DimColumnResolvedFilterInfo implements Serializable, ResolvedFilterInfoVisitable {
 +  /**
 +   *
 +   */
 +  private static final long serialVersionUID = 3428115141211084114L;
 +
 +  /**
 +   * column index in file
 +   */
 +  private int columnIndex = -1;
 +
 +  /**
 +   * need compressed data from file
 +   */
 +  private boolean needCompressedData;
 +
 +  /**
 +   * rowIndex
 +   */
 +  private int rowIndex = -1;
 +
 +  private boolean isDimensionExistsInCurrentSilce = true;
 +
 +  private int rsSurrogates;
 +
 +  private String defaultValue;
 +
-   private transient Map<Integer, GenericQueryType> complexTypesWithBlockStartIndex;
- 
 +  private CarbonDimension dimension;
 +
 +  /**
 +   * start index key of the block based on the keygenerator
 +   */
 +  private transient IndexKey starIndexKey;
 +
 +  /**
 +   * end index key  which is been formed considering the max surrogate values
 +   * from dictionary cache
 +   */
 +  private transient IndexKey endIndexKey;
 +
 +  /**
 +   * reolved filter object of a particlar filter Expression.
 +   */
 +  private DimColumnFilterInfo resolvedFilterValueObj;
 +
 +  private Map<CarbonDimension, List<DimColumnFilterInfo>> dimensionResolvedFilter;
 +
 +  public DimColumnResolvedFilterInfo() {
 +    dimensionResolvedFilter = new HashMap<CarbonDimension, List<DimColumnFilterInfo>>(20);
 +  }
 +
 +  public IndexKey getStarIndexKey() {
 +    return starIndexKey;
 +  }
 +
 +  public void setStarIndexKey(IndexKey starIndexKey) {
 +    this.starIndexKey = starIndexKey;
 +  }
 +
 +  public IndexKey getEndIndexKey() {
 +    return endIndexKey;
 +  }
 +
 +  public void setEndIndexKey(IndexKey endIndexKey) {
 +    this.endIndexKey = endIndexKey;
 +  }
 +
 +  public void addDimensionResolvedFilterInstance(CarbonDimension dimension,
 +      DimColumnFilterInfo filterResolvedObj) {
 +    List<DimColumnFilterInfo> currentVals = dimensionResolvedFilter.get(dimension);
 +    if (null == currentVals) {
 +      currentVals = new ArrayList<DimColumnFilterInfo>(20);
 +      currentVals.add(filterResolvedObj);
 +      dimensionResolvedFilter.put(dimension, currentVals);
 +    } else {
 +      currentVals.add(filterResolvedObj);
 +    }
 +  }
 +
 +  public Map<CarbonDimension, List<DimColumnFilterInfo>> getDimensionResolvedFilterInstance() {
 +    return dimensionResolvedFilter;
 +  }
 +
-   public Map<Integer, GenericQueryType> getComplexTypesWithBlockStartIndex() {
-     return complexTypesWithBlockStartIndex;
-   }
- 
-   public void setComplexTypesWithBlockStartIndex(
-       Map<Integer, GenericQueryType> complexTypesWithBlockStartIndex) {
-     this.complexTypesWithBlockStartIndex = complexTypesWithBlockStartIndex;
-   }
- 
 +  public CarbonDimension getDimension() {
 +    return dimension;
 +  }
 +
 +  public void setDimension(CarbonDimension dimension) {
 +    this.dimension = dimension;
 +  }
 +
 +  public int getColumnIndex() {
 +    return columnIndex;
 +  }
 +
 +  public void setColumnIndex(int columnIndex) {
 +    this.columnIndex = columnIndex;
 +  }
 +
 +  public boolean isNeedCompressedData() {
 +    return needCompressedData;
 +  }
 +
 +  public void setNeedCompressedData(boolean needCompressedData) {
 +    this.needCompressedData = needCompressedData;
 +  }
 +
 +  public DimColumnFilterInfo getFilterValues() {
 +    return resolvedFilterValueObj;
 +  }
 +
 +  public void setFilterValues(final DimColumnFilterInfo resolvedFilterValueObj) {
 +    this.resolvedFilterValueObj = resolvedFilterValueObj;
 +  }
 +
 +  public int getRowIndex() {
 +    return rowIndex;
 +  }
 +
 +  public void setRowIndex(int rowIndex) {
 +    this.rowIndex = rowIndex;
 +  }
 +
 +  public boolean isDimensionExistsInCurrentSilce() {
 +    return isDimensionExistsInCurrentSilce;
 +  }
 +
 +  public void setDimensionExistsInCurrentSilce(boolean isDimensionExistsInCurrentSilce) {
 +    this.isDimensionExistsInCurrentSilce = isDimensionExistsInCurrentSilce;
 +  }
 +
 +  public int getRsSurrogates() {
 +    return rsSurrogates;
 +  }
 +
 +  public void setRsSurrogates(int rsSurrogates) {
 +    this.rsSurrogates = rsSurrogates;
 +  }
 +
 +  public String getDefaultValue() {
 +    return defaultValue;
 +  }
 +
 +  public void setDefaultValue(String defaultValue) {
 +    this.defaultValue = defaultValue;
 +  }
 +
 +  @Override public void populateFilterInfoBasedOnColumnType(ResolvedFilterInfoVisitorIntf visitor,
 +      FilterResolverMetadata metadata) throws FilterUnsupportedException {
 +    if (null != visitor) {
 +      visitor.populateFilterResolvedInfo(this, metadata);
 +      this.addDimensionResolvedFilterInstance(metadata.getColumnExpression().getDimension(),
 +          this.getFilterValues());
 +      this.setDimension(metadata.getColumnExpression().getDimension());
 +      this.setColumnIndex(metadata.getColumnExpression().getDimension().getOrdinal());
 +    }
 +
 +  }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaecb651/core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/visitor/CustomTypeDictionaryVisitor.java
----------------------------------------------------------------------
diff --cc core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/visitor/CustomTypeDictionaryVisitor.java
index 5131cca,0000000..9fc08c6
mode 100644,000000..100644
--- a/core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/visitor/CustomTypeDictionaryVisitor.java
+++ b/core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/visitor/CustomTypeDictionaryVisitor.java
@@@ -1,88 -1,0 +1,101 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *    http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.carbondata.scan.filter.resolver.resolverinfo.visitor;
 +
 +import java.util.ArrayList;
 +import java.util.Collections;
 +import java.util.List;
 +
 +import org.carbondata.common.logging.LogService;
 +import org.carbondata.common.logging.LogServiceFactory;
 +import org.carbondata.core.carbon.AbsoluteTableIdentifier;
 +import org.carbondata.core.constants.CarbonCommonConstants;
 +import org.carbondata.core.keygenerator.directdictionary.DirectDictionaryGenerator;
 +import org.carbondata.core.keygenerator.directdictionary.DirectDictionaryKeyGeneratorFactory;
 +import org.carbondata.scan.expression.ColumnExpression;
 +import org.carbondata.scan.expression.exception.FilterIllegalMemberException;
 +import org.carbondata.scan.expression.exception.FilterUnsupportedException;
 +import org.carbondata.scan.filter.DimColumnFilterInfo;
 +import org.carbondata.scan.filter.resolver.metadata.FilterResolverMetadata;
 +import org.carbondata.scan.filter.resolver.resolverinfo.DimColumnResolvedFilterInfo;
 +
 +public class CustomTypeDictionaryVisitor implements ResolvedFilterInfoVisitorIntf {
 +  private static final LogService LOGGER =
 +      LogServiceFactory.getLogService(CustomTypeDictionaryVisitor.class.getName());
 +
 +  /**
 +   * This Visitor method is been used to resolve or populate the filter details
 +   * by using custom type dictionary value, the filter membrers will be resolved using
 +   * custom type function which will generate dictionary for the direct column type filter members
 +   *
 +   * @param visitableObj
 +   * @param metadata
 +   * @throws FilterUnsupportedException,if exception occurs while evaluating
 +   * filter models.
 +   */
 +  public void populateFilterResolvedInfo(DimColumnResolvedFilterInfo visitableObj,
 +      FilterResolverMetadata metadata) throws FilterUnsupportedException {
 +    DimColumnFilterInfo resolvedFilterObject = null;
 +
 +    List<String> evaluateResultListFinal;
 +    try {
 +      evaluateResultListFinal = metadata.getExpression().evaluate(null).getListAsString();
 +    } catch (FilterIllegalMemberException e) {
 +      throw new FilterUnsupportedException(e);
 +    }
++    boolean isNotTimestampType = FilterUtil.checkIfDataTypeNotTimeStamp(metadata.getExpression());
 +    resolvedFilterObject = getDirectDictionaryValKeyMemberForFilter(metadata.getTableIdentifier(),
-         metadata.getColumnExpression(), evaluateResultListFinal, metadata.isIncludeFilter());
++        metadata.getColumnExpression(), evaluateResultListFinal, metadata.isIncludeFilter(),
++        isNotTimestampType);
 +    visitableObj.setFilterValues(resolvedFilterObject);
 +  }
 +
 +  private DimColumnFilterInfo getDirectDictionaryValKeyMemberForFilter(
 +      AbsoluteTableIdentifier tableIdentifier, ColumnExpression columnExpression,
-       List<String> evaluateResultListFinal, boolean isIncludeFilter) {
++      List<String> evaluateResultListFinal, boolean isIncludeFilter, boolean isNotTimestampType) {
 +    List<Integer> surrogates = new ArrayList<Integer>(20);
 +    DirectDictionaryGenerator directDictionaryGenerator = DirectDictionaryKeyGeneratorFactory
 +        .getDirectDictionaryGenerator(columnExpression.getDimension().getDataType());
 +    // Reading the dictionary value direct
-     for (String filterMember : evaluateResultListFinal) {
-       surrogates.add(directDictionaryGenerator.generateDirectSurrogateKey(filterMember,
-           CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT));
-     }
++    getSurrogateValuesForDictionary(evaluateResultListFinal, surrogates, isNotTimestampType,
++        directDictionaryGenerator);
++
 +    Collections.sort(surrogates);
 +    DimColumnFilterInfo columnFilterInfo = null;
 +    if (surrogates.size() > 0) {
 +      columnFilterInfo = new DimColumnFilterInfo();
 +      columnFilterInfo.setIncludeFilter(isIncludeFilter);
 +      columnFilterInfo.setFilterList(surrogates);
 +    }
 +    return columnFilterInfo;
 +  }
 +
++  private void getSurrogateValuesForDictionary(List<String> evaluateResultListFinal,
++      List<Integer> surrogates, boolean isNotTimestampType,
++      DirectDictionaryGenerator directDictionaryGenerator) {
++    String timeFormat = CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT;
++    if (isNotTimestampType) {
++      timeFormat = null;
++    }
++    for (String filterMember : evaluateResultListFinal) {
++      surrogates
++          .add(directDictionaryGenerator.generateDirectSurrogateKey(filterMember, timeFormat));
++    }
++  }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaecb651/core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/visitor/DictionaryColumnVisitor.java
----------------------------------------------------------------------
diff --cc core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/visitor/DictionaryColumnVisitor.java
index f531474,0000000..c133d3d
mode 100644,000000..100644
--- a/core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/visitor/DictionaryColumnVisitor.java
+++ b/core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/visitor/DictionaryColumnVisitor.java
@@@ -1,65 -1,0 +1,72 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *    http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.carbondata.scan.filter.resolver.resolverinfo.visitor;
 +
++import java.util.Collections;
 +import java.util.List;
 +
 +import org.carbondata.common.logging.LogService;
 +import org.carbondata.common.logging.LogServiceFactory;
 +import org.carbondata.scan.executor.exception.QueryExecutionException;
 +import org.carbondata.scan.expression.exception.FilterIllegalMemberException;
 +import org.carbondata.scan.expression.exception.FilterUnsupportedException;
 +import org.carbondata.scan.filter.DimColumnFilterInfo;
 +import org.carbondata.scan.filter.FilterUtil;
 +import org.carbondata.scan.filter.resolver.metadata.FilterResolverMetadata;
 +import org.carbondata.scan.filter.resolver.resolverinfo.DimColumnResolvedFilterInfo;
 +
 +public class DictionaryColumnVisitor implements ResolvedFilterInfoVisitorIntf {
 +  private static final LogService LOGGER =
 +      LogServiceFactory.getLogService(DictionaryColumnVisitor.class.getName());
 +
 +  /**
 +   * This Visitor method is used to populate the visitableObj with direct dictionary filter details
 +   * where the filters values will be resolve using dictionary cache.
 +   *
 +   * @param visitableObj
 +   * @param metadata
 +   * @throws FilterUnsupportedException,if exception occurs while evaluating
 +   * filter models.
 +   * @throws QueryExecutionException
 +   */
 +  public void populateFilterResolvedInfo(DimColumnResolvedFilterInfo visitableObj,
 +      FilterResolverMetadata metadata) throws FilterUnsupportedException {
 +    DimColumnFilterInfo resolvedFilterObject = null;
 +    List<String> evaluateResultListFinal;
 +    try {
 +      evaluateResultListFinal = metadata.getExpression().evaluate(null).getListAsString();
 +    } catch (FilterIllegalMemberException e) {
 +      throw new FilterUnsupportedException(e);
 +    }
 +    try {
 +      resolvedFilterObject = FilterUtil
 +          .getFilterValues(metadata.getTableIdentifier(), metadata.getColumnExpression(),
 +              evaluateResultListFinal, metadata.isIncludeFilter());
++      if (!metadata.isIncludeFilter() && null != resolvedFilterObject) {
++        // Adding default surrogate key of null member inorder to not display the same while
++        // displaying the report as per hive compatibility.
++        resolvedFilterObject.getFilterList().add(1);
++        Collections.sort(resolvedFilterObject.getFilterList());
++      }
 +    } catch (QueryExecutionException e) {
 +      throw new FilterUnsupportedException(e);
 +    }
 +    visitableObj.setFilterValues(resolvedFilterObject);
 +  }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaecb651/core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/visitor/NoDictionaryTypeVisitor.java
----------------------------------------------------------------------
diff --cc core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/visitor/NoDictionaryTypeVisitor.java
index 6958d61,0000000..3078027
mode 100644,000000..100644
--- a/core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/visitor/NoDictionaryTypeVisitor.java
+++ b/core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/visitor/NoDictionaryTypeVisitor.java
@@@ -1,62 -1,0 +1,68 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *    http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
- package org.carbondata.scan.filter.resolver.resolverinfo.visitor;
++package org.carbondata.query.filter.resolver.resolverinfo.visitor;
 +
 +import java.util.List;
 +
 +import org.carbondata.common.logging.LogService;
 +import org.carbondata.common.logging.LogServiceFactory;
 +import org.carbondata.scan.expression.exception.FilterIllegalMemberException;
 +import org.carbondata.scan.expression.exception.FilterUnsupportedException;
 +import org.carbondata.scan.filter.DimColumnFilterInfo;
 +import org.carbondata.scan.filter.FilterUtil;
 +import org.carbondata.scan.filter.resolver.metadata.FilterResolverMetadata;
 +import org.carbondata.scan.filter.resolver.resolverinfo.DimColumnResolvedFilterInfo;
 +
 +public class NoDictionaryTypeVisitor implements ResolvedFilterInfoVisitorIntf {
 +  private static final LogService LOGGER =
 +      LogServiceFactory.getLogService(NoDictionaryTypeVisitor.class.getName());
 +
 +  /**
 +   * Visitor Method will update the filter related details in visitableObj, For no dictionary
 +   * type columns the filter members will resolved directly, no need to look up in dictionary
 +   * since it will not be part of dictionary, directly the actual data can be converted as
 +   * byte[] and can be set. this type of encoding is effective when the particular column
 +   * is having very high cardinality.
 +   *
 +   * @param visitableObj
 +   * @param metadata
 +   * @throws FilterUnsupportedException,if exception occurs while evaluating
 +   * filter models.
 +   */
 +  public void populateFilterResolvedInfo(DimColumnResolvedFilterInfo visitableObj,
 +      FilterResolverMetadata metadata) throws FilterUnsupportedException {
 +    DimColumnFilterInfo resolvedFilterObject = null;
 +    List<String> evaluateResultListFinal;
 +    try {
 +      evaluateResultListFinal = metadata.getExpression().evaluate(null).getListAsString();
++      // Adding default  null member inorder to not display the same while
++      // displaying the report as per hive compatibility.
++      if (!metadata.isIncludeFilter() && !evaluateResultListFinal
++          .contains(CarbonCommonConstants.MEMBER_DEFAULT_VAL)) {
++        evaluateResultListFinal.add(CarbonCommonConstants.MEMBER_DEFAULT_VAL);
++      }
 +    } catch (FilterIllegalMemberException e) {
 +      throw new FilterUnsupportedException(e);
 +    }
 +    resolvedFilterObject = FilterUtil
 +        .getNoDictionaryValKeyMemberForFilter(metadata.getTableIdentifier(),
 +            metadata.getColumnExpression(), evaluateResultListFinal, metadata.isIncludeFilter());
 +    visitableObj.setFilterValues(resolvedFilterObject);
 +  }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaecb651/core/src/main/java/org/carbondata/scan/model/QueryModel.java
----------------------------------------------------------------------
diff --cc core/src/main/java/org/carbondata/scan/model/QueryModel.java
index c979b6d,0000000..82a6221
mode 100644,000000..100644
--- a/core/src/main/java/org/carbondata/scan/model/QueryModel.java
+++ b/core/src/main/java/org/carbondata/scan/model/QueryModel.java
@@@ -1,516 -1,0 +1,522 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *    http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +package org.carbondata.scan.model;
 +
 +import java.io.Serializable;
 +import java.util.ArrayList;
 +import java.util.List;
 +import java.util.Map;
 +
 +import org.carbondata.core.cache.dictionary.Dictionary;
 +import org.carbondata.core.carbon.AbsoluteTableIdentifier;
 +import org.carbondata.core.carbon.datastore.block.TableBlockInfo;
 +import org.carbondata.core.carbon.metadata.schema.table.CarbonTable;
 +import org.carbondata.core.carbon.metadata.schema.table.column.CarbonColumn;
 +import org.carbondata.core.carbon.metadata.schema.table.column.CarbonDimension;
 +import org.carbondata.core.carbon.metadata.schema.table.column.CarbonMeasure;
++import org.carbondata.core.carbon.querystatistics.QueryStatisticsRecorder;
++import org.carbondata.core.constants.CarbonCommonConstants;
 +import org.carbondata.core.util.CarbonUtil;
 +import org.carbondata.scan.expression.ColumnExpression;
 +import org.carbondata.scan.expression.Expression;
 +import org.carbondata.scan.expression.UnknownExpression;
 +import org.carbondata.scan.expression.conditional.ConditionalExpression;
 +import org.carbondata.scan.filter.resolver.FilterResolverIntf;
 +
 +/**
 + * Query model which will have all the detail
 + * about the query, This will be sent from driver to executor '
 + * This will be refereed to executing the query.
 + */
 +public class QueryModel implements Serializable {
 +
 +  /**
 +   * serialization version
 +   */
 +  private static final long serialVersionUID = -4674677234007089052L;
- 
++  /**
++   * this will hold the information about the dictionary dimension
++   * which to
++   */
++  public transient Map<String, Dictionary> columnToDictionaryMapping;
++  /**
++   * Number of records to keep in memory.
++   */
++  public int inMemoryRecordSize;
 +  /**
 +   * list of dimension selected for in query
 +   */
 +  private List<QueryDimension> queryDimension;
- 
 +  /**
 +   * list of dimension in which sorting is applied
 +   */
 +  private List<QueryDimension> sortDimension;
- 
 +  /**
 +   * list of measure selected in query
 +   */
 +  private List<QueryMeasure> queryMeasures;
- 
 +  /**
 +   * query id
 +   */
 +  private String queryId;
- 
 +  /**
 +   * to check if it a aggregate table
 +   */
 +  private boolean isAggTable;
- 
 +  /**
 +   * filter tree
 +   */
 +  private FilterResolverIntf filterExpressionResolverTree;
- 
 +  /**
 +   * in case of lime query we need to know how many
 +   * records will passed from executor
 +   */
 +  private int limit;
 +
 +  /**
 +   * to check if it is a count star query , so processing will be different
 +   */
 +  private boolean isCountStarQuery;
- 
 +  /**
 +   * to check whether aggregation is required during query execution
 +   */
 +  private boolean detailQuery;
- 
 +  /**
 +   * table block information in which query will be executed
 +   */
 +  private List<TableBlockInfo> tableBlockInfos;
- 
 +  /**
 +   * sort in which dimension will be get sorted
 +   */
 +  private byte[] sortOrder;
- 
 +  /**
 +   * absolute table identifier
 +   */
 +  private AbsoluteTableIdentifier absoluteTableIdentifier;
 +  /**
 +   * in case of detail query with sort we are spilling to disk
 +   * to this location will be used to write the temp file in this location
 +   */
 +  private String queryTempLocation;
- 
 +  /**
 +   * To handle most of the computation in query engines like spark and hive, carbon should give
 +   * raw detailed records to it.
 +   */
 +  private boolean forcedDetailRawQuery;
- 
 +  /**
 +   * paritition column list
 +   */
 +  private List<String> paritionColumns;
- 
-   /**
-    * this will hold the information about the dictionary dimension
-    * which to
-    */
-   public transient Map<String, Dictionary> columnToDictionaryMapping;
- 
 +  /**
 +   * table on which query will be executed
 +   * TODO need to remove this ad pass only the path
 +   * and carbon metadata will load the table from metadata file
 +   */
 +  private CarbonTable table;
 +
 +  /**
 +   * This is used only whne [forcedDetailRawQuery = true]. By default forcedDetailRawQuery returns
 +   * dictionary values. But user wants in detail raw bytes the user set this field to true.
 +   */
 +  private boolean rawBytesDetailQuery;
 +
++  private QueryStatisticsRecorder statisticsRecorder;
++
 +  public QueryModel() {
 +    tableBlockInfos = new ArrayList<TableBlockInfo>();
 +    queryDimension = new ArrayList<QueryDimension>();
 +    queryMeasures = new ArrayList<QueryMeasure>();
 +    sortDimension = new ArrayList<QueryDimension>();
 +    sortOrder = new byte[0];
 +    paritionColumns = new ArrayList<String>();
- 
 +  }
 +
 +  public static QueryModel createModel(AbsoluteTableIdentifier absoluteTableIdentifier,
 +      CarbonQueryPlan queryPlan, CarbonTable carbonTable) {
 +    QueryModel queryModel = new QueryModel();
 +    String factTableName = carbonTable.getFactTableName();
 +    queryModel.setAbsoluteTableIdentifier(absoluteTableIdentifier);
 +
 +    fillQueryModel(queryPlan, carbonTable, queryModel, factTableName);
 +
 +    queryModel.setLimit(queryPlan.getLimit());
 +    queryModel.setDetailQuery(queryPlan.isDetailQuery());
 +    queryModel.setForcedDetailRawQuery(queryPlan.isRawDetailQuery());
 +    queryModel.setQueryId(queryPlan.getQueryId());
 +    queryModel.setQueryTempLocation(queryPlan.getOutLocationPath());
 +    return queryModel;
 +  }
 +
 +  private static void fillQueryModel(CarbonQueryPlan queryPlan, CarbonTable carbonTable,
 +      QueryModel queryModel, String factTableName) {
 +    queryModel.setAbsoluteTableIdentifier(carbonTable.getAbsoluteTableIdentifier());
 +    queryModel.setQueryDimension(queryPlan.getDimensions());
 +    fillSortInfoInModel(queryModel, queryPlan.getSortedDimemsions());
-     queryModel.setQueryMeasures(
-         queryPlan.getMeasures());
++    queryModel.setQueryMeasures(queryPlan.getMeasures());
 +    if (null != queryPlan.getFilterExpression()) {
 +      processFilterExpression(queryPlan.getFilterExpression(),
 +          carbonTable.getDimensionByTableName(factTableName),
 +          carbonTable.getMeasureByTableName(factTableName));
 +    }
 +    queryModel.setCountStarQuery(queryPlan.isCountStarQuery());
 +    //TODO need to remove this code, and executor will load the table
 +    // from file metadata
 +    queryModel.setTable(carbonTable);
 +  }
 +
 +  private static void fillSortInfoInModel(QueryModel executorModel,
 +      List<QueryDimension> sortedDims) {
 +    if (null != sortedDims) {
 +      byte[] sortOrderByteArray = new byte[sortedDims.size()];
 +      int i = 0;
 +      for (QueryColumn mdim : sortedDims) {
 +        sortOrderByteArray[i++] = (byte) mdim.getSortOrder().ordinal();
 +      }
 +      executorModel.setSortOrder(sortOrderByteArray);
 +      executorModel.setSortDimension(sortedDims);
 +    } else {
 +      executorModel.setSortOrder(new byte[0]);
 +      executorModel.setSortDimension(new ArrayList<QueryDimension>(0));
 +    }
 +
 +  }
 +
 +  public static void processFilterExpression(
 +      Expression filterExpression, List<CarbonDimension> dimensions, List<CarbonMeasure> measures) {
 +    if (null != filterExpression) {
 +      if (null != filterExpression.getChildren() && filterExpression.getChildren().size() == 0) {
 +        if (filterExpression instanceof ConditionalExpression) {
 +          List<ColumnExpression> listOfCol =
 +              ((ConditionalExpression) filterExpression).getColumnList();
 +          for (ColumnExpression expression : listOfCol) {
 +            setDimAndMsrColumnNode(dimensions, measures, (ColumnExpression) expression);
 +          }
 +
 +        }
 +      }
 +      for (Expression expression : filterExpression.getChildren()) {
 +
 +        if (expression instanceof ColumnExpression) {
 +          setDimAndMsrColumnNode(dimensions, measures, (ColumnExpression) expression);
 +        } else if (expression instanceof UnknownExpression) {
 +          UnknownExpression exp = ((UnknownExpression) expression);
 +          List<ColumnExpression> listOfColExpression = exp.getAllColumnList();
 +          for (ColumnExpression col : listOfColExpression) {
 +            setDimAndMsrColumnNode(dimensions, measures, col);
 +          }
 +        } else {
 +          processFilterExpression(expression, dimensions, measures);
 +        }
 +      }
 +    }
 +
 +  }
 +
 +  private static CarbonMeasure getCarbonMetadataMeasure(String name, List<CarbonMeasure> measures) {
 +    for (CarbonMeasure measure : measures) {
 +      if (measure.getColName().equalsIgnoreCase(name)) {
 +        return measure;
 +      }
 +    }
 +    return null;
 +  }
 +
 +  private static void setDimAndMsrColumnNode(List<CarbonDimension> dimensions,
 +      List<CarbonMeasure> measures, ColumnExpression col) {
 +    CarbonDimension dim;
 +    CarbonMeasure msr;
 +    String columnName;
 +    columnName = col.getColumnName();
 +    dim = CarbonUtil.findDimension(dimensions, columnName);
 +    col.setCarbonColumn(dim);
 +    col.setDimension(dim);
 +    col.setDimension(true);
 +    if (null == dim) {
 +      msr = getCarbonMetadataMeasure(columnName, measures);
 +      col.setCarbonColumn(msr);
 +      col.setDimension(false);
 +    }
 +  }
 +
 +  /**
 +   * It gets the projection columns
 +   */
 +  public CarbonColumn[] getProjectionColumns() {
 +    CarbonColumn[] carbonColumns =
-         new CarbonColumn[getQueryDimension().size() + getQueryMeasures()
-             .size()];
++        new CarbonColumn[getQueryDimension().size() + getQueryMeasures().size()];
 +    for (QueryDimension dimension : getQueryDimension()) {
 +      carbonColumns[dimension.getQueryOrder()] = dimension.getDimension();
 +    }
 +    for (QueryMeasure msr : getQueryMeasures()) {
 +      carbonColumns[msr.getQueryOrder()] = msr.getMeasure();
 +    }
 +    return carbonColumns;
 +  }
 +
 +  /**
 +   * @return the queryDimension
 +   */
 +  public List<QueryDimension> getQueryDimension() {
 +    return queryDimension;
 +  }
 +
 +  /**
 +   * @param queryDimension the queryDimension to set
 +   */
 +  public void setQueryDimension(List<QueryDimension> queryDimension) {
 +    this.queryDimension = queryDimension;
 +  }
 +
 +  /**
 +   * @return the queryMeasures
 +   */
 +  public List<QueryMeasure> getQueryMeasures() {
 +    return queryMeasures;
 +  }
 +
 +  /**
 +   * @param queryMeasures the queryMeasures to set
 +   */
 +  public void setQueryMeasures(List<QueryMeasure> queryMeasures) {
 +    this.queryMeasures = queryMeasures;
 +  }
 +
 +  /**
 +   * @return the queryId
 +   */
 +  public String getQueryId() {
 +    return queryId;
 +  }
 +
 +  /**
 +   * @param queryId the queryId to set
 +   */
 +  public void setQueryId(String queryId) {
 +    this.queryId = queryId;
 +  }
 +
 +  /**
 +   * @return the isAggTable
 +   */
 +  public boolean isAggTable() {
 +    return isAggTable;
 +  }
 +
 +  /**
 +   * @param isAggTable the isAggTable to set
 +   */
 +  public void setAggTable(boolean isAggTable) {
 +    this.isAggTable = isAggTable;
 +  }
 +
 +  /**
 +   * @return the limit
 +   */
 +  public int getLimit() {
 +    return limit;
 +  }
 +
 +  /**
 +   * @param limit the limit to set
 +   */
 +  public void setLimit(int limit) {
 +    this.limit = limit;
 +  }
 +
 +  /**
 +   * @return the isCountStarQuery
 +   */
 +  public boolean isCountStarQuery() {
 +    return isCountStarQuery;
 +  }
 +
 +  /**
 +   * @param isCountStarQuery the isCountStarQuery to set
 +   */
 +  public void setCountStarQuery(boolean isCountStarQuery) {
 +    this.isCountStarQuery = isCountStarQuery;
 +  }
 +
 +  /**
 +   * @return the isdetailQuery
 +   */
 +  public boolean isDetailQuery() {
 +    return detailQuery;
 +  }
 +
 +  public void setDetailQuery(boolean detailQuery) {
 +    this.detailQuery = detailQuery;
 +  }
 +
 +  /**
 +   * @return the tableBlockInfos
 +   */
 +  public List<TableBlockInfo> getTableBlockInfos() {
 +    return tableBlockInfos;
 +  }
 +
 +  /**
 +   * @param tableBlockInfos the tableBlockInfos to set
 +   */
 +  public void setTableBlockInfos(List<TableBlockInfo> tableBlockInfos) {
 +    this.tableBlockInfos = tableBlockInfos;
 +  }
 +
 +  /**
 +   * @return the queryTempLocation
 +   */
 +  public String getQueryTempLocation() {
 +    return queryTempLocation;
 +  }
 +
 +  /**
 +   * @param queryTempLocation the queryTempLocation to set
 +   */
 +  public void setQueryTempLocation(String queryTempLocation) {
 +    this.queryTempLocation = queryTempLocation;
 +  }
 +
 +  /**
 +   * @return the sortOrder
 +   */
 +  public byte[] getSortOrder() {
 +    return sortOrder;
 +  }
 +
 +  /**
 +   * @param sortOrder the sortOrder to set
 +   */
 +  public void setSortOrder(byte[] sortOrder) {
 +    this.sortOrder = sortOrder;
 +  }
 +
 +  /**
 +   * @return the sortDimension
 +   */
 +  public List<QueryDimension> getSortDimension() {
 +    return sortDimension;
 +  }
 +
 +  /**
 +   * @param sortDimension the sortDimension to set
 +   */
 +  public void setSortDimension(List<QueryDimension> sortDimension) {
 +    this.sortDimension = sortDimension;
 +  }
 +
 +  /**
 +   * @return the filterEvaluatorTree
 +   */
 +  public FilterResolverIntf getFilterExpressionResolverTree() {
 +    return filterExpressionResolverTree;
 +  }
 +
 +  public void setFilterExpressionResolverTree(FilterResolverIntf filterExpressionResolverTree) {
 +    this.filterExpressionResolverTree = filterExpressionResolverTree;
 +  }
 +
 +  /**
 +   * @return the absoluteTableIdentifier
 +   */
 +  public AbsoluteTableIdentifier getAbsoluteTableIdentifier() {
 +    return absoluteTableIdentifier;
 +  }
 +
 +  /**
 +   * @param absoluteTableIdentifier the absoluteTableIdentifier to set
 +   */
 +  public void setAbsoluteTableIdentifier(AbsoluteTableIdentifier absoluteTableIdentifier) {
 +    this.absoluteTableIdentifier = absoluteTableIdentifier;
 +  }
 +
 +  /**
 +   * @return the paritionColumns
 +   */
 +  public List<String> getParitionColumns() {
 +    return paritionColumns;
 +  }
 +
 +  /**
 +   * @param paritionColumns the paritionColumns to set
 +   */
 +  public void setParitionColumns(List<String> paritionColumns) {
 +    this.paritionColumns = paritionColumns;
 +  }
 +
 +  /**
 +   * @return the table
 +   */
 +  public CarbonTable getTable() {
 +    return table;
 +  }
 +
 +  /**
 +   * @param table the table to set
 +   */
 +  public void setTable(CarbonTable table) {
 +    this.table = table;
 +  }
 +
 +  public boolean isForcedDetailRawQuery() {
 +    return forcedDetailRawQuery;
 +  }
 +
 +  public void setForcedDetailRawQuery(boolean forcedDetailRawQuery) {
 +    this.forcedDetailRawQuery = forcedDetailRawQuery;
 +  }
 +
 +  /**
 +   * @return
 +   */
 +  public Map<String, Dictionary> getColumnToDictionaryMapping() {
 +    return columnToDictionaryMapping;
 +  }
 +
 +  /**
 +   * @param columnToDictionaryMapping
 +   */
 +  public void setColumnToDictionaryMapping(Map<String, Dictionary> columnToDictionaryMapping) {
 +    this.columnToDictionaryMapping = columnToDictionaryMapping;
 +  }
 +
 +  public boolean isRawBytesDetailQuery() {
 +    return rawBytesDetailQuery;
 +  }
 +
 +  public void setRawBytesDetailQuery(boolean rawBytesDetailQuery) {
 +    this.rawBytesDetailQuery = rawBytesDetailQuery;
 +  }
++
++  public int getInMemoryRecordSize() {
++    return inMemoryRecordSize;
++  }
++
++  public void setInMemoryRecordSize(int inMemoryRecordSize) {
++    this.inMemoryRecordSize = inMemoryRecordSize;
++  }
++
++  public QueryStatisticsRecorder getStatisticsRecorder() {
++    return statisticsRecorder;
++  }
++
++  public void setStatisticsRecorder(QueryStatisticsRecorder statisticsRecorder) {
++    this.statisticsRecorder = statisticsRecorder;
++  }
 +}