You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by jb...@apache.org on 2016/06/23 14:16:13 UTC

[25/56] [abbrv] incubator-carbondata git commit: Refactor org.carbondata.query package (#692)

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/core/src/main/java/org/carbondata/scan/util/DataTypeUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/scan/util/DataTypeUtil.java b/core/src/main/java/org/carbondata/scan/util/DataTypeUtil.java
new file mode 100644
index 0000000..8f301ca
--- /dev/null
+++ b/core/src/main/java/org/carbondata/scan/util/DataTypeUtil.java
@@ -0,0 +1,175 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.carbondata.scan.util;
+
+import java.math.BigDecimal;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+import org.carbondata.common.logging.LogService;
+import org.carbondata.common.logging.LogServiceFactory;
+import org.carbondata.core.carbon.metadata.datatype.DataType;
+import org.carbondata.core.constants.CarbonCommonConstants;
+import org.carbondata.core.util.CarbonProperties;
+
+import org.apache.spark.unsafe.types.UTF8String;
+
+/**
+ * Utility for data type
+ */
+public class DataTypeUtil {
+
+  /**
+   * LOGGER
+   */
+  private static final LogService LOGGER =
+      LogServiceFactory.getLogService(DataTypeUtil.class.getName());
+
+  /**
+   * Below method will be used to convert the data passed to its actual data
+   * type
+   *
+   * @param data           data
+   * @param actualDataType actual data type
+   * @return actual data after conversion
+   */
+  public static Object getDataBasedOnDataType(String data, DataType actualDataType) {
+
+    if (null == data) {
+      return null;
+    }
+    try {
+      switch (actualDataType) {
+        case INT:
+          if (data.isEmpty()) {
+            return null;
+          }
+          return Integer.parseInt(data);
+        case DOUBLE:
+          if (data.isEmpty()) {
+            return null;
+          }
+          return Double.parseDouble(data);
+        case LONG:
+          if (data.isEmpty()) {
+            return null;
+          }
+          return Long.parseLong(data);
+        case TIMESTAMP:
+          if (data.isEmpty()) {
+            return null;
+          }
+          SimpleDateFormat parser = new SimpleDateFormat(CarbonProperties.getInstance()
+              .getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+                  CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT));
+          Date dateToStr = null;
+          try {
+            dateToStr = parser.parse(data);
+            return dateToStr.getTime() * 1000;
+          } catch (ParseException e) {
+            LOGGER.error("Cannot convert" + data + " to Time/Long type value" + e.getMessage());
+            return null;
+          }
+        case DECIMAL:
+          if (data.isEmpty()) {
+            return null;
+          }
+          java.math.BigDecimal javaDecVal = new java.math.BigDecimal(data);
+          scala.math.BigDecimal scalaDecVal = new scala.math.BigDecimal(javaDecVal);
+          org.apache.spark.sql.types.Decimal decConverter =
+              new org.apache.spark.sql.types.Decimal();
+          return decConverter.set(scalaDecVal);
+        default:
+          return UTF8String.fromString(data);
+      }
+    } catch (NumberFormatException ex) {
+      LOGGER.error("Problem while converting data type" + data);
+      return null;
+    }
+
+  }
+
+  public static Object getMeasureDataBasedOnDataType(Object data, DataType dataType) {
+
+    if (null == data) {
+      return null;
+    }
+    try {
+      switch (dataType) {
+        case DOUBLE:
+
+          return (Double) data;
+        case LONG:
+
+          return (Long) data;
+
+        case DECIMAL:
+
+          java.math.BigDecimal javaDecVal = new java.math.BigDecimal(data.toString());
+          scala.math.BigDecimal scalaDecVal = new scala.math.BigDecimal(javaDecVal);
+          org.apache.spark.sql.types.Decimal decConverter =
+              new org.apache.spark.sql.types.Decimal();
+          return decConverter.set(scalaDecVal);
+        default:
+
+          return data;
+      }
+    } catch (NumberFormatException ex) {
+      LOGGER.error("Problem while converting data type" + data);
+      return null;
+    }
+
+  }
+
+  public static int compareBasedOnDatatYpe(Object data1, Object data2, DataType dataType) {
+    switch (dataType) {
+      case INT:
+        return ((Integer) data1).compareTo((Integer) data2);
+      case LONG:
+      case TIMESTAMP:
+        return ((Long) data1).compareTo((Long) data2);
+      case DOUBLE:
+        return ((Double) data1).compareTo((Double) data2);
+      case DECIMAL:
+        return ((BigDecimal) data1).compareTo((BigDecimal) data2);
+      default:
+        return ((String) data1).compareTo((String) data2);
+    }
+  }
+
+  /**
+   * below method is to check whether data type is of numeric type or not
+   *
+   * @param dataType data type
+   * @return true if numeric data type
+   */
+  public boolean isNumericDatatype(DataType dataType) {
+    switch (dataType) {
+      case INT:
+      case LONG:
+      case DOUBLE:
+      case DECIMAL:
+        return true;
+      default:
+        return false;
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/core/src/main/java/org/carbondata/scan/wrappers/ByteArrayWrapper.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/scan/wrappers/ByteArrayWrapper.java b/core/src/main/java/org/carbondata/scan/wrappers/ByteArrayWrapper.java
new file mode 100644
index 0000000..d36ef7f
--- /dev/null
+++ b/core/src/main/java/org/carbondata/scan/wrappers/ByteArrayWrapper.java
@@ -0,0 +1,202 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.carbondata.scan.wrappers;
+
+import org.carbondata.core.util.ByteUtil.UnsafeComparer;
+
+/**
+ * This class will store the dimension column data when query is executed
+ * This can be used as a key for aggregation
+ */
+public class ByteArrayWrapper implements Comparable<ByteArrayWrapper> {
+
+  /**
+   * to store key which is generated using
+   * key generator
+   */
+  protected byte[] dictionaryKey;
+
+  /**
+   * to store no dictionary column data
+   */
+  protected byte[][] complexTypesKeys;
+
+  /**
+   * to store no dictionary column data
+   */
+  protected byte[][] noDictionaryKeys;
+
+  public ByteArrayWrapper() {
+  }
+
+  /**
+   * @return the dictionaryKey
+   */
+  public byte[] getDictionaryKey() {
+    return dictionaryKey;
+  }
+
+  /**
+   * @param dictionaryKey the dictionaryKey to set
+   */
+  public void setDictionaryKey(byte[] dictionaryKey) {
+    this.dictionaryKey = dictionaryKey;
+  }
+
+  /**
+   * @param noDictionaryKeys the noDictionaryKeys to set
+   */
+  public void setNoDictionaryKeys(byte[][] noDictionaryKeys) {
+    this.noDictionaryKeys = noDictionaryKeys;
+  }
+
+  /**
+   * to get the no dictionary column data
+   *
+   * @param index of the no dictionary key
+   * @return no dictionary key for the index
+   */
+  public byte[] getNoDictionaryKeyByIndex(int index) {
+    return this.noDictionaryKeys[index];
+  }
+
+  /**
+   * to get the no dictionary column data
+   *
+   * @param index of the no dictionary key
+   * @return no dictionary key for the index
+   */
+  public byte[] getComplexTypeByIndex(int index) {
+    return this.complexTypesKeys[index];
+  }
+
+  /**
+   * to generate the hash code
+   */
+  @Override public int hashCode() {
+    // first generate the has code of the dictionary column
+    int len = dictionaryKey.length;
+    int result = 1;
+    for (int j = 0; j < len; j++) {
+      result = 31 * result + dictionaryKey[j];
+    }
+    // then no dictionary column
+    for (byte[] directSurrogateValue : noDictionaryKeys) {
+      for (int i = 0; i < directSurrogateValue.length; i++) {
+        result = 31 * result + directSurrogateValue[i];
+      }
+    }
+    // then for complex type
+    for (byte[] complexTypeKey : complexTypesKeys) {
+      for (int i = 0; i < complexTypeKey.length; i++) {
+        result = 31 * result + complexTypeKey[i];
+      }
+    }
+    return result;
+  }
+
+  /**
+   * to validate the two
+   *
+   * @param other object
+   */
+  @Override public boolean equals(Object other) {
+    if (null == other || !(other instanceof ByteArrayWrapper)) {
+      return false;
+    }
+    boolean result = false;
+    // Comparison will be as follows
+    // first compare the no dictionary column
+    // if it is not equal then return false
+    // if it is equal then compare the complex column
+    // if it is also equal then compare dictionary column
+    byte[][] noDictionaryKeysOther = ((ByteArrayWrapper) other).noDictionaryKeys;
+    if (noDictionaryKeysOther.length != noDictionaryKeys.length) {
+      return false;
+    } else {
+      for (int i = 0; i < noDictionaryKeys.length; i++) {
+        result = UnsafeComparer.INSTANCE.equals(noDictionaryKeys[i], noDictionaryKeysOther[i]);
+        if (!result) {
+          return false;
+        }
+      }
+    }
+
+    byte[][] complexTypesKeysOther = ((ByteArrayWrapper) other).complexTypesKeys;
+    if (complexTypesKeysOther.length != complexTypesKeys.length) {
+      return false;
+    } else {
+      for (int i = 0; i < complexTypesKeys.length; i++) {
+        result = UnsafeComparer.INSTANCE.equals(complexTypesKeys[i], complexTypesKeysOther[i]);
+        if (!result) {
+          return false;
+        }
+      }
+    }
+
+    return UnsafeComparer.INSTANCE.equals(dictionaryKey, ((ByteArrayWrapper) other).dictionaryKey);
+  }
+
+  /**
+   * Compare method for ByteArrayWrapper class this will used to compare Two
+   * ByteArrayWrapper data object, basically it will compare two byte array
+   *
+   * @param other ArrayWrapper Object
+   */
+  @Override public int compareTo(ByteArrayWrapper other) {
+    // compare will be as follows
+    //compare dictionary column
+    // then no dictionary column
+    // then complex type column data
+    int compareTo = UnsafeComparer.INSTANCE.compareTo(dictionaryKey, other.dictionaryKey);
+    if (compareTo == 0) {
+      for (int i = 0; i < noDictionaryKeys.length; i++) {
+        compareTo =
+            UnsafeComparer.INSTANCE.compareTo(noDictionaryKeys[i], other.noDictionaryKeys[i]);
+        if (compareTo != 0) {
+          return compareTo;
+        }
+      }
+    }
+    if (compareTo == 0) {
+      for (int i = 0; i < complexTypesKeys.length; i++) {
+        compareTo =
+            UnsafeComparer.INSTANCE.compareTo(complexTypesKeys[i], other.complexTypesKeys[i]);
+        if (compareTo != 0) {
+          return compareTo;
+        }
+      }
+    }
+    return compareTo;
+  }
+
+  /**
+   * @return the complexTypesKeys
+   */
+  public byte[][] getComplexTypesKeys() {
+    return complexTypesKeys;
+  }
+
+  /**
+   * @param complexTypesKeys the complexTypesKeys to set
+   */
+  public void setComplexTypesKeys(byte[][] complexTypesKeys) {
+    this.complexTypesKeys = complexTypesKeys;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/core/src/test/java/org/carbondata/query/QueryExecutor_UT.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/carbondata/query/QueryExecutor_UT.java b/core/src/test/java/org/carbondata/query/QueryExecutor_UT.java
deleted file mode 100644
index a273b33..0000000
--- a/core/src/test/java/org/carbondata/query/QueryExecutor_UT.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * Copyright Notice
- * =====================================
- * This file contains proprietary information of
- * Huawei Technologies India Pvt Ltd.
- * Copying or reproduction without prior written approval is prohibited.
- * Copyright (c) 2012
- * =====================================
- */
-package org.carbondata.query;
-
-import junit.framework.TestCase;
-
-public class QueryExecutor_UT extends TestCase {
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/core/src/test/java/org/carbondata/query/carbon/executor/util/QueryUtilTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/carbondata/query/carbon/executor/util/QueryUtilTest.java b/core/src/test/java/org/carbondata/query/carbon/executor/util/QueryUtilTest.java
deleted file mode 100644
index d765013..0000000
--- a/core/src/test/java/org/carbondata/query/carbon/executor/util/QueryUtilTest.java
+++ /dev/null
@@ -1,133 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.carbondata.query.carbon.executor.util;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-import junit.framework.TestCase;
-
-import org.carbondata.core.carbon.datastore.block.SegmentProperties;
-import org.carbondata.core.carbon.datastore.block.SegmentPropertiesTestUtil;
-import org.carbondata.core.keygenerator.KeyGenException;
-import org.carbondata.query.carbon.model.QueryDimension;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-public class QueryUtilTest extends TestCase {
-
-  private SegmentProperties segmentProperties;
-
-  @BeforeClass public void setUp() {
-    segmentProperties = SegmentPropertiesTestUtil.getSegmentProperties();
-  }
-
-  @Test public void testGetMaskedByteRangeGivingProperMaksedByteRange() {
-	  
-	QueryDimension dimension = new QueryDimension(segmentProperties.getDimensions().get(0).getColName());
-	dimension.setDimension(segmentProperties.getDimensions().get(0));  
-    int[] maskedByteRange = QueryUtil
-        .getMaskedByteRange(Arrays.asList(dimension),
-            segmentProperties.getDimensionKeyGenerator());
-    int[] expectedMaskedByteRange = { 0 };
-    for (int i = 0; i < maskedByteRange.length; i++) {
-      assertEquals(expectedMaskedByteRange[i], maskedByteRange[i]);
-    }
-  }
-
-  @Test public void testGetMaskedByteRangeGivingProperMaksedByteRangeOnlyForDictionaryKey() {
-    List<QueryDimension> dimensions = new ArrayList<QueryDimension>();
-    for (int i = 0; i < 2; i++) {
-      QueryDimension dimension = new QueryDimension(segmentProperties.getDimensions().get(i).getColName());
-      dimension.setDimension(segmentProperties.getDimensions().get(i));
-      dimensions.add(dimension);
-    }
-    int[] maskedByteRange =
-        QueryUtil.getMaskedByteRange(dimensions, segmentProperties.getDimensionKeyGenerator());
-    int[] expectedMaskedByteRange = { 0 };
-    for (int i = 0; i < maskedByteRange.length; i++) {
-      assertEquals(expectedMaskedByteRange[i], maskedByteRange[i]);
-    }
-  }
-
-  @Test public void testGetMaskedByteRangeBasedOrdinalGivingProperMaskedByte() {
-    List<Integer> dimensionOrdinal = new ArrayList<Integer>();
-    dimensionOrdinal.add(0);
-    int[] maskedByteRange = QueryUtil.getMaskedByteRangeBasedOrdinal(dimensionOrdinal,
-        segmentProperties.getDimensionKeyGenerator());
-    int[] expectedMaskedByteRange = { 0 };
-    for (int i = 0; i < maskedByteRange.length; i++) {
-      assertEquals(expectedMaskedByteRange[i], maskedByteRange[i]);
-    }
-  }
-
-  @Test public void testGetMaxKeyBasedOnDimensions() {
-	  List<QueryDimension> dimensions = new ArrayList<QueryDimension>();
-	    for (int i = 0; i < 2; i++) {
-	      QueryDimension dimension = new QueryDimension(segmentProperties.getDimensions().get(i).getColName());
-	      dimension.setDimension(segmentProperties.getDimensions().get(i));
-	      dimensions.add(dimension);
-	    }
-    byte[] maxKeyBasedOnDimensions = null;
-    try {
-      maxKeyBasedOnDimensions = QueryUtil
-          .getMaxKeyBasedOnDimensions(dimensions, segmentProperties.getDimensionKeyGenerator());
-    } catch (KeyGenException e) {
-      assertTrue(false);
-    }
-    byte[] expectedMaxKeyBasedOnDimensions = { -1, 0, 0, 0, 0, 0 };
-    for (int i = 0; i < expectedMaxKeyBasedOnDimensions.length; i++) {
-      if (expectedMaxKeyBasedOnDimensions[i] != maxKeyBasedOnDimensions[i]) {
-        assertTrue(false);
-      }
-    }
-    long[] expectedKeyArray = { 255, 0, 0, 0, 0, 0 };
-    long[] keyArray =
-        segmentProperties.getDimensionKeyGenerator().getKeyArray(maxKeyBasedOnDimensions);
-    for (int i = 0; i < keyArray.length; i++) {
-      if (expectedKeyArray[i] != keyArray[i]) {
-        assertTrue(false);
-      }
-    }
-  }
-
-  @Test public void testGetMaksedByte() {
-	  QueryDimension dimension = new QueryDimension(segmentProperties.getDimensions().get(0).getColName());
-		dimension.setDimension(segmentProperties.getDimensions().get(0)); 
-		dimension.setDimension(segmentProperties.getDimensions().get(0));
-    int[] maskedByteRange = QueryUtil
-        .getMaskedByteRange(Arrays.asList(dimension),
-            segmentProperties.getDimensionKeyGenerator());
-    int[] maskedByte = QueryUtil
-        .getMaskedByte(segmentProperties.getDimensionKeyGenerator().getDimCount(), maskedByteRange);
-    int[] expectedMaskedByte = { 0, -1, -1, -1, -1, -1 };
-
-    for (int i = 0; i < expectedMaskedByte.length; i++) {
-      if (expectedMaskedByte[i] != maskedByte[i]) {
-        assertTrue(false);
-      }
-    }
-  }
-
-  @AfterClass public void tearDown() {
-    segmentProperties = null;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/core/src/test/java/org/carbondata/scan/QueryExecutor_UT.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/carbondata/scan/QueryExecutor_UT.java b/core/src/test/java/org/carbondata/scan/QueryExecutor_UT.java
new file mode 100644
index 0000000..a04e9e9
--- /dev/null
+++ b/core/src/test/java/org/carbondata/scan/QueryExecutor_UT.java
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * Copyright Notice
+ * =====================================
+ * This file contains proprietary information of
+ * Huawei Technologies India Pvt Ltd.
+ * Copying or reproduction without prior written approval is prohibited.
+ * Copyright (c) 2012
+ * =====================================
+ */
+package org.carbondata.scan;
+
+import junit.framework.TestCase;
+
+public class QueryExecutor_UT extends TestCase {
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/core/src/test/java/org/carbondata/scan/executor/util/QueryUtilTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/carbondata/scan/executor/util/QueryUtilTest.java b/core/src/test/java/org/carbondata/scan/executor/util/QueryUtilTest.java
new file mode 100644
index 0000000..0671c61
--- /dev/null
+++ b/core/src/test/java/org/carbondata/scan/executor/util/QueryUtilTest.java
@@ -0,0 +1,133 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.carbondata.scan.executor.util;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import junit.framework.TestCase;
+
+import org.carbondata.core.carbon.datastore.block.SegmentProperties;
+import org.carbondata.core.carbon.datastore.block.SegmentPropertiesTestUtil;
+import org.carbondata.core.keygenerator.KeyGenException;
+import org.carbondata.scan.model.QueryDimension;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class QueryUtilTest extends TestCase {
+
+  private SegmentProperties segmentProperties;
+
+  @BeforeClass public void setUp() {
+    segmentProperties = SegmentPropertiesTestUtil.getSegmentProperties();
+  }
+
+  @Test public void testGetMaskedByteRangeGivingProperMaksedByteRange() {
+	  
+	QueryDimension dimension = new QueryDimension(segmentProperties.getDimensions().get(0).getColName());
+	dimension.setDimension(segmentProperties.getDimensions().get(0));  
+    int[] maskedByteRange = QueryUtil
+        .getMaskedByteRange(Arrays.asList(dimension),
+            segmentProperties.getDimensionKeyGenerator());
+    int[] expectedMaskedByteRange = { 0 };
+    for (int i = 0; i < maskedByteRange.length; i++) {
+      assertEquals(expectedMaskedByteRange[i], maskedByteRange[i]);
+    }
+  }
+
+  @Test public void testGetMaskedByteRangeGivingProperMaksedByteRangeOnlyForDictionaryKey() {
+    List<QueryDimension> dimensions = new ArrayList<QueryDimension>();
+    for (int i = 0; i < 2; i++) {
+      QueryDimension dimension = new QueryDimension(segmentProperties.getDimensions().get(i).getColName());
+      dimension.setDimension(segmentProperties.getDimensions().get(i));
+      dimensions.add(dimension);
+    }
+    int[] maskedByteRange =
+        QueryUtil.getMaskedByteRange(dimensions, segmentProperties.getDimensionKeyGenerator());
+    int[] expectedMaskedByteRange = { 0 };
+    for (int i = 0; i < maskedByteRange.length; i++) {
+      assertEquals(expectedMaskedByteRange[i], maskedByteRange[i]);
+    }
+  }
+
+  @Test public void testGetMaskedByteRangeBasedOrdinalGivingProperMaskedByte() {
+    List<Integer> dimensionOrdinal = new ArrayList<Integer>();
+    dimensionOrdinal.add(0);
+    int[] maskedByteRange = QueryUtil.getMaskedByteRangeBasedOrdinal(dimensionOrdinal,
+        segmentProperties.getDimensionKeyGenerator());
+    int[] expectedMaskedByteRange = { 0 };
+    for (int i = 0; i < maskedByteRange.length; i++) {
+      assertEquals(expectedMaskedByteRange[i], maskedByteRange[i]);
+    }
+  }
+
+  @Test public void testGetMaxKeyBasedOnDimensions() {
+	  List<QueryDimension> dimensions = new ArrayList<QueryDimension>();
+	    for (int i = 0; i < 2; i++) {
+	      QueryDimension dimension = new QueryDimension(segmentProperties.getDimensions().get(i).getColName());
+	      dimension.setDimension(segmentProperties.getDimensions().get(i));
+	      dimensions.add(dimension);
+	    }
+    byte[] maxKeyBasedOnDimensions = null;
+    try {
+      maxKeyBasedOnDimensions = QueryUtil
+          .getMaxKeyBasedOnDimensions(dimensions, segmentProperties.getDimensionKeyGenerator());
+    } catch (KeyGenException e) {
+      assertTrue(false);
+    }
+    byte[] expectedMaxKeyBasedOnDimensions = { -1, 0, 0, 0, 0, 0 };
+    for (int i = 0; i < expectedMaxKeyBasedOnDimensions.length; i++) {
+      if (expectedMaxKeyBasedOnDimensions[i] != maxKeyBasedOnDimensions[i]) {
+        assertTrue(false);
+      }
+    }
+    long[] expectedKeyArray = { 255, 0, 0, 0, 0, 0 };
+    long[] keyArray =
+        segmentProperties.getDimensionKeyGenerator().getKeyArray(maxKeyBasedOnDimensions);
+    for (int i = 0; i < keyArray.length; i++) {
+      if (expectedKeyArray[i] != keyArray[i]) {
+        assertTrue(false);
+      }
+    }
+  }
+
+  @Test public void testGetMaksedByte() {
+	  QueryDimension dimension = new QueryDimension(segmentProperties.getDimensions().get(0).getColName());
+		dimension.setDimension(segmentProperties.getDimensions().get(0)); 
+		dimension.setDimension(segmentProperties.getDimensions().get(0));
+    int[] maskedByteRange = QueryUtil
+        .getMaskedByteRange(Arrays.asList(dimension),
+            segmentProperties.getDimensionKeyGenerator());
+    int[] maskedByte = QueryUtil
+        .getMaskedByte(segmentProperties.getDimensionKeyGenerator().getDimCount(), maskedByteRange);
+    int[] expectedMaskedByte = { 0, -1, -1, -1, -1, -1 };
+
+    for (int i = 0; i < expectedMaskedByte.length; i++) {
+      if (expectedMaskedByte[i] != maskedByte[i]) {
+        assertTrue(false);
+      }
+    }
+  }
+
+  @AfterClass public void tearDown() {
+    segmentProperties = null;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/dev/findbugs-exclude.xml
----------------------------------------------------------------------
diff --git a/dev/findbugs-exclude.xml b/dev/findbugs-exclude.xml
index 3c0880c..4e1ca05 100644
--- a/dev/findbugs-exclude.xml
+++ b/dev/findbugs-exclude.xml
@@ -34,12 +34,12 @@
   </Match>
 
   <Match>
-    <Class name="org.carbondata.query.aggregator.impl.BitSet"/>
+    <Class name="org.carbondata.scan.aggregator.impl.BitSet"/>
     <Bug pattern="SE_TRANSIENT_FIELD_NOT_RESTORED"/>
   </Match>
 
   <Match>
-    <Class name="org.carbondata.query.expression.ExpressionResult"/>
+    <Class name="org.carbondata.scan.expression.ExpressionResult"/>
     <Or>
       <Method name="getBoolean"/>
     </Or>

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/hadoop/src/main/java/org/carbondata/hadoop/CarbonInputFormat.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/carbondata/hadoop/CarbonInputFormat.java b/hadoop/src/main/java/org/carbondata/hadoop/CarbonInputFormat.java
index 7f794e9..e9ec001 100644
--- a/hadoop/src/main/java/org/carbondata/hadoop/CarbonInputFormat.java
+++ b/hadoop/src/main/java/org/carbondata/hadoop/CarbonInputFormat.java
@@ -56,14 +56,14 @@ import org.carbondata.hadoop.util.CarbonInputFormatUtil;
 import org.carbondata.hadoop.util.ObjectSerializationUtil;
 import org.carbondata.hadoop.util.SchemaReader;
 import org.carbondata.lcm.status.SegmentStatusManager;
-import org.carbondata.query.carbon.executor.exception.QueryExecutionException;
-import org.carbondata.query.carbon.model.CarbonQueryPlan;
-import org.carbondata.query.carbon.model.QueryModel;
-import org.carbondata.query.expression.Expression;
-import org.carbondata.query.expression.exception.FilterUnsupportedException;
-import org.carbondata.query.filter.resolver.FilterResolverIntf;
-import org.carbondata.query.filters.FilterExpressionProcessor;
-import org.carbondata.query.filters.measurefilter.util.FilterUtil;
+import org.carbondata.scan.executor.exception.QueryExecutionException;
+import org.carbondata.scan.expression.Expression;
+import org.carbondata.scan.expression.exception.FilterUnsupportedException;
+import org.carbondata.scan.filter.FilterExpressionProcessor;
+import org.carbondata.scan.filter.FilterUtil;
+import org.carbondata.scan.filter.resolver.FilterResolverIntf;
+import org.carbondata.scan.model.CarbonQueryPlan;
+import org.carbondata.scan.model.QueryModel;
 
 import static org.carbondata.core.constants.CarbonCommonConstants.INVALID_SEGMENT_ID;
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/hadoop/src/main/java/org/carbondata/hadoop/CarbonRecordReader.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/carbondata/hadoop/CarbonRecordReader.java b/hadoop/src/main/java/org/carbondata/hadoop/CarbonRecordReader.java
index 3d54d96..774ad14 100644
--- a/hadoop/src/main/java/org/carbondata/hadoop/CarbonRecordReader.java
+++ b/hadoop/src/main/java/org/carbondata/hadoop/CarbonRecordReader.java
@@ -10,11 +10,11 @@ import org.carbondata.core.carbon.datastore.block.TableBlockInfo;
 import org.carbondata.core.iterator.CarbonIterator;
 import org.carbondata.core.util.CarbonUtil;
 import org.carbondata.hadoop.readsupport.CarbonReadSupport;
-import org.carbondata.query.carbon.executor.QueryExecutorFactory;
-import org.carbondata.query.carbon.executor.exception.QueryExecutionException;
-import org.carbondata.query.carbon.model.QueryModel;
-import org.carbondata.query.carbon.result.BatchResult;
-import org.carbondata.query.carbon.result.iterator.ChunkRowIterator;
+import org.carbondata.scan.executor.QueryExecutorFactory;
+import org.carbondata.scan.executor.exception.QueryExecutionException;
+import org.carbondata.scan.model.QueryModel;
+import org.carbondata.scan.result.BatchResult;
+import org.carbondata.scan.result.iterator.ChunkRowIterator;
 
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.RecordReader;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/hadoop/src/main/java/org/carbondata/hadoop/util/CarbonInputFormatUtil.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/carbondata/hadoop/util/CarbonInputFormatUtil.java b/hadoop/src/main/java/org/carbondata/hadoop/util/CarbonInputFormatUtil.java
index 7384636..279c8ab 100644
--- a/hadoop/src/main/java/org/carbondata/hadoop/util/CarbonInputFormatUtil.java
+++ b/hadoop/src/main/java/org/carbondata/hadoop/util/CarbonInputFormatUtil.java
@@ -25,13 +25,13 @@ import org.carbondata.core.carbon.AbsoluteTableIdentifier;
 import org.carbondata.core.carbon.metadata.schema.table.CarbonTable;
 import org.carbondata.core.carbon.metadata.schema.table.column.CarbonDimension;
 import org.carbondata.core.carbon.metadata.schema.table.column.CarbonMeasure;
-import org.carbondata.query.carbon.model.CarbonQueryPlan;
-import org.carbondata.query.carbon.model.QueryDimension;
-import org.carbondata.query.carbon.model.QueryMeasure;
-import org.carbondata.query.carbon.model.QueryModel;
-import org.carbondata.query.expression.Expression;
-import org.carbondata.query.filter.resolver.FilterResolverIntf;
-import org.carbondata.query.filters.FilterExpressionProcessor;
+import org.carbondata.scan.expression.Expression;
+import org.carbondata.scan.filter.FilterExpressionProcessor;
+import org.carbondata.scan.filter.resolver.FilterResolverIntf;
+import org.carbondata.scan.model.CarbonQueryPlan;
+import org.carbondata.scan.model.QueryDimension;
+import org.carbondata.scan.model.QueryMeasure;
+import org.carbondata.scan.model.QueryModel;
 
 /**
  * Utility class

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/hadoop/src/test/java/org/carbondata/hadoop/ft/CarbonInputFormat_FT.java
----------------------------------------------------------------------
diff --git a/hadoop/src/test/java/org/carbondata/hadoop/ft/CarbonInputFormat_FT.java b/hadoop/src/test/java/org/carbondata/hadoop/ft/CarbonInputFormat_FT.java
index 1cf4a87..bf5624b 100644
--- a/hadoop/src/test/java/org/carbondata/hadoop/ft/CarbonInputFormat_FT.java
+++ b/hadoop/src/test/java/org/carbondata/hadoop/ft/CarbonInputFormat_FT.java
@@ -19,17 +19,16 @@
 
 package org.carbondata.hadoop.ft;
 
-import java.util.Arrays;
 import java.util.List;
 import java.util.UUID;
 
 import org.carbondata.core.carbon.CarbonTableIdentifier;
 import org.carbondata.hadoop.CarbonInputFormat;
-import org.carbondata.query.expression.ColumnExpression;
-import org.carbondata.query.expression.DataType;
-import org.carbondata.query.expression.Expression;
-import org.carbondata.query.expression.LiteralExpression;
-import org.carbondata.query.expression.conditional.EqualToExpression;
+import org.carbondata.scan.expression.ColumnExpression;
+import org.carbondata.scan.expression.DataType;
+import org.carbondata.scan.expression.Expression;
+import org.carbondata.scan.expression.LiteralExpression;
+import org.carbondata.scan.expression.conditional.EqualToExpression;
 
 import junit.framework.TestCase;
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/hadoop/src/test/java/org/carbondata/hadoop/ft/CarbonInputMapperTest.java
----------------------------------------------------------------------
diff --git a/hadoop/src/test/java/org/carbondata/hadoop/ft/CarbonInputMapperTest.java b/hadoop/src/test/java/org/carbondata/hadoop/ft/CarbonInputMapperTest.java
index 5fa8169..a20f957 100644
--- a/hadoop/src/test/java/org/carbondata/hadoop/ft/CarbonInputMapperTest.java
+++ b/hadoop/src/test/java/org/carbondata/hadoop/ft/CarbonInputMapperTest.java
@@ -12,11 +12,11 @@ import org.carbondata.core.util.CarbonUtil;
 import org.carbondata.hadoop.CarbonInputFormat;
 import org.carbondata.hadoop.CarbonProjection;
 import org.carbondata.hadoop.test.util.StoreCreator;
-import org.carbondata.query.expression.ColumnExpression;
-import org.carbondata.query.expression.DataType;
-import org.carbondata.query.expression.Expression;
-import org.carbondata.query.expression.LiteralExpression;
-import org.carbondata.query.expression.conditional.EqualToExpression;
+import org.carbondata.scan.expression.ColumnExpression;
+import org.carbondata.scan.expression.DataType;
+import org.carbondata.scan.expression.Expression;
+import org.carbondata.scan.expression.LiteralExpression;
+import org.carbondata.scan.expression.conditional.EqualToExpression;
 
 import junit.framework.TestCase;
 import org.apache.hadoop.conf.Configuration;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/hadoop/src/test/java/org/carbondata/hadoop/test/util/ObjectSerializationUtilTest.java
----------------------------------------------------------------------
diff --git a/hadoop/src/test/java/org/carbondata/hadoop/test/util/ObjectSerializationUtilTest.java b/hadoop/src/test/java/org/carbondata/hadoop/test/util/ObjectSerializationUtilTest.java
index 031ea6b..e5e3fb6 100644
--- a/hadoop/src/test/java/org/carbondata/hadoop/test/util/ObjectSerializationUtilTest.java
+++ b/hadoop/src/test/java/org/carbondata/hadoop/test/util/ObjectSerializationUtilTest.java
@@ -20,11 +20,11 @@
 package org.carbondata.hadoop.test.util;
 
 import org.carbondata.hadoop.util.ObjectSerializationUtil;
-import org.carbondata.query.expression.ColumnExpression;
-import org.carbondata.query.expression.DataType;
-import org.carbondata.query.expression.Expression;
-import org.carbondata.query.expression.LiteralExpression;
-import org.carbondata.query.expression.conditional.EqualToExpression;
+import org.carbondata.scan.expression.ColumnExpression;
+import org.carbondata.scan.expression.DataType;
+import org.carbondata.scan.expression.Expression;
+import org.carbondata.scan.expression.LiteralExpression;
+import org.carbondata.scan.expression.conditional.EqualToExpression;
 
 import junit.framework.TestCase;
 import org.junit.Assert;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/integration/spark/src/main/java/org/carbondata/integration/spark/merger/CarbonCompactionExecutor.java
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/java/org/carbondata/integration/spark/merger/CarbonCompactionExecutor.java b/integration/spark/src/main/java/org/carbondata/integration/spark/merger/CarbonCompactionExecutor.java
index ce0f656..89803f0 100644
--- a/integration/spark/src/main/java/org/carbondata/integration/spark/merger/CarbonCompactionExecutor.java
+++ b/integration/spark/src/main/java/org/carbondata/integration/spark/merger/CarbonCompactionExecutor.java
@@ -37,14 +37,14 @@ import org.carbondata.core.carbon.metadata.schema.table.column.CarbonMeasure;
 import org.carbondata.core.constants.CarbonCommonConstants;
 import org.carbondata.core.iterator.CarbonIterator;
 import org.carbondata.core.util.CarbonUtil;
-import org.carbondata.query.carbon.executor.QueryExecutor;
-import org.carbondata.query.carbon.executor.QueryExecutorFactory;
-import org.carbondata.query.carbon.executor.exception.QueryExecutionException;
-import org.carbondata.query.carbon.model.QueryDimension;
-import org.carbondata.query.carbon.model.QueryMeasure;
-import org.carbondata.query.carbon.model.QueryModel;
-import org.carbondata.query.carbon.result.BatchRawResult;
-import org.carbondata.query.carbon.result.iterator.RawResultIterator;
+import org.carbondata.scan.executor.QueryExecutor;
+import org.carbondata.scan.executor.QueryExecutorFactory;
+import org.carbondata.scan.executor.exception.QueryExecutionException;
+import org.carbondata.scan.model.QueryDimension;
+import org.carbondata.scan.model.QueryMeasure;
+import org.carbondata.scan.model.QueryModel;
+import org.carbondata.scan.result.BatchRawResult;
+import org.carbondata.scan.result.iterator.RawResultIterator;
 
 /**
  * Executor class for executing the query on the selected segments to be merged.

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/integration/spark/src/main/java/org/carbondata/integration/spark/merger/RowResultMerger.java
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/java/org/carbondata/integration/spark/merger/RowResultMerger.java b/integration/spark/src/main/java/org/carbondata/integration/spark/merger/RowResultMerger.java
index 66ec1d4..53d32b7 100644
--- a/integration/spark/src/main/java/org/carbondata/integration/spark/merger/RowResultMerger.java
+++ b/integration/spark/src/main/java/org/carbondata/integration/spark/merger/RowResultMerger.java
@@ -50,8 +50,8 @@ import org.carbondata.processing.store.CarbonFactDataHandlerColumnar;
 import org.carbondata.processing.store.CarbonFactDataHandlerModel;
 import org.carbondata.processing.store.CarbonFactHandler;
 import org.carbondata.processing.store.writer.exception.CarbonDataWriterException;
-import org.carbondata.query.carbon.result.iterator.RawResultIterator;
-import org.carbondata.query.carbon.wrappers.ByteArrayWrapper;
+import org.carbondata.scan.result.iterator.RawResultIterator;
+import org.carbondata.scan.wrappers.ByteArrayWrapper;
 import org.carbondata.spark.load.CarbonLoadModel;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/integration/spark/src/main/java/org/carbondata/integration/spark/merger/TupleConversionAdapter.java
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/java/org/carbondata/integration/spark/merger/TupleConversionAdapter.java b/integration/spark/src/main/java/org/carbondata/integration/spark/merger/TupleConversionAdapter.java
index 593afc6..d5c4d6a 100644
--- a/integration/spark/src/main/java/org/carbondata/integration/spark/merger/TupleConversionAdapter.java
+++ b/integration/spark/src/main/java/org/carbondata/integration/spark/merger/TupleConversionAdapter.java
@@ -24,7 +24,7 @@ import java.util.List;
 import org.carbondata.core.carbon.datastore.block.SegmentProperties;
 import org.carbondata.core.carbon.metadata.schema.table.column.CarbonMeasure;
 import org.carbondata.processing.util.RemoveDictionaryUtil;
-import org.carbondata.query.carbon.wrappers.ByteArrayWrapper;
+import org.carbondata.scan.wrappers.ByteArrayWrapper;
 
 /**
  * This class will be used to convert the Result into the format used in data writer.

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/integration/spark/src/main/java/org/carbondata/spark/partition/api/DataPartitioner.java
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/java/org/carbondata/spark/partition/api/DataPartitioner.java b/integration/spark/src/main/java/org/carbondata/spark/partition/api/DataPartitioner.java
index a5d8bf4..1e87ac8 100644
--- a/integration/spark/src/main/java/org/carbondata/spark/partition/api/DataPartitioner.java
+++ b/integration/spark/src/main/java/org/carbondata/spark/partition/api/DataPartitioner.java
@@ -30,7 +30,7 @@ package org.carbondata.spark.partition.api;
 
 import java.util.List;
 
-import org.carbondata.query.carbon.model.CarbonQueryPlan;
+import org.carbondata.scan.model.CarbonQueryPlan;
 
 import org.apache.spark.sql.execution.command.Partitioner;
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/integration/spark/src/main/java/org/carbondata/spark/partition/api/impl/QueryPartitionHelper.java
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/java/org/carbondata/spark/partition/api/impl/QueryPartitionHelper.java b/integration/spark/src/main/java/org/carbondata/spark/partition/api/impl/QueryPartitionHelper.java
index cb59d91..c14515b 100644
--- a/integration/spark/src/main/java/org/carbondata/spark/partition/api/impl/QueryPartitionHelper.java
+++ b/integration/spark/src/main/java/org/carbondata/spark/partition/api/impl/QueryPartitionHelper.java
@@ -40,7 +40,7 @@ import java.util.Properties;
 import org.carbondata.common.logging.LogService;
 import org.carbondata.common.logging.LogServiceFactory;
 import org.carbondata.core.constants.CarbonCommonConstants;
-import org.carbondata.query.carbon.model.CarbonQueryPlan;
+import org.carbondata.scan.model.CarbonQueryPlan;
 import org.carbondata.spark.partition.api.DataPartitioner;
 import org.carbondata.spark.partition.api.Partition;
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/integration/spark/src/main/java/org/carbondata/spark/partition/api/impl/SampleDataPartitionerImpl.java
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/java/org/carbondata/spark/partition/api/impl/SampleDataPartitionerImpl.java b/integration/spark/src/main/java/org/carbondata/spark/partition/api/impl/SampleDataPartitionerImpl.java
index cae2f28..11ad382 100644
--- a/integration/spark/src/main/java/org/carbondata/spark/partition/api/impl/SampleDataPartitionerImpl.java
+++ b/integration/spark/src/main/java/org/carbondata/spark/partition/api/impl/SampleDataPartitionerImpl.java
@@ -26,7 +26,7 @@ import java.util.List;
 import org.carbondata.common.logging.LogService;
 import org.carbondata.common.logging.LogServiceFactory;
 import org.carbondata.core.constants.CarbonCommonConstants;
-import org.carbondata.query.carbon.model.CarbonQueryPlan;
+import org.carbondata.scan.model.CarbonQueryPlan;
 import org.carbondata.spark.partition.api.DataPartitioner;
 import org.carbondata.spark.partition.api.Partition;
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/integration/spark/src/main/java/org/carbondata/spark/readsupport/SparkRowReadSupportImpl.java
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/java/org/carbondata/spark/readsupport/SparkRowReadSupportImpl.java b/integration/spark/src/main/java/org/carbondata/spark/readsupport/SparkRowReadSupportImpl.java
index 1e4d877..85a3418 100644
--- a/integration/spark/src/main/java/org/carbondata/spark/readsupport/SparkRowReadSupportImpl.java
+++ b/integration/spark/src/main/java/org/carbondata/spark/readsupport/SparkRowReadSupportImpl.java
@@ -5,7 +5,7 @@ import java.sql.Timestamp;
 import org.carbondata.core.carbon.AbsoluteTableIdentifier;
 import org.carbondata.core.carbon.metadata.schema.table.column.CarbonColumn;
 import org.carbondata.hadoop.readsupport.impl.AbstractDictionaryDecodedReadSupport;
-import org.carbondata.query.carbon.util.DataTypeUtil;
+import org.carbondata.scan.util.DataTypeUtil;
 
 import org.apache.spark.sql.Row;
 import org.apache.spark.sql.catalyst.expressions.GenericRow;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/integration/spark/src/main/java/org/carbondata/spark/util/CarbonQueryUtil.java
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/java/org/carbondata/spark/util/CarbonQueryUtil.java b/integration/spark/src/main/java/org/carbondata/spark/util/CarbonQueryUtil.java
index 4b997b5..b9d5fce 100644
--- a/integration/spark/src/main/java/org/carbondata/spark/util/CarbonQueryUtil.java
+++ b/integration/spark/src/main/java/org/carbondata/spark/util/CarbonQueryUtil.java
@@ -34,7 +34,7 @@ import org.carbondata.core.datastorage.store.impl.FileFactory;
 import org.carbondata.core.datastorage.store.impl.FileFactory.FileType;
 import org.carbondata.core.load.LoadMetadataDetails;
 import org.carbondata.core.util.CarbonUtil;
-import org.carbondata.query.carbon.model.CarbonQueryPlan;
+import org.carbondata.scan.model.CarbonQueryPlan;
 import org.carbondata.spark.partition.api.Partition;
 import org.carbondata.spark.partition.api.impl.DefaultLoadBalancer;
 import org.carbondata.spark.partition.api.impl.PartitionMultiFileImpl;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonBoundReference.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonBoundReference.scala b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonBoundReference.scala
index 2dd569b..3b52897 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonBoundReference.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonBoundReference.scala
@@ -22,7 +22,7 @@ import org.apache.spark.sql.catalyst.expressions.{Attribute, ExprId, LeafExpress
 import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback
 import org.apache.spark.sql.types.DataType
 
-import org.carbondata.query.expression.ColumnExpression
+import org.carbondata.scan.expression.ColumnExpression
 
 
 case class CarbonBoundReference(colExp: ColumnExpression, dataType: DataType, nullable: Boolean)

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDatasourceHadoopRelation.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDatasourceHadoopRelation.scala b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDatasourceHadoopRelation.scala
index da5ecf1..06f05d3 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDatasourceHadoopRelation.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDatasourceHadoopRelation.scala
@@ -39,7 +39,7 @@ import org.apache.spark.util.SerializableConfiguration
 
 import org.carbondata.core.carbon.CarbonTableIdentifier
 import org.carbondata.hadoop.{CarbonInputFormat, CarbonInputSplit, CarbonProjection}
-import org.carbondata.query.expression.logical.AndExpression
+import org.carbondata.scan.expression.logical.AndExpression
 import org.carbondata.spark.{CarbonFilters, CarbonOption}
 import org.carbondata.spark.readsupport.SparkRowReadSupportImpl
 import org.carbondata.spark.util.CarbonScalaUtil.CarbonSparkUtil

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala
index 5359009..c2e1564 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala
@@ -33,7 +33,7 @@ import org.carbondata.core.carbon.{AbsoluteTableIdentifier, CarbonTableIdentifie
 import org.carbondata.core.carbon.metadata.datatype.DataType
 import org.carbondata.core.carbon.metadata.encoder.Encoding
 import org.carbondata.core.carbon.metadata.schema.table.column.CarbonDimension
-import org.carbondata.query.carbon.util.DataTypeUtil
+import org.carbondata.scan.util.DataTypeUtil
 
 /**
  * It decodes the data.

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonOperators.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonOperators.scala b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonOperators.scala
index 22fa4fb..6b2bdc2 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonOperators.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonOperators.scala
@@ -32,7 +32,7 @@ import org.apache.spark.unsafe.types.UTF8String
 
 import org.carbondata.core.constants.CarbonCommonConstants
 import org.carbondata.core.util.CarbonProperties
-import org.carbondata.query.carbon.model._
+import org.carbondata.scan.model._
 import org.carbondata.spark.{CarbonFilters, RawValue, RawValueImpl}
 import org.carbondata.spark.rdd.CarbonScanRDD
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/integration/spark/src/main/scala/org/apache/spark/sql/SparkUnknownExpression.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/SparkUnknownExpression.scala b/integration/spark/src/main/scala/org/apache/spark/sql/SparkUnknownExpression.scala
index 35e1035..e8f6e11 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/SparkUnknownExpression.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/SparkUnknownExpression.scala
@@ -24,10 +24,10 @@ import scala.collection.JavaConverters._
 import org.apache.spark.sql.catalyst.expressions.{Expression => SparkExpression, GenericMutableRow}
 
 import org.carbondata.core.carbon.metadata.encoder.Encoding
-import org.carbondata.query.carbonfilterinterface.{ExpressionType, RowIntf}
-import org.carbondata.query.expression.{ColumnExpression, ExpressionResult, UnknownExpression}
-import org.carbondata.query.expression.conditional.ConditionalExpression
-import org.carbondata.query.expression.exception.FilterUnsupportedException
+import org.carbondata.scan.expression.{ColumnExpression, ExpressionResult, UnknownExpression}
+import org.carbondata.scan.expression.conditional.ConditionalExpression
+import org.carbondata.scan.expression.exception.FilterUnsupportedException
+import org.carbondata.scan.filter.intf.{ExpressionType, RowIntf}
 import org.carbondata.spark.util.CarbonScalaUtil
 
 class SparkUnknownExpression(sparkExp: SparkExpression)

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/integration/spark/src/main/scala/org/carbondata/spark/CarbonFilters.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/carbondata/spark/CarbonFilters.scala b/integration/spark/src/main/scala/org/carbondata/spark/CarbonFilters.scala
index cf31a7b..e02bc7f 100644
--- a/integration/spark/src/main/scala/org/carbondata/spark/CarbonFilters.scala
+++ b/integration/spark/src/main/scala/org/carbondata/spark/CarbonFilters.scala
@@ -30,9 +30,9 @@ import org.apache.spark.sql.types.StructType
 import org.carbondata.core.carbon.metadata.datatype.DataType
 import org.carbondata.core.carbon.metadata.schema.table.CarbonTable
 import org.carbondata.core.carbon.metadata.schema.table.column.CarbonColumn
-import org.carbondata.query.expression.{ColumnExpression => CarbonColumnExpression, Expression => CarbonExpression, LiteralExpression => CarbonLiteralExpression}
-import org.carbondata.query.expression.conditional._
-import org.carbondata.query.expression.logical.{AndExpression, OrExpression}
+import org.carbondata.scan.expression.{ColumnExpression => CarbonColumnExpression, Expression => CarbonExpression, LiteralExpression => CarbonLiteralExpression}
+import org.carbondata.scan.expression.conditional._
+import org.carbondata.scan.expression.logical.{AndExpression, OrExpression}
 import org.carbondata.spark.util.CarbonScalaUtil
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonMergerRDD.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonMergerRDD.scala b/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonMergerRDD.scala
index dd2a10a..e18b64d 100644
--- a/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonMergerRDD.scala
+++ b/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonMergerRDD.scala
@@ -36,7 +36,7 @@ import org.carbondata.core.util.CarbonProperties
 import org.carbondata.hadoop.{CarbonInputFormat, CarbonInputSplit}
 import org.carbondata.integration.spark.merger.{CarbonCompactionExecutor, CarbonCompactionUtil,
 RowResultMerger}
-import org.carbondata.query.carbon.result.iterator.RawResultIterator
+import org.carbondata.scan.result.iterator.RawResultIterator
 import org.carbondata.spark.MergeResult
 import org.carbondata.spark.load.{CarbonLoaderUtil, CarbonLoadModel}
 import org.carbondata.spark.merger.CarbonDataMergerUtil

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonScanRDD.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonScanRDD.scala b/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonScanRDD.scala
index 6693108..84a362b 100644
--- a/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonScanRDD.scala
+++ b/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonScanRDD.scala
@@ -15,7 +15,6 @@
  * limitations under the License.
  */
 
-
 package org.carbondata.spark.rdd
 
 import java.util
@@ -32,11 +31,11 @@ import org.carbondata.common.logging.LogServiceFactory
 import org.carbondata.core.carbon.datastore.block.TableBlockInfo
 import org.carbondata.core.iterator.CarbonIterator
 import org.carbondata.hadoop.{CarbonInputFormat, CarbonInputSplit}
-import org.carbondata.query.carbon.executor.QueryExecutorFactory
-import org.carbondata.query.carbon.model.QueryModel
-import org.carbondata.query.carbon.result.BatchResult
-import org.carbondata.query.carbon.result.iterator.ChunkRowIterator
-import org.carbondata.query.expression.Expression
+import org.carbondata.scan.executor.QueryExecutorFactory
+import org.carbondata.scan.expression.Expression
+import org.carbondata.scan.model.QueryModel
+import org.carbondata.scan.result.BatchResult
+import org.carbondata.scan.result.iterator.ChunkRowIterator
 import org.carbondata.spark.RawValue
 import org.carbondata.spark.load.CarbonLoaderUtil
 import org.carbondata.spark.util.QueryPlanUtil

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/integration/spark/src/main/scala/org/carbondata/spark/util/CarbonScalaUtil.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/carbondata/spark/util/CarbonScalaUtil.scala b/integration/spark/src/main/scala/org/carbondata/spark/util/CarbonScalaUtil.scala
index 9bc1a64..593aadb 100644
--- a/integration/spark/src/main/scala/org/carbondata/spark/util/CarbonScalaUtil.scala
+++ b/integration/spark/src/main/scala/org/carbondata/spark/util/CarbonScalaUtil.scala
@@ -28,7 +28,7 @@ import org.carbondata.core.carbon.metadata.datatype.DataType
 import org.carbondata.core.carbon.metadata.encoder.Encoding
 import org.carbondata.core.carbon.metadata.schema.table.CarbonTable
 import org.carbondata.core.constants.CarbonCommonConstants
-import org.carbondata.query.expression.{DataType => CarbonDataType}
+import org.carbondata.scan.expression.{DataType => CarbonDataType}
 
 object CarbonScalaUtil {
   def convertSparkToCarbonDataType(