You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by gv...@apache.org on 2017/09/18 17:05:43 UTC

[1/2] carbondata git commit: [CARBONDATA-1450] Support timestamp, int and Long as Dictionary Exclude

Repository: carbondata
Updated Branches:
  refs/heads/master 8b83f5885 -> 36ceb59f0


http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SortColumnExcudeDictTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SortColumnExcudeDictTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SortColumnExcudeDictTestCase.scala
new file mode 100644
index 0000000..6f9df82
--- /dev/null
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SortColumnExcudeDictTestCase.scala
@@ -0,0 +1,433 @@
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.cluster.sdv.generated
+
+import org.apache.spark.sql.common.util._
+import org.scalatest.BeforeAndAfterAll
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.util.CarbonProperties
+
+/**
+ * Test Class for SortColumnExcudeDictTestCase to verify all scenerios
+ */
+
+class SortColumnExcudeDictTestCase extends QueryTest with BeforeAndAfterAll {
+
+
+  //create table with no dictionary sort_columns
+  test("Sortcolumn-001_TC001", Include) {
+    sql(s"""drop table if exists sorttable1""").collect
+    sql(s"""CREATE TABLE sorttable1 (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_exclude'='empno','sort_columns'='empno')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable1 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select empno from sorttable1""").collect
+
+    sql(s"""drop table if exists sorttable1""").collect
+  }
+
+
+  //create table with direct-dictioanry sort_columns
+  test("Sortcolumn-001_TC003", Include) {
+    sql(s"""CREATE TABLE sorttable3 (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_include'='doj','sort_columns'='doj')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable3 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select doj from sorttable3""").collect
+
+    sql(s"""drop table if exists sorttable3""").collect
+  }
+
+
+  //create table with multi-sort_columns and data loading with offheap safe
+  test("Sortcolumn-001_TC004", Include) {
+    sql(s"""CREATE TABLE sorttable4_offheap_safe (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_exclude'='workgroupcategory','sort_columns'='workgroupcategory, empname')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable4_offheap_safe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select workgroupcategory, empname from sorttable4_offheap_safe""").collect
+
+    sql(s"""drop table if exists sorttable4_offheap_safe""").collect
+  }
+
+
+  //create table with multi-sort_columns and data loading with offheap and unsafe sort
+  test("Sortcolumn-001_TC005", Include) {
+    sql(s"""CREATE TABLE sorttable4_offheap_unsafe (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_exclude'='workgroupcategory','sort_columns'='workgroupcategory, empname')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable4_offheap_unsafe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select workgroupcategory, empname from sorttable4_offheap_unsafe""").collect
+
+    sql(s"""drop table if exists sorttable4_offheap_unsafe""").collect
+  }
+
+
+  //create table with multi-sort_columns and data loading with offheap and inmemory sort
+  test("Sortcolumn-001_TC006", Include) {
+    sql(s"""CREATE TABLE sorttable4_offheap_inmemory (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_exclude'='workgroupcategory','sort_columns'='workgroupcategory, empname')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable4_offheap_inmemory OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select workgroupcategory, empname from sorttable4_offheap_inmemory""").collect
+
+    sql(s"""drop table if exists sorttable4_offheap_inmemory""").collect
+  }
+
+
+  //create table with multi-sort_columns and data loading with heap
+  test("Sortcolumn-001_TC007", Include) {
+    sql(s"""CREATE TABLE sorttable4_heap_safe (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_exclude'='workgroupcategory','sort_columns'='workgroupcategory, empname')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable4_heap_safe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select workgroupcategory, empname from sorttable4_heap_safe""").collect
+
+    sql(s"""drop table if exists sorttable4_heap_safe""").collect
+  }
+
+
+  //create table with multi-sort_columns and data loading with heap and unsafe sort
+  test("Sortcolumn-001_TC008", Include) {
+    sql(s"""CREATE TABLE sorttable4_heap_unsafe (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_exclude'='workgroupcategory','sort_columns'='workgroupcategory, empname')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable4_heap_unsafe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select workgroupcategory, empname from sorttable4_heap_unsafe""").collect
+
+    sql(s"""drop table if exists sorttable4_heap_unsafe""").collect
+  }
+
+
+  //create table with multi-sort_columns and data loading with heap and inmemory sort
+  test("Sortcolumn-001_TC009", Include) {
+    sql(s"""CREATE TABLE sorttable4_heap_inmemory (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_exclude'='workgroupcategory','sort_columns'='workgroupcategory, empname')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable4_heap_inmemory OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select workgroupcategory, empname from sorttable4_heap_inmemory""").collect
+
+    sql(s"""drop table if exists sorttable4_heap_inmemory""").collect
+  }
+
+
+  //create table with multi-sort_columns and data loading with heap and inmemory sort
+  test("Sortcolumn-001_TC010", Include) {
+    sql(s"""drop table if exists origintable2""").collect
+    sql(s"""drop table if exists sorttable5""").collect
+    sql(s"""CREATE TABLE origintable2 (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format'""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE origintable2 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE origintable2 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE origintable2 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE origintable2 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""alter table origintable2 compact 'minor'""").collect
+    sql(s"""CREATE TABLE sorttable5 (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_exclude'='empno','sort_columns'='empno')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable5 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable5 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable5 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable5 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""alter table sorttable5 compact 'minor'""").collect
+    sql(s"""select empno from sorttable5""").collect
+
+    sql(s"""drop table if exists sorttable5""").collect
+  }
+
+
+  //filter on sort_columns include no-dictionary
+  test("Sortcolumn-001_TC011", Include) {
+    sql(s"""drop table if exists sorttable6""").collect
+    sql(s"""CREATE TABLE sorttable6 (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_exclude'='workgroupcategory','dictionary_include'='doj','sort_columns'='workgroupcategory, doj, empname')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable6 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from sorttable6 where workgroupcategory = 1""").collect
+
+    sql(s"""drop table if exists sorttable6""").collect
+  }
+
+
+  //filter on sort_columns include direct-dictionary
+  test("Sortcolumn-001_TC012", Include) {
+    sql(s"""CREATE TABLE sorttable6 (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_exclude'='workgroupcategory','dictionary_include'='doj','sort_columns'='workgroupcategory, doj, empname')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable6 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from sorttable6 where doj = '2007-01-17 00:00:00'""").collect
+
+    sql(s"""drop table if exists sorttable6""").collect
+  }
+
+
+  //filter on sort_columns include dictioanry
+  test("Sortcolumn-001_TC013", Include) {
+    sql(s"""drop table if exists sorttable6""").collect
+    sql(s"""CREATE TABLE sorttable6 (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_exclude'='workgroupcategory','dictionary_include'='doj','sort_columns'='workgroupcategory, doj, empname')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable6 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from sorttable6 where empname = 'madhan'""").collect
+
+    sql(s"""drop table if exists sorttable6""").collect
+  }
+
+
+  //unsorted table creation, query data loading with heap and safe sort config
+  test("Sortcolumn-001_TC014", Include) {
+    sql(s"""CREATE TABLE unsortedtable_heap_safe (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_c+C17olumns'='')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE unsortedtable_heap_safe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from unsortedtable_heap_safe where empno = 11""").collect
+
+    sql(s"""drop table if exists unsortedtable_heap_safe""").collect
+  }
+
+
+  //unsorted table creation, query data loading with heap and safe sort config with order by
+  test("Sortcolumn-001_TC015", Include) {
+    sql(s"""drop table if exists unsortedtable_heap_safe""").collect
+    sql(s"""CREATE TABLE unsortedtable_heap_safe (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_exclude'='empno,deptno,workgroupcategory,projectcode,attendance,utilization,salary','dictionary_include'='doj','sort_columns'='')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE unsortedtable_heap_safe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from unsortedtable_heap_safe order by empno""").collect
+
+    sql(s"""drop table if exists unsortedtable_heap_safe""").collect
+  }
+
+
+  //unsorted table creation, query and data loading with heap and unsafe sort config
+  test("Sortcolumn-001_TC016", Include) {
+    sql(s"""CREATE TABLE unsortedtable_heap_unsafe (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_exclude'='empno,deptno,workgroupcategory,projectcode,attendance,utilization,salary','dictionary_include'='doj','sort_columns'='')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE unsortedtable_heap_unsafe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from unsortedtable_heap_unsafe where empno = 11""").collect
+
+    sql(s"""drop table if exists unsortedtable_heap_unsafe""").collect
+  }
+
+
+  //unsorted table creation, query and data loading with heap and unsafe sort config with order by
+  test("Sortcolumn-001_TC017", Include) {
+    sql(s"""drop table if exists unsortedtable_heap_unsafe""").collect
+    sql(s"""CREATE TABLE unsortedtable_heap_unsafe (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_exclude'='empno,deptno,workgroupcategory,projectcode,attendance,utilization,salary','dictionary_include'='doj','sort_columns'='')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE unsortedtable_heap_unsafe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from unsortedtable_heap_unsafe order by empno""").collect
+
+    sql(s"""drop table if exists unsortedtable_heap_unsafe""").collect
+  }
+
+
+  //unsorted table creation, query and data loading with offheap and safe sort config
+  test("Sortcolumn-001_TC018", Include) {
+    sql(s"""CREATE TABLE unsortedtable_offheap_safe (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_exclude'='empno,deptno,workgroupcategory,projectcode,attendance,utilization,salary','dictionary_include'='doj','sort_columns'='')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE unsortedtable_offheap_safe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from unsortedtable_offheap_safe where empno = 11""").collect
+
+    sql(s"""drop table if exists unsortedtable_offheap_safe""").collect
+  }
+
+
+  //unsorted table creation, query and data loading with offheap and safe sort config with order by
+  test("Sortcolumn-001_TC019", Include) {
+    sql(s"""CREATE TABLE unsortedtable_offheap_safe (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_exclude'='empno,deptno,workgroupcategory,projectcode,attendance,utilization,salary','dictionary_include'='doj','sort_columns'='')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE unsortedtable_offheap_safe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from unsortedtable_offheap_safe order by empno""").collect
+
+    sql(s"""drop table if exists unsortedtable_offheap_safe""").collect
+  }
+
+
+  //unsorted table creation, query and data loading with offheap and unsafe sort config
+  test("Sortcolumn-001_TC020", Include) {
+    sql(s"""CREATE TABLE unsortedtable_offheap_unsafe (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_exclude'='empno,deptno,workgroupcategory,projectcode,attendance,utilization,salary','dictionary_include'='doj','sort_columns'='')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE unsortedtable_offheap_unsafe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from unsortedtable_offheap_unsafe where empno = 11""").collect
+
+    sql(s"""drop table if exists unsortedtable_offheap_unsafe""").collect
+  }
+
+
+  //unsorted table creation, query and data loading with offheap and unsafe sort config with order by
+  test("Sortcolumn-001_TC021", Include) {
+    sql(s"""CREATE TABLE unsortedtable_offheap_unsafe (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_exclude'='empno,deptno,workgroupcategory,projectcode,attendance,utilization,salary','dictionary_include'='doj','sort_columns'='')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE unsortedtable_offheap_unsafe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from unsortedtable_offheap_unsafe order by empno""").collect
+
+    sql(s"""drop table if exists unsortedtable_offheap_unsafe""").collect
+  }
+
+
+  //unsorted table creation, query and data loading with offheap and inmemory sort config
+  test("Sortcolumn-001_TC022", Include) {
+    sql(s"""CREATE TABLE unsortedtable_offheap_inmemory (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_exclude'='empno,deptno,workgroupcategory,projectcode,attendance,utilization,salary','dictionary_include'='doj','sort_columns'='')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE unsortedtable_offheap_inmemory OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from unsortedtable_offheap_inmemory where empno = 11""").collect
+
+    sql(s"""drop table if exists unsortedtable_offheap_inmemory""").collect
+  }
+
+
+  //unsorted table creation, query and data loading with offheap and inmemory sort config with order by
+  test("Sortcolumn-001_TC023", Include) {
+    sql(s"""CREATE TABLE unsortedtable_offheap_inmemory (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_exclude'='empno,deptno,workgroupcategory,projectcode,attendance,utilization,salary','dictionary_include'='doj','sort_columns'='')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE unsortedtable_offheap_inmemory OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from unsortedtable_offheap_inmemory order by empno""").collect
+
+    sql(s"""drop table if exists unsortedtable_offheap_inmemory""").collect
+  }
+
+
+  //create table with dictioanry_exclude sort_columns
+  test("Sortcolumn-001_TC024", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+    sql(s"""CREATE TABLE sorttable (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_exclude'='empno,deptno,workgroupcategory,projectcode,attendance,utilization,salary','dictionary_include'='doj','dictionary_exclude'='empname','sort_columns'='empname')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select doj from sorttable""").collect
+
+    sql(s"""drop table if exists sorttable""").collect
+  }
+
+
+  //create table with dictionary_include,  sort_columns
+  test("Sortcolumn-001_TC025", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+    sql(s"""CREATE TABLE sorttable (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_exclude'='doj','sort_columns'='doj')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select doj from sorttable""").collect
+
+    sql(s"""drop table if exists sorttable""").collect
+  }
+
+
+  //create table with dictionary_include, dictioanry_exclude sort_columns
+  test("Sortcolumn-001_TC026", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+    sql(s"""CREATE TABLE sorttable (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_exclude'='empname,doj','sort_columns'='doj')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select doj from sorttable""").collect
+
+    sql(s"""drop table if exists sorttable""").collect
+  }
+
+
+  //create table with alter table and sort_columns with dimension
+  test("Sortcolumn-001_TC027", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+    sql(s"""CREATE TABLE sorttable (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_include'='doj','sort_columns'='doj')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""alter table sorttable add columns(newField String) tblproperties('dictionary_include'='newField')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/dataString.csv' INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select doj from sorttable""").collect
+
+    sql(s"""drop table if exists sorttable""").collect
+  }
+
+
+  //create table with alter table and sort_columns with measure
+  test("Sortcolumn-001_TC028", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+    sql(s"""CREATE TABLE sorttable (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_include'='doj','sort_columns'='doj')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""alter table sorttable add columns(newField Int) tblproperties('dictionary_include'='newField')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/dataInt.csv' INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select doj from sorttable""").collect
+
+    sql(s"""drop table if exists sorttable""").collect
+  }
+
+
+  //create table with dictionary_include ,no_inverted_index and sort_columns
+  test("Sortcolumn-001_TC030", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+    sql(s"""CREATE TABLE sorttable (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_include'='doj','sort_columns'='doj','no_inverted_index'='doj')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select doj from sorttable""").collect
+
+    sql(s"""drop table if exists sorttable""").collect
+  }
+
+
+  //create table with dictionary_include ,no_inverted_index and sort_columns with measure
+  test("Sortcolumn-001_TC031", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+    sql(s"""CREATE TABLE sorttable (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_exclude'='empno','sort_columns'='empno','no_inverted_index'='empno')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select doj from sorttable""").collect
+
+    sql(s"""drop table if exists sorttable""").collect
+  }
+
+
+  //test sort_column for different order of column name
+  test("Sortcolumn-001_TC032", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+    sql(s"""CREATE TABLE sorttable (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_exclude'='empno,workgroupcategory','dictionary_include'='doj','sort_columns'='empname,empno,workgroupcategory,doj')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select doj from sorttable""").collect
+
+    sql(s"""drop table if exists sorttable""").collect
+  }
+
+
+  //default behavior if sort_column not provided
+  test("Sortcolumn-001_TC033", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+    sql(s"""CREATE TABLE sorttable (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_exclude'='empno')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select doj from sorttable""").collect
+
+    sql(s"""drop table if exists sorttable""").collect
+  }
+
+
+  //test sort_column for alter table
+  test("Sortcolumn-001_TC035", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+    sql(s"""CREATE TABLE sorttable (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_include'='doj','sort_columns'='doj')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""alter table sorttable drop columns(doj)""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/dataDrop.csv' INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from sorttable""").collect
+
+    sql(s"""drop table if exists sorttable""").collect
+  }
+
+
+  //test sort_column for float data_type with alter query
+  test("Sortcolumn-001_TC037", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+    sql(s"""drop table if exists sorttable1""").collect
+    sql(s"""CREATE TABLE sorttable1 (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_exclude'='empno','sort_columns'='empno')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable1 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""alter table sorttable1 add columns(newField Float) tblproperties('DICTIONARY_INCLUDE'='newField')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/dataFloat.csv' INTO TABLE sorttable1 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from sorttable1""").collect
+
+    sql(s"""drop table if exists sorttable1""").collect
+  }
+
+
+  //test sort_column for decimal data_type with alter query
+  test("Sortcolumn-001_TC038", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+    sql(s"""drop table if exists sorttable1""").collect
+    sql(s"""CREATE TABLE sorttable1 (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_exclude'='empno','sort_columns'='empno')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable1 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""alter table sorttable1 add columns(newField decimal) tblproperties('dictionary_include'='newField')""").collect
+    sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/dataDecimal.csv' INTO TABLE sorttable1 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from sorttable1""").collect
+
+    sql(s"""drop table if exists sorttable1""").collect
+  }
+
+
+  val prop = CarbonProperties.getInstance()
+  val p1 = prop.getProperty("carbon.load.sort.scope", CarbonCommonConstants.LOAD_SORT_SCOPE_DEFAULT)
+  val p2 = prop.getProperty("enable.unsafe.sort", CarbonCommonConstants.ENABLE_UNSAFE_SORT_DEFAULT)
+  val p3 = prop.getProperty("enable.offheap.sort", CarbonCommonConstants.ENABLE_OFFHEAP_SORT_DEFAULT)
+
+  override protected def beforeAll() {
+    // Adding new properties
+    prop.addProperty("carbon.load.sort.scope", "batch_sort")
+    prop.addProperty("enable.unsafe.sort", "true")
+    prop.addProperty("enable.offheap.sort", "true")
+  }
+
+  override def afterAll: Unit = {
+    //Reverting to old
+    prop.addProperty("carbon.load.sort.scope", p1)
+    prop.addProperty("enable.unsafe.sort", p2)
+    prop.addProperty("enable.offheap.sort", p3)
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark-common-test/src/test/resources/data_beyond68yrs.csv
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/resources/data_beyond68yrs.csv b/integration/spark-common-test/src/test/resources/data_beyond68yrs.csv
new file mode 100644
index 0000000..2188be6
--- /dev/null
+++ b/integration/spark-common-test/src/test/resources/data_beyond68yrs.csv
@@ -0,0 +1,11 @@
+empno,empname,designation,doj,workgroupcategory,workgroupcategoryname,deptno,deptname,projectcode,projectjoindate,projectenddate,attendance,utilization,salary
+11,arvind,SE,17-01-2007,1,developer,10,network,928478,17-02-1800,29-11-1900,96,96,5040
+12,krithin,SSE,29-05-2008,1,developer,11,protocol,928378,29-06-1802,30-12-1902,85,95,7124
+13,madhan,TPL,7/7/2009,2,tester,10,network,928478,7/8/2009,30-12-2016,88,99,9054
+14,anandh,SA,29-12-2010,3,manager,11,protocol,928278,29-01-2000,29-06-2016,77,92,11248
+15,ayushi,SSA,9/7/2011,1,developer,12,security,928375,9/12/2011,29-05-2016,99,91,13245
+16,pramod,SE,14-10-2012,1,developer,13,configManagement,928478,14-11-2038,29-12-2041,86,93,5040
+17,gawrav,PL,22-09-2013,2,tester,12,security,928778,22-10-3000,15-11-3002,78,97,9574
+18,sibi,TL,15-08-2014,2,tester,14,Learning,928176,15-09-2014,29-05-2016,84,98,7245
+19,shivani,PL,12/5/2015,1,developer,10,network,928977,12/6/2015,12/11/2016,88,91,11254
+20,bill,PM,1/12/2015,3,manager,14,Learning,928479,1/1/2016,30-11-2016,75,94,13547

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datetype/DateTypeTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datetype/DateTypeTest.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datetype/DateTypeTest.scala
index 4f29a28..a71cee1 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datetype/DateTypeTest.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datetype/DateTypeTest.scala
@@ -55,34 +55,29 @@ class DateTypeTest extends QueryTest {
     }
   }
   test("must throw exception for timestamp data type in dictionary_exclude") {
-    try {
-      sql(
-        "create table if not exists Carbon_automation_testtimestamp (imei string,doj timestamp," +
-        "deviceInformationId int,MAC string,deviceColor string,device_backColor string,modelId " +
-        "string,marketName string,AMSize string,ROMSize string,CUPAudit string,CPIClocked string," +
-        "series string,productionDate timestamp,bomCode string,internalModels string, " +
-        "deliveryTime string, channelsId string, channelsName string , deliveryAreaId string, " +
-        "deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict " +
-        "string, deliveryStreet string, oxSingleNumber string, ActiveCheckTime string, " +
-        "ActiveAreaId string, ActiveCountry string, ActiveProvince string, Activecity string, " +
-        "ActiveDistrict string, ActiveStreet string, ActiveOperatorId string, Active_releaseId " +
-        "string, Active_EMUIVersion string, Active_operaSysVersion string, Active_BacVerNumber " +
-        "string, Active_BacFlashVer string, Active_webUIVersion string, Active_webUITypeCarrVer " +
-        "string,Active_webTypeDataVerNumber string, Active_operatorsVersion string, " +
-        "Active_phonePADPartitionedVersions string, Latest_YEAR int, Latest_MONTH int, Latest_DAY" +
-        " int, Latest_HOUR string, Latest_areaId string, Latest_country string, Latest_province " +
-        "string, Latest_city string, Latest_district string, Latest_street string, " +
-        "Latest_releaseId string, Latest_EMUIVersion string, Latest_operaSysVersion string, " +
-        "Latest_BacVerNumber string, Latest_BacFlashVer string, Latest_webUIVersion string, " +
-        "Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber string, " +
-        "Latest_operatorsVersion string, Latest_phonePADPartitionedVersions string, " +
-        "Latest_operatorId string, gamePointDescription string, gamePointId int,contractNumber " +
-        "int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_EXCLUDE'='doj')")
+    sql(
+      "create table if not exists Carbon_automation_testtimestamp (imei string,doj timestamp," +
+      "deviceInformationId int,MAC string,deviceColor string,device_backColor string,modelId " +
+      "string,marketName string,AMSize string,ROMSize string,CUPAudit string,CPIClocked string," +
+      "series string,productionDate timestamp,bomCode string,internalModels string, " +
+      "deliveryTime string, channelsId string, channelsName string , deliveryAreaId string, " +
+      "deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict " +
+      "string, deliveryStreet string, oxSingleNumber string, ActiveCheckTime string, " +
+      "ActiveAreaId string, ActiveCountry string, ActiveProvince string, Activecity string, " +
+      "ActiveDistrict string, ActiveStreet string, ActiveOperatorId string, Active_releaseId " +
+      "string, Active_EMUIVersion string, Active_operaSysVersion string, Active_BacVerNumber " +
+      "string, Active_BacFlashVer string, Active_webUIVersion string, Active_webUITypeCarrVer " +
+      "string,Active_webTypeDataVerNumber string, Active_operatorsVersion string, " +
+      "Active_phonePADPartitionedVersions string, Latest_YEAR int, Latest_MONTH int, Latest_DAY" +
+      " int, Latest_HOUR string, Latest_areaId string, Latest_country string, Latest_province " +
+      "string, Latest_city string, Latest_district string, Latest_street string, " +
+      "Latest_releaseId string, Latest_EMUIVersion string, Latest_operaSysVersion string, " +
+      "Latest_BacVerNumber string, Latest_BacFlashVer string, Latest_webUIVersion string, " +
+      "Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber string, " +
+      "Latest_operatorsVersion string, Latest_phonePADPartitionedVersions string, " +
+      "Latest_operatorId string, gamePointDescription string, gamePointId int,contractNumber " +
+      "int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_EXCLUDE'='doj')")
 
-      assert(false)
-    }
-    catch {
-      case exception: MalformedCarbonCommandException => assert(true)
-    }
+    assert(true)
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/RangeFilterAllDataTypesTestCases.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/RangeFilterAllDataTypesTestCases.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/RangeFilterAllDataTypesTestCases.scala
index 157ae6e..ac9325d 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/RangeFilterAllDataTypesTestCases.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/RangeFilterAllDataTypesTestCases.scala
@@ -656,6 +656,7 @@ class RangeFilterMyTests extends QueryTest with BeforeAndAfterAll {
     sql("drop table if exists NO_DICTIONARY_CARBON_1")
     sql("drop table if exists NO_DICTIONARY_CARBON_2")
     sql("drop table if exists NO_DICTIONARY_HIVE_6")
+    sql("drop table if exists directdictionarytable")
     sql("drop table if exists dictionary_hive_6")
     sql("drop table if exists NO_DICTIONARY_HIVE_7")
     sql("drop table if exists NO_DICTIONARY_CARBON_6")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampNoDictionaryColumnTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampNoDictionaryColumnTestCase.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampNoDictionaryColumnTestCase.scala
new file mode 100644
index 0000000..e8a465a
--- /dev/null
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampNoDictionaryColumnTestCase.scala
@@ -0,0 +1,93 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.spark.testsuite.directdictionary
+
+import java.sql.Timestamp
+
+import org.apache.spark.sql.Row
+import org.apache.spark.sql.hive.HiveContext
+import org.scalatest.BeforeAndAfterAll
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.keygenerator.directdictionary.timestamp.TimeStampGranularityConstants
+import org.apache.carbondata.core.util.CarbonProperties
+import org.apache.spark.sql.test.util.QueryTest
+
+/**
+ * Test Class for detailed query on timestamp datatypes
+ */
+class TimestampNoDictionaryColumnTestCase extends QueryTest with BeforeAndAfterAll {
+
+  override def beforeAll {
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,"dd-MM-yyyy")
+
+    try {
+      sql("drop table if exists timestamp_nodictionary")
+      sql(
+        """
+         CREATE TABLE IF NOT EXISTS timestamp_nodictionary
+        (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String,
+         projectcode int, projectjoindate Timestamp, projectenddate Timestamp, attendance int,
+         utilization int, salary Int) STORED BY 'org.apache.carbondata.format'"""
+      )
+
+      val csvFilePath = s"$resourcesPath/data_beyond68yrs.csv"
+      sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE timestamp_nodictionary OPTIONS"
+          + "('DELIMITER'= ',', 'QUOTECHAR'= '\"')");
+    } catch {
+      case x: Throwable => CarbonProperties.getInstance()
+        .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+          CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
+    }
+  }
+
+  test("select projectjoindate, projectenddate from timestamp_nodictionary") {
+    checkAnswer(
+      sql("select projectjoindate, projectenddate from timestamp_nodictionary"),
+      Seq(Row(Timestamp.valueOf("2000-01-29 00:00:00.0"), Timestamp.valueOf("2016-06-29 00:00:00.0")),
+        Row(Timestamp.valueOf("1800-02-17 00:00:00.0"), Timestamp.valueOf("1900-11-29 00:00:00.0")),
+        Row(null, Timestamp.valueOf("2016-05-29 00:00:00.0")),
+        Row(null, Timestamp.valueOf("2016-11-30 00:00:00.0")),
+        Row(Timestamp.valueOf("3000-10-22 00:00:00.0"), Timestamp.valueOf("3002-11-15 00:00:00.0")),
+        Row(Timestamp.valueOf("1802-06-29 00:00:00.0"), Timestamp.valueOf("1902-12-30 00:00:00.0")),
+        Row(null, Timestamp.valueOf("2016-12-30 00:00:00.0")),
+        Row(Timestamp.valueOf("2038-11-14 00:00:00.0"), Timestamp.valueOf("2041-12-29 00:00:00.0")),
+        Row(null, null),
+        Row(Timestamp.valueOf("2014-09-15 00:00:00.0"), Timestamp.valueOf("2016-05-29 00:00:00.0"))
+      )
+    )
+  }
+
+
+  test("select projectjoindate, projectenddate from timestamp_nodictionary where in filter") {
+    checkAnswer(
+      sql("select projectjoindate, projectenddate from timestamp_nodictionary where projectjoindate in" +
+          "('1800-02-17 00:00:00','3000-10-22 00:00:00') or projectenddate in ('1900-11-29 00:00:00'," +
+          "'3002-11-15 00:00:00','2041-12-29 00:00:00')"),
+      Seq(Row(Timestamp.valueOf("1800-02-17 00:00:00.0"), Timestamp.valueOf("1900-11-29 00:00:00.0")),
+        Row(Timestamp.valueOf("3000-10-22 00:00:00.0"), Timestamp.valueOf("3002-11-15 00:00:00.0")),
+        Row(Timestamp.valueOf("2038-11-14 00:00:00.0"), Timestamp.valueOf("2041-12-29 00:00:00.0")))
+    )
+
+  }
+
+
+  override def afterAll {
+    sql("drop table timestamp_nodictionary")
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDDLForPartitionTable.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDDLForPartitionTable.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDDLForPartitionTable.scala
index 8c79398..c0dba74 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDDLForPartitionTable.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDDLForPartitionTable.scala
@@ -69,7 +69,7 @@ class TestDDLForPartitionTable  extends QueryTest with BeforeAndAfterAll {
         | PARTITIONED BY (doj Timestamp)
         | STORED BY 'org.apache.carbondata.format'
         | TBLPROPERTIES('PARTITION_TYPE'='RANGE',
-        |  'RANGE_INFO'='2017-06-11 00:00:02, 2017-06-13 23:59:59')
+        |  'RANGE_INFO'='2017-06-11 00:00:02, 2017-06-13 23:59:59', 'DICTIONARY_INCLUDE'='doj')
       """.stripMargin)
 
     val carbonTable = CarbonMetadata.getInstance().getCarbonTable("default_rangeTable")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDDLForPartitionTableWithDefaultProperties.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDDLForPartitionTableWithDefaultProperties.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDDLForPartitionTableWithDefaultProperties.scala
index 7359b53..1d660e8 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDDLForPartitionTableWithDefaultProperties.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDDLForPartitionTableWithDefaultProperties.scala
@@ -64,7 +64,7 @@ class TestDDLForPartitionTableWithDefaultProperties  extends QueryTest with Befo
         | PARTITIONED BY (doj Timestamp)
         | STORED BY 'org.apache.carbondata.format'
         | TBLPROPERTIES('PARTITION_TYPE'='RANGE',
-        |  'RANGE_INFO'='2017-06-11 00:00:02, 2017-06-13 23:59:59')
+        |  'RANGE_INFO'='2017-06-11 00:00:02, 2017-06-13 23:59:59','DICTIONARY_INCLUDE'='doj')
       """.stripMargin)
 
     val carbonTable = CarbonMetadata.getInstance().getCarbonTable("default_rangeTable")
@@ -92,7 +92,8 @@ class TestDDLForPartitionTableWithDefaultProperties  extends QueryTest with Befo
         | PARTITIONED BY (projectenddate Timestamp)
         | STORED BY 'org.apache.carbondata.format'
         | TBLPROPERTIES('PARTITION_TYPE'='LIST',
-        |  'LIST_INFO'='2017-06-11 00:00:02, 2017-06-13 23:59:59')
+        |  'LIST_INFO'='2017-06-11 00:00:02, 2017-06-13 23:59:59',
+        |  'DICTIONARY_INCLUDE'='projectenddate')
       """.stripMargin)
     val carbonTable = CarbonMetadata.getInstance().getCarbonTable("default_listTable")
     val partitionInfo = carbonTable.getPartitionInfo(carbonTable.getFactTableName)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumns.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumns.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumns.scala
index 9e4f3b7..6347241 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumns.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumns.scala
@@ -31,6 +31,42 @@ class TestSortColumns extends QueryTest with BeforeAndAfterAll {
     sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE origintable1 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""")
   }
 
+  test("create table sort columns dictionary include - int") {
+    sql(
+      "CREATE TABLE sortint (empno int, empname String, designation String, doj Timestamp, " +
+      "workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, " +
+      "projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int," +
+      "utilization int,salary int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES " +
+      "('dictionary_include' = 'empno', 'sort_columns'='empno')")
+  }
+
+  test("create table sort columns dictionary exclude - int") {
+    sql(
+      "CREATE TABLE sortint1 (empno int, empname String, designation String, doj Timestamp, " +
+      "workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, " +
+      "projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int," +
+      "utilization int,salary int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES " +
+      "('dictionary_exclude' = 'empno', 'sort_columns'='empno')")
+  }
+
+  test("create table sort columns dictionary include - bigint") {
+    sql(
+      "CREATE TABLE sortbigint (empno bigint, empname String, designation String, doj Timestamp, " +
+      "workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, " +
+      "projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int," +
+      "utilization int,salary int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES " +
+      "('dictionary_include' = 'empno', 'sort_columns'='empno')")
+  }
+
+  test("create table sort columns dictionary exclude - bigint") {
+    sql(
+      "CREATE TABLE sortbigint1 (empno bigint, empname String, designation String, doj Timestamp, " +
+      "workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, " +
+      "projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int," +
+      "utilization int,salary int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES " +
+      "('dictionary_exclude' = 'empno', 'sort_columns'='empno')")
+  }
+
   test("create table with no dictionary sort_columns") {
     sql("CREATE TABLE sorttable1 (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='empno')")
     sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE sorttable1 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""")
@@ -310,11 +346,18 @@ class TestSortColumns extends QueryTest with BeforeAndAfterAll {
   }
 
   def dropTable = {
+    sql("drop table if exists sortint")
+    sql("drop table if exists sortint1")
+    sql("drop table if exists sortlong")
+    sql("drop table if exists sortlong1")
+    sql("drop table if exists sortbigint")
+    sql("drop table if exists sortbigint1")
     sql("drop table if exists origintable1")
     sql("drop table if exists origintable2")
     sql("drop table if exists sorttable1")
     sql("drop table if exists sorttableDesc")
     sql("drop table if exists sorttable1a")
+    sql("drop table if exists sorttable1b")
     sql("drop table if exists sorttable2")
     sql("drop table if exists sorttable3")
     sql("drop table if exists sorttable4_offheap_safe")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala b/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
index d0309ba..acdec91 100644
--- a/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
+++ b/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
@@ -619,6 +619,10 @@ abstract class CarbonDDLSqlParser extends AbstractCarbonSparkSQLParser {
     fields.foreach { field =>
       if (dictIncludeCols.exists(x => x.equalsIgnoreCase(field.column))) {
         dimFields += field
+      } else if (DataTypeUtil.getDataType(field.dataType.get.toUpperCase) == DataType.TIMESTAMP &&
+                 !dictIncludeCols.exists(x => x.equalsIgnoreCase(field.column))) {
+        noDictionaryDims :+= field.column
+        dimFields += field
       } else if (isDetectAsDimentionDatatype(field.dataType.get)) {
         dimFields += field
         // consider all String cols as noDicitonaryDims by default
@@ -626,8 +630,7 @@ abstract class CarbonDDLSqlParser extends AbstractCarbonSparkSQLParser {
           noDictionaryDims :+= field.column
         }
       } else if (sortKeyDimsTmp.exists(x => x.equalsIgnoreCase(field.column)) &&
-                 (dictExcludeCols.exists(x => x.equalsIgnoreCase(field.column)) ||
-                  isDefaultMeasure(field.dataType)) &&
+                 isDefaultMeasure(field.dataType) &&
                  (!field.dataType.get.equalsIgnoreCase("STRING"))) {
         throw new MalformedCarbonCommandException(s"Illegal argument in sort_column.Check if you " +
                                                   s"have included UNSUPPORTED DataType column{${
@@ -710,7 +713,7 @@ abstract class CarbonDDLSqlParser extends AbstractCarbonSparkSQLParser {
    * detects whether datatype is part of dictionary_exclude
    */
   def isDataTypeSupportedForDictionary_Exclude(columnDataType: String): Boolean = {
-    val dataTypes = Array("string")
+    val dataTypes = Array("string", "timestamp", "int", "long", "bigint")
     dataTypes.exists(x => x.equalsIgnoreCase(columnDataType))
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala b/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
index f5d69ef..fc20108 100644
--- a/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
+++ b/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
@@ -299,7 +299,10 @@ class AlterTableColumnSchemaGenerator(
     if (alterTableModel.highCardinalityDims.contains(colName)) {
       encoders.remove(Encoding.DICTIONARY)
     }
-    if (dataType == DataType.TIMESTAMP || dataType == DataType.DATE) {
+    if (dataType == DataType.DATE) {
+      encoders.add(Encoding.DIRECT_DICTIONARY)
+    }
+    if (dataType == DataType.TIMESTAMP && !alterTableModel.highCardinalityDims.contains(colName)) {
       encoders.add(Encoding.DIRECT_DICTIONARY)
     }
     val colPropMap = new java.util.HashMap[String, String]()
@@ -364,7 +367,10 @@ class TableNewProcessor(cm: TableModel) {
     if (highCardinalityDims.contains(colName)) {
       encoders.remove(Encoding.DICTIONARY)
     }
-    if (dataType == DataType.TIMESTAMP || dataType == DataType.DATE) {
+    if (dataType == DataType.DATE) {
+      encoders.add(Encoding.DIRECT_DICTIONARY)
+    }
+    if (dataType == DataType.TIMESTAMP && !highCardinalityDims.contains(colName)) {
       encoders.add(Encoding.DIRECT_DICTIONARY)
     }
     columnSchema.setEncodingList(encoders)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
index 130f305..01cd113 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
@@ -877,8 +877,7 @@ private[sql] case class DescribeCommandFormatted(
             .append(mapper.writeValueAsString(dimension.getColumnProperties))
             .append(",")
         }
-        if (dimension.hasEncoding(Encoding.DICTIONARY) &&
-            !dimension.hasEncoding(Encoding.DIRECT_DICTIONARY)) {
+        if (dimension.hasEncoding(Encoding.DICTIONARY)) {
           "DICTIONARY, KEY COLUMN" + (dimension.hasEncoding(Encoding.INVERTED_INDEX) match {
                       case false => ",NOINVERTEDINDEX"
                       case _ => ""

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createtable/TestCreateTableSyntax.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createtable/TestCreateTableSyntax.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createtable/TestCreateTableSyntax.scala
index 62713fa..7400839 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createtable/TestCreateTableSyntax.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createtable/TestCreateTableSyntax.scala
@@ -92,19 +92,12 @@ class TestCreateTableSyntax extends QueryTest with BeforeAndAfterAll {
       }
     }
   }
-    test("test carbon table create with int datatype as dictionary exclude") {
-    try {
-      sql("drop table if exists carbontable")
-      sql("create table carbontable(id int, name string, dept string, mobile array<string>, "+
+  test("test carbon table create with int datatype as dictionary exclude") {
+    sql("drop table if exists carbontable")
+    sql("create table carbontable(id int, name string, dept string, mobile array<string>, " +
         "country string, salary double) STORED BY 'org.apache.carbondata.format' " +
         "TBLPROPERTIES('DICTIONARY_EXCLUDE'='id')")
-      assert(false)
-    } catch {
-      case e : MalformedCarbonCommandException => {
-        assert(e.getMessage.equals("DICTIONARY_EXCLUDE is unsupported for int " +
-          "data type column: id"))
-      }
-    }
+    assert(true)
   }
 
   test("test carbon table create with decimal datatype as dictionary exclude") {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestAlterPartitionTable.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestAlterPartitionTable.scala b/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestAlterPartitionTable.scala
index 3646fad..29daac9 100644
--- a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestAlterPartitionTable.scala
+++ b/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestAlterPartitionTable.scala
@@ -109,7 +109,7 @@ class TestAlterPartitionTable extends QueryTest with BeforeAndAfterAll {
         | PARTITIONED BY (logdate Timestamp)
         | STORED BY 'carbondata'
         | TBLPROPERTIES('PARTITION_TYPE'='RANGE',
-        | 'RANGE_INFO'='2014/01/01, 2015/01/01, 2016/01/01')
+        | 'RANGE_INFO'='2014/01/01, 2015/01/01, 2016/01/01', 'DICTIONARY_INCLUDE'='logdate')
       """.stripMargin)
 
     /**
@@ -183,7 +183,8 @@ class TestAlterPartitionTable extends QueryTest with BeforeAndAfterAll {
         | PARTITIONED BY (logdate Timestamp)
         | STORED BY 'carbondata'
         | TBLPROPERTIES('PARTITION_TYPE'='RANGE',
-        | 'RANGE_INFO'='2014/01/01, 2015/01/01, 2016/01/01, 2018/01/01')
+        | 'RANGE_INFO'='2014/01/01, 2015/01/01, 2016/01/01, 2018/01/01',
+        | 'DICTIONARY_INCLUDE'='logdate')
       """.stripMargin)
 
     /**
@@ -223,6 +224,7 @@ class TestAlterPartitionTable extends QueryTest with BeforeAndAfterAll {
         | STORED BY 'carbondata'
         | TBLPROPERTIES('PARTITION_TYPE'='RANGE',
         | 'RANGE_INFO'='2014/01/01, 2015/01/01, 2016/01/01, 2018/01/01',
+        | 'DICTIONARY_INCLUDE'='logdate',
         | 'BUCKETNUMBER'='3',
         | 'BUCKETCOLUMNS'='country')
       """.stripMargin)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
index 23270e3..3dab247 100644
--- a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
+++ b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
@@ -19,6 +19,7 @@ package org.apache.spark.carbondata.restructure
 
 import java.io.File
 import java.math.{BigDecimal, RoundingMode}
+import java.sql.Timestamp
 
 import org.apache.spark.sql.Row
 import org.apache.spark.sql.common.util.Spark2QueryTest
@@ -35,6 +36,7 @@ class AlterTableValidationTestCase extends Spark2QueryTest with BeforeAndAfterAl
         new File("./target/test/badRecords").getCanonicalPath)
 
     sql("drop table if exists restructure")
+    sql("drop table if exists table1")
     sql("drop table if exists restructure_test")
     sql("drop table if exists restructure_new")
     sql("drop table if exists restructure_bad")
@@ -83,7 +85,7 @@ class AlterTableValidationTestCase extends Spark2QueryTest with BeforeAndAfterAl
       "('DICTIONARY_EXCLUDE'='nodict', 'DEFAULT.VALUE.NoDict'= 'abcd')")
     checkAnswer(sql("select distinct(nodict) from restructure"), Row("abcd"))
   }
-  test("test add timestamp direct dictionary column") {
+  test("test add timestamp no dictionary column") {
     sql(
       "alter table restructure add columns(tmpstmp timestamp) TBLPROPERTIES ('DEFAULT.VALUE" +
       ".tmpstmp'= '17-01-2007')")
@@ -91,6 +93,27 @@ class AlterTableValidationTestCase extends Spark2QueryTest with BeforeAndAfterAl
       Row(new java.sql.Timestamp(107, 0, 17, 0, 0, 0, 0)))
     checkExistence(sql("desc restructure"), true, "tmpstmptimestamp")
   }
+
+  test("test add timestamp direct dictionary column") {
+    sql(
+      "alter table restructure add columns(tmpstmp1 timestamp) TBLPROPERTIES ('DEFAULT.VALUE" +
+      ".tmpstmp1'= '17-01-3007','DICTIONARY_INCLUDE'='tmpstmp1')")
+    checkAnswer(sql("select distinct(tmpstmp1) from restructure"),
+      Row(null))
+    checkExistence(sql("desc restructure"), true, "tmpstmptimestamp")
+  }
+
+  test("test add timestamp column and load as dictionary") {
+    sql("create table table1(name string) stored by 'carbondata'")
+    sql("insert into table1 select 'abc'")
+    sql("alter table table1 add columns(tmpstmp timestamp) TBLPROPERTIES " +
+        "('DEFAULT.VALUE.tmpstmp'='17-01-3007','DICTIONARY_INCLUDE'= 'tmpstmp')")
+    sql("insert into table1 select 'name','17-01-2007'")
+    checkAnswer(sql("select * from table1"),
+      Seq(Row("abc",null),
+        Row("name",Timestamp.valueOf("2007-01-17 00:00:00.0"))))
+  }
+
   test("test add msr column") {
     sql(
       "alter table restructure add columns(msrField decimal(5,2))TBLPROPERTIES ('DEFAULT.VALUE" +
@@ -441,6 +464,7 @@ class AlterTableValidationTestCase extends Spark2QueryTest with BeforeAndAfterAl
   }
   override def afterAll {
     sql("DROP TABLE IF EXISTS restructure")
+    sql("drop table if exists table1")
     sql("DROP TABLE IF EXISTS restructure_new")
     sql("DROP TABLE IF EXISTS restructure_test")
     sql("DROP TABLE IF EXISTS restructure_bad")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/processing/src/main/java/org/apache/carbondata/processing/newflow/converter/impl/NonDictionaryFieldConverterImpl.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/newflow/converter/impl/NonDictionaryFieldConverterImpl.java b/processing/src/main/java/org/apache/carbondata/processing/newflow/converter/impl/NonDictionaryFieldConverterImpl.java
index 533fc87..4861d78 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/newflow/converter/impl/NonDictionaryFieldConverterImpl.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/newflow/converter/impl/NonDictionaryFieldConverterImpl.java
@@ -38,8 +38,11 @@ public class NonDictionaryFieldConverterImpl implements FieldConverter {
 
   private boolean isEmptyBadRecord;
 
+  private DataField dataField;
+
   public NonDictionaryFieldConverterImpl(DataField dataField, String nullformat, int index,
       boolean isEmptyBadRecord) {
+    this.dataField = dataField;
     this.dataType = dataField.getColumn().getDataType();
     this.column = dataField.getColumn();
     this.index = index;
@@ -49,15 +52,19 @@ public class NonDictionaryFieldConverterImpl implements FieldConverter {
 
   @Override public void convert(CarbonRow row, BadRecordLogHolder logHolder) {
     String dimensionValue = row.getString(index);
-    if (dimensionValue == null || dimensionValue.equals(nullformat)) {
-      row.update(CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY, index);
+    if (null == dimensionValue && column.getDataType() != DataType.STRING) {
+      logHolder.setReason(
+          CarbonDataProcessorUtil.prepareFailureReason(column.getColName(), column.getDataType()));
+      updateWithNullValue(row);
+    } else if (dimensionValue == null || dimensionValue.equals(nullformat)) {
+      updateWithNullValue(row);
     } else {
       try {
-        row.update(
-            DataTypeUtil.getBytesBasedOnDataTypeForNoDictionaryColumn(dimensionValue, dataType),
-            index);
+        row.update(DataTypeUtil
+            .getBytesBasedOnDataTypeForNoDictionaryColumn(dimensionValue, dataType,
+                dataField.getDateFormat()), index);
       } catch (Throwable ex) {
-        if (dimensionValue.length() > 0 || isEmptyBadRecord) {
+        if (dimensionValue.length() > 0 || (dimensionValue.length() == 0 && isEmptyBadRecord)) {
           String message = logHolder.getColumnMessageMap().get(column.getColName());
           if (null == message) {
             message = CarbonDataProcessorUtil
@@ -65,11 +72,19 @@ public class NonDictionaryFieldConverterImpl implements FieldConverter {
             logHolder.getColumnMessageMap().put(column.getColName(), message);
           }
           logHolder.setReason(message);
-          row.update(CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY, index);
+          updateWithNullValue(row);
         } else {
-          row.update(new byte[0], index);
+          updateWithNullValue(row);
         }
       }
     }
   }
+
+  private void updateWithNullValue(CarbonRow row) {
+    if (dataType == DataType.STRING) {
+      row.update(CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY, index);
+    } else {
+      row.update(CarbonCommonConstants.EMPTY_BYTE_ARRAY, index);
+    }
+  }
 }


[2/2] carbondata git commit: [CARBONDATA-1450] Support timestamp, int and Long as Dictionary Exclude

Posted by gv...@apache.org.
[CARBONDATA-1450] Support timestamp, int and Long as Dictionary Exclude

Timestamp column supports 68 years.

This PR breaks the limitation of 68 years and can support any time.

To be noted,

(1) By default timestamp will be no dictionary column that can support any timestamp without limitation

(2) If it is enough to load only 68 years, then explicitly timestamp column can be included in DICTIONARY_INCLUDE(this will be direct_dictionary)

(3) Sort columns support for int,long, bigint

(4) int, long, bigint can be DICTIONARY_EXCLUDE columns

(5) If the timestamp column to be partitioned, it should be a DICTIONARY_INCLUDE column.(Partition on timestamp column(dictionary_exclude column) will not throw any exception but not supported)

This closes #1322


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/36ceb59f
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/36ceb59f
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/36ceb59f

Branch: refs/heads/master
Commit: 36ceb59f014f7369575f433064e88aa07a7de48e
Parents: 8b83f58
Author: dhatchayani <dh...@gmail.com>
Authored: Tue Sep 5 15:54:28 2017 +0530
Committer: Venkata Ramana G <ra...@huawei.com>
Committed: Mon Sep 18 22:34:21 2017 +0530

----------------------------------------------------------------------
 .../core/constants/CarbonCommonConstants.java   |   6 +
 .../carbondata/core/datastore/TableSpec.java    |   4 +
 ...feVariableLengthDimensionDataChunkStore.java |  11 +-
 ...afeVariableLengthDimesionDataChunkStore.java |   7 +-
 .../DictionaryBasedVectorResultCollector.java   |   3 +
 .../RestructureBasedRawResultCollector.java     |   5 +-
 .../RestructureBasedVectorResultCollector.java  |  27 +-
 .../executor/impl/AbstractQueryExecutor.java    |  18 +-
 .../scan/executor/util/RestructureUtil.java     |  21 +-
 .../scan/filter/FilterExpressionProcessor.java  |  24 +-
 .../carbondata/core/scan/filter/FilterUtil.java |  13 +-
 .../executer/RangeValueFilterExecuterImpl.java  |  10 +-
 .../executer/RestructureEvaluatorImpl.java      |   5 +-
 .../executer/RowLevelFilterExecuterImpl.java    |   2 +
 ...velRangeLessThanEqualFilterExecuterImpl.java |   2 +
 .../RowLevelRangeLessThanFiterExecuterImpl.java |   2 +
 .../RowLevelRangeFilterResolverImpl.java        |  15 +-
 .../carbondata/core/util/DataTypeUtil.java      |  43 +-
 .../sdv/generated/DataLoadingTestCase.scala     |  14 +-
 .../sdv/generated/QueriesBVATestCase.scala      |   2 +-
 .../sdv/generated/QueriesBasicTestCase.scala    |   2 +-
 .../generated/QueriesCompactionTestCase.scala   |   2 +-
 .../QueriesExcludeDictionaryTestCase.scala      |   2 +-
 .../SortColumnExcudeDictTestCase.scala          | 433 +++++++++++++++++++
 .../src/test/resources/data_beyond68yrs.csv     |  11 +
 .../spark/testsuite/datetype/DateTypeTest.scala |  51 +--
 .../RangeFilterAllDataTypesTestCases.scala      |   1 +
 .../TimestampNoDictionaryColumnTestCase.scala   |  93 ++++
 .../partition/TestDDLForPartitionTable.scala    |   2 +-
 ...ForPartitionTableWithDefaultProperties.scala |   5 +-
 .../testsuite/sortcolumns/TestSortColumns.scala |  43 ++
 .../spark/sql/catalyst/CarbonDDLSqlParser.scala |   9 +-
 .../command/carbonTableSchemaCommon.scala       |  10 +-
 .../execution/command/carbonTableSchema.scala   |   3 +-
 .../createtable/TestCreateTableSyntax.scala     |  15 +-
 .../partition/TestAlterPartitionTable.scala     |   6 +-
 .../AlterTableValidationTestCase.scala          |  26 +-
 .../impl/NonDictionaryFieldConverterImpl.java   |  31 +-
 38 files changed, 855 insertions(+), 124 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
index 3bc1bcc..36d73d7 100644
--- a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
+++ b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
@@ -190,6 +190,12 @@ public final class CarbonCommonConstants {
    * Bytes for string 0, it is used in codegen in case of null values.
    */
   public static final byte[] ZERO_BYTE_ARRAY = "0".getBytes(Charset.forName(DEFAULT_CHARSET));
+
+  /**
+   * Empty byte array
+   */
+  public static final byte[] EMPTY_BYTE_ARRAY = new byte[0];
+
   /**
    * FILE STATUS IN-PROGRESS
    */

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/core/src/main/java/org/apache/carbondata/core/datastore/TableSpec.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/TableSpec.java b/core/src/main/java/org/apache/carbondata/core/datastore/TableSpec.java
index 5492f7b..2fdf82b 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/TableSpec.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/TableSpec.java
@@ -62,6 +62,10 @@ public class TableSpec {
         if (dimension.isComplex()) {
           DimensionSpec spec = new DimensionSpec(ColumnType.COMPLEX, dimension);
           dimensionSpec[dimIndex++] = spec;
+        } else if (dimension.getDataType() == DataType.TIMESTAMP && !dimension
+            .isDirectDictionaryEncoding()) {
+          DimensionSpec spec = new DimensionSpec(ColumnType.PLAIN_VALUE, dimension);
+          dimensionSpec[dimIndex++] = spec;
         } else if (dimension.isDirectDictionaryEncoding()) {
           DimensionSpec spec = new DimensionSpec(ColumnType.DIRECT_DICTIONARY, dimension);
           dimensionSpec[dimIndex++] = spec;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeVariableLengthDimensionDataChunkStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeVariableLengthDimensionDataChunkStore.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeVariableLengthDimensionDataChunkStore.java
index 2079811..7ce3a1d 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeVariableLengthDimensionDataChunkStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeVariableLengthDimensionDataChunkStore.java
@@ -29,6 +29,7 @@ import org.apache.spark.sql.types.IntegerType;
 import org.apache.spark.sql.types.LongType;
 import org.apache.spark.sql.types.ShortType;
 import org.apache.spark.sql.types.StringType;
+import org.apache.spark.sql.types.TimestampType;
 
 /**
  * Below class is responsible to store variable length dimension data chunk in
@@ -140,11 +141,13 @@ public class SafeVariableLengthDimensionDataChunkStore extends SafeAbsractDimens
       // for last record
       length = (short) (this.data.length - currentDataOffset);
     }
-    if (ByteUtil.UnsafeComparer.INSTANCE.equals(CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY, 0,
-        CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY.length, data, currentDataOffset, length)) {
+    DataType dt = vector.getType();
+    if ((!(dt instanceof StringType) && length == 0) || ByteUtil.UnsafeComparer.INSTANCE
+        .equals(CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY, 0,
+            CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY.length, data, currentDataOffset,
+            length)) {
       vector.putNull(vectorRow);
     } else {
-      DataType dt = vector.getType();
       if (dt instanceof StringType) {
         vector.putBytes(vectorRow, currentDataOffset, length, data);
       } else if (dt instanceof BooleanType) {
@@ -155,6 +158,8 @@ public class SafeVariableLengthDimensionDataChunkStore extends SafeAbsractDimens
         vector.putInt(vectorRow, ByteUtil.toInt(data, currentDataOffset, length));
       } else if (dt instanceof LongType) {
         vector.putLong(vectorRow, ByteUtil.toLong(data, currentDataOffset, length));
+      } else if (dt instanceof TimestampType) {
+        vector.putLong(vectorRow, ByteUtil.toLong(data, currentDataOffset, length) * 1000L);
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeVariableLengthDimesionDataChunkStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeVariableLengthDimesionDataChunkStore.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeVariableLengthDimesionDataChunkStore.java
index 6193804..c242752 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeVariableLengthDimesionDataChunkStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeVariableLengthDimesionDataChunkStore.java
@@ -30,6 +30,7 @@ import org.apache.spark.sql.types.IntegerType;
 import org.apache.spark.sql.types.LongType;
 import org.apache.spark.sql.types.ShortType;
 import org.apache.spark.sql.types.StringType;
+import org.apache.spark.sql.types.TimestampType;
 
 /**
  * Below class is responsible to store variable length dimension data chunk in
@@ -167,11 +168,11 @@ public class UnsafeVariableLengthDimesionDataChunkStore
 
   @Override public void fillRow(int rowId, CarbonColumnVector vector, int vectorRow) {
     byte[] value = getRow(rowId);
-    if (ByteUtil.UnsafeComparer.INSTANCE
+    DataType dt = vector.getType();
+    if ((!(dt instanceof StringType) && value.length == 0) || ByteUtil.UnsafeComparer.INSTANCE
         .equals(CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY, value)) {
       vector.putNull(vectorRow);
     } else {
-      DataType dt = vector.getType();
       if (dt instanceof StringType) {
         vector.putBytes(vectorRow, 0, value.length, value);
       } else if (dt instanceof BooleanType) {
@@ -182,6 +183,8 @@ public class UnsafeVariableLengthDimesionDataChunkStore
         vector.putInt(vectorRow, ByteUtil.toInt(value, 0, value.length));
       } else if (dt instanceof LongType) {
         vector.putLong(vectorRow, ByteUtil.toLong(value, 0, value.length));
+      } else if (dt instanceof TimestampType) {
+        vector.putLong(vectorRow, ByteUtil.toLong(value, 0, value.length) * 1000L);
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/DictionaryBasedVectorResultCollector.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/DictionaryBasedVectorResultCollector.java b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/DictionaryBasedVectorResultCollector.java
index c857a47..10888fe 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/DictionaryBasedVectorResultCollector.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/DictionaryBasedVectorResultCollector.java
@@ -70,6 +70,9 @@ public class DictionaryBasedVectorResultCollector extends AbstractScannedResultC
     List<ColumnVectorInfo> complexList = new ArrayList<>();
     List<ColumnVectorInfo> implictColumnList = new ArrayList<>();
     for (int i = 0; i < queryDimensions.length; i++) {
+      if (!dimensionInfo.getDimensionExists()[i]) {
+        continue;
+      }
       if (queryDimensions[i].getDimension().hasEncoding(Encoding.IMPLICIT)) {
         ColumnVectorInfo columnVectorInfo = new ColumnVectorInfo();
         implictColumnList.add(columnVectorInfo);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedRawResultCollector.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedRawResultCollector.java b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedRawResultCollector.java
index ea89ce5..45275a5 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedRawResultCollector.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedRawResultCollector.java
@@ -26,6 +26,7 @@ import org.apache.carbondata.core.datastore.block.SegmentProperties;
 import org.apache.carbondata.core.keygenerator.KeyGenException;
 import org.apache.carbondata.core.keygenerator.KeyGenerator;
 import org.apache.carbondata.core.keygenerator.mdkey.MultiDimKeyVarLengthGenerator;
+import org.apache.carbondata.core.metadata.datatype.DataType;
 import org.apache.carbondata.core.metadata.encoder.Encoding;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
 import org.apache.carbondata.core.scan.executor.infos.BlockExecutionInfo;
@@ -238,9 +239,11 @@ public class RestructureBasedRawResultCollector extends RawBasedResultCollector
           Object defaultValue = dimensionInfo.getDefaultValues()[i];
           if (null != defaultValue) {
             newColumnDefaultValue = ((UTF8String) defaultValue).getBytes();
-          } else {
+          } else if (actualQueryDimensions[i].getDimension().getDataType() == DataType.STRING) {
             newColumnDefaultValue =
                 UTF8String.fromString(CarbonCommonConstants.MEMBER_DEFAULT_VAL).getBytes();
+          } else {
+            newColumnDefaultValue = CarbonCommonConstants.EMPTY_BYTE_ARRAY;
           }
           noDictionaryKeyArrayWithNewlyAddedColumns[newKeyArrayIndex++] = newColumnDefaultValue;
         }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedVectorResultCollector.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedVectorResultCollector.java b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedVectorResultCollector.java
index 8ae0d96..65b9a17 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedVectorResultCollector.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedVectorResultCollector.java
@@ -18,6 +18,7 @@ package org.apache.carbondata.core.scan.collector.impl;
 
 import java.util.List;
 
+import org.apache.carbondata.core.keygenerator.directdictionary.DirectDictionaryKeyGeneratorFactory;
 import org.apache.carbondata.core.metadata.datatype.DataType;
 import org.apache.carbondata.core.metadata.encoder.Encoding;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
@@ -30,6 +31,7 @@ import org.apache.carbondata.core.scan.result.vector.CarbonColumnarBatch;
 import org.apache.carbondata.core.scan.result.vector.ColumnVectorInfo;
 
 import org.apache.spark.sql.types.Decimal;
+import org.apache.spark.unsafe.types.UTF8String;
 
 /**
  * It is not a collector it is just a scanned result holder.
@@ -57,6 +59,12 @@ public class RestructureBasedVectorResultCollector extends DictionaryBasedVector
       if (!dimensionInfo.getDimensionExists()[i]) {
         // add a dummy column vector result collector object
         ColumnVectorInfo columnVectorInfo = new ColumnVectorInfo();
+        columnVectorInfo.dimension = queryDimensions[i];
+        if (queryDimensions[i].getDimension().getDataType().equals(DataType.TIMESTAMP)
+            || queryDimensions[i].getDimension().getDataType().equals(DataType.DATE)) {
+          columnVectorInfo.directDictionaryGenerator = DirectDictionaryKeyGeneratorFactory
+              .getDirectDictionaryGenerator(queryDimensions[i].getDimension().getDataType());
+        }
         allColumnInfo[queryDimensions[i].getQueryOrder()] = columnVectorInfo;
       }
     }
@@ -71,6 +79,7 @@ public class RestructureBasedVectorResultCollector extends DictionaryBasedVector
         // add a dummy column vector result collector object
         ColumnVectorInfo columnVectorInfo = new ColumnVectorInfo();
         allColumnInfo[queryMeasures[i].getQueryOrder()] = columnVectorInfo;
+        columnVectorInfo.measure = queryMeasures[i];
         measureDefaultValues[i] = getMeasureDefaultValue(queryMeasures[i].getMeasure());
       }
     }
@@ -140,7 +149,7 @@ public class RestructureBasedVectorResultCollector extends DictionaryBasedVector
         } else {
           // fill no dictionary data
           fillNoDictionaryData(allColumnInfo[queryOrder].vector, allColumnInfo[queryOrder],
-              dimension.getDefaultValue());
+              dimensionInfo.getDefaultValues()[i]);
         }
       }
     }
@@ -186,9 +195,21 @@ public class RestructureBasedVectorResultCollector extends DictionaryBasedVector
    * @param defaultValue
    */
   private void fillNoDictionaryData(CarbonColumnVector vector, ColumnVectorInfo columnVectorInfo,
-      byte[] defaultValue) {
+      Object defaultValue) {
     if (null != defaultValue) {
-      vector.putBytes(columnVectorInfo.vectorOffset, columnVectorInfo.size, defaultValue);
+      switch (columnVectorInfo.dimension.getDimension().getDataType()) {
+        case INT:
+          vector.putInts(columnVectorInfo.vectorOffset, columnVectorInfo.size, (int) defaultValue);
+          break;
+        case LONG:
+        case TIMESTAMP:
+          vector
+              .putLongs(columnVectorInfo.vectorOffset, columnVectorInfo.size, (long) defaultValue);
+          break;
+        default:
+          vector.putBytes(columnVectorInfo.vectorOffset, columnVectorInfo.size,
+              ((UTF8String) defaultValue).getBytes());
+      }
     } else {
       vector.putNulls(columnVectorInfo.vectorOffset, columnVectorInfo.size);
     }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java
index e8e7bfb..25c827b 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java
@@ -296,18 +296,12 @@ public abstract class AbstractQueryExecutor<E> implements QueryExecutor<E> {
       blockExecutionInfo.setFilterExecuterTree(FilterUtil
           .getFilterExecuterTree(queryModel.getFilterExpressionResolverTree(), segmentProperties,
               blockExecutionInfo.getComlexDimensionInfoMap()));
-      List<IndexKey> listOfStartEndKeys = new ArrayList<IndexKey>(2);
-      FilterUtil.traverseResolverTreeAndGetStartAndEndKey(segmentProperties,
-          queryModel.getFilterExpressionResolverTree(), listOfStartEndKeys);
-      startIndexKey = listOfStartEndKeys.get(0);
-      endIndexKey = listOfStartEndKeys.get(1);
-    } else {
-      try {
-        startIndexKey = FilterUtil.prepareDefaultStartIndexKey(segmentProperties);
-        endIndexKey = FilterUtil.prepareDefaultEndIndexKey(segmentProperties);
-      } catch (KeyGenException e) {
-        throw new QueryExecutionException(e);
-      }
+    }
+    try {
+      startIndexKey = FilterUtil.prepareDefaultStartIndexKey(segmentProperties);
+      endIndexKey = FilterUtil.prepareDefaultEndIndexKey(segmentProperties);
+    } catch (KeyGenException e) {
+      throw new QueryExecutionException(e);
     }
     //setting the start index key of the block node
     blockExecutionInfo.setStartKey(startIndexKey);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/core/src/main/java/org/apache/carbondata/core/scan/executor/util/RestructureUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/executor/util/RestructureUtil.java b/core/src/main/java/org/apache/carbondata/core/scan/executor/util/RestructureUtil.java
index aed2775..5e78741 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/executor/util/RestructureUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/executor/util/RestructureUtil.java
@@ -35,6 +35,7 @@ import org.apache.carbondata.core.scan.executor.infos.DimensionInfo;
 import org.apache.carbondata.core.scan.executor.infos.MeasureInfo;
 import org.apache.carbondata.core.scan.model.QueryDimension;
 import org.apache.carbondata.core.scan.model.QueryMeasure;
+import org.apache.carbondata.core.util.ByteUtil;
 import org.apache.carbondata.core.util.CarbonUtil;
 import org.apache.carbondata.core.util.DataTypeUtil;
 
@@ -157,7 +158,8 @@ public class RestructureUtil {
       }
     } else {
       // no dictionary
-      defaultValueToBeConsidered = getNoDictionaryDefaultValue(defaultValue);
+      defaultValueToBeConsidered =
+          getNoDictionaryDefaultValue(queryDimension.getDataType(), defaultValue);
     }
     return defaultValueToBeConsidered;
   }
@@ -211,10 +213,23 @@ public class RestructureUtil {
    * @param defaultValue
    * @return
    */
-  private static Object getNoDictionaryDefaultValue(byte[] defaultValue) {
+  private static Object getNoDictionaryDefaultValue(DataType datatype, byte[] defaultValue) {
     Object noDictionaryDefaultValue = null;
     if (!isDefaultValueNull(defaultValue)) {
-      noDictionaryDefaultValue = UTF8String.fromBytes(defaultValue);
+      switch (datatype) {
+        case INT:
+          noDictionaryDefaultValue = ByteUtil.toInt(defaultValue, 0, defaultValue.length);
+          break;
+        case LONG:
+          noDictionaryDefaultValue = ByteUtil.toLong(defaultValue, 0, defaultValue.length);
+          break;
+        case TIMESTAMP:
+          long timestampValue = ByteUtil.toLong(defaultValue, 0, defaultValue.length);
+          noDictionaryDefaultValue = timestampValue * 1000L;
+          break;
+        default:
+          noDictionaryDefaultValue = UTF8String.fromBytes(defaultValue);
+      }
     }
     return noDictionaryDefaultValue;
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java
index cfcf112..1290f8b 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java
@@ -113,23 +113,13 @@ public class FilterExpressionProcessor implements FilterProcessor {
       LOGGER.debug("preparing the start and end key for finding"
           + "start and end block as per filter resolver");
     }
-    List<IndexKey> listOfStartEndKeys = new ArrayList<IndexKey>(2);
-    FilterUtil.traverseResolverTreeAndGetStartAndEndKey(tableSegment.getSegmentProperties(),
-        filterResolver, listOfStartEndKeys);
-    // reading the first value from list which has start key
-    IndexKey searchStartKey = listOfStartEndKeys.get(0);
-    // reading the last value from list which has end key
-    IndexKey searchEndKey = listOfStartEndKeys.get(1);
-    if (null == searchStartKey && null == searchEndKey) {
-      try {
-        // TODO need to handle for no dictionary dimensions
-        searchStartKey =
-            FilterUtil.prepareDefaultStartIndexKey(tableSegment.getSegmentProperties());
-        // TODO need to handle for no dictionary dimensions
-        searchEndKey = FilterUtil.prepareDefaultEndIndexKey(tableSegment.getSegmentProperties());
-      } catch (KeyGenException e) {
-        return listOfDataBlocksToScan;
-      }
+    IndexKey searchStartKey = null;
+    IndexKey searchEndKey = null;
+    try {
+      searchStartKey = FilterUtil.prepareDefaultStartIndexKey(tableSegment.getSegmentProperties());
+      searchEndKey = FilterUtil.prepareDefaultEndIndexKey(tableSegment.getSegmentProperties());
+    } catch (KeyGenException e) {
+      throw new RuntimeException(e);
     }
     if (LOGGER.isDebugEnabled()) {
       char delimiter = ',';

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
index 01e1cfa..497ca8c 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
@@ -414,14 +414,21 @@ public final class FilterUtil {
     String result = null;
     try {
       int length = evaluateResultListFinal.size();
+      String timeFormat = CarbonProperties.getInstance()
+          .getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+              CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT);
       for (int i = 0; i < length; i++) {
         result = evaluateResultListFinal.get(i);
         if (CarbonCommonConstants.MEMBER_DEFAULT_VAL.equals(result)) {
-          filterValuesList.add(CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY);
+          if (dataType == DataType.STRING) {
+            filterValuesList.add(CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY);
+          } else {
+            filterValuesList.add(CarbonCommonConstants.EMPTY_BYTE_ARRAY);
+          }
           continue;
         }
-        filterValuesList.add(
-              DataTypeUtil.getBytesBasedOnDataTypeForNoDictionaryColumn(result, dataType));
+        filterValuesList.add(DataTypeUtil
+            .getBytesBasedOnDataTypeForNoDictionaryColumn(result, dataType, timeFormat));
       }
     } catch (Throwable ex) {
       throw new FilterUnsupportedException("Unsupported Filter condition: " + result, ex);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecuterImpl.java
index f2d5a69..c7a0ae7 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecuterImpl.java
@@ -27,6 +27,7 @@ import org.apache.carbondata.core.datastore.chunk.impl.DimensionRawColumnChunk;
 import org.apache.carbondata.core.keygenerator.directdictionary.DirectDictionaryGenerator;
 import org.apache.carbondata.core.keygenerator.directdictionary.DirectDictionaryKeyGeneratorFactory;
 import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
+import org.apache.carbondata.core.metadata.datatype.DataType;
 import org.apache.carbondata.core.metadata.encoder.Encoding;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
 import org.apache.carbondata.core.scan.expression.Expression;
@@ -453,7 +454,8 @@ public class RangeValueFilterExecuterImpl extends ValueBasedFilterExecuterImpl {
   private void updateForNoDictionaryColumn(int start, int end, DimensionColumnDataChunk dataChunk,
       BitSet bitset) {
     for (int j = start; j <= end; j++) {
-      if (dataChunk.compareTo(j, CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY) == 0) {
+      if (dataChunk.compareTo(j, CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY) == 0
+          || dataChunk.compareTo(j, CarbonCommonConstants.EMPTY_BYTE_ARRAY) == 0) {
         bitset.flip(j);
       }
     }
@@ -562,7 +564,11 @@ public class RangeValueFilterExecuterImpl extends ValueBasedFilterExecuterImpl {
           defaultValue = ByteUtil.toBytes(key);
         }
       } else {
-        defaultValue = CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY;
+        if (dimColEvaluatorInfo.getDimension().getDataType() == DataType.STRING) {
+          defaultValue = CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY;
+        } else {
+          defaultValue = CarbonCommonConstants.EMPTY_BYTE_ARRAY;
+        }
       }
       // evaluate result for lower range value first and then perform and operation in the
       // upper range value in order to compute the final result

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RestructureEvaluatorImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RestructureEvaluatorImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RestructureEvaluatorImpl.java
index d72b955..c570ed2 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RestructureEvaluatorImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RestructureEvaluatorImpl.java
@@ -21,6 +21,7 @@ import java.nio.charset.Charset;
 import java.util.List;
 
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
+import org.apache.carbondata.core.metadata.datatype.DataType;
 import org.apache.carbondata.core.metadata.encoder.Encoding;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure;
@@ -53,10 +54,12 @@ public abstract class RestructureEvaluatorImpl implements FilterExecuter {
     if (!dimension.hasEncoding(Encoding.DICTIONARY)) {
       // for no dictionary cases
       // 3 cases: is NUll, is Not Null and filter on default value of newly added column
-      if (null == defaultValue) {
+      if (null == defaultValue && dimension.getDataType() == DataType.STRING) {
         // default value for case where user gives is Null condition
         defaultValue = CarbonCommonConstants.MEMBER_DEFAULT_VAL
             .getBytes(Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET));
+      } else if (null == defaultValue) {
+        defaultValue = CarbonCommonConstants.EMPTY_BYTE_ARRAY;
       }
       List<byte[]> noDictionaryFilterValuesList = filterValues.getNoDictionaryFilterValuesList();
       for (byte[] filterValue : noDictionaryFilterValuesList) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java
index 3f25d9b..b79f18d 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java
@@ -269,6 +269,8 @@ public class RowLevelFilterExecuterImpl implements FilterExecuter {
           if (null != memberBytes) {
             if (Arrays.equals(CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY, memberBytes)) {
               memberBytes = null;
+            } else if (memberBytes.length == 0) {
+              memberBytes = null;
             }
             record[dimColumnEvaluatorInfo.getRowIndex()] = DataTypeUtil
                 .getDataBasedOnDataTypeForNoDictionaryColumn(memberBytes,

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java
index 50231d6..f8886f9 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java
@@ -274,6 +274,8 @@ public class RowLevelRangeLessThanEqualFilterExecuterImpl extends RowLevelFilter
       } else {
         defaultValue = ByteUtil.toBytes(key);
       }
+    } else if (dimColEvaluatorInfoList.get(0).getDimension().getDataType() != DataType.STRING) {
+      defaultValue = CarbonCommonConstants.EMPTY_BYTE_ARRAY;
     }
     BitSet bitSet = null;
     if (dimensionColumnDataChunk.isExplicitSorted()) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFiterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFiterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFiterExecuterImpl.java
index 1972f8e..580f963 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFiterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFiterExecuterImpl.java
@@ -276,6 +276,8 @@ public class RowLevelRangeLessThanFiterExecuterImpl extends RowLevelFilterExecut
       } else {
         defaultValue = ByteUtil.toBytes(key);
       }
+    } else if (dimColEvaluatorInfoList.get(0).getDimension().getDataType() != DataType.STRING) {
+      defaultValue = CarbonCommonConstants.EMPTY_BYTE_ARRAY;
     }
     BitSet bitSet = null;
     if (dimensionColumnDataChunk.isExplicitSorted()) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/RowLevelRangeFilterResolverImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/RowLevelRangeFilterResolverImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/RowLevelRangeFilterResolverImpl.java
index 3e27594..c4df001 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/RowLevelRangeFilterResolverImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/RowLevelRangeFilterResolverImpl.java
@@ -28,6 +28,7 @@ import org.apache.carbondata.core.datastore.block.SegmentProperties;
 import org.apache.carbondata.core.keygenerator.directdictionary.DirectDictionaryGenerator;
 import org.apache.carbondata.core.keygenerator.directdictionary.DirectDictionaryKeyGeneratorFactory;
 import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
+import org.apache.carbondata.core.metadata.datatype.DataType;
 import org.apache.carbondata.core.metadata.encoder.Encoding;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure;
@@ -45,6 +46,7 @@ import org.apache.carbondata.core.scan.filter.intf.FilterExecuterType;
 import org.apache.carbondata.core.scan.filter.resolver.resolverinfo.DimColumnResolvedFilterInfo;
 import org.apache.carbondata.core.scan.filter.resolver.resolverinfo.MeasureColumnResolvedFilterInfo;
 import org.apache.carbondata.core.util.ByteUtil;
+import org.apache.carbondata.core.util.CarbonProperties;
 import org.apache.carbondata.core.util.DataTypeUtil;
 
 public class RowLevelRangeFilterResolverImpl extends ConditionalFilterResolverImpl {
@@ -159,15 +161,22 @@ public class RowLevelRangeFilterResolverImpl extends ConditionalFilterResolverIm
     }
     List<byte[]> filterValuesList = new ArrayList<byte[]>(20);
     boolean invalidRowsPresent = false;
+    String timeFormat = CarbonProperties.getInstance()
+        .getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+            CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT);
     for (ExpressionResult result : listOfExpressionResults) {
       try {
         if (result.getString() == null) {
-          filterValuesList.add(CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY);
+          if (result.getDataType() == DataType.STRING) {
+            filterValuesList.add(CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY);
+          } else {
+            filterValuesList.add(CarbonCommonConstants.EMPTY_BYTE_ARRAY);
+          }
           continue;
         }
         filterValuesList.add(DataTypeUtil
-            .getBytesBasedOnDataTypeForNoDictionaryColumn(result.getString(),
-                result.getDataType()));
+            .getBytesBasedOnDataTypeForNoDictionaryColumn(result.getString(), result.getDataType(),
+                timeFormat));
       } catch (FilterIllegalMemberException e) {
         // Any invalid member while evaluation shall be ignored, system will log the
         // error only once since all rows the evaluation happens so inorder to avoid

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java b/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
index 2cd7ce5..2e65983 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
@@ -37,6 +37,7 @@ import org.apache.carbondata.core.datastore.page.ColumnPage;
 import org.apache.carbondata.core.keygenerator.directdictionary.DirectDictionaryGenerator;
 import org.apache.carbondata.core.keygenerator.directdictionary.DirectDictionaryKeyGeneratorFactory;
 import org.apache.carbondata.core.metadata.datatype.DataType;
+import org.apache.carbondata.core.metadata.encoder.Encoding;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure;
 import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
@@ -54,9 +55,11 @@ public final class DataTypeUtil {
 
   private static final ThreadLocal<DateFormat> timeStampformatter = new ThreadLocal<DateFormat>() {
     @Override protected DateFormat initialValue() {
-      return new SimpleDateFormat(CarbonProperties.getInstance()
+      DateFormat dateFormat = new SimpleDateFormat(CarbonProperties.getInstance()
           .getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
               CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT));
+      dateFormat.setLenient(false);
+      return dateFormat;
     }
   };
 
@@ -367,7 +370,7 @@ public final class DataTypeUtil {
   }
 
   public static byte[] getBytesBasedOnDataTypeForNoDictionaryColumn(String dimensionValue,
-      DataType actualDataType) {
+      DataType actualDataType, String dateFormat) {
     switch (actualDataType) {
       case STRING:
         return ByteUtil.toBytes(dimensionValue);
@@ -379,6 +382,20 @@ public final class DataTypeUtil {
         return ByteUtil.toBytes(Integer.parseInt(dimensionValue));
       case LONG:
         return ByteUtil.toBytes(Long.parseLong(dimensionValue));
+      case TIMESTAMP:
+        Date dateToStr = null;
+        DateFormat dateFormatter = null;
+        try {
+          if (null != dateFormat) {
+            dateFormatter = new SimpleDateFormat(dateFormat);
+          } else {
+            dateFormatter = timeStampformatter.get();
+          }
+          dateToStr = dateFormatter.parse(dimensionValue);
+          return ByteUtil.toBytes(dateToStr.getTime());
+        } catch (ParseException e) {
+          throw new NumberFormatException(e.getMessage());
+        }
       default:
         return ByteUtil.toBytes(dimensionValue);
     }
@@ -411,6 +428,8 @@ public final class DataTypeUtil {
           return ByteUtil.toInt(dataInBytes, 0, dataInBytes.length);
         case LONG:
           return ByteUtil.toLong(dataInBytes, 0, dataInBytes.length);
+        case TIMESTAMP:
+          return ByteUtil.toLong(dataInBytes, 0, dataInBytes.length) * 1000L;
         default:
           return ByteUtil.toString(dataInBytes, 0, dataInBytes.length);
       }
@@ -679,12 +698,30 @@ public final class DataTypeUtil {
           return String.valueOf(Long.parseLong(data))
               .getBytes(Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET));
         case DATE:
-        case TIMESTAMP:
           DirectDictionaryGenerator directDictionaryGenerator = DirectDictionaryKeyGeneratorFactory
               .getDirectDictionaryGenerator(columnSchema.getDataType());
           int value = directDictionaryGenerator.generateDirectSurrogateKey(data);
           return String.valueOf(value)
               .getBytes(Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET));
+        case TIMESTAMP:
+          if (columnSchema.hasEncoding(Encoding.DIRECT_DICTIONARY)) {
+            DirectDictionaryGenerator directDictionaryGenerator1 =
+                DirectDictionaryKeyGeneratorFactory
+                    .getDirectDictionaryGenerator(columnSchema.getDataType());
+            int value1 = directDictionaryGenerator1.generateDirectSurrogateKey(data);
+            return String.valueOf(value1)
+                .getBytes(Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET));
+          } else {
+            try {
+              Date dateToStr = timeStampformatter.get().parse(data);
+              return ByteUtil.toBytes(dateToStr.getTime());
+            } catch (ParseException e) {
+              LOGGER.error(
+                  "Cannot convert value to Time/Long type value. Value is considered as null" + e
+                      .getMessage());
+              return null;
+            }
+          }
         case DECIMAL:
           String parsedValue = parseStringToBigDecimal(data, columnSchema);
           if (null == parsedValue) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/DataLoadingTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/DataLoadingTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/DataLoadingTestCase.scala
index f32ae10..c8c88e2 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/DataLoadingTestCase.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/DataLoadingTestCase.scala
@@ -61,7 +61,7 @@ class DataLoadingTestCase extends QueryTest with BeforeAndAfterAll {
 
   //Data load--->Action--->IGNORE--->Logger-->True
   test("BadRecord_Dataload_003", Include) {
-     sql(s"""CREATE TABLE uniqdata (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect
+     sql(s"""CREATE TABLE uniqdata (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='DOB,DOJ')""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/2000_UniqData.csv' into table uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_LOGGER_ENABLE'='TRUE', 'BAD_RECORDS_ACTION'='IGNORE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
     checkAnswer(s"""select count(*) from uniqdata""",
       Seq(Row(2010)), "DataLoadingTestCase-BadRecord_Dataload_003")
@@ -72,7 +72,7 @@ class DataLoadingTestCase extends QueryTest with BeforeAndAfterAll {
   //Data load--->Action--->Ignore--->Logger-->False
   test("BadRecord_Dataload_004", Include) {
     sql(s"""drop table if exists uniqdata""").collect
-     sql(s""" CREATE TABLE uniqdata (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect
+     sql(s""" CREATE TABLE uniqdata (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='DOB,DOJ')""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/2000_UniqData.csv' into table uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_LOGGER_ENABLE'='FALSE', 'BAD_RECORDS_ACTION'='IGNORE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
     checkAnswer(s"""select count(*) from uniqdata""",
       Seq(Row(2010)), "DataLoadingTestCase-BadRecord_Dataload_004")
@@ -94,7 +94,7 @@ class DataLoadingTestCase extends QueryTest with BeforeAndAfterAll {
   //Data load--->Action--->Redirect--->Logger-->False
   test("BadRecord_Dataload_006", Include) {
     sql(s"""drop table if exists uniqdata""").collect
-     sql(s""" CREATE TABLE uniqdata (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect
+     sql(s""" CREATE TABLE uniqdata (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='DOB,DOJ')""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/2000_UniqData.csv' into table uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_LOGGER_ENABLE'='FALSE', 'BAD_RECORDS_ACTION'='REDIRECT','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
     checkAnswer(s"""select count(*) from uniqdata""",
       Seq(Row(2010)), "DataLoadingTestCase-BadRecord_Dataload_006")
@@ -104,7 +104,7 @@ class DataLoadingTestCase extends QueryTest with BeforeAndAfterAll {
 
   //Data load-->Dictionary_Exclude
   test("BadRecord_Dataload_007", Include) {
-     sql(s"""CREATE TABLE uniq_exclude (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_EXCLUDE'='CUST_NAME,ACTIVE_EMUI_VERSION')""").collect
+     sql(s"""CREATE TABLE uniq_exclude (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_EXCLUDE'='CUST_NAME,ACTIVE_EMUI_VERSION','DICTIONARY_INCLUDE'='DOB,DOJ')""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/2000_UniqData.csv' into table uniq_exclude OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_LOGGER_ENABLE'='TRUE', 'BAD_RECORDS_ACTION'='REDIRECT','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
     checkAnswer(s"""select count(*) from uniq_exclude""",
       Seq(Row(2010)), "DataLoadingTestCase-BadRecord_Dataload_007")
@@ -718,7 +718,7 @@ class DataLoadingTestCase extends QueryTest with BeforeAndAfterAll {
   //Show loads--->Action=Fail--->Logger=True
   test("BadRecord_Dataload_024", Include) {
     dropTable("uniqdata")
-     sql(s"""CREATE TABLE uniqdata (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect
+     sql(s"""CREATE TABLE uniqdata (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='DOB,DOJ')""").collect
     intercept[Exception] {
       sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/2000_UniqData.csv' into table uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FAIL','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
     }
@@ -890,7 +890,7 @@ class DataLoadingTestCase extends QueryTest with BeforeAndAfterAll {
 
   //Check for the incremental load data DML without "DELIMITER" specified loading the data successfully.
   test("Incremental_Data_Load_001_001-001-TC-09_840", Include) {
-     sql(s"""create table DL_WithOutDELIMITER(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+     sql(s"""create table DL_WithOutDELIMITER(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='Update_time')""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1.csv' INTO table DL_WithOutDELIMITER options ('QUOTECHAR'='\', 'FILEHEADER'='Active_status,Item_type_cd,Qty_day_avg,Qty_total,Sell_price,Sell_pricep,Discount_price,Profit,Item_code,Item_name,Outlet_name,Update_time,Create_date')""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1.csv' INTO table DL_WithOutDELIMITER options ('QUOTECHAR'='\', 'FILEHEADER'='Active_status,Item_type_cd,Qty_day_avg,Qty_total,Sell_price,Sell_pricep,Discount_price,Profit,Item_code,Item_name,Outlet_name,Update_time,Create_date')""").collect
     checkAnswer(s"""select count(*) from DL_WithOutDELIMITER""",
@@ -1406,7 +1406,7 @@ class DataLoadingTestCase extends QueryTest with BeforeAndAfterAll {
 
   //Check for the incremental load data DML without "QUOTECHAR" specified loading the data successfully.
   test("Incremental_Data_Load_001_001-001-TC-11_840", Include) {
-     sql(s"""CREATE TABLE DL_without_QUOTECHAR (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect
+     sql(s"""CREATE TABLE DL_without_QUOTECHAR (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='DOB,DOJ')""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/2000_UniqData.csv' into table DL_without_QUOTECHAR OPTIONS('DELIMITER'=',' , 'BAD_RECORDS_LOGGER_ENABLE'='TRUE', 'BAD_RECORDS_ACTION'='REDIRECT','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
     checkAnswer(s"""select count(*) from DL_without_QUOTECHAR""",
       Seq(Row(2010)), "DataLoadingTestCase_Incremental_Data_Load_001_001-001-TC-11_840")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesBVATestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesBVATestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesBVATestCase.scala
index 1aac73b..5e00bde 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesBVATestCase.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesBVATestCase.scala
@@ -33,7 +33,7 @@ class QueriesBVATestCase extends QueryTest with BeforeAndAfterAll {
     sql(s"""drop table if exists Test_Boundary""").collect
     sql(s"""drop table if exists Test_Boundary_hive""").collect
 
-    sql(s"""create table Test_Boundary (c1_int int,c2_Bigint Bigint,c3_Decimal Decimal(38,30),c4_double double,c5_string string,c6_Timestamp Timestamp,c7_Datatype_Desc string) STORED BY 'org.apache.carbondata.format'""").collect
+    sql(s"""create table Test_Boundary (c1_int int,c2_Bigint Bigint,c3_Decimal Decimal(38,30),c4_double double,c5_string string,c6_Timestamp Timestamp,c7_Datatype_Desc string) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='c6_Timestamp')""").collect
 
     sql(s"""create table Test_Boundary_hive (c1_int int,c2_Bigint Bigint,c3_Decimal Decimal(38,30),c4_double double,c5_string string,c6_Timestamp Timestamp,c7_Datatype_Desc string)  ROW FORMAT DELIMITED FIELDS TERMINATED BY ','""").collect
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesBasicTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesBasicTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesBasicTestCase.scala
index 362352b..1b525e3 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesBasicTestCase.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesBasicTestCase.scala
@@ -39,7 +39,7 @@ class QueriesBasicTestCase extends QueryTest with BeforeAndAfterAll {
     sql(s"""drop table if exists uniqdata""").collect
     sql(s"""drop table if exists uniqdata_hive""").collect
 
-    sql(s"""CREATE TABLE uniqdata (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect
+    sql(s"""CREATE TABLE uniqdata (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='DOB,DOJ')""").collect
 
     sql(s"""CREATE TABLE uniqdata_hive (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ','""").collect
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesCompactionTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesCompactionTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesCompactionTestCase.scala
index 13115ff..5fdc098 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesCompactionTestCase.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesCompactionTestCase.scala
@@ -274,7 +274,7 @@ class QueriesCompactionTestCase extends QueryTest with BeforeAndAfterAll {
     sql(s"""drop table if exists Comp_DICTIONARY_EXCLUDE""").collect
     sql(s"""drop table if exists Comp_DICTIONARY_EXCLUDE_hive""").collect
 
-    sql(s"""create table  Comp_DICTIONARY_EXCLUDE (imei string,deviceInformationId int,MAC string,deviceColor string,device_backColor string,modelId string,marketName string,AMSize string,ROMSize string,CUPAudit string,CPIClocked string,series string,productionDate timestamp,bomCode string,internalModels string, deliveryTime string, channelsId string, channelsName string , deliveryAreaId string, deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict string, deliveryStreet string, oxSingleNumber string, ActiveCheckTime string, ActiveAreaId string, ActiveCountry string, ActiveProvince string, Activecity string, ActiveDistrict string, ActiveStreet string, ActiveOperatorId string, Active_releaseId string, Active_EMUIVersion string, Active_operaSysVersion string, Active_BacVerNumber string, Active_BacFlashVer string, Active_webUIVersion string, Active_webUITypeCarrVer string,Active_webTypeDataVerNumber string, Active_operatorsVersion string, Active_phonePAD
 PartitionedVersions string, Latest_YEAR int, Latest_MONTH int, Latest_DAY Decimal(30,10), Latest_HOUR string, Latest_areaId string, Latest_country string, Latest_province string, Latest_city string, Latest_district string, Latest_street string, Latest_releaseId string, Latest_EMUIVersion string, Latest_operaSysVersion string, Latest_BacVerNumber string, Latest_BacFlashVer string, Latest_webUIVersion string, Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber string, Latest_operatorsVersion string, Latest_phonePADPartitionedVersions string, Latest_operatorId string, gamePointDescription string,gamePointId double,contractNumber BigInt)  STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_EXCLUDE'='imei')""").collect
+    sql(s"""create table  Comp_DICTIONARY_EXCLUDE (imei string,deviceInformationId int,MAC string,deviceColor string,device_backColor string,modelId string,marketName string,AMSize string,ROMSize string,CUPAudit string,CPIClocked string,series string,productionDate timestamp,bomCode string,internalModels string, deliveryTime string, channelsId string, channelsName string , deliveryAreaId string, deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict string, deliveryStreet string, oxSingleNumber string, ActiveCheckTime string, ActiveAreaId string, ActiveCountry string, ActiveProvince string, Activecity string, ActiveDistrict string, ActiveStreet string, ActiveOperatorId string, Active_releaseId string, Active_EMUIVersion string, Active_operaSysVersion string, Active_BacVerNumber string, Active_BacFlashVer string, Active_webUIVersion string, Active_webUITypeCarrVer string,Active_webTypeDataVerNumber string, Active_operatorsVersion string, Active_phonePAD
 PartitionedVersions string, Latest_YEAR int, Latest_MONTH int, Latest_DAY Decimal(30,10), Latest_HOUR string, Latest_areaId string, Latest_country string, Latest_province string, Latest_city string, Latest_district string, Latest_street string, Latest_releaseId string, Latest_EMUIVersion string, Latest_operaSysVersion string, Latest_BacVerNumber string, Latest_BacFlashVer string, Latest_webUIVersion string, Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber string, Latest_operatorsVersion string, Latest_phonePADPartitionedVersions string, Latest_operatorId string, gamePointDescription string,gamePointId double,contractNumber BigInt)  STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_EXCLUDE'='imei', 'DICTIONARY_INCLUDE'='productionDate')""").collect
 
     sql(s"""create table  Comp_DICTIONARY_EXCLUDE_hive (imei string,deviceInformationId int,MAC string,deviceColor string,device_backColor string,modelId string,marketName string,AMSize string,ROMSize string,CUPAudit string,CPIClocked string,series string,productionDate timestamp,bomCode string,internalModels string,deliveryTime string,channelsId string,channelsName string,deliveryAreaId string,deliveryCountry string,deliveryProvince string,deliveryCity string,deliveryDistrict string,deliveryStreet string,oxSingleNumber string,contractNumber BigInt,ActiveCheckTime string,ActiveAreaId string,ActiveCountry string,ActiveProvince string,Activecity string,ActiveDistrict string,ActiveStreet string,ActiveOperatorId string,Active_releaseId string,Active_EMUIVersion string,Active_operaSysVersion string,Active_BacVerNumber string,Active_BacFlashVer string,Active_webUIVersion string,Active_webUITypeCarrVer string,Active_webTypeDataVerNumber string,Active_operatorsVersion string,Active_phonePAD
 PartitionedVersions string,Latest_YEAR int,Latest_MONTH int,Latest_DAY Decimal(30,10),Latest_HOUR string,Latest_areaId string,Latest_country string,Latest_province string,Latest_city string,Latest_district string,Latest_street string,Latest_releaseId string,Latest_EMUIVersion string,Latest_operaSysVersion string,Latest_BacVerNumber string,Latest_BacFlashVer string,Latest_webUIVersion string,Latest_webUITypeCarrVer string,Latest_webTypeDataVerNumber string,Latest_operatorsVersion string,Latest_phonePADPartitionedVersions string,Latest_operatorId string,gamePointId double,gamePointDescription string) ROW FORMAT DELIMITED FIELDS TERMINATED BY ','""").collect
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ceb59f/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesExcludeDictionaryTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesExcludeDictionaryTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesExcludeDictionaryTestCase.scala
index 4b434a2..fcd20fd 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesExcludeDictionaryTestCase.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/QueriesExcludeDictionaryTestCase.scala
@@ -33,7 +33,7 @@ class QueriesExcludeDictionaryTestCase extends QueryTest with BeforeAndAfterAll
     sql(s"""drop table if exists TABLE_DICTIONARY_EXCLUDE""").collect
     sql(s"""drop table if exists TABLE_DICTIONARY_EXCLUDE1_hive""").collect
 
-    sql(s"""create table  TABLE_DICTIONARY_EXCLUDE (imei string,deviceInformationId int,MAC string,deviceColor string,device_backColor string,modelId string,marketName string,AMSize string,ROMSize string,CUPAudit string,CPIClocked string,series string,productionDate timestamp,bomCode string,internalModels string, deliveryTime string, channelsId string, channelsName string , deliveryAreaId string, deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict string, deliveryStreet string, oxSingleNumber string, ActiveCheckTime string, ActiveAreaId string, ActiveCountry string, ActiveProvince string, Activecity string, ActiveDistrict string, ActiveStreet string, ActiveOperatorId string, Active_releaseId string, Active_EMUIVersion string, Active_operaSysVersion string, Active_BacVerNumber string, Active_BacFlashVer string, Active_webUIVersion string, Active_webUITypeCarrVer string,Active_webTypeDataVerNumber string, Active_operatorsVersion string, Active_phonePA
 DPartitionedVersions string, Latest_YEAR int, Latest_MONTH int, Latest_DAY Decimal(30,10), Latest_HOUR string, Latest_areaId string, Latest_country string, Latest_province string, Latest_city string, Latest_district string, Latest_street string, Latest_releaseId string, Latest_EMUIVersion string, Latest_operaSysVersion string, Latest_BacVerNumber string, Latest_BacFlashVer string, Latest_webUIVersion string, Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber string, Latest_operatorsVersion string, Latest_phonePADPartitionedVersions string, Latest_operatorId string, gamePointDescription string,gamePointId double,contractNumber BigInt)  STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_EXCLUDE'='imei')""").collect
+    sql(s"""create table  TABLE_DICTIONARY_EXCLUDE (imei string,deviceInformationId int,MAC string,deviceColor string,device_backColor string,modelId string,marketName string,AMSize string,ROMSize string,CUPAudit string,CPIClocked string,series string,productionDate timestamp,bomCode string,internalModels string, deliveryTime string, channelsId string, channelsName string , deliveryAreaId string, deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict string, deliveryStreet string, oxSingleNumber string, ActiveCheckTime string, ActiveAreaId string, ActiveCountry string, ActiveProvince string, Activecity string, ActiveDistrict string, ActiveStreet string, ActiveOperatorId string, Active_releaseId string, Active_EMUIVersion string, Active_operaSysVersion string, Active_BacVerNumber string, Active_BacFlashVer string, Active_webUIVersion string, Active_webUITypeCarrVer string,Active_webTypeDataVerNumber string, Active_operatorsVersion string, Active_phonePA
 DPartitionedVersions string, Latest_YEAR int, Latest_MONTH int, Latest_DAY Decimal(30,10), Latest_HOUR string, Latest_areaId string, Latest_country string, Latest_province string, Latest_city string, Latest_district string, Latest_street string, Latest_releaseId string, Latest_EMUIVersion string, Latest_operaSysVersion string, Latest_BacVerNumber string, Latest_BacFlashVer string, Latest_webUIVersion string, Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber string, Latest_operatorsVersion string, Latest_phonePADPartitionedVersions string, Latest_operatorId string, gamePointDescription string,gamePointId double,contractNumber BigInt)  STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_EXCLUDE'='imei', 'DICTIONARY_INCLUDE'='productionDate')""").collect
 
     sql(s"""create table  TABLE_DICTIONARY_EXCLUDE1_hive (imei string,deviceInformationId int,MAC string,deviceColor string,device_backColor string,modelId string,marketName string,AMSize string,ROMSize string,CUPAudit string,CPIClocked string,series string,productionDate timestamp,bomCode string,internalModels string,deliveryTime string,channelsId string,channelsName string,deliveryAreaId string,deliveryCountry string,deliveryProvince string,deliveryCity string,deliveryDistrict string,deliveryStreet string,oxSingleNumber string,contractNumber BigInt,ActiveCheckTime string,ActiveAreaId string,ActiveCountry string,ActiveProvince string,Activecity string,ActiveDistrict string,ActiveStreet string,ActiveOperatorId string,Active_releaseId string,Active_EMUIVersion string,Active_operaSysVersion string,Active_BacVerNumber string,Active_BacFlashVer string,Active_webUIVersion string,Active_webUITypeCarrVer string,Active_webTypeDataVerNumber string,Active_operatorsVersion string,Active_phoneP
 ADPartitionedVersions string,Latest_YEAR int,Latest_MONTH int,Latest_DAY Decimal(30,10),Latest_HOUR string,Latest_areaId string,Latest_country string,Latest_province string,Latest_city string,Latest_district string,Latest_street string,Latest_releaseId string,Latest_EMUIVersion string,Latest_operaSysVersion string,Latest_BacVerNumber string,Latest_BacFlashVer string,Latest_webUIVersion string,Latest_webUITypeCarrVer string,Latest_webTypeDataVerNumber string,Latest_operatorsVersion string,Latest_phonePADPartitionedVersions string,Latest_operatorId string,gamePointId double,gamePointDescription string) ROW FORMAT DELIMITED FIELDS TERMINATED BY ','""").collect