You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ch...@apache.org on 2017/08/01 06:00:10 UTC

[03/20] carbondata git commit: [CARBONDATA-1346] SDV cluster tests

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SortColumnTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SortColumnTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SortColumnTestCase.scala
new file mode 100644
index 0000000..88c7306
--- /dev/null
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SortColumnTestCase.scala
@@ -0,0 +1,465 @@
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.cluster.sdv.generated
+
+import org.apache.spark.sql.common.util._
+import org.scalatest.BeforeAndAfterAll
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.util.CarbonProperties
+
+/**
+ * Test Class for sortColumnTestCase to verify all scenerios
+ */
+
+class SortColumnTestCase extends QueryTest with BeforeAndAfterAll {
+         
+
+  //create table with no dictionary sort_columns
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC001", Include) {
+    sql(s"""drop table if exists sorttable1""").collect
+     sql(s"""CREATE TABLE sorttable1 (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='empno')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable1 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select empno from sorttable1""").collect
+
+     sql(s"""drop table if exists sorttable1""").collect
+  }
+
+
+  //create table with dictionary sort_columns
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC002", Include) {
+     sql(s"""CREATE TABLE sorttable2 (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='empname')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable2 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select empname from sorttable2""").collect
+
+     sql(s"""drop table if exists sorttable2""").collect
+  }
+
+
+  //create table with direct-dictioanry sort_columns
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC003", Include) {
+     sql(s"""CREATE TABLE sorttable3 (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='doj')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable3 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select doj from sorttable3""").collect
+
+     sql(s"""drop table if exists sorttable3""").collect
+  }
+
+
+  //create table with multi-sort_columns and data loading with offheap safe
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC004", Include) {
+     sql(s"""CREATE TABLE sorttable4_offheap_safe (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='workgroupcategory, empname')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable4_offheap_safe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select workgroupcategory, empname from sorttable4_offheap_safe""").collect
+
+     sql(s"""drop table if exists sorttable4_offheap_safe""").collect
+  }
+
+
+  //create table with multi-sort_columns and data loading with offheap and unsafe sort
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC005", Include) {
+     sql(s"""CREATE TABLE sorttable4_offheap_unsafe (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='workgroupcategory, empname')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable4_offheap_unsafe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select workgroupcategory, empname from sorttable4_offheap_unsafe""").collect
+
+     sql(s"""drop table if exists sorttable4_offheap_unsafe""").collect
+  }
+
+
+  //create table with multi-sort_columns and data loading with offheap and inmemory sort
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC006", Include) {
+     sql(s"""CREATE TABLE sorttable4_offheap_inmemory (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='workgroupcategory, empname')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable4_offheap_inmemory OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select workgroupcategory, empname from sorttable4_offheap_inmemory""").collect
+
+     sql(s"""drop table if exists sorttable4_offheap_inmemory""").collect
+  }
+
+
+  //create table with multi-sort_columns and data loading with heap
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC007", Include) {
+     sql(s"""CREATE TABLE sorttable4_heap_safe (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='workgroupcategory, empname')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable4_heap_safe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select workgroupcategory, empname from sorttable4_heap_safe""").collect
+
+     sql(s"""drop table if exists sorttable4_heap_safe""").collect
+  }
+
+
+  //create table with multi-sort_columns and data loading with heap and unsafe sort
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC008", Include) {
+     sql(s"""CREATE TABLE sorttable4_heap_unsafe (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='workgroupcategory, empname')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable4_heap_unsafe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select workgroupcategory, empname from sorttable4_heap_unsafe""").collect
+
+     sql(s"""drop table if exists sorttable4_heap_unsafe""").collect
+  }
+
+
+  //create table with multi-sort_columns and data loading with heap and inmemory sort
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC009", Include) {
+     sql(s"""CREATE TABLE sorttable4_heap_inmemory (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='workgroupcategory, empname')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable4_heap_inmemory OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select workgroupcategory, empname from sorttable4_heap_inmemory""").collect
+
+     sql(s"""drop table if exists sorttable4_heap_inmemory""").collect
+  }
+
+
+  //create table with multi-sort_columns and data loading with heap and inmemory sort
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC010", Include) {
+    sql(s"""drop table if exists origintable2""").collect
+    sql(s"""drop table if exists sorttable5""").collect
+     sql(s"""CREATE TABLE origintable2 (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format'""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE origintable2 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE origintable2 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE origintable2 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE origintable2 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+   sql(s"""alter table origintable2 compact 'minor'""").collect
+   sql(s"""CREATE TABLE sorttable5 (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='empno')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable5 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable5 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable5 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable5 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+   sql(s"""alter table sorttable5 compact 'minor'""").collect
+    sql(s"""select empno from sorttable5""").collect
+
+     sql(s"""drop table if exists sorttable5""").collect
+  }
+
+
+  //filter on sort_columns include no-dictionary
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC011", Include) {
+    sql(s"""drop table if exists sorttable6""").collect
+     sql(s"""CREATE TABLE sorttable6 (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='workgroupcategory, doj, empname')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable6 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from sorttable6 where workgroupcategory = 1""").collect
+
+     sql(s"""drop table if exists sorttable6""").collect
+  }
+
+
+  //filter on sort_columns include direct-dictionary
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC012", Include) {
+     sql(s"""CREATE TABLE sorttable6 (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='workgroupcategory, doj, empname')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable6 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from sorttable6 where doj = '2007-01-17 00:00:00'""").collect
+
+     sql(s"""drop table if exists sorttable6""").collect
+  }
+
+
+  //filter on sort_columns include dictioanry
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC013", Include) {
+    sql(s"""drop table if exists sorttable6""").collect
+     sql(s"""CREATE TABLE sorttable6 (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='workgroupcategory, doj, empname')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable6 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from sorttable6 where empname = 'madhan'""").collect
+
+     sql(s"""drop table if exists sorttable6""").collect
+  }
+
+
+  //unsorted table creation, query data loading with heap and safe sort config
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC014", Include) {
+     sql(s"""CREATE TABLE unsortedtable_heap_safe (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_c+C17olumns'='')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE unsortedtable_heap_safe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from unsortedtable_heap_safe where empno = 11""").collect
+
+     sql(s"""drop table if exists unsortedtable_heap_safe""").collect
+  }
+
+
+  //unsorted table creation, query data loading with heap and safe sort config with order by
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC015", Include) {
+    sql(s"""drop table if exists unsortedtable_heap_safe""").collect
+     sql(s"""CREATE TABLE unsortedtable_heap_safe (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE unsortedtable_heap_safe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from unsortedtable_heap_safe order by empno""").collect
+
+     sql(s"""drop table if exists unsortedtable_heap_safe""").collect
+  }
+
+
+  //unsorted table creation, query and data loading with heap and unsafe sort config
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC016", Include) {
+     sql(s"""CREATE TABLE unsortedtable_heap_unsafe (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE unsortedtable_heap_unsafe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from unsortedtable_heap_unsafe where empno = 11""").collect
+
+     sql(s"""drop table if exists unsortedtable_heap_unsafe""").collect
+  }
+
+
+  //unsorted table creation, query and data loading with heap and unsafe sort config with order by
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC017", Include) {
+    sql(s"""drop table if exists unsortedtable_heap_unsafe""").collect
+     sql(s"""CREATE TABLE unsortedtable_heap_unsafe (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE unsortedtable_heap_unsafe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from unsortedtable_heap_unsafe order by empno""").collect
+
+     sql(s"""drop table if exists unsortedtable_heap_unsafe""").collect
+  }
+
+
+  //unsorted table creation, query and data loading with offheap and safe sort config
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC018", Include) {
+     sql(s"""CREATE TABLE unsortedtable_offheap_safe (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE unsortedtable_offheap_safe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from unsortedtable_offheap_safe where empno = 11""").collect
+
+     sql(s"""drop table if exists unsortedtable_offheap_safe""").collect
+  }
+
+
+  //unsorted table creation, query and data loading with offheap and safe sort config with order by
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC019", Include) {
+     sql(s"""CREATE TABLE unsortedtable_offheap_safe (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE unsortedtable_offheap_safe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from unsortedtable_offheap_safe order by empno""").collect
+
+     sql(s"""drop table if exists unsortedtable_offheap_safe""").collect
+  }
+
+
+  //unsorted table creation, query and data loading with offheap and unsafe sort config
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC020", Include) {
+     sql(s"""CREATE TABLE unsortedtable_offheap_unsafe (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE unsortedtable_offheap_unsafe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from unsortedtable_offheap_unsafe where empno = 11""").collect
+
+     sql(s"""drop table if exists unsortedtable_offheap_unsafe""").collect
+  }
+
+
+  //unsorted table creation, query and data loading with offheap and unsafe sort config with order by
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC021", Include) {
+     sql(s"""CREATE TABLE unsortedtable_offheap_unsafe (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE unsortedtable_offheap_unsafe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from unsortedtable_offheap_unsafe order by empno""").collect
+
+     sql(s"""drop table if exists unsortedtable_offheap_unsafe""").collect
+  }
+
+
+  //unsorted table creation, query and data loading with offheap and inmemory sort config
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC022", Include) {
+     sql(s"""CREATE TABLE unsortedtable_offheap_inmemory (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE unsortedtable_offheap_inmemory OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from unsortedtable_offheap_inmemory where empno = 11""").collect
+
+     sql(s"""drop table if exists unsortedtable_offheap_inmemory""").collect
+  }
+
+
+  //unsorted table creation, query and data loading with offheap and inmemory sort config with order by
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC023", Include) {
+     sql(s"""CREATE TABLE unsortedtable_offheap_inmemory (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE unsortedtable_offheap_inmemory OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from unsortedtable_offheap_inmemory order by empno""").collect
+
+     sql(s"""drop table if exists unsortedtable_offheap_inmemory""").collect
+  }
+
+
+  //create table with dictioanry_exclude sort_columns
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC024", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+     sql(s"""CREATE TABLE sorttable (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_exclude'='empname','sort_columns'='empname')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select doj from sorttable""").collect
+
+     sql(s"""drop table if exists sorttable""").collect
+  }
+
+
+  //create table with dictionary_include,  sort_columns
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC025", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+     sql(s"""CREATE TABLE sorttable (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_include'='doj','sort_columns'='doj')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select doj from sorttable""").collect
+
+     sql(s"""drop table if exists sorttable""").collect
+  }
+
+
+  //create table with dictionary_include, dictioanry_exclude sort_columns
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC026", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+     sql(s"""CREATE TABLE sorttable (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_include'='doj','dictionary_exclude'='empname','sort_columns'='doj')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select doj from sorttable""").collect
+
+     sql(s"""drop table if exists sorttable""").collect
+  }
+
+
+  //create table with alter table and sort_columns with dimension
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC027", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+     sql(s"""CREATE TABLE sorttable (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='doj')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+   sql(s"""alter table sorttable add columns(newField String) tblproperties('dictionary_include'='newField')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/dataString.csv' INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select doj from sorttable""").collect
+
+     sql(s"""drop table if exists sorttable""").collect
+  }
+
+
+  //create table with alter table and sort_columns with measure
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC028", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+     sql(s"""CREATE TABLE sorttable (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='doj')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+   sql(s"""alter table sorttable add columns(newField Int) tblproperties('dictionary_include'='newField')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/dataInt.csv' INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select doj from sorttable""").collect
+
+     sql(s"""drop table if exists sorttable""").collect
+  }
+
+
+  //create table with no_inverted_index and sort_columns
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC029", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+     sql(s"""CREATE TABLE sorttable (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='doj','no_inverted_index'='doj')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select doj from sorttable""").collect
+
+     sql(s"""drop table if exists sorttable""").collect
+  }
+
+
+  //create table with dictionary_include ,no_inverted_index and sort_columns
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC030", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+     sql(s"""CREATE TABLE sorttable (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_include'='doj','sort_columns'='doj','no_inverted_index'='doj')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select doj from sorttable""").collect
+
+     sql(s"""drop table if exists sorttable""").collect
+  }
+
+
+  //create table with dictionary_include ,no_inverted_index and sort_columns with measure
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC031", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+     sql(s"""CREATE TABLE sorttable (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_include'='empno','sort_columns'='empno','no_inverted_index'='empno')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select doj from sorttable""").collect
+
+     sql(s"""drop table if exists sorttable""").collect
+  }
+
+
+  //test sort_column for different order of column name
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC032", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+     sql(s"""CREATE TABLE sorttable (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_include'='empno','sort_columns'='empname,empno,workgroupcategory,doj')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select doj from sorttable""").collect
+
+     sql(s"""drop table if exists sorttable""").collect
+  }
+
+
+  //default behavior if sort_column not provided
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC033", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+     sql(s"""CREATE TABLE sorttable (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('dictionary_include'='empno')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select doj from sorttable""").collect
+
+     sql(s"""drop table if exists sorttable""").collect
+  }
+
+
+  //test sort_column for alter table
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC035", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+     sql(s"""CREATE TABLE sorttable (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='doj')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+   sql(s"""alter table sorttable drop columns(doj)""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/dataDrop.csv' INTO TABLE sorttable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from sorttable""").collect
+
+     sql(s"""drop table if exists sorttable""").collect
+  }
+
+
+  //test sort_column for float data_type with alter query
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC037", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+    sql(s"""drop table if exists sorttable1""").collect
+     sql(s"""CREATE TABLE sorttable1 (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='empno')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable1 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+   sql(s"""alter table sorttable1 add columns(newField Float) tblproperties('DICTIONARY_INCLUDE'='newField')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/dataFloat.csv' INTO TABLE sorttable1 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from sorttable1""").collect
+
+     sql(s"""drop table if exists sorttable1""").collect
+  }
+
+
+  //test sort_column for decimal data_type with alter query
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC038", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+    sql(s"""drop table if exists sorttable1""").collect
+     sql(s"""CREATE TABLE sorttable1 (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='empno')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/data.csv' INTO TABLE sorttable1 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+   sql(s"""alter table sorttable1 add columns(newField decimal) tblproperties('dictionary_include'='newField')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/dataDecimal.csv' INTO TABLE sorttable1 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from sorttable1""").collect
+
+     sql(s"""drop table if exists sorttable1""").collect
+  }
+
+
+  //test sort_column for decimal data_type
+  test("AR-Develop-Feature-sortcolumn-001_PTS001_TC039", Include) {
+    sql(s"""drop table if exists sorttable""").collect
+    sql(s"""drop table if exists sorttable1""").collect
+     sql(s"""CREATE TABLE sorttable1 (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int,newField decimal) STORED BY 'org.apache.carbondata.format' tblproperties('DICTIONARY_INCLUDE'='empno')""").collect
+   sql(s"""LOAD DATA local inpath '$resourcesPath/Data/sortcolumns/dataDecimal.csv' INTO TABLE sorttable1 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""").collect
+    sql(s"""select * from sorttable1""").collect
+
+     sql(s"""drop table if exists sorttable1""").collect
+  }
+
+  val prop = CarbonProperties.getInstance()
+  val p1 = prop.getProperty("carbon.load.sort.scope", CarbonCommonConstants.LOAD_SORT_SCOPE_DEFAULT)
+  val p2 = prop.getProperty("enable.unsafe.sort", CarbonCommonConstants.ENABLE_UNSAFE_SORT_DEFAULT)
+  val p3 = prop.getProperty("enable.offheap.sort", CarbonCommonConstants.ENABLE_OFFHEAP_SORT_DEFAULT)
+
+  override protected def beforeAll() {
+    // Adding new properties
+    prop.addProperty("carbon.load.sort.scope", "batch_sort")
+    prop.addProperty("enable.unsafe.sort", "true")
+    prop.addProperty("enable.offheap.sort", "true")
+  }
+
+  override def afterAll: Unit = {
+    //Reverting to old
+    prop.addProperty("carbon.load.sort.scope", p1)
+    prop.addProperty("enable.unsafe.sort", p2)
+    prop.addProperty("enable.offheap.sort", p3)
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/TimestamptypesTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/TimestamptypesTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/TimestamptypesTestCase.scala
new file mode 100644
index 0000000..4fdd490
--- /dev/null
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/TimestamptypesTestCase.scala
@@ -0,0 +1,87 @@
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.cluster.sdv.generated
+
+import org.apache.spark.sql.Row
+import org.apache.spark.sql.common.util._
+import org.scalatest.BeforeAndAfterAll
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.util.CarbonProperties
+
+/**
+ * Test Class for timestamptypesTestCase to verify all scenerios
+ */
+
+class TimestamptypesTestCase extends QueryTest with BeforeAndAfterAll {
+         
+
+  //timestamp in yyyy.MMM.dd HH:mm:ss
+  test("DataSight_Carbon_TimeStampType_001", Include) {
+     sql(s""" create table if not exists ddMMMyyyy (imei string,AMSize string,channelsId string,ActiveCountry string, Activecity string,gamePointId double,deviceInformationId int,productionDate Timestamp,deliveryDate timestamp,deliverycharge decimal(10,2)) STORED BY 'org.apache.carbondata.format'""").collect
+   sql(s""" LOAD DATA INPATH '$resourcesPath/Data/vardhandaterestructddMMMyyyy.csv' INTO TABLE ddMMMyyyy OPTIONS('DELIMITER'=',', 'QUOTECHAR'= '"', 'FILEHEADER'= 'imei,deviceInformationId,AMSize,channelsId,ActiveCountry,Activecity,gamePointId,productionDate,deliveryDate,deliverycharge')""").collect
+    checkAnswer(s"""select count(*) from ddMMMyyyy""",
+      Seq(Row(99)), "timestamptypesTestCase_DataSight_Carbon_TimeStampType_001")
+     sql(s"""drop table ddMMMyyyy""").collect
+  }
+
+
+  //timestamp in dd.MM.yyyy HH:mm:ss
+  ignore("DataSight_Carbon_TimeStampType_002", Include) {
+     sql(s""" create table if not exists ddMMyyyy (imei string,AMSize string,channelsId string,ActiveCountry string, Activecity string,gamePointId double,deviceInformationId int,productionDate Timestamp,deliveryDate timestamp,deliverycharge decimal(10,2)) STORED BY 'org.apache.carbondata.format'""").collect
+   sql(s""" LOAD DATA INPATH '$resourcesPath/Data/vardhandaterestructddMMyyyy.csv' INTO TABLE ddMMyyyy OPTIONS('DELIMITER'=',', 'QUOTECHAR'= '"', 'FILEHEADER'= 'imei,deviceInformationId,AMSize,channelsId,ActiveCountry,Activecity,gamePointId,productionDate,deliveryDate,deliverycharge')""").collect
+    checkAnswer(s"""select count(*) from ddMMyyyy""",
+      Seq(Row(99)), "timestamptypesTestCase_DataSight_Carbon_TimeStampType_002")
+     sql(s"""drop table ddMMyyyy""").collect
+  }
+
+
+  //timestamp in yyyy.MM.dd HH:mm:ss
+  ignore("DataSight_Carbon_TimeStampType_003", Include) {
+     sql(s""" create table if not exists yyyyMMdd (imei string,AMSize string,channelsId string,ActiveCountry string, Activecity string,gamePointId double,deviceInformationId int,productionDate Timestamp,deliveryDate timestamp,deliverycharge decimal(10,2)) STORED BY 'org.apache.carbondata.format'""").collect
+   sql(s""" LOAD DATA INPATH '$resourcesPath/Data/vardhandaterestructyyyyMMdd.csv' INTO TABLE yyyyMMdd OPTIONS('DELIMITER'=',', 'QUOTECHAR'= '"', 'FILEHEADER'= 'imei,deviceInformationId,AMSize,channelsId,ActiveCountry,Activecity,gamePointId,productionDate,deliveryDate,deliverycharge')""").collect
+    checkAnswer(s"""select count(*) from yyyyMMdd""",
+      Seq(Row(99)), "timestamptypesTestCase_DataSight_Carbon_TimeStampType_003")
+     sql(s"""drop table yyyyMMdd""").collect
+  }
+
+
+  //timestamp in dd.MMM.yyyy HH:mm:ss
+  test("DataSight_Carbon_TimeStampType_004", Include) {
+     sql(s""" create table if not exists yyyyMMMdd (imei string,AMSize string,channelsId string,ActiveCountry string, Activecity string,gamePointId double,deviceInformationId int,productionDate Timestamp,deliveryDate timestamp,deliverycharge decimal(10,2)) STORED BY 'org.apache.carbondata.format'""").collect
+   sql(s""" LOAD DATA INPATH '$resourcesPath/Data/vardhandaterestructyyyyMMMdd.csv' INTO TABLE yyyyMMMdd OPTIONS('DELIMITER'=',', 'QUOTECHAR'= '"', 'FILEHEADER'= 'imei,deviceInformationId,AMSize,channelsId,ActiveCountry,Activecity,gamePointId,productionDate,deliveryDate,deliverycharge')""").collect
+    checkAnswer(s"""select count(*) from yyyyMMMdd""",
+      Seq(Row(99)), "timestamptypesTestCase_DataSight_Carbon_TimeStampType_004")
+     sql(s"""drop table yyyyMMMdd""").collect
+  }
+
+  val prop = CarbonProperties.getInstance()
+  val p1 = prop.getProperty("carbon.timestamp.format", CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
+
+  override protected def beforeAll() {
+    // Adding new properties
+    prop.addProperty("carbon.timestamp.format", "yyyy.MM.dd HH:mm:ss")
+  }
+
+  override def afterAll: Unit = {
+    //Reverting to old
+    prop.addProperty("carbon.timestamp.format", p1)
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/V3offheapvectorTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/V3offheapvectorTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/V3offheapvectorTestCase.scala
new file mode 100644
index 0000000..3c37f83
--- /dev/null
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/V3offheapvectorTestCase.scala
@@ -0,0 +1,350 @@
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.cluster.sdv.generated
+
+import org.apache.spark.sql.Row
+import org.apache.spark.sql.common.util._
+import org.scalatest.BeforeAndAfterAll
+
+import org.apache.carbondata.core.constants.{CarbonCommonConstants, CarbonV3DataFormatConstants}
+import org.apache.carbondata.core.util.CarbonProperties
+
+/**
+ * Test Class for V3offheapvectorTestCase to verify all scenerios
+ */
+
+class V3offheapvectorTestCase extends QueryTest with BeforeAndAfterAll {
+         
+
+  //Check query reponse for select * query with no filters
+  test("PTS_TOR-Productize-New-Features-V3_01_Query_01_033", Include) {
+     dropTable("3lakh_uniqdata")
+     sql(s"""CREATE TABLE 3lakh_uniqdata (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('table_blocksize'='128','include_dictionary'='BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1,CUST_ID')""").collect
+   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/3Lakh.csv' into table 3lakh_uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
+    checkAnswer(s"""select count(*) from 3lakh_uniqdata""",
+      Seq(Row(300635)), "V3offheapvectorTestCase_PTS_TOR-Productize-New-Features-V3_01_Query_01_033")
+
+  }
+
+
+  //Check query reponse where table is having > 10 columns as dimensions and all the columns are selected in the query
+  test("PTS_TOR-Productize-New-Features-V3_01_Query_01_034", Include) {
+
+    checkAnswer(s"""select count(*) from (select CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1 from 3lakh_uniqdata)c""",
+      Seq(Row(300635)), "V3offheapvectorTestCase_PTS_TOR-Productize-New-Features-V3_01_Query_01_034")
+
+  }
+
+
+  //Check query reponse when filter is having eq condition on 1st column and data is selected within a page
+  test("PTS_TOR-Productize-New-Features-V3_01_Query_01_035", Include) {
+
+    checkAnswer(s"""select CUST_ID from 3lakh_uniqdata where cust_id = 35000""",
+      Seq(Row(35000)), "V3offheapvectorTestCase_PTS_TOR-Productize-New-Features-V3_01_Query_01_035")
+
+  }
+
+
+  //Check query reponse when filter is having in condition on 1st column and data is selected within a page
+  test("PTS_TOR-Productize-New-Features-V3_01_Query_01_036", Include) {
+
+    checkAnswer(s"""select CUST_ID from 3lakh_uniqdata where cust_id in (30000, 35000 ,37000)""",
+      Seq(Row(30000),Row(35000),Row(37000)), "V3offheapvectorTestCase_PTS_TOR-Productize-New-Features-V3_01_Query_01_036")
+
+  }
+
+
+  //Check query reponse when filter is having range condition on 1st column and data is selected within a page
+  test("PTS_TOR-Productize-New-Features-V3_01_Query_01_037", Include) {
+
+    checkAnswer(s"""select count(*) from (select CUST_ID from 3lakh_uniqdata where cust_id between 59000 and 60000)c""",
+      Seq(Row(1001)), "V3offheapvectorTestCase_PTS_TOR-Productize-New-Features-V3_01_Query_01_037")
+
+  }
+
+
+  //Check query reponse when filter is having range condition on 1st coluumn and data is selected within a pages - values just in the boundary of the page upper llimit - with offheap sort and vector reader
+  test("PTS_TOR-Productize-New-Features-V3_01_Query_01_041", Include) {
+
+    checkAnswer(s"""select count(*) from (select CUST_ID from 3lakh_uniqdata where cust_id between 59000 and 61000)c""",
+      Seq(Row(2001)), "V3offheapvectorTestCase_PTS_TOR-Productize-New-Features-V3_01_Query_01_041")
+
+  }
+
+
+  //Check query reponse when filter is having in condition 1st column and data is selected across multiple pages - with no offheap sort and vector reader
+  test("PTS_TOR-Productize-New-Features-V3_01_Query_01_042", Include) {
+
+    checkAnswer(s"""select CUST_ID from 3lakh_uniqdata where cust_id in (30000, 35000 ,37000, 69000,101000,133000,165000,197000,229000,261000,293000, 329622)""",
+      Seq(Row(133000),Row(165000),Row(197000),Row(30000),Row(229000),Row(261000),Row(35000),Row(37000),Row(293000),Row(329622),Row(69000),Row(101000)), "V3offheapvectorTestCase_PTS_TOR-Productize-New-Features-V3_01_Query_01_042")
+
+  }
+
+
+  //Check query reponse when filter is having not between condition 1st column and data is selected across all pages - with  offheap sort and vector reader
+  test("PTS_TOR-Productize-New-Features-V3_01_Query_01_043", Include) {
+
+    checkAnswer(s"""select count(*) from (select CUST_ID from 3lakh_uniqdata where cust_id not between 29001 and 329621)c""",
+      Seq(Row(3)), "V3offheapvectorTestCase_PTS_TOR-Productize-New-Features-V3_01_Query_01_043")
+
+  }
+
+
+  //Check query reponse when filter is applied on on the 2nd column and data is selected across all pages  -with no offheap sort and vector reader
+  test("PTS_TOR-Productize-New-Features-V3_01_Query_01_044", Include) {
+
+    checkAnswer(s"""select count(*) from (select CUST_ID from 3lakh_uniqdata where cust_name like 'CUST_NAME_2%')c""",
+      Seq(Row(110000)), "V3offheapvectorTestCase_PTS_TOR-Productize-New-Features-V3_01_Query_01_044")
+
+  }
+
+
+  //Check query reponse when filter is having not like condition set on the 2nd columns and data is selected across all pages
+  test("PTS_TOR-Productize-New-Features-V3_01_Query_01_045", Include) {
+
+    checkAnswer(s"""select count(*) from (select CUST_ID from 3lakh_uniqdata where cust_name not like 'CUST_NAME_2%')c""",
+      Seq(Row(190635)), "V3offheapvectorTestCase_PTS_TOR-Productize-New-Features-V3_01_Query_01_045")
+
+  }
+
+
+  //Check query reponse when filter is having > operator set on the 10th columns and data is selected within a  page
+  test("PTS_TOR-Productize-New-Features-V3_01_Query_01_046", Include) {
+
+    checkAnswer(s"""select count(*) from (select CUST_ID from 3lakh_uniqdata where Double_COLUMN1 > 42000)b""",
+      Seq(Row(300624)), "V3offheapvectorTestCase_PTS_TOR-Productize-New-Features-V3_01_Query_01_046")
+
+  }
+
+
+  //Check query reponse when filter is having like operator set on the 3rd columns and data is selected across all pages - with no offheap sort and vector reader
+  test("PTS_TOR-Productize-New-Features-V3_01_Query_01_047", Include) {
+
+    checkAnswer(s"""select count(*) from (select ACTIVE_EMUI_VERSION from 3lakh_uniqdata where ACTIVE_EMUI_VERSION like 'ACTIVE_EMUI_VERSION_20%')c""",
+      Seq(Row(11000)), "V3offheapvectorTestCase_PTS_TOR-Productize-New-Features-V3_01_Query_01_047")
+
+  }
+
+
+  //Check query reponse when filter condtion is put on all collumns connected through and operator and data is selected across from 1  page
+  test("PTS_TOR-Productize-New-Features-V3_01_Query_01_048", Include) {
+
+    checkAnswer(s"""select count(*) from (select * from 3lakh_uniqdata where CUST_ID = 29000 and CUST_NAME = 'CUST_NAME_20000' and ACTIVE_EMUI_VERSION = 'ACTIVE_EMUI_VERSION_20000' and  DOB = '04-10-2010 01:00' and DOJ = '04-10-2012 02:00' and BIGINT_COLUMN1 = 1.23372E+11 and BIGINT_COLUMN2 = -2.23E+11 and DECIMAL_COLUMN1 =  12345698901	 and DECIMAL_COLUMN2 = 22345698901	 and Double_COLUMN1 = 11234567490	 and Double_COLUMN2 = -11234567490 	and  INTEGER_COLUMN1 = 20001)c""",
+      Seq(Row(0)), "V3offheapvectorTestCase_PTS_TOR-Productize-New-Features-V3_01_Query_01_048")
+
+  }
+
+
+  //Check query reponse when filter condtion is put on all collumns connected through and and grouping operator and data is selected across from 1  page
+  test("PTS_TOR-Productize-New-Features-V3_01_Query_01_050", Include) {
+
+    checkAnswer(s"""select count(*) from (select * from 3lakh_uniqdata where CUST_ID = 29000 and CUST_NAME = 'CUST_NAME_20000' and (ACTIVE_EMUI_VERSION = 'ACTIVE_EMUI_VERSION_20001' or DOB = '04-10-2010 01:00') and DOJ = '04-10-2012 02:00' and BIGINT_COLUMN1 = 1.23372E+11 and BIGINT_COLUMN2 = -2.23E+11 and DECIMAL_COLUMN1 =  12345698901 and DECIMAL_COLUMN2 = 22345698901 or Double_COLUMN1 = 11234567490 and ( Double_COLUMN2 = -11234567490 or  INTEGER_COLUMN1 = 20003))c""",
+      Seq(Row(300623)), "V3offheapvectorTestCase_PTS_TOR-Productize-New-Features-V3_01_Query_01_050")
+
+  }
+
+
+  //Check query reponse when filter condtion is 1st column and connected through OR condition and data is selected across multiple pages
+  test("PTS_TOR-Productize-New-Features-V3_01_Query_01_051", Include) {
+
+    checkAnswer(s"""select CUST_NAME from 3lakh_uniqdata where CUST_ID = 29000 or CUST_ID = 60000 or CUST_ID = 100000 or CUST_ID = 130000""",
+      Seq(Row("CUST_NAME_121000"),Row("CUST_NAME_20000"),Row("CUST_NAME_51000"),Row("CUST_NAME_91000")), "V3offheapvectorTestCase_PTS_TOR-Productize-New-Features-V3_01_Query_01_051")
+
+  }
+
+
+  //Check query reponse when filter condtion is put on all collumns connected through and/or operator and range is used and data is selected across multiple   pages
+  test("PTS_TOR-Productize-New-Features-V3_01_Query_01_052", Include) {
+
+    checkAnswer(s"""select count(*) from (select * from 3lakh_uniqdata where (CUST_ID >= 29000 and CUST_ID <= 60000) and CUST_NAME like 'CUST_NAME_20%' and ACTIVE_EMUI_VERSION = 'ACTIVE_EMUI_VERSION_20000' and  DOB = '04-10-2010 01:00' and DOJ = '04-10-2012 02:00' and BIGINT_COLUMN1 = 1.23372E+11 and BIGINT_COLUMN2 = -2.23E+11 and DECIMAL_COLUMN1 =  12345698901 or DECIMAL_COLUMN2 = 22345698901 and Double_COLUMN1 = 11234567490 and (Double_COLUMN2 = -11234567490 or  INTEGER_COLUMN1 = 20001))c""",
+      Seq(Row(1)), "V3offheapvectorTestCase_PTS_TOR-Productize-New-Features-V3_01_Query_01_052")
+
+  }
+
+
+  //Check query reponse when 1st column select ed nd filter is applied and data is selected from 1 page
+  test("PTS_TOR-Productize-New-Features-V3_01_Query_01_054", Include) {
+
+    checkAnswer(s"""select CUST_ID from 3lakh_uniqdata limit 10""",
+      Seq(Row(8999),Row(null),Row(null),Row(null),Row(null),Row(null),Row(null),Row(null),Row(null),Row(null)), "V3offheapvectorTestCase_PTS_TOR-Productize-New-Features-V3_01_Query_01_054")
+
+  }
+
+
+  //Check query reponse when 2nd column select ed nd filter is applied and data is selected from 1 page
+  test("PTS_TOR-Productize-New-Features-V3_01_Query_01_055", Include) {
+
+    checkAnswer(s"""select count(*) from (select CUST_NAME from 3lakh_uniqdata limit 30000)c""",
+      Seq(Row(30000)), "V3offheapvectorTestCase_PTS_TOR-Productize-New-Features-V3_01_Query_01_055")
+
+  }
+
+
+  //Check query reponse when 4th column select ed nd filter is applied and data is selected from 1 page
+  test("PTS_TOR-Productize-New-Features-V3_01_Query_01_056", Include) {
+
+    checkAnswer(s"""select count(*) from (select DOB from 3lakh_uniqdata limit 30000)c""",
+      Seq(Row(30000)), "V3offheapvectorTestCase_PTS_TOR-Productize-New-Features-V3_01_Query_01_056")
+
+  }
+
+
+  //Check query reponse when 1st column select ed nd filter is applied and data is selected from 2 page
+  test("PTS_TOR-Productize-New-Features-V3_01_Query_01_057", Include) {
+
+    checkAnswer(s"""select count(*) from (select CUST_ID from 3lakh_uniqdata limit 60000)c""",
+      Seq(Row(60000)), "V3offheapvectorTestCase_PTS_TOR-Productize-New-Features-V3_01_Query_01_057")
+
+  }
+
+
+  //Check query reponse when 2nd column select ed nd filter is applied and data is selected from 2 page
+  test("PTS_TOR-Productize-New-Features-V3_01_Query_01_058", Include) {
+
+    checkAnswer(s"""select count(*) from (select CUST_NAME from 3lakh_uniqdata limit 60000)c""",
+      Seq(Row(60000)), "V3offheapvectorTestCase_PTS_TOR-Productize-New-Features-V3_01_Query_01_058")
+
+  }
+
+
+  //Check query reponse when 4th column selected nd filter is applied and data is selected from 2 page
+  test("PTS_TOR-Productize-New-Features-V3_01_Query_01_059", Include) {
+
+    checkAnswer(s"""select count(*) from (select DOB from 3lakh_uniqdata limit 60000)c""",
+      Seq(Row(60000)), "V3offheapvectorTestCase_PTS_TOR-Productize-New-Features-V3_01_Query_01_059")
+
+  }
+
+
+  //Check query reponse when 2nd column select ed nd with order by and data is selected from 1 page
+  test("PTS_TOR-Productize-New-Features-V3_01_Query_01_060", Include) {
+
+    checkAnswer(s"""select cust_id from 3lakh_uniqdata order by CUST_NAME desc limit 10""",
+      Seq(Row(108999),Row(108998),Row(108997),Row(108996),Row(108995),Row(108994),Row(108993),Row(108992),Row(108991),Row(108990)), "V3offheapvectorTestCase_PTS_TOR-Productize-New-Features-V3_01_Query_01_060")
+
+  }
+
+
+  //Check query reponse when temp table is used and multiple pages are scanned
+  test("PTS_TOR-Productize-New-Features-V3_01_Query_01_061", Include) {
+
+    checkAnswer(s"""select count(*) from ( select a.cust_id from 3lakh_uniqdata a where a.cust_id in (select c.cust_id from 3lakh_uniqdata c where c.cust_name  like  'CUST_NAME_2000%') and a.cust_id between 29000 and 60000)d""",
+      Seq(Row(10)), "V3offheapvectorTestCase_PTS_TOR-Productize-New-Features-V3_01_Query_01_061")
+
+  }
+
+
+  //Check query reponse when aggregate table is used and multiple pages are scanned
+  test("PTS_TOR-Productize-New-Features-V3_01_Query_01_062", Include) {
+
+    checkAnswer(s"""select substring(CUST_NAME,1,11),count(*) from 3lakh_uniqdata group by substring(CUST_NAME,1,11) having count(*) > 1""",
+      Seq(Row("CUST_NAME_4",10000),Row("CUST_NAME_1",100000),Row("CUST_NAME_8",10000),Row("CUST_NAME_6",10000),Row("CUST_NAME_2",110000),Row("CUST_NAME_5",10000),Row("CUST_NAME_7",10000),Row("CUST_NAME_9",10000),Row("",11),Row("CUST_NAME_3",30623)), "V3offheapvectorTestCase_PTS_TOR-Productize-New-Features-V3_01_Query_01_062")
+
+  }
+
+
+  //Check query reponse when aggregate table is used along with filter condition and multiple pages are scanned
+  test("PTS_TOR-Productize-New-Features-V3_01_Query_01_063", Include) {
+
+    checkAnswer(s"""select substring(CUST_NAME,1,11),count(*) from 3lakh_uniqdata where  cust_id between 59000 and 160000 group by substring(CUST_NAME,1,11) having count(*) > 1""",
+      Seq(Row("CUST_NAME_1",51001),Row("CUST_NAME_8",10000),Row("CUST_NAME_6",10000),Row("CUST_NAME_5",10000),Row("CUST_NAME_7",10000),Row("CUST_NAME_9",10000)), "V3offheapvectorTestCase_PTS_TOR-Productize-New-Features-V3_01_Query_01_063")
+
+  }
+
+
+  //Check query when table is having single column so that the records count per blocklet is > 120000, where query scan is done on single page
+  test("PTS_TOR-Productize-New-Features-V3_01_Param_01_007", Include) {
+     sql(s"""CREATE TABLE 3lakh_uniqdata1 (CUST_NAME String) STORED BY 'carbondata' TBLPROPERTIES('table_blocksize'='128')""").collect
+   sql(s"""insert into 3lakh_uniqdata1 select cust_name from 3lakh_uniqdata""").collect
+    checkAnswer(s"""select count(*) from (select CUST_NAME from 3lakh_uniqdata where cust_name  like  'CUST_NAME_2000%')c""",
+      Seq(Row(110)), "V3offheapvectorTestCase_PTS_TOR-Productize-New-Features-V3_01_Param_01_007")
+
+  }
+
+
+  //Check query when table is having single column so that the records count per blocklet is > 120000, where query scan is done across the pages in the blocklet
+  test("PTS_TOR-Productize-New-Features-V3_01_Param_01_008", Include) {
+
+    checkAnswer(s"""select count(*) from (select CUST_NAME from 3lakh_uniqdata where cust_name  like  'CUST_NAME_20%')c""",
+      Seq(Row(11000)), "V3offheapvectorTestCase_PTS_TOR-Productize-New-Features-V3_01_Param_01_008")
+
+  }
+
+
+  //Check impact on load and query reading when larger value (1 lakh length) present in the column
+  ignore("PTS_TOR-Productize-New-Features-V3_01_Stress_01_008", Include) {
+     sql(s"""create table t_carbn1c (name string) stored by 'carbondata' TBLPROPERTIES('table_blocksize'='128','include_dictionary'='name')""").collect
+   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/1lakh.csv' into table t_carbn1c OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='name')""").collect
+    checkAnswer(s"""select count(*) from t_carbn1c""",
+      Seq(Row(1)), "V3offheapvectorTestCase_PTS_TOR-Productize-New-Features-V3_01_Stress_01_008")
+
+  }
+
+
+  //Check impact on load and query reading when larger value (1 lakh length) present in the column when the column is measure
+  ignore("PTS_TOR-Productize-New-Features-V3_01_Stress_01_009", Include) {
+
+    checkAnswer(s"""select substring(name,1,10) from t_carbn1c""",
+      Seq(Row("hellohowar")), "V3offheapvectorTestCase_PTS_TOR-Productize-New-Features-V3_01_Stress_01_009")
+
+  }
+
+
+  //Check join query when the table is having v3 format
+  test("PTS_TOR-Productize-New-Features-V3_01_Query_01_064", Include) {
+    dropTable("3lakh_uniqdata2")
+     sql(s"""CREATE TABLE 3lakh_uniqdata2 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('table_blocksize'='128','include_dictionary'='BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1,CUST_ID')""").collect
+   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/3Lakh.csv' into table 3lakh_uniqdata2 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
+    checkAnswer(s"""select a.cust_id, b.cust_name from 3lakh_uniqdata a, 3lakh_uniqdata2 b where a.cust_id = b.cust_id and a.cust_name = b.cust_name and a.cust_id in (29000, 59000, 69000,15000,250000, 310000)""",
+      Seq(Row(29000,"CUST_NAME_20000"),Row(250000,"CUST_NAME_241000"),Row(310000,"CUST_NAME_301000"),Row(59000,"CUST_NAME_50000"),Row(69000,"CUST_NAME_60000")), "V3offheapvectorTestCase_PTS_TOR-Productize-New-Features-V3_01_Query_01_064")
+     sql(s"""drop table 3lakh_uniqdata""").collect
+   sql(s"""drop table if exists 3lakh_uniqdata2""").collect
+   sql(s"""drop table if exists t_carbn1c""").collect
+   sql(s"""drop table if exists 3lakh_uniqdata1""").collect
+  }
+
+  val prop = CarbonProperties.getInstance()
+  val p1 = prop.getProperty("carbon.blockletgroup.size.in.mb", CarbonV3DataFormatConstants.BLOCKLET_SIZE_IN_MB_DEFAULT_VALUE)
+  val p2 = prop.getProperty("enable.offheap.sort", CarbonCommonConstants.ENABLE_OFFHEAP_SORT_DEFAULT)
+  val p3 = prop.getProperty("carbon.enable.vector.reader", CarbonCommonConstants.ENABLE_VECTOR_READER_DEFAULT)
+  val p4 = prop.getProperty("carbon.data.file.version", CarbonCommonConstants.CARBON_DATA_FILE_DEFAULT_VERSION)
+  val p5 = prop.getProperty("carbon.enable.auto.load.merge", CarbonCommonConstants.DEFAULT_ENABLE_AUTO_LOAD_MERGE)
+  val p6 = prop.getProperty("carbon.compaction.level.threshold", CarbonCommonConstants.DEFAULT_SEGMENT_LEVEL_THRESHOLD)
+
+  override protected def beforeAll() {
+    // Adding new properties
+    prop.addProperty("carbon.blockletgroup.size.in.mb", "16")
+    prop.addProperty("enable.offheap.sort", "true")
+    prop.addProperty("carbon.enable.vector.reader", "true")
+    prop.addProperty("carbon.data.file.version", "V3")
+    prop.addProperty("carbon.enable.auto.load.merge", "false")
+    prop.addProperty("carbon.compaction.level.threshold", "(2,2)")
+  }
+
+  override def afterAll: Unit = {
+    //Reverting to old
+    prop.addProperty("carbon.blockletgroup.size.in.mb", p1)
+    prop.addProperty("enable.offheap.sort", p2)
+    prop.addProperty("carbon.enable.vector.reader", p3)
+    prop.addProperty("carbon.data.file.version", p4)
+    prop.addProperty("carbon.enable.auto.load.merge", p5)
+    prop.addProperty("carbon.compaction.level.threshold", p6)
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/Vector1TestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/Vector1TestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/Vector1TestCase.scala
new file mode 100644
index 0000000..fc6e590
--- /dev/null
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/Vector1TestCase.scala
@@ -0,0 +1,645 @@
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.cluster.sdv.generated
+
+import org.apache.spark.sql.common.util._
+import org.scalatest.BeforeAndAfterAll
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.util.CarbonProperties
+
+/**
+ * Test Class for Vector1TestCase to verify all scenerios
+ */
+
+class Vector1TestCase extends QueryTest with BeforeAndAfterAll {
+         
+
+  //To check select all records with  vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_001", Include) {
+     sql(s"""CREATE TABLE uniqdatavector1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
+     sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv' into table uniqdatavector1 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
+
+    sql(s"""select * from uniqdatavector1 """).collect
+
+
+
+  }
+
+
+  //To check  random measure select query with  vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_002", Include) {
+
+    sql(s"""select cust_name,DOB,DOJ from uniqdatavector1 where cust_id=10999""").collect
+
+
+     sql(s"""drop table uniqdatavector1""").collect
+
+  }
+
+
+  //To check select random columns  and order with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_003", Include) {
+     sql(s"""create table double(id double, name string) STORED BY 'org.apache.carbondata.format' """).collect
+   sql(s"""load data  inpath '$resourcesPath/Data/InsertData/maxrange_double.csv' into table double""").collect
+
+    sql(s"""select id from double order by id""").collect
+
+  }
+
+
+  //To check the logs of executor with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_004", Include) {
+
+    sql(s"""select id from double order by id""").collect
+
+
+
+  }
+
+
+  //To check  for select random measures with group by and having clause with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_005", Include) {
+
+    sql(s"""select id,count(*) from double group by id having count(*)=1""").collect
+  }
+
+
+  //To check for select count query with group by and having clause with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_006", Include) {
+
+    sql(s"""select id,count(id) from double group by id having count(*)=1""").collect
+
+     sql(s"""drop table double""").collect
+
+  }
+
+
+  //To applied cast method  with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_007", Include) {
+     sql(s"""CREATE TABLE uniqdatavector11 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
+   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv' into table uniqdatavector11 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
+
+    sql(s"""select cast(Double_COLUMN1 as int) from uniqdatavector11""").collect
+
+
+
+  }
+
+
+  //To apply sum method on a column with select query with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_008", Include) {
+
+    sql(s"""select sum(CUST_ID) from uniqdatavector11""").collect
+
+
+
+  }
+
+
+  //To apply the average method on a column with select query with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_009", Include) {
+
+    sql(s"""select avg(CUST_ID) from uniqdatavector11""").collect
+
+
+
+  }
+
+
+  //To apply the percentile_approx method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_010", Include) {
+
+    sql(s"""select percentile_approx(1, 0.5 ,500)  from uniqdatavector11""").collect
+
+
+
+  }
+
+
+  //To apply the var_samp method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_011", Include) {
+
+    sql(s"""select var_samp(cust_id) from uniqdatavector11""").collect
+
+
+
+  }
+
+
+  //To apply the stddev_pop method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_012", Include) {
+
+    sql(s"""select stddev_pop(cust_id) from uniqdatavector11""").collect
+
+
+
+  }
+
+
+  //To apply the stddev_samp method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_013", Include) {
+
+    sql(s"""select stddev_samp(cust_id) from uniqdatavector11""").collect
+
+  }
+
+
+  //To apply percentile method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_014", Include) {
+
+    sql(s"""select percentile(0,1) from uniqdatavector11""").collect
+  }
+
+
+  //To apply min method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_015", Include) {
+
+    sql(s"""select min(CUST_ID) from uniqdatavector11""").collect
+  }
+
+
+  //To applied max method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_016", Include) {
+
+    sql(s"""select max(CUST_ID) from uniqdatavector11""").collect
+  }
+
+
+  //To apply sum method with plus operator with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_017", Include) {
+
+    sql(s"""select sum(CUST_ID+1) from uniqdatavector11""").collect
+  }
+
+
+  //To apply sum method with minus operator with vectorized carbon reader enabled
+
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_018", Include) {
+
+    sql(s"""select sum(CUST_ID-1) from uniqdatavector11""").collect
+  }
+
+
+  //To apply count method  with distinct operator with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_019", Include) {
+
+    sql(s"""select count(DISTINCT CUST_ID) from uniqdatavector11""").collect
+  }
+
+
+  //To check random measure select query with  AND operator and vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_020", Include) {
+
+    sql(s"""select cust_name,DOB,DOJ from uniqdatavector11 where cust_id=10999 and INTEGER_COLUMN1=2000 """).collect
+  }
+
+
+  //To check random measure select query with  OR operator and vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_021", Include) {
+
+    sql(s"""select cust_name,DOB,DOJ from uniqdatavector11 where cust_id=10999 or INTEGER_COLUMN1=2000 """).collect
+  }
+
+
+  //To apply count method with if operator with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_022", Include) {
+
+    sql(s"""select count(if(CUST_ID<1999,NULL,CUST_NAME)) from uniqdatavector11""").collect
+  }
+
+
+  //To apply in operator with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_023", Include) {
+
+    sql(s"""select CUST_NAME from uniqdatavector11 where CUST_ID IN(1,22)""").collect
+  }
+
+
+  //To apply not in operator with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_024", Include) {
+
+    sql(s"""select CUST_NAME from uniqdatavector11 where CUST_ID NOT IN(1,22)""").collect
+  }
+
+
+  //To apply between operator with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_025", Include) {
+
+    sql(s"""select CUST_NAME from uniqdatavector11 where CUST_ID BETWEEN 1 AND 11000""").collect
+  }
+
+
+  //To apply not between operator with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_026", Include) {
+
+    sql(s"""select CUST_NAME from uniqdatavector11 where CUST_ID NOT BETWEEN 1 AND 11000""").collect
+  }
+
+
+  //To apply between in operator with order by clause with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_027", Include) {
+
+    sql(s"""select CUST_NAME from uniqdatavector11 where CUST_ID in (1,10999) order by 'CUST_ID'""").collect
+  }
+
+
+  //To apply between in operator with group by clause with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_028", Include) {
+
+    sql(s"""select CUST_NAME from uniqdatavector11 where CUST_ID in (1,10999) group by CUST_NAME""").collect
+
+
+
+  }
+
+
+  //To apply  null clause with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_029", Include) {
+
+    sql(s"""select CUST_NAME from uniqdatavector11 where CUST_ID is null""").collect
+
+
+
+  }
+
+
+  //To applied not null clause with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_030", Include) {
+
+    sql(s"""select CUST_NAME from uniqdatavector11 where CUST_ID is not null""").collect
+
+
+
+  }
+
+
+  //To apply > operator with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_031", Include) {
+
+    sql(s"""select CUST_NAME from uniqdatavector11 where CUST_ID>1""").collect
+
+
+
+  }
+
+
+  //To apply < operator with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_032", Include) {
+
+    sql(s"""select CUST_NAME from uniqdatavector11 where CUST_ID<1""").collect
+
+
+
+  }
+
+
+  //To apply != operator with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_033", Include) {
+
+    sql(s"""select CUST_NAME from uniqdatavector11 where CUST_ID!=1""").collect
+
+
+
+  }
+
+
+  //To apply like clause with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_034", Include) {
+
+    sql(s"""select CUST_ID from uniqdatavector11 where CUST_ID like 10999""").collect
+
+
+
+  }
+
+
+  //To apply like% clause with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_035", Include) {
+
+    sql(s"""select CUST_ID from uniqdatavector11 where CUST_ID like '%10999%'""").collect
+
+
+
+  }
+
+
+  //To apply rlike clause with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_036", Include) {
+
+    sql(s"""select CUST_ID from uniqdatavector11 where CUST_ID rlike 10999""").collect
+
+
+
+  }
+
+
+  //To apply rlike% clause with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_037", Include) {
+
+    sql(s"""select CUST_ID from uniqdatavector11 where CUST_ID rlike '%10999'""").collect
+
+
+
+  }
+
+
+  //To apply alias clause with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_038", Include) {
+
+    sql(s"""select count(cust_id)+10.364 as a from uniqdatavector11""").collect
+
+
+
+  }
+
+
+  //To apply aliase clause with group by clause with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_039", Include) {
+
+    sql(s"""select count(cust_id)+10.364 as a from uniqdatavector11 group by CUST_ID""").collect
+
+
+
+  }
+
+
+  //To apply aliase clause with order by clause with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_040", Include) {
+
+    sql(s"""select cust_id,count(cust_name) a from uniqdatavector11 group by cust_id order by cust_id""").collect
+
+
+
+  }
+
+
+  //To apply regexp_replace clause with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_041", Include) {
+
+    sql(s"""select regexp_replace(cust_id, 'i', 'ment')  from uniqdatavector11""").collect
+
+
+
+  }
+
+
+  //To apply date_add method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_048", Include) {
+
+    sql(s"""SELECT date_add(DOB,1) FROM uniqdatavector11""").collect
+
+
+
+  }
+
+
+  //To apply date_sub method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_049", Include) {
+
+    sql(s"""SELECT date_sub(DOB,1) FROM uniqdatavector11""").collect
+
+
+
+  }
+
+
+  //To apply current_date method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_050", Include) {
+
+    sql(s"""SELECT current_date() FROM uniqdatavector11""").collect
+
+
+
+  }
+
+
+  //To apply add_month method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_051", Include) {
+
+    sql(s"""SELECT add_months(dob,1) FROM uniqdatavector11""").collect
+
+
+
+  }
+
+
+  //To apply last_day method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_052", Include) {
+
+    sql(s"""SELECT last_day(dob) FROM uniqdatavector11""").collect
+
+
+
+  }
+
+
+  //To apply next_day method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_053", Include) {
+
+    sql(s"""SELECT next_day(dob,'monday') FROM uniqdatavector11""").collect
+
+
+
+  }
+
+
+  //To apply months_between method on carbon table
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_054", Include) {
+
+    sql(s"""select months_between('2016-12-28', '2017-01-30') from uniqdatavector11""").collect
+
+
+
+  }
+
+
+  //Toapply date_diff method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_055", Include) {
+
+    sql(s"""select datediff('2009-03-01', '2009-02-27') from uniqdatavector11""").collect
+
+
+
+  }
+
+
+  //To apply concat method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_056", Include) {
+
+    sql(s"""SELECT concat('hi','hi') FROM uniqdatavector11""").collect
+
+
+
+  }
+
+
+  //To apply lower method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_057", Include) {
+
+    sql(s"""SELECT lower('H') FROM uniqdatavector11""").collect
+
+
+
+  }
+
+
+  //To apply substr method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_058", Include) {
+
+    sql(s"""select substr(cust_id,3) from uniqdatavector11""").collect
+
+
+
+  }
+
+
+  //To apply trim method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_059", Include) {
+
+    sql(s"""select trim(cust_id) from uniqdatavector11""").collect
+
+
+
+  }
+
+
+  //To apply split method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_060", Include) {
+
+    sql(s"""select split('knoldus','ol') from uniqdatavector11""").collect
+
+
+
+  }
+
+
+  //To apply split method  limit clause with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_061", Include) {
+
+    sql(s"""select split('knoldus','ol') from uniqdatavector11 limit 1""").collect
+
+
+
+  }
+
+
+  //To apply reverse on carbon table with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_062", Include) {
+
+    sql(s"""select reverse('knoldus') from uniqdatavector11""").collect
+
+
+
+  }
+
+
+  //To apply replace on carbon table with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_063", Include) {
+
+    sql(s"""select regexp_replace('Tester', 'T', 't') from uniqdatavector11""").collect
+
+
+
+  }
+
+
+  //To apply replace with limit clause with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_064", Include) {
+
+    sql(s"""select regexp_replace('Tester', 'T', 't') from uniqdatavector11 limit 1""").collect
+
+
+
+  }
+
+
+  //To apply FORMAT_STRING on carbon table with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_065", Include) {
+
+    sql(s"""select format_string('data', cust_name) from uniqdatavector11""").collect
+
+
+
+  }
+
+
+  //To apply sentences method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_066", Include) {
+
+    sql(s"""select sentences(cust_name) from uniqdatavector11""").collect
+
+
+
+  }
+
+
+  //To apply space method on carbon table with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_067", Include) {
+
+    sql(s"""select space(10) from uniqdatavector11""").collect
+
+
+
+  }
+
+
+  //To apply rtrim method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_068", Include) {
+
+    sql(s"""select rtrim("     testing           ") from uniqdatavector11""").collect
+
+
+
+  }
+
+
+  //To apply ascii method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_069", Include) {
+
+    sql(s"""select ascii('A') from uniqdatavector11""").collect
+
+
+
+  }
+
+
+  //To apply utc_timestamp method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_070", Include) {
+
+    sql(s"""select from_utc_timestamp('2016-12-12 08:00:00','PST') from uniqdatavector11""").collect
+
+
+     sql(s"""drop table uniqdatavector11""").collect
+
+  }
+
+  val prop = CarbonProperties.getInstance()
+  val p1 = prop.getProperty("carbon.enable.vector.reader", CarbonCommonConstants.ENABLE_VECTOR_READER_DEFAULT)
+
+  override protected def beforeAll() {
+    // Adding new properties
+    prop.addProperty("carbon.enable.vector.reader", "true")
+  }
+
+  override def afterAll: Unit = {
+    //Reverting to old
+    prop.addProperty("carbon.enable.vector.reader", p1)
+  }
+}
\ No newline at end of file