You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ch...@apache.org on 2017/08/01 06:00:09 UTC

[02/20] carbondata git commit: [CARBONDATA-1346] SDV cluster tests

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/Vector2TestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/Vector2TestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/Vector2TestCase.scala
new file mode 100644
index 0000000..21423f1
--- /dev/null
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/Vector2TestCase.scala
@@ -0,0 +1,625 @@
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.cluster.sdv.generated
+
+import org.apache.spark.sql.common.util._
+import org.scalatest.BeforeAndAfterAll
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.util.CarbonProperties
+
+/**
+ * Test Class for Vector2TestCase to verify all scenerios
+ */
+
+class Vector2TestCase extends QueryTest with BeforeAndAfterAll {
+         
+
+  //To check select all records with  vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_071", Include) {
+     sql(s"""CREATE TABLE uniqdatavector2 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
+   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv' into table uniqdatavector2 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
+    sql(s"""select * from uniqdatavector2 """).collect
+
+
+  }
+
+
+  //To check  random measure select query with  vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_072", Include) {
+
+    sql(s"""select cust_name,DOB,DOJ from uniqdatavector2 where cust_id=10999""").collect
+
+     sql(s"""drop table uniqdatavector2""").collect
+  }
+
+
+  //To check select random columns  and order with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_073", Include) {
+     sql(s"""create table double1(id double, name string) STORED BY 'org.apache.carbondata.format' """).collect
+   sql(s"""load data  inpath '$resourcesPath/Data/InsertData/maxrange_double.csv' into table double1""").collect
+    sql(s"""select id from double1 order by id""").collect
+
+
+  }
+
+
+  //To check the logs of executor with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_074", Include) {
+
+    sql(s"""select id from double1 order by id""").collect
+
+
+  }
+
+
+  //To check  for select random measures with group by and having clause with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_075", Include) {
+
+    sql(s"""select id,count(*) from double1 group by id having count(*)=1""").collect
+
+
+  }
+
+
+  //To check for select count query with group by and having clause with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_076", Include) {
+
+    sql(s"""select id,count(id) from double1 group by id having count(*)=1""").collect
+
+     sql(s"""drop table double1""").collect
+  }
+
+
+  //To applied cast method  with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_077", Include) {
+     sql(s"""CREATE TABLE uniqdatavector22 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
+   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv' into table uniqdatavector22 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
+    sql(s"""select cast(Double_COLUMN1 as int) from uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply sum method on a column with select query with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_078", Include) {
+
+    sql(s"""select sum(CUST_ID) from uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply the average method on a column with select query with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_079", Include) {
+
+    sql(s"""select avg(CUST_ID) from uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply the percentile_approx method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_080", Include) {
+
+    sql(s"""select percentile_approx(1, 0.5 ,500)  from uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply the var_samp method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_081", Include) {
+
+    sql(s"""select var_samp(cust_id) from uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply the stddev_pop method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_082", Include) {
+
+    sql(s"""select stddev_pop(cust_id) from uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply the stddev_samp method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_083", Include) {
+
+    sql(s"""select stddev_samp(cust_id) from uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply percentile method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_084", Include) {
+
+    sql(s"""select percentile(0,1) from uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply min method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_085", Include) {
+
+    sql(s"""select min(CUST_ID) from uniqdatavector22""").collect
+
+
+  }
+
+
+  //To applied max method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_086", Include) {
+
+    sql(s"""select max(CUST_ID) from uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply sum method with plus operator with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_087", Include) {
+
+    sql(s"""select sum(CUST_ID+1) from uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply sum method with minus operator with vectorized carbon reader enabled
+
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_088", Include) {
+
+    sql(s"""select sum(CUST_ID-1) from uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply count method  with distinct operator with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_089", Include) {
+
+    sql(s"""select count(DISTINCT CUST_ID) from uniqdatavector22""").collect
+
+
+  }
+
+
+  //To check random measure select query with  AND operator and vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_090", Include) {
+
+    sql(s"""select cust_name,DOB,DOJ from uniqdatavector22 where cust_id=10999 and INTEGER_COLUMN1=2000 """).collect
+
+
+  }
+
+
+  //To check random measure select query with  OR operator and vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_091", Include) {
+
+    sql(s"""select cust_name,DOB,DOJ from uniqdatavector22 where cust_id=10999 or INTEGER_COLUMN1=2000 """).collect
+
+
+  }
+
+
+  //To apply count method with if operator with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_092", Include) {
+
+    sql(s"""select count(if(CUST_ID<1999,NULL,CUST_NAME)) from uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply in operator with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_093", Include) {
+
+    sql(s"""select CUST_NAME from uniqdatavector22 where CUST_ID IN(1,22)""").collect
+
+
+  }
+
+
+  //To apply not in operator with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_094", Include) {
+
+    sql(s"""select CUST_NAME from uniqdatavector22 where CUST_ID NOT IN(1,22)""").collect
+
+
+  }
+
+
+  //To apply between operator with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_095", Include) {
+
+    sql(s"""select CUST_NAME from uniqdatavector22 where CUST_ID BETWEEN 1 AND 11000""").collect
+
+
+  }
+
+
+  //To apply not between operator with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_096", Include) {
+
+    sql(s"""select CUST_NAME from uniqdatavector22 where CUST_ID NOT BETWEEN 1 AND 11000""").collect
+
+
+  }
+
+
+  //To apply between in operator with order by clause with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_097", Include) {
+
+    sql(s"""select CUST_NAME from uniqdatavector22 where CUST_ID in (1,10999)order by 'CUST_ID'""").collect
+
+
+  }
+
+
+  //To apply between in operator with group by clause with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_098", Include) {
+
+    sql(s"""select CUST_NAME from uniqdatavector22 where CUST_ID in (1,10999) group by CUST_NAME""").collect
+
+
+  }
+
+
+  //To apply  null clause with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_099", Include) {
+
+    sql(s"""select CUST_NAME from uniqdatavector22 where CUST_ID is null""").collect
+
+
+  }
+
+
+  //To applied not null clause with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_100", Include) {
+
+    sql(s"""select CUST_NAME from uniqdatavector22 where CUST_ID is not null""").collect
+
+
+  }
+
+
+  //To apply > operator with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_101", Include) {
+
+    sql(s"""select CUST_NAME from uniqdatavector22 where CUST_ID>1""").collect
+
+
+  }
+
+
+  //To apply < operator with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_102", Include) {
+
+    sql(s"""select CUST_NAME from uniqdatavector22 where CUST_ID<1""").collect
+
+
+  }
+
+
+  //To apply != operator with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_103", Include) {
+
+    sql(s"""select CUST_NAME from uniqdatavector22 where CUST_ID!=1""").collect
+
+
+  }
+
+
+  //To apply like clause with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_104", Include) {
+
+    sql(s"""select CUST_ID from uniqdatavector22 where CUST_ID like 10999""").collect
+
+
+  }
+
+
+  //To apply like% clause with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_105", Include) {
+
+    sql(s"""select CUST_ID from uniqdatavector22 where CUST_ID like '%10999%'""").collect
+
+
+  }
+
+
+  //To apply rlike clause with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_106", Include) {
+
+    sql(s"""select CUST_ID from uniqdatavector22 where CUST_ID rlike 10999""").collect
+
+
+  }
+
+
+  //To apply rlike% clause with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_107", Include) {
+
+    sql(s"""select CUST_ID from uniqdatavector22 where CUST_ID rlike '%10999'""").collect
+
+
+  }
+
+
+  //To apply alias clause with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_108", Include) {
+
+    sql(s"""select count(cust_id)+10.364 as a from uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply aliase clause with group by clause with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_109", Include) {
+
+    sql(s"""select count(cust_id)+10.364 as a from uniqdatavector22 group by CUST_ID""").collect
+
+
+  }
+
+
+  //To apply aliase clause with order by clause with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_110", Include) {
+
+    sql(s"""select cust_id,count(cust_name) a from uniqdatavector22 group by cust_id order by cust_id""").collect
+
+
+  }
+
+
+  //To apply regexp_replace clause with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_111", Include) {
+
+    sql(s"""select regexp_replace(cust_id, 'i', 'ment')  from uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply date_add method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_118", Include) {
+
+    sql(s"""SELECT date_add(DOB,1) FROM uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply date_sub method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_119", Include) {
+
+    sql(s"""SELECT date_sub(DOB,1) FROM uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply current_date method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_120", Include) {
+
+    sql(s"""SELECT current_date() FROM uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply add_month method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_121", Include) {
+
+    sql(s"""SELECT add_months(dob,1) FROM uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply last_day method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_122", Include) {
+
+    sql(s"""SELECT last_day(dob) FROM uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply next_day method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_123", Include) {
+
+    sql(s"""SELECT next_day(dob,'monday') FROM uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply months_between method on carbon table
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_124", Include) {
+
+    sql(s"""select months_between('2016-12-28', '2017-01-30') from uniqdatavector22""").collect
+
+
+  }
+
+
+  //Toapply date_diff method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_125", Include) {
+
+    sql(s"""select datediff('2009-03-01', '2009-02-27') from uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply concat method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_126", Include) {
+
+    sql(s"""SELECT concat('hi','hi') FROM uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply lower method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_127", Include) {
+
+    sql(s"""SELECT lower('H') FROM uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply substr method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_128", Include) {
+
+    sql(s"""select substr(cust_id,3) from uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply trim method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_129", Include) {
+
+    sql(s"""select trim(cust_id) from uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply split method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_130", Include) {
+
+    sql(s"""select split('knoldus','ol') from uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply split method  limit clause with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_131", Include) {
+
+    sql(s"""select split('knoldus','ol') from uniqdatavector22 limit 1""").collect
+
+
+  }
+
+
+  //To apply reverse on carbon table with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_132", Include) {
+
+    sql(s"""select reverse('knoldus') from uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply replace on carbon table with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_133", Include) {
+
+    sql(s"""select regexp_replace('Tester', 'T', 't') from uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply replace with limit clause with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_134", Include) {
+
+    sql(s"""select regexp_replace('Tester', 'T', 't') from uniqdatavector22 limit 1""").collect
+
+
+  }
+
+
+  //To apply FORMAT_STRING on carbon table with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_135", Include) {
+
+    sql(s"""select format_string('data', cust_name) from uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply sentences method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_136", Include) {
+
+    sql(s"""select sentences(cust_name) from uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply space method on carbon table with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_137", Include) {
+
+    sql(s"""select space(10) from uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply rtrim method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_138", Include) {
+
+    sql(s"""select rtrim("     testing           ") from uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply ascii method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_139", Include) {
+
+    sql(s"""select ascii('A') from uniqdatavector22""").collect
+
+
+  }
+
+
+  //To apply utc_timestamp method with vectorized carbon reader enabled
+  test("PTS-AR-Productize-New-Features-Huawei-Spark2.1-007-01-01-01_001-TC_140", Include) {
+
+    sql(s"""select from_utc_timestamp('2016-12-12 08:00:00','PST') from uniqdatavector22""").collect
+
+     sql(s"""drop table uniqdatavector22""").collect
+  }
+
+  val prop = CarbonProperties.getInstance()
+  val p1 = prop.getProperty("carbon.enable.vector.reader", CarbonCommonConstants.ENABLE_VECTOR_READER_DEFAULT)
+
+  override protected def beforeAll() {
+    // Adding new properties
+    prop.addProperty("carbon.enable.vector.reader", "false")
+  }
+
+  override def afterAll: Unit = {
+    //Reverting to old
+    prop.addProperty("carbon.enable.vector.reader", p1)
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/suite/SDVSuites.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/suite/SDVSuites.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/suite/SDVSuites.scala
new file mode 100644
index 0000000..de946a0
--- /dev/null
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/suite/SDVSuites.scala
@@ -0,0 +1,125 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.carbondata.cluster.sdv.suite
+
+import org.apache.spark.sql.test.TestQueryExecutor
+import org.scalatest.{BeforeAndAfterAll, Suites}
+
+import org.apache.carbondata.cluster.sdv.generated._
+
+/**
+ * Suite class for all tests.
+ */
+class SDVSuites extends Suites with BeforeAndAfterAll {
+
+  val suites =                new AlterTableTestCase ::
+                              new BadRecordTestCase ::
+                              new BatchSortLoad1TestCase ::
+                              new BatchSortLoad2TestCase ::
+                              new BatchSortQueryTestCase ::
+                              new ColumndictTestCase ::
+                              new DataLoadingTestCase ::
+                              new DataLoadingV3TestCase ::
+                              new InvertedindexTestCase ::
+                              new OffheapQuery1TestCase ::
+                              new OffheapQuery2TestCase ::
+                              new OffheapSort1TestCase ::
+                              new OffheapSort2TestCase ::
+                              new PartitionTestCase ::
+                              new QueriesBasicTestCase ::
+                              new QueriesBVATestCase ::
+                              new QueriesCompactionTestCase ::
+                              new QueriesExcludeDictionaryTestCase ::
+                              new QueriesIncludeDictionaryTestCase ::
+                              new QueriesNormalTestCase ::
+                              new QueriesRangeFilterTestCase ::
+                              new QueriesSparkBlockDistTestCase ::
+                              new ShowLoadsTestCase ::
+                              new SinglepassTestCase ::
+                              new SortColumnTestCase ::
+                              new TimestamptypesTestCase ::
+                              new V3offheapvectorTestCase ::
+                              new Vector1TestCase ::
+                              new Vector2TestCase ::Nil
+
+  override val nestedSuites = suites.toIndexedSeq
+
+  override protected def afterAll() = {
+    println("---------------- Stopping spark -----------------")
+    TestQueryExecutor.INSTANCE.stop()
+    println("---------------- Stopped spark -----------------")
+  }
+}
+
+/**
+ * Suite class for all tests.
+ */
+class SDVSuites1 extends Suites with BeforeAndAfterAll {
+
+  val suites =                 new AlterTableTestCase ::
+                               new BadRecordTestCase ::
+                               new BatchSortLoad1TestCase ::
+                               new BatchSortLoad2TestCase ::
+                               new BatchSortQueryTestCase ::
+                               new ColumndictTestCase ::
+                               new DataLoadingTestCase ::
+                               new DataLoadingV3TestCase ::
+                               new InvertedindexTestCase ::
+                               new OffheapQuery1TestCase ::
+                               new OffheapQuery2TestCase ::
+                               new OffheapSort1TestCase ::
+                               new OffheapSort2TestCase ::
+                               new PartitionTestCase ::
+                               new QueriesBasicTestCase :: Nil
+
+  override val nestedSuites = suites.toIndexedSeq
+
+  override protected def afterAll() = {
+    println("---------------- Stopping spark -----------------")
+    TestQueryExecutor.INSTANCE.stop()
+    println("---------------- Stopped spark -----------------")
+  }
+}
+
+/**
+ * Suite class for all tests.
+ */
+class SDVSuites2 extends Suites with BeforeAndAfterAll {
+
+  val suites =    new QueriesBVATestCase ::
+                  new QueriesCompactionTestCase ::
+                  new QueriesExcludeDictionaryTestCase ::
+                  new QueriesIncludeDictionaryTestCase ::
+                  new QueriesNormalTestCase ::
+                  new QueriesRangeFilterTestCase ::
+                  new QueriesSparkBlockDistTestCase ::
+                  new ShowLoadsTestCase ::
+                  new SinglepassTestCase ::
+                  new SortColumnTestCase ::
+                  new TimestamptypesTestCase ::
+                  new V3offheapvectorTestCase ::
+                  new Vector1TestCase ::
+                  new Vector2TestCase :: Nil
+
+  override val nestedSuites = suites.toIndexedSeq
+
+  override protected def afterAll() = {
+    println("---------------- Stopping spark -----------------")
+    TestQueryExecutor.INSTANCE.stop()
+    println("---------------- Stopped spark -----------------")
+  }
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/CarbonFunSuite.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/CarbonFunSuite.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/CarbonFunSuite.scala
new file mode 100644
index 0000000..37803a8
--- /dev/null
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/CarbonFunSuite.scala
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.common.util
+
+import org.apache.carbondata.common.logging.LogServiceFactory
+import org.scalatest.{FunSuite, Outcome}
+
+
+private[spark] abstract class CarbonFunSuite extends FunSuite {
+
+  private val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
+
+  /**
+   * Log the suite name and the test name before and after each test.
+   *
+   * Subclasses should never override this method. If they wish to run
+   * custom code before and after each test, they should should mix in
+   * the {{org.scalatest.BeforeAndAfter}} trait instead.
+   */
+  final protected override def withFixture(test: NoArgTest): Outcome = {
+    val testName = test.text
+    val suiteName = this.getClass.getName
+    val shortSuiteName = suiteName.replaceAll("org.apache.spark", "o.a.s")
+    try {
+      LOGGER.info(s"\n\n===== TEST OUTPUT FOR $shortSuiteName: '$testName' =====\n")
+      test()
+    } finally {
+      LOGGER.info(s"\n\n===== FINISHED $shortSuiteName: '$testName' =====\n")
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/PlanTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/PlanTest.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/PlanTest.scala
new file mode 100644
index 0000000..cdd415f
--- /dev/null
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/PlanTest.scala
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.common.util
+
+import org.apache.spark.sql.catalyst.expressions._
+import org.apache.spark.sql.catalyst.plans.logical.{Filter, LogicalPlan, OneRowRelation}
+import org.apache.spark.sql.catalyst.util._
+
+/**
+ * Provides helper methods for comparing plans.
+ */
+class PlanTest extends CarbonFunSuite {
+
+  /** Fails the test if the two expressions do not match */
+  protected def compareExpressions(e1: Expression, e2: Expression): Unit = {
+    comparePlans(Filter(e1, OneRowRelation), Filter(e2, OneRowRelation))
+  }
+
+  /** Fails the test if the two plans do not match */
+  protected def comparePlans(plan1: LogicalPlan, plan2: LogicalPlan) {
+    val normalized1 = normalizeExprIds(plan1)
+    val normalized2 = normalizeExprIds(plan2)
+    if (normalized1 != normalized2) {
+      fail(
+        s"""
+           |== FAIL: Plans do not match ===
+           |${sideBySide(normalized1.treeString, normalized2.treeString).mkString("\n")}
+         """.stripMargin)
+    }
+  }
+
+  /**
+   * Since attribute references are given globally unique ids during analysis,
+   * we must normalize them to check if two different queries are identical.
+   */
+  protected def normalizeExprIds(plan: LogicalPlan) = {
+    plan transformAllExpressions {
+      case a: AttributeReference =>
+        AttributeReference(a.name, a.dataType, a.nullable)(exprId = ExprId(0))
+      case a: Alias =>
+        Alias(a.child, a.name)(exprId = ExprId(0))
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala
new file mode 100644
index 0000000..292c160
--- /dev/null
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala
@@ -0,0 +1,199 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.common.util
+
+import java.io.{FileInputStream, ObjectInputStream, ObjectOutputStream}
+import java.math
+import java.math.RoundingMode
+import java.util.{Locale, TimeZone}
+
+import org.apache.carbondata.common.logging.LogServiceFactory
+import scala.collection.JavaConversions._
+
+import org.apache.spark.sql.catalyst.plans._
+import org.apache.spark.sql.catalyst.util._
+import org.apache.spark.sql.execution.command.LoadDataCommand
+import org.apache.spark.sql.test.{ResourceRegisterAndCopier, TestQueryExecutor}
+import org.apache.spark.sql.{DataFrame, Row, SQLContext}
+import org.scalatest.Suite
+
+import org.apache.carbondata.core.datastore.impl.FileFactory
+
+class QueryTest extends PlanTest with Suite {
+
+  val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
+
+  val DOLLAR = "$"
+
+  // Timezone is fixed to America/Los_Angeles for those timezone sensitive tests (timestamp_*)
+  TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"))
+  // Add Locale setting
+  Locale.setDefault(Locale.US)
+
+  /**
+   * Runs the plan and makes sure the answer contains all of the keywords, or the
+   * none of keywords are listed in the answer
+   * @param df the [[DataFrame]] to be executed
+   * @param exists true for make sure the keywords are listed in the output, otherwise
+   *               to make sure none of the keyword are not listed in the output
+   * @param keywords keyword in string array
+   */
+  def checkExistence(df: DataFrame, exists: Boolean, keywords: String*) {
+    val outputs = df.collect().map(_.mkString).mkString
+    for (key <- keywords) {
+      if (exists) {
+        assert(outputs.contains(key), s"Failed for $df ($key doesn't exist in result)")
+      } else {
+        assert(!outputs.contains(key), s"Failed for $df ($key existed in the result)")
+      }
+    }
+  }
+
+  /**
+   * Runs the plan and makes sure the answer matches the expected result.
+   * @param df the [[DataFrame]] to be executed
+   * @param expectedAnswer the expected result in a [[Seq]] of [[Row]]s.
+   */
+  protected def checkAnswer(df: DataFrame, expectedAnswer: Seq[Row]): Unit = {
+    QueryTest.checkAnswer(df, expectedAnswer) match {
+      case Some(errorMessage) => fail(errorMessage)
+      case None =>
+    }
+  }
+
+  protected def checkAnswer(df: DataFrame, expectedAnswer: Row): Unit = {
+    checkAnswer(df, Seq(expectedAnswer))
+  }
+
+  protected def checkAnswer(df: DataFrame, expectedAnswer: DataFrame): Unit = {
+    checkAnswer(df, expectedAnswer.collect())
+  }
+
+  protected def checkAnswer(carbon: String, hive: String, uniqueIdentifier:String): Unit = {
+    val path = TestQueryExecutor.hiveresultpath + "/"+uniqueIdentifier
+    if (FileFactory.isFileExist(path, FileFactory.getFileType(path))) {
+      val objinp = new ObjectInputStream(FileFactory.getDataInputStream(path, FileFactory.getFileType(path)))
+      val rows = objinp.readObject().asInstanceOf[Array[Row]]
+      objinp.close()
+      checkAnswer(sql(carbon), rows)
+    } else {
+      val rows = sql(hive).collect()
+      val obj = new ObjectOutputStream(FileFactory.getDataOutputStream(path, FileFactory.getFileType(path)))
+      obj.writeObject(rows)
+      obj.close()
+      checkAnswer(sql(carbon), rows)
+    }
+  }
+
+  protected def checkAnswer(carbon: String, expectedAnswer: Seq[Row], uniqueIdentifier:String): Unit = {
+    checkAnswer(sql(carbon), expectedAnswer)
+  }
+
+  def sql(sqlText: String): DataFrame = {
+    val frame = TestQueryExecutor.INSTANCE.sql(sqlText)
+    val plan = frame.queryExecution.logical
+    if (TestQueryExecutor.hdfsUrl.startsWith("hdfs")) {
+      plan match {
+        case l: LoadDataCommand =>
+          val copyPath = TestQueryExecutor.warehouse + "/" + l.table.table.toLowerCase +
+                         l.path.substring(l.path.lastIndexOf("/"), l.path.length)
+          ResourceRegisterAndCopier.copyLocalFile(l.path, copyPath)
+        case _ =>
+      }
+    }
+    frame
+  }
+
+  protected def dropTable(tableName: String): Unit ={
+    sql(s"DROP TABLE IF EXISTS $tableName")
+  }
+
+  val sqlContext: SQLContext = TestQueryExecutor.INSTANCE.sqlContext
+
+  val storeLocation = TestQueryExecutor.storeLocation
+  val resourcesPath = TestQueryExecutor.resourcesPath
+  val integrationPath = TestQueryExecutor.integrationPath
+}
+
+object QueryTest {
+
+  def checkAnswer(df: DataFrame, expectedAnswer: java.util.List[Row]): String = {
+    checkAnswer(df, expectedAnswer.toSeq) match {
+      case Some(errorMessage) => errorMessage
+      case None => null
+    }
+  }
+
+  import java.text.DecimalFormat
+
+  /**
+   * Runs the plan and makes sure the answer matches the expected result.
+   * If there was exception during the execution or the contents of the DataFrame does not
+   * match the expected result, an error message will be returned. Otherwise, a [[None]] will
+   * be returned.
+   * @param df the [[DataFrame]] to be executed
+   * @param expectedAnswer the expected result in a [[Seq]] of [[Row]]s.
+   */
+  def checkAnswer(df: DataFrame, expectedAnswer: Seq[Row]): Option[String] = {
+    val isSorted = df.logicalPlan.collect { case s: logical.Sort => s }.nonEmpty
+    def prepareAnswer(answer: Seq[Row]): Seq[Row] = {
+      // Converts data to types that we can do equality comparison using Scala collections.
+      // For BigDecimal type, the Scala type has a better definition of equality test (similar to
+      // Java's java.math.BigDecimal.compareTo).
+      // For binary arrays, we convert it to Seq to avoid of calling java.util.Arrays.equals for
+      // equality test.
+      val converted: Seq[Row] = answer.map { s =>
+        Row.fromSeq(s.toSeq.map {
+          case d: java.math.BigDecimal => BigDecimal(d)
+          case b: Array[Byte] => b.toSeq
+          case o => o
+        })
+      }
+      if (!isSorted) converted.sortBy(_.toString()) else converted
+    }
+    val sparkAnswer = try df.collect().toSeq catch {
+      case e: Exception =>
+        val errorMessage =
+          s"""
+             |Exception thrown while executing query:
+             |== Exception ==
+             |$e
+             |${org.apache.spark.sql.catalyst.util.stackTraceToString(e)}
+          """.stripMargin
+        return Some(errorMessage)
+    }
+
+    if (prepareAnswer(expectedAnswer) != prepareAnswer(sparkAnswer)) {
+      val errorMessage =
+        s"""
+           |Results do not match for query:
+           |== Results ==
+           |${
+          sideBySide(
+            s"== Correct Answer - ${expectedAnswer.size} ==" +:
+              prepareAnswer(expectedAnswer).map(_.toString()),
+            s"== Spark Answer - ${sparkAnswer.size} ==" +:
+              prepareAnswer(sparkAnswer).map(_.toString())).mkString("\n")
+        }
+      """.stripMargin
+      return Some(errorMessage)
+    }
+
+    return None
+  }
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/Tags.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/Tags.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/Tags.scala
new file mode 100644
index 0000000..7d6770b
--- /dev/null
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/Tags.scala
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under onassemblye
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.spark.sql.common.util
+
+import org.scalatest.Tag
+
+object Include extends Tag("Include")
+
+object Exclude extends Tag("Exclude")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark-common-test/pom.xml
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/pom.xml b/integration/spark-common-test/pom.xml
index 613d59f..fbb4f8d 100644
--- a/integration/spark-common-test/pom.xml
+++ b/integration/spark-common-test/pom.xml
@@ -137,6 +137,7 @@
         <version>1.0</version>
         <!-- Note config is repeated in surefire config -->
         <configuration>
+          <skipTests>false</skipTests>
           <reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
           <junitxml>.</junitxml>
           <filereports>CarbonTestSuite.txt</filereports>
@@ -227,5 +228,11 @@
         </dependency>
       </dependencies>
     </profile>
+    <profile>
+      <id>sdvtest</id>
+      <properties>
+        <maven.test.skip>true</maven.test.skip>
+      </properties>
+    </profile>
   </profiles>
 </project>

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestDataLoadWithColumnsMoreThanSchema.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestDataLoadWithColumnsMoreThanSchema.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestDataLoadWithColumnsMoreThanSchema.scala
index adbc1b1..c09d285 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestDataLoadWithColumnsMoreThanSchema.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestDataLoadWithColumnsMoreThanSchema.scala
@@ -128,7 +128,9 @@ class TestDataLoadWithColumnsMoreThanSchema extends QueryTest with BeforeAndAfte
       sql(s"LOAD DATA INPATH '$resourcesPath/seq_20Records.csv' into table smart_500_DE options('DELIMITER'=',','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='SID,PROBEID,INTERFACEID,GROUPID,GGSN_ID,SGSN_ID,dummy,SESSION_INDICATOR,BEGIN_TIME,BEGIN_TIME_MSEL,END_TIME,END_TIME_MSEL,PROT_CATEGORY,PROT_TYPE,L7_CARRIER_PROT,SUB_PROT_TYPE,MSISDN,IMSI,IMEI,ENCRYPT_VERSION,ROAMING_TYPE,ROAM_DIRECTION,MS_IP,SERVER_IP,MS_PORT,APN,SGSN_SIG_IP,GGSN_USER_IP,SGSN_USER_IP,MCC,MNC,RAT,LAC,RAC,SAC,CI,SERVER_DECIMAL,BROWSER_TIMESTAMP,TCP_CONN_STATES,GGSN_SIG_IP_BigInt_NEGATIVE,TCP_STATES_BIGINTPOSITIVE,dummy,TCP_WIN_SIZE,dummy,TCP_MSS,dummy,TCP_CONN_TIMES,dummy,TCP_CONN_2_FAILED_TIMES,dummy,TCP_CONN_3_FAILED_TIMES,HOST,STREAMING_URL,dummy,GET_STREAMING_FAILED_CODE,dummy,GET_STREAMING_FLAG,dummy,GET_NUM,dummy,GET_SUCCEED_NUM,dummy,GET_RETRANS_NUM,dummy,GET_TIMEOUT_NUM,INTBUFFER_FST_FLAG,INTBUFFER_FULL_FLAG,STALL_NUM,dummy,VIDEO_FRAME_RATE,dummy,VIDEO_CODEC_ID,dummy,VIDEO_WIDTH,dummy,VIDEO_HEIGHT,dummy,AUD
 IO_CODEC_ID,dummy,MEDIA_FILE_TYPE,dummy,PLAY_STATE,dummy,PLAY_STATE,dummy,STREAMING_FLAG,dummy,TCP_STATUS_INDICATOR,dummy,DISCONNECTION_FLAG,dummy,FAILURE_CODE,FLAG,TAC,ECI,dummy,TCP_SYN_TIME_MSEL,dummy,TCP_FST_SYN_DIRECTION,RAN_NE_USER_IP,HOMEMCC,HOMEMNC,dummy,CHARGE_FLAG,dummy,PREPAID_FLAG,dummy,USER_AGENT,dummy,MS_WIN_STAT_TOTAL_NUM,dummy,MS_WIN_STAT_SMALL_NUM,dummy,MS_ACK_TO_1STGET_DELAY,dummy,SERVER_ACK_TO_1STDATA_DELAY,dummy,STREAMING_TYPE,dummy,SOURCE_VIDEO_QUALITY,TETHERING_FLAG,CARRIER_ID,LAYER1ID,LAYER2ID,dummy,LAYER3ID,dummy,LAYER4ID,dummy,LAYER5ID,dummy,LAYER6ID,CHARGING_RULE_BASE_NAME,SP,dummy,EXTENDED_URL,SV,FIRST_SAI_CGI_ECGI,dummy,EXTENDED_URL_OTHER,SIGNALING_USE_FLAG,dummy,DNS_RETRANS_NUM,dummy,DNS_FAIL_CODE,FIRST_RAT,FIRST_RAT,MS_INDICATOR,LAST_SAI_CGI_ECGI,LAST_RAT,dummy,FIRST_LONGITUDE,dummy,FIRST_LATITUDE,dummy,FIRST_ALTITUDE,dummy,FIRST_RASTERLONGITUDE,dummy,FIRST_RASTERLATITUDE,dummy,FIRST_RASTERALTITUDE,dummy,FIRST_FREQUENCYSPOT,dummy,FIRST_CLUTTER,dummy,FIRS
 T_USERBEHAVIOR,dummy,FIRST_SPEED,dummy,FIRST_CREDIBILITY,dummy,LAST_LONGITUDE,dummy,LAST_LATITUDE,dummy,LAST_ALTITUDE,dummy,LAST_RASTERLONGITUDE,dummy,LAST_RASTERLATITUDE,dummy,LAST_RASTERALTITUDE,dummy,LAST_FREQUENCYSPOT,dummy,LAST_CLUTTER,dummy,LAST_USERBEHAVIOR,dummy,LAST_SPEED,dummy,LAST_CREDIBILITY,dummy,IMEI_CIPHERTEXT,APP_ID,dummy,DOMAIN_NAME,dummy,STREAMING_CACHE_IP,dummy,STOP_LONGER_THAN_MIN_THRESHOLD,dummy,STOP_LONGER_THAN_MAX_THRESHOLD,dummy,PLAY_END_STAT,dummy,STOP_START_TIME1,dummy,STOP_END_TIME1,dummy,STOP_START_TIME2,dummy,STOP_END_TIME2,dummy,STOP_START_TIME3,dummy,STOP_END_TIME3,dummy,STOP_START_TIME4,dummy,STOP_END_TIME4,dummy,STOP_START_TIME5,dummy,STOP_END_TIME5,dummy,STOP_START_TIME6,dummy,STOP_END_TIME6,dummy,STOP_START_TIME7,dummy,STOP_END_TIME7,dummy,STOP_START_TIME8,dummy,STOP_END_TIME8,dummy,STOP_START_TIME9,dummy,STOP_END_TIME9,dummy,STOP_START_TIME10,dummy,STOP_END_TIME10,dummy,FAIL_CLASS,RECORD_TYPE,dummy,NODATA_COUNT,dummy,VIDEO_NODATA_DURATION,dummy,VI
 DEO_SMOOTH_DURATION,dummy,VIDEO_SD_DURATION,dummy,VIDEO_HD_DURATION,dummy,VIDEO_UHD_DURATION,dummy,VIDEO_FHD_DURATION,dummy,FLUCTUATION,dummy,START_DOWNLOAD_THROUGHPUT,dummy,L7_UL_GOODPUT_FULL_MSS,dummy,SESSIONKEY,dummy,FIRST_UCELLID,dummy,LAST_UCELLID,dummy,UCELLID1,dummy,LONGITUDE1,dummy,LATITUDE1,dummy,UCELLID2,dummy,LONGITUDE2,dummy,LATITUDE2,dummy,UCELLID3,dummy,LONGITUDE3,dummy,LATITUDE3,dummy,UCELLID4,dummy,LONGITUDE4,dummy,LATITUDE4,dummy,UCELLID5,dummy,LONGITUDE5,dummy,LATITUDE5,dummy,UCELLID6,dummy,LONGITUDE6,dummy,LATITUDE6,dummy,UCELLID7,dummy,LONGITUDE7,dummy,LATITUDE7,dummy,UCELLID8,dummy,LONGITUDE8,dummy,LATITUDE8,dummy,UCELLID9,dummy,LONGITUDE9,dummy,LATITUDE9,dummy,UCELLID10,dummy,LONGITUDE10,dummy,LATITUDE10,dummy,INTBUFFER_FULL_DELAY,dummy,STALL_DURATION,dummy,STREAMING_DW_PACKETS,dummy,STREAMING_DOWNLOAD_DELAY,dummy,PLAY_DURATION,dummy,STREAMING_QUALITY,dummy,VIDEO_DATA_RATE,dummy,AUDIO_DATA_RATE,dummy,STREAMING_FILESIZE,dummy,STREAMING_DURATIOIN,dummy,TCP_SYN_TI
 ME,dummy,TCP_RTT_STEP1,CHARGE_ID,dummy,UL_REVERSE_TO_DL_DELAY,dummy,DL_REVERSE_TO_UL_DELAY,dummy,DATATRANS_DW_GOODPUT,dummy,DATATRANS_DW_TOTAL_DURATION,dummy,SUM_FRAGMENT_INTERVAL,dummy,TCP_FIN_TIMES,dummy,TCP_RESET_TIMES,dummy,URL_CLASSIFICATION,dummy,STREAMING_LQ_DURATIOIN,dummy,MAX_DNS_DELAY,dummy,MAX_DNS2SYN,dummy,MAX_LATANCY_OF_LINK_SETUP,dummy,MAX_SYNACK2FIRSTACK,dummy,MAX_SYNACK2LASTACK,dummy,MAX_ACK2GET_DELAY,dummy,MAX_FRAG_INTERVAL_PREDELAY,dummy,SUM_FRAG_INTERVAL_PREDELAY,dummy,SERVICE_DELAY_MSEC,dummy,HOMEPROVINCE,dummy,HOMECITY,dummy,SERVICE_ID,dummy,CHARGING_CLASS,dummy,DATATRANS_UL_DURATION,dummy,ASSOCIATED_ID,dummy,PACKET_LOSS_NUM,dummy,JITTER,dummy,MS_DNS_DELAY_MSEL,dummy,GET_STREAMING_DELAY,dummy,TCP_UL_RETRANS_WITHOUTPL,dummy,TCP_DW_RETRANS_WITHOUTPL,dummy,GET_MAX_UL_SIZE,dummy,GET_MIN_UL_SIZE,dummy,GET_MAX_DL_SIZE,dummy,GET_MIN_DL_SIZE,dummy,FLOW_SAMPLE_RATIO,dummy,UL_RTT_LONG_NUM,dummy,DW_RTT_LONG_NUM,dummy,UL_RTT_STAT_NUM,dummy,DW_RTT_STAT_NUM,dummy,USER_PROBE_U
 L_LOST_PKT,dummy,SERVER_PROBE_UL_LOST_PKT,dummy,SERVER_PROBE_DW_LOST_PKT,dummy,USER_PROBE_DW_LOST_PKT,dummy,AVG_DW_RTT_MICRO_SEC,dummy,AVG_UL_RTT_MICRO_SEC,dummy,RAN_NE_ID,dummy,AVG_UL_RTT,dummy,AVG_DW_RTT,dummy,CHARGING_CHARACTERISTICS,dummy,DL_SERIOUS_OUT_OF_ORDER_NUM,dummy,DL_SLIGHT_OUT_OF_ORDER_NUM,dummy,DL_FLIGHT_TOTAL_SIZE,dummy,DL_FLIGHT_TOTAL_NUM,dummy,DL_MAX_FLIGHT_SIZE,dummy,VALID_TRANS_DURATION,dummy,AIR_PORT_DURATION,dummy,RADIO_CONN_TIMES,dummy,UL_SERIOUS_OUT_OF_ORDER_NUM,dummy,UL_SLIGHT_OUT_OF_ORDER_NUM,dummy,UL_FLIGHT_TOTAL_SIZE,dummy,UL_FLIGHT_TOTAL_NUM,dummy,UL_MAX_FLIGHT_SIZE,dummy,USER_DL_SLIGHT_OUT_OF_ORDER_PACKETS,dummy,SERVER_UL_SLIGHT_OUT_OF_ORDER_PACKETS,dummy,DL_CONTINUOUS_RETRANSMISSION_DELAY,dummy,USER_HUNGRY_DELAY,dummy,SERVER_HUNGRY_DELAY,dummy,UPPERLAYER_IP_UL_FRAGMENTS,dummy,UPPERLAYER_IP_DL_FRAGMENTS,dummy,DOWNLAYER_IP_UL_FRAGMENTS,dummy,DOWNLAYER_IP_DL_FRAGMENTS,dummy,UPPERLAYER_IP_UL_PACKETS,dummy,UPPERLAYER_IP_DL_PACKETS,dummy,DOWNLAYER_IP_UL_PACKE
 TS,dummy,DOWNLAYER_IP_DL_PACKETS,dummy,TCP_UL_PACKAGES_WITHPL,dummy,TCP_DW_PACKAGES_WITHPL,dummy,TCP_UL_PACKAGES_WITHOUTPL,dummy,TCP_DW_PACKAGES_WITHOUTPL,dummy,TCP_UL_RETRANS_WITHPL,dummy,TCP_DW_RETRANS_WITHPL,L4_UL_THROUGHPUT,L4_DW_THROUGHPUT,L4_UL_GOODPUT,L4_DW_GOODPUT,NETWORK_UL_TRAFFIC,NETWORK_DL_TRAFFIC,L4_UL_PACKETS,L4_DW_PACKETS,TCP_RTT,TCP_UL_OUTOFSEQU,TCP_DW_OUTOFSEQU,TCP_UL_RETRANS,TCP_DW_RETRANS')")
       assert(true)
     } catch {
-      case _: Throwable => assert(false)
+      case e: Throwable =>
+        e.printStackTrace()
+        assert(false)
     }
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/DateDataTypeDirectDictionaryTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/DateDataTypeDirectDictionaryTest.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/DateDataTypeDirectDictionaryTest.scala
index 59f7a98..9018ec0 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/DateDataTypeDirectDictionaryTest.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/DateDataTypeDirectDictionaryTest.scala
@@ -53,7 +53,8 @@ class DateDataTypeDirectDictionaryTest extends QueryTest with BeforeAndAfterAll
       case x: Throwable =>
         x.printStackTrace()
         CarbonProperties.getInstance()
-        .addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT, "dd-MM-yyyy")
+        .addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT,
+          CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
     }
   }
 
@@ -131,7 +132,8 @@ class DateDataTypeDirectDictionaryTest extends QueryTest with BeforeAndAfterAll
   override def afterAll {
     sql("drop table directDictionaryTable")
     CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT, "dd-MM-yyyy")
+      .addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
     CarbonProperties.getInstance().addProperty("carbon.direct.dictionary", "false")
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/DateDataTypeDirectDictionaryWithNoDictTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/DateDataTypeDirectDictionaryWithNoDictTestCase.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/DateDataTypeDirectDictionaryWithNoDictTestCase.scala
index 463489b..195b107 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/DateDataTypeDirectDictionaryWithNoDictTestCase.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/DateDataTypeDirectDictionaryWithNoDictTestCase.scala
@@ -56,7 +56,8 @@ class DateDataTypeDirectDictionaryWithNoDictTestCase extends QueryTest with Befo
       case x: Throwable =>
         x.printStackTrace()
         CarbonProperties.getInstance()
-        .addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT, "dd-MM-yyyy")
+        .addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT,
+          CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
     }
   }
 
@@ -94,7 +95,8 @@ class DateDataTypeDirectDictionaryWithNoDictTestCase extends QueryTest with Befo
   override def afterAll {
     sql("drop table directDictionaryTable")
     CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT, "dd-MM-yyyy")
+      .addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
     CarbonProperties.getInstance().addProperty("carbon.direct.dictionary", "false")
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/DateDataTypeNullDataTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/DateDataTypeNullDataTest.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/DateDataTypeNullDataTest.scala
index d00a108..20e1d2d 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/DateDataTypeNullDataTest.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/DateDataTypeNullDataTest.scala
@@ -52,7 +52,8 @@ class DateDataTypeNullDataTest extends QueryTest with BeforeAndAfterAll {
       case x: Throwable =>
         x.printStackTrace()
         CarbonProperties.getInstance()
-        .addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT, "dd-MM-yyyy")
+        .addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT,
+          CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
     }
   }
 
@@ -73,7 +74,8 @@ class DateDataTypeNullDataTest extends QueryTest with BeforeAndAfterAll {
   override def afterAll {
     sql("drop table timestampTyeNullData")
     CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT, "dd-MM-yyyy")
+      .addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
     CarbonProperties.getInstance().addProperty("carbon.direct.dictionary", "false")
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryTestCase.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryTestCase.scala
index ca337cc..b8d057c 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryTestCase.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryTestCase.scala
@@ -64,7 +64,8 @@ class TimestampDataTypeDirectDictionaryTest extends QueryTest with BeforeAndAfte
       sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE directDictionaryTable_hive")
     } catch {
       case x: Throwable => CarbonProperties.getInstance()
-        .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
+        .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+          CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
     }
   }
 
@@ -144,7 +145,8 @@ class TimestampDataTypeDirectDictionaryTest extends QueryTest with BeforeAndAfte
     sql("drop table directDictionaryTable")
     sql("drop table directDictionaryTable_hive")
     CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
     CarbonProperties.getInstance().addProperty("carbon.direct.dictionary", "false")
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryWithNoDictTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryWithNoDictTestCase.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryWithNoDictTestCase.scala
index 26f9ff7..4878c86 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryWithNoDictTestCase.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryWithNoDictTestCase.scala
@@ -56,7 +56,8 @@ class TimestampDataTypeDirectDictionaryWithNoDictTestCase extends QueryTest with
         + "('DELIMITER'= ',', 'QUOTECHAR'= '\"')");
     } catch {
       case x: Throwable => CarbonProperties.getInstance()
-        .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
+        .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+          CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
     }
   }
 
@@ -91,7 +92,8 @@ class TimestampDataTypeDirectDictionaryWithNoDictTestCase extends QueryTest with
   override def afterAll {
     sql("drop table directDictionaryTable")
     CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
     CarbonProperties.getInstance().addProperty("carbon.direct.dictionary", "false")
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampDataTypeNullDataTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampDataTypeNullDataTest.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampDataTypeNullDataTest.scala
index 9b07b1e..bc3d6dd 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampDataTypeNullDataTest.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampDataTypeNullDataTest.scala
@@ -58,7 +58,8 @@ class TimestampDataTypeNullDataTest extends QueryTest with BeforeAndAfterAll {
 
     } catch {
       case x: Throwable => CarbonProperties.getInstance()
-        .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
+        .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+          CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
     }
   }
 
@@ -79,7 +80,8 @@ class TimestampDataTypeNullDataTest extends QueryTest with BeforeAndAfterAll {
   override def afterAll {
     sql("drop table timestampTyeNullData")
     CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
     CarbonProperties.getInstance().addProperty("carbon.direct.dictionary", "false")
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestAllDataTypeForPartitionTable.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestAllDataTypeForPartitionTable.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestAllDataTypeForPartitionTable.scala
index 4aa6954..54586c2 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestAllDataTypeForPartitionTable.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestAllDataTypeForPartitionTable.scala
@@ -39,8 +39,8 @@ class TestAllDataTypeForPartitionTable extends QueryTest with BeforeAndAfterAll
 
   override def afterAll = {
     CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, TestQueryExecutor.timestampFormat)
-      .addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT, CarbonCommonConstants.CARBON_DATE_DEFAULT_FORMAT)
+      .addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT,
+        CarbonCommonConstants.CARBON_DATE_DEFAULT_FORMAT)
 
     dropTable
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestCompactionForPartitionTable.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestCompactionForPartitionTable.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestCompactionForPartitionTable.scala
index a05d908..0217486 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestCompactionForPartitionTable.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestCompactionForPartitionTable.scala
@@ -71,8 +71,6 @@ class TestCompactionForPartitionTable extends QueryTest with BeforeAndAfterAll {
 
   override def afterAll = {
     dropTable
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, TestQueryExecutor.timestampFormat)
   }
 
   def dropTable = {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDDLForPartitionTable.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDDLForPartitionTable.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDDLForPartitionTable.scala
index 41289b1..4ae5ece 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDDLForPartitionTable.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDDLForPartitionTable.scala
@@ -356,8 +356,6 @@ class TestDDLForPartitionTable  extends QueryTest with BeforeAndAfterAll {
 
   override def afterAll = {
     dropTable
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, TestQueryExecutor.timestampFormat)
   }
 
   def dropTable = {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDataLoadingForPartitionTable.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDataLoadingForPartitionTable.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDataLoadingForPartitionTable.scala
index 4bfc4ef..f7ac87c 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDataLoadingForPartitionTable.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDataLoadingForPartitionTable.scala
@@ -371,8 +371,6 @@ class TestDataLoadingForPartitionTable extends QueryTest with BeforeAndAfterAll
 
   override def afterAll = {
     dropTable
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, TestQueryExecutor.timestampFormat)
   }
 
   def dropTable = {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestQueryForPartitionTable.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestQueryForPartitionTable.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestQueryForPartitionTable.scala
index e02c6bf..df0eea3 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestQueryForPartitionTable.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestQueryForPartitionTable.scala
@@ -272,8 +272,6 @@ class TestQueryForPartitionTable  extends QueryTest with BeforeAndAfterAll {
 
   override def afterAll = {
     dropTable
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, TestQueryExecutor.timestampFormat)
   }
 
   def dropTable = {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark-common-test/src/test/scala/org/apache/spark/sql/common/util/Tags.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/spark/sql/common/util/Tags.scala b/integration/spark-common-test/src/test/scala/org/apache/spark/sql/common/util/Tags.scala
new file mode 100644
index 0000000..7d6770b
--- /dev/null
+++ b/integration/spark-common-test/src/test/scala/org/apache/spark/sql/common/util/Tags.scala
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under onassemblye
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.spark.sql.common.util
+
+import org.scalatest.Tag
+
+object Include extends Tag("Include")
+
+object Exclude extends Tag("Exclude")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark-common/pom.xml
----------------------------------------------------------------------
diff --git a/integration/spark-common/pom.xml b/integration/spark-common/pom.xml
index ac42bf3..4e75334 100644
--- a/integration/spark-common/pom.xml
+++ b/integration/spark-common/pom.xml
@@ -129,6 +129,41 @@
           <target>1.7</target>
         </configuration>
       </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <version>3.0.0</version>
+        <executions>
+          <execution>
+            <id>compile</id>
+            <phase>compile</phase>
+            <goals>
+              <goal>copy</goal>
+            </goals>
+            <configuration>
+              <artifactItems>
+                <artifactItem>
+                  <groupId>org.apache.carbondata</groupId>
+                  <artifactId>carbondata-format</artifactId>
+                  <version>${project.version}</version>
+                </artifactItem>
+                <artifactItem>
+                  <groupId>com.google.code.gson</groupId>
+                  <artifactId>gson</artifactId>
+                  <version>2.3.1</version>
+                </artifactItem>
+                <artifactItem>
+                  <groupId>org.xerial.snappy</groupId>
+                  <artifactId>snappy-java</artifactId>
+                  <version>${snappy.version}</version>
+                </artifactItem>
+              </artifactItems>
+              <outputDirectory>${project.build.directory}/jars</outputDirectory>
+              <!-- other configurations here -->
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
     </plugins>
   </build>
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonRDD.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonRDD.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonRDD.scala
index c908c08..b137d6d 100644
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonRDD.scala
+++ b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonRDD.scala
@@ -19,13 +19,14 @@ package org.apache.carbondata.spark.rdd
 
 import java.io.{ByteArrayInputStream, DataInputStream}
 
+import scala.collection.JavaConverters._
 import scala.reflect.ClassTag
 
 import org.apache.spark.{Dependency, OneToOneDependency, Partition, SparkContext, TaskContext}
 import org.apache.spark.rdd.RDD
 
 import org.apache.carbondata.core.metadata.schema.table.TableInfo
-import org.apache.carbondata.core.util.{CarbonSessionInfo, CarbonTaskInfo, SessionParams, ThreadLocalSessionInfo, ThreadLocalTaskInfo}
+import org.apache.carbondata.core.util.{CarbonProperties, CarbonSessionInfo, CarbonTaskInfo, SessionParams, ThreadLocalSessionInfo, ThreadLocalTaskInfo}
 
 /**
  * This RDD maintains session level ThreadLocal
@@ -35,6 +36,8 @@ abstract class CarbonRDD[T: ClassTag](@transient sc: SparkContext,
 
   val carbonSessionInfo: CarbonSessionInfo = ThreadLocalSessionInfo.getCarbonSessionInfo
 
+//  val addedProperty = CarbonProperties.getInstance().getAddedProperty
+
   /** Construct an RDD with just a one-to-one dependency on one parent */
   def this(@transient oneParent: RDD[_]) =
     this (oneParent.context, List(new OneToOneDependency(oneParent)))
@@ -47,6 +50,7 @@ abstract class CarbonRDD[T: ClassTag](@transient sc: SparkContext,
     val carbonTaskInfo = new CarbonTaskInfo
     carbonTaskInfo.setTaskId(System.nanoTime)
     ThreadLocalTaskInfo.setCarbonTaskInfo(carbonTaskInfo)
+//    addedProperty.asScala.map(f => CarbonProperties.getInstance().addProperty(f._1, f._2))
     internalCompute(split, context)
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark-common/src/main/scala/org/apache/spark/sql/test/ResourceRegisterAndCopier.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/test/ResourceRegisterAndCopier.scala b/integration/spark-common/src/main/scala/org/apache/spark/sql/test/ResourceRegisterAndCopier.scala
new file mode 100644
index 0000000..6364630
--- /dev/null
+++ b/integration/spark-common/src/main/scala/org/apache/spark/sql/test/ResourceRegisterAndCopier.scala
@@ -0,0 +1,113 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.spark.sql.test
+
+import java.io.{BufferedReader, File, FileReader}
+import java.net.URL
+
+import scala.collection.mutable.ArrayBuffer
+
+import org.apache.hadoop.io.IOUtils
+
+import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.core.datastore.impl.FileFactory
+import org.apache.carbondata.core.util.CarbonUtil
+
+/**
+ * All the registered resources will be checked into hdfs and copies to it if not exists.
+ */
+object ResourceRegisterAndCopier {
+
+  private val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
+
+  val link = "https://raw.githubusercontent" +
+             ".com/ravipesala/incubator-carbondata/sdv-test_data/integration/spark-common-test" +
+             "/src/test/resources"
+
+  def copyResourcesifNotExists(hdfsPath: String,
+      resourcePath: String,
+      dataFilesPath: String): Unit = {
+    val fileType = FileFactory.getFileType(hdfsPath)
+    val file = FileFactory.getCarbonFile(hdfsPath, fileType)
+    if (!file.exists()) {
+      sys.error(s"""Provided path $hdfsPath does not exist""")
+    }
+    val resources = readDataFiles(dataFilesPath)
+    resources.foreach { file =>
+      val hdfsDataPath = hdfsPath + "/" + file
+      val rsFile = FileFactory.getCarbonFile(hdfsDataPath, fileType)
+      if (!rsFile.exists()) {
+        val target = resourcePath + "/" + file
+        new File(resourcePath + "/" + file.substring(0, file.lastIndexOf("/"))).mkdirs()
+        downloadFile(link, file, target)
+        // copy it
+        copyLocalFile(hdfsDataPath, target)
+        new File(target).delete()
+      }
+    }
+  }
+
+  def readDataFiles(dataFilesPath: String): Seq[String] = {
+    val buffer = new ArrayBuffer[String]()
+    val reader = new BufferedReader(new FileReader(dataFilesPath))
+    var line = reader.readLine()
+    while (line != null) {
+      buffer += line
+      line = reader.readLine()
+    }
+    reader.close()
+    buffer
+  }
+
+  def copyLocalFile(dst: String,
+      src: String): Unit = {
+    LOGGER.info(s"Copying file : $src to  $dst")
+    if (FileFactory.isFileExist(src, FileFactory.getFileType(src))) {
+      val dataOutputStream = FileFactory.getDataOutputStream(dst,
+        FileFactory.getFileType(dst))
+      val dataInputStream = FileFactory.getDataInputStream(src,
+        FileFactory.getFileType(src))
+      IOUtils.copyBytes(dataInputStream, dataOutputStream, 8 * 1024)
+      CarbonUtil.closeStream(dataInputStream)
+      CarbonUtil.closeStream(dataOutputStream)
+    }
+  }
+
+  def downloadFile(relativeLink: String, fileToDownLoad: String, targetFile: String): Unit = {
+    import java.io.FileOutputStream
+    val link = relativeLink + "/" + fileToDownLoad
+    LOGGER.info(s"Downloading file $link")
+    val url = new URL(link)
+    val c = url.openConnection
+    c.setRequestProperty("User-Agent",
+      "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; .NET CLR 1.0.3705; .NET CLR 1.1.4322;" +
+      " .NET CLR 1.2.30703)")
+
+    var input = c.getInputStream
+    val buffer = new Array[Byte](4096)
+    var n = input.read(buffer)
+
+    val output = new FileOutputStream(new File(targetFile))
+    while (n != -1) {
+      output.write(buffer, 0, n)
+      n = input.read(buffer)
+    }
+    output.close()
+    input.close()
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark-common/src/main/scala/org/apache/spark/sql/test/TestQueryExecutor.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/test/TestQueryExecutor.scala b/integration/spark-common/src/main/scala/org/apache/spark/sql/test/TestQueryExecutor.scala
index 149e3b1..cf90912 100644
--- a/integration/spark-common/src/main/scala/org/apache/spark/sql/test/TestQueryExecutor.scala
+++ b/integration/spark-common/src/main/scala/org/apache/spark/sql/test/TestQueryExecutor.scala
@@ -17,14 +17,18 @@
 
 package org.apache.spark.sql.test
 
-import java.io.File
+import java.io.{File, FilenameFilter}
 import java.util.ServiceLoader
 
+import scala.collection.mutable.ArrayBuffer
+import scala.util.Random
+
 import org.apache.spark.sql.{DataFrame, SQLContext}
 import org.apache.spark.util.Utils
 
 import org.apache.carbondata.common.logging.LogServiceFactory
 import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.datastore.impl.FileFactory
 import org.apache.carbondata.core.util.CarbonProperties
 
 /**
@@ -33,6 +37,8 @@ import org.apache.carbondata.core.util.CarbonProperties
 trait TestQueryExecutorRegister {
   def sql(sqlText: String): DataFrame
 
+  def stop()
+
   def sqlContext: SQLContext
 }
 
@@ -44,16 +50,99 @@ object TestQueryExecutor {
     .getCanonicalPath
   LOGGER.info(s"project path: $projectPath")
   val integrationPath = s"$projectPath/integration"
-  val resourcesPath = s"$integrationPath/spark-common-test/src/test/resources"
-  val storeLocation = s"$integrationPath/spark-common/target/store"
-  val warehouse = s"$integrationPath/spark-common/target/warehouse"
   val metastoredb = s"$integrationPath/spark-common/target"
-  val timestampFormat = "dd-MM-yyyy"
+  val masterUrl = {
+    val property = System.getProperty("spark.master.url")
+    if (property == null) {
+      "local[2]"
+    } else {
+      property
+    }
+  }
+
+  val hdfsUrl = {
+    val property = System.getProperty("hdfs.url")
+    if (property == null) {
+      "local"
+    } else {
+      LOGGER.info("HDFS PATH given : " + property)
+      property
+    }
+  }
+
+  val resourcesPath = if (hdfsUrl.startsWith("hdfs://")) {
+    ResourceRegisterAndCopier.
+      copyResourcesifNotExists(hdfsUrl, s"$integrationPath/spark-common-test/src/test/resources",
+        s"$integrationPath//spark-common-cluster-test/src/test/resources/testdatafileslist.txt")
+    hdfsUrl
+  } else {
+    s"$integrationPath/spark-common-test/src/test/resources"
+  }
+
+  val storeLocation = if (hdfsUrl.startsWith("hdfs://")) {
+    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.LOCK_TYPE,
+      CarbonCommonConstants.CARBON_LOCK_TYPE_HDFS)
+    val carbonFile = FileFactory.
+      getCarbonFile(s"$hdfsUrl/store", FileFactory.getFileType(s"$hdfsUrl/store"))
+    FileFactory.deleteAllCarbonFilesOfDir(carbonFile)
+    s"$hdfsUrl/store_" + System.nanoTime()
+  } else {
+    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.LOCK_TYPE,
+      CarbonCommonConstants.CARBON_LOCK_TYPE_LOCAL)
+    s"$integrationPath/spark-common/target/store"
+  }
+  val warehouse = if (hdfsUrl.startsWith("hdfs://")) {
+    val carbonFile = FileFactory.
+      getCarbonFile(s"$hdfsUrl/warehouse", FileFactory.getFileType(s"$hdfsUrl/warehouse"))
+    FileFactory.deleteAllCarbonFilesOfDir(carbonFile)
+    s"$hdfsUrl/warehouse_" + System.nanoTime()
+  } else {
+    s"$integrationPath/spark-common/target/warehouse"
+  }
+
+  val hiveresultpath = if (hdfsUrl.startsWith("hdfs://")) {
+    val p = s"$hdfsUrl/hiveresultpath"
+    FileFactory.mkdirs(p, FileFactory.getFileType(p))
+    p
+  } else {
+    val p = s"$integrationPath/spark-common/target/hiveresultpath"
+    new File(p).mkdirs()
+    p
+  }
+
+  LOGGER.info(s"""Store path taken $storeLocation""")
+  LOGGER.info(s"""Warehouse path taken $warehouse""")
+  LOGGER.info(s"""Resource path taken $resourcesPath""")
+
+  lazy val modules = Seq(TestQueryExecutor.projectPath + "/common/target",
+    TestQueryExecutor.projectPath + "/core/target",
+    TestQueryExecutor.projectPath + "/hadoop/target",
+    TestQueryExecutor.projectPath + "/processing/target",
+    TestQueryExecutor.projectPath + "/integration/spark-common/target",
+    TestQueryExecutor.projectPath + "/integration/spark2/target",
+    TestQueryExecutor.projectPath + "/integration/spark-common/target/jars")
+  lazy val jars = {
+    val jarsLocal = new ArrayBuffer[String]()
+    modules.foreach { path =>
+      val files = new File(path).listFiles(new FilenameFilter {
+        override def accept(dir: File, name: String) = {
+          name.endsWith(".jar")
+        }
+      })
+      files.foreach(jarsLocal += _.getAbsolutePath)
+    }
+    jarsLocal
+  }
 
   val INSTANCE = lookupQueryExecutor.newInstance().asInstanceOf[TestQueryExecutorRegister]
   CarbonProperties.getInstance()
     .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, "FORCE")
     .addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC, "/tmp/carbon/badrecords")
+    .addProperty(CarbonCommonConstants.DICTIONARY_SERVER_PORT,
+      (CarbonCommonConstants.DICTIONARY_SERVER_PORT_DEFAULT.toInt + Random.nextInt(100)) + "")
+    .addProperty(CarbonCommonConstants.CARBON_MAX_DRIVER_LRU_CACHE_SIZE, "1024")
+      .addProperty(CarbonCommonConstants.CARBON_MAX_EXECUTOR_LRU_CACHE_SIZE, "1024")
+
   private def lookupQueryExecutor: Class[_] = {
     ServiceLoader.load(classOf[TestQueryExecutorRegister], Utils.getContextOrSparkClassLoader)
       .iterator().next().getClass

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark/pom.xml
----------------------------------------------------------------------
diff --git a/integration/spark/pom.xml b/integration/spark/pom.xml
index 04b5aa7..fc13c7a 100644
--- a/integration/spark/pom.xml
+++ b/integration/spark/pom.xml
@@ -188,5 +188,13 @@
       </plugin>
     </plugins>
   </build>
+  <profiles>
+    <profile>
+      <id>sdvtest</id>
+      <properties>
+        <maven.test.skip>true</maven.test.skip>
+      </properties>
+    </profile>
+  </profiles>
 
 </project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark/src/main/scala/org/apache/spark/sql/test/SparkTestQueryExecutor.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/test/SparkTestQueryExecutor.scala b/integration/spark/src/main/scala/org/apache/spark/sql/test/SparkTestQueryExecutor.scala
index 27df623..bb00126 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/test/SparkTestQueryExecutor.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/test/SparkTestQueryExecutor.scala
@@ -32,13 +32,14 @@ class SparkTestQueryExecutor extends TestQueryExecutorRegister {
   override def sql(sqlText: String): DataFrame = SparkTestQueryExecutor.cc.sql(sqlText)
 
   override def sqlContext: SQLContext = SparkTestQueryExecutor.cc
+
+  override def stop(): Unit = SparkTestQueryExecutor.cc.sparkContext.stop()
 }
 
 object SparkTestQueryExecutor {
   private val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
   LOGGER.info("use TestQueryExecutorImplV1")
   CarbonProperties.getInstance()
-    .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, TestQueryExecutor.timestampFormat)
     .addProperty(CarbonCommonConstants.STORE_LOCATION_TEMP_PATH,
       System.getProperty("java.io.tmpdir"))
     .addProperty(CarbonCommonConstants.LOCK_TYPE, CarbonCommonConstants.CARBON_LOCK_TYPE_LOCAL)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark2/pom.xml
----------------------------------------------------------------------
diff --git a/integration/spark2/pom.xml b/integration/spark2/pom.xml
index 8279c5d..2f34c34 100644
--- a/integration/spark2/pom.xml
+++ b/integration/spark2/pom.xml
@@ -143,6 +143,7 @@
         <version>1.0</version>
         <!-- Note config is repeated in surefire config -->
         <configuration>
+          <skipTests>false</skipTests>
           <reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
           <junitxml>.</junitxml>
           <filereports>CarbonTestSuite.txt</filereports>
@@ -166,5 +167,13 @@
       </plugin>
     </plugins>
   </build>
+  <profiles>
+    <profile>
+      <id>sdvtest</id>
+      <properties>
+        <maven.test.skip>true</maven.test.skip>
+      </properties>
+    </profile>
+  </profiles>
 
 </project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
index c9eaf6d..a32146a 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
@@ -33,6 +33,7 @@ import org.apache.carbondata.core.cache.dictionary.ManageDictionaryAndBTree
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.datastore.impl.FileFactory
 import org.apache.carbondata.core.fileoperations.FileWriteOperation
+import org.apache.carbondata.core.indexstore.DataMapStoreManager
 import org.apache.carbondata.core.metadata.{AbsoluteTableIdentifier, CarbonMetadata, CarbonTableIdentifier}
 import org.apache.carbondata.core.metadata.converter.ThriftWrapperSchemaConverterImpl
 import org.apache.carbondata.core.metadata.schema
@@ -425,6 +426,7 @@ class CarbonFileMetastore(conf: RuntimeConfig) extends CarbonMetaStore {
       CarbonHiveMetadataUtil.invalidateAndDropTable(dbName, tableName, sparkSession)
       // discard cached table info in cachedDataSourceTables
       sparkSession.sessionState.catalog.refreshTable(tableIdentifier)
+      DataMapStoreManager.getInstance().clearDataMap(identifier, "blocklet")
     }
   }