You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ra...@apache.org on 2016/09/22 05:36:12 UTC

[14/50] [abbrv] incubator-carbondata git commit: Fixed data mismatch issue in case of Dictionary Exclude column for Numeric data type

Fixed data mismatch issue in case of Dictionary Exclude column for Numeric data type


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/7cfc3ec2
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/7cfc3ec2
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/7cfc3ec2

Branch: refs/heads/branch-0.1
Commit: 7cfc3ec28df89d6c4de20e5d2804bf075c54e4d8
Parents: 6411fde
Author: kumarvishal <ku...@gmail.com>
Authored: Wed Sep 7 22:16:17 2016 +0530
Committer: ravipesala <ra...@gmail.com>
Committed: Thu Sep 22 09:39:49 2016 +0530

----------------------------------------------------------------------
 .../org/apache/spark/sql/CarbonSqlParser.scala  |  6 +--
 .../AllDataTypesTestCaseAggregate.scala         |  2 +-
 .../createtable/TestCreateTableSyntax.scala     | 14 ++++++
 .../dataload/TestLoadDataWithHiveSyntax.scala   | 36 ++++++-------
 .../deleteTable/TestDeleteTableNewDDL.scala     |  6 +--
 .../HighCardinalityDataTypesTestCase.scala      | 32 ++++++------
 .../NoDictionaryColumnTestCase.scala            |  2 +-
 .../apache/spark/sql/TestCarbonSqlParser.scala  | 53 ++++++++++----------
 8 files changed, 82 insertions(+), 69 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/7cfc3ec2/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSqlParser.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSqlParser.scala b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSqlParser.scala
index 3bc5f5c..69d921f 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSqlParser.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSqlParser.scala
@@ -773,7 +773,7 @@ class CarbonSqlParser()
               val errormsg = "DICTIONARY_EXCLUDE is unsupported for complex datatype column: " +
                 dictExcludeCol
               throw new MalformedCarbonCommandException(errormsg)
-            } else if (isDoubleDecimalColDictionaryExclude(dataType)) {
+            } else if (!isStringAndTimestampColDictionaryExclude(dataType)) {
               val errorMsg = "DICTIONARY_EXCLUDE is unsupported for " + dataType.toLowerCase() +
                 " data type column: " + dictExcludeCol
               throw new MalformedCarbonCommandException(errorMsg)
@@ -862,8 +862,8 @@ class CarbonSqlParser()
    /**
     * detects whether double or decimal column is part of dictionary_exclude
     */
-  def isDoubleDecimalColDictionaryExclude(columnDataType: String): Boolean = {
-    val dataTypes = Array("double", "decimal")
+  def isStringAndTimestampColDictionaryExclude(columnDataType: String): Boolean = {
+    val dataTypes = Array("string", "timestamp")
     dataTypes.exists(x => x.equalsIgnoreCase(columnDataType))
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/7cfc3ec2/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
index 7d7a4e4..d4def47 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
@@ -40,7 +40,7 @@ class AllDataTypesTestCaseAggregate extends QueryTest with BeforeAndAfterAll {
     val currentDirectory = new File(this.getClass.getResource("/").getPath + "/../../")
       .getCanonicalPath
 
-    sql("create table if not exists Carbon_automation_test (imei string,deviceInformationId int,MAC string,deviceColor string,device_backColor string,modelId string,marketName string,AMSize string,ROMSize string,CUPAudit string,CPIClocked string,series string,productionDate timestamp,bomCode string,internalModels string, deliveryTime string, channelsId string, channelsName string , deliveryAreaId string, deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict string, deliveryStreet string, oxSingleNumber string, ActiveCheckTime string, ActiveAreaId string, ActiveCountry string, ActiveProvince string, Activecity string, ActiveDistrict string, ActiveStreet string, ActiveOperatorId string, Active_releaseId string, Active_EMUIVersion string, Active_operaSysVersion string, Active_BacVerNumber string, Active_BacFlashVer string, Active_webUIVersion string, Active_webUITypeCarrVer string,Active_webTypeDataVerNumber string, Active_operatorsVersion string, Active
 _phonePADPartitionedVersions string, Latest_YEAR int, Latest_MONTH int, Latest_DAY int, Latest_HOUR string, Latest_areaId string, Latest_country string, Latest_province string, Latest_city string, Latest_district string, Latest_street string, Latest_releaseId string, Latest_EMUIVersion string, Latest_operaSysVersion string, Latest_BacVerNumber string, Latest_BacFlashVer string, Latest_webUIVersion string, Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber string, Latest_operatorsVersion string, Latest_phonePADPartitionedVersions string, Latest_operatorId string, gamePointDescription string, gamePointId int,contractNumber int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_EXCLUDE'='Latest_MONTH,Latest_DAY,deviceInformationId')");
+    sql("create table if not exists Carbon_automation_test (imei string,deviceInformationId int,MAC string,deviceColor string,device_backColor string,modelId string,marketName string,AMSize string,ROMSize string,CUPAudit string,CPIClocked string,series string,productionDate timestamp,bomCode string,internalModels string, deliveryTime string, channelsId string, channelsName string , deliveryAreaId string, deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict string, deliveryStreet string, oxSingleNumber string, ActiveCheckTime string, ActiveAreaId string, ActiveCountry string, ActiveProvince string, Activecity string, ActiveDistrict string, ActiveStreet string, ActiveOperatorId string, Active_releaseId string, Active_EMUIVersion string, Active_operaSysVersion string, Active_BacVerNumber string, Active_BacFlashVer string, Active_webUIVersion string, Active_webUITypeCarrVer string,Active_webTypeDataVerNumber string, Active_operatorsVersion string, Active
 _phonePADPartitionedVersions string, Latest_YEAR int, Latest_MONTH int, Latest_DAY int, Latest_HOUR string, Latest_areaId string, Latest_country string, Latest_province string, Latest_city string, Latest_district string, Latest_street string, Latest_releaseId string, Latest_EMUIVersion string, Latest_operaSysVersion string, Latest_BacVerNumber string, Latest_BacFlashVer string, Latest_webUIVersion string, Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber string, Latest_operatorsVersion string, Latest_phonePADPartitionedVersions string, Latest_operatorId string, gamePointDescription string, gamePointId int,contractNumber int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='Latest_MONTH,Latest_DAY,deviceInformationId')");
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
     sql("LOAD DATA LOCAL INPATH '"+currentDirectory+"/src/test/resources/100_olap.csv' INTO table Carbon_automation_test options('DELIMITER'= ',' ,'QUOTECHAR'= '\"', 'FILEHEADER'= 'imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,contractNumber,ActiveCheckTime,ActiveAreaId,ActiveCountry,ActiveProvince,Activecity,ActiveDistrict,ActiveStreet,ActiveOperatorId,Active_releaseId,Active_EMUIVersion,Active_operaSysVersion,Active_BacVerNumber,Active_BacFlashVer,Active_webUIVersion,Active_webUITypeCarrVer,Active_webTypeDataVerNumber,Active_operatorsVersion,Active_phonePADPartitionedVersions,Latest_YEAR,Latest_MONTH,Latest_DAY,Latest_HOUR,Latest_areaId,Latest_country,Latest_province,Latest_city,Latest_district,Latest_street,Latest_releaseId,Late
 st_EMUIVersion,Latest_operaSysVersion,Latest_BacVerNumber,Latest_BacFlashVer,Latest_webUIVersion,Latest_webUITypeCarrVer,Latest_webTypeDataVerNumber,Latest_operatorsVersion,Latest_phonePADPartitionedVersions,Latest_operatorId,gamePointId,gamePointDescription')");

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/7cfc3ec2/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createtable/TestCreateTableSyntax.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createtable/TestCreateTableSyntax.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createtable/TestCreateTableSyntax.scala
index 6fd5e1d..b99e73f 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createtable/TestCreateTableSyntax.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createtable/TestCreateTableSyntax.scala
@@ -94,6 +94,20 @@ class TestCreateTableSyntax extends QueryTest with BeforeAndAfterAll {
     }
     sql("drop table if exists carbontable")
   }
+    test("test carbon table create with int datatype as dictionary exclude") {
+    try {
+      sql("create table carbontable(id int, name string, dept string, mobile array<string>, "+
+        "country string, salary double) STORED BY 'org.apache.carbondata.format' " +
+        "TBLPROPERTIES('DICTIONARY_EXCLUDE'='id')")
+      assert(false)
+    } catch {
+      case e : MalformedCarbonCommandException => {
+        assert(e.getMessage.equals("DICTIONARY_EXCLUDE is unsupported for int " +
+          "data type column: id"))
+      }
+    }
+    sql("drop table if exists carbontable")
+  }
 
   test("test carbon table create with decimal datatype as dictionary exclude") {
     try {

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/7cfc3ec2/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntax.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntax.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntax.scala
index da64b39..e017a62 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntax.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntax.scala
@@ -55,17 +55,17 @@ class TestLoadDataWithHiveSyntax extends QueryTest with BeforeAndAfterAll {
   test("test data loading and validate query output") {
     //Create test cube and hive table
     sql(
-      "CREATE table testtable (empno int, empname String, designation String, doj String, " +
-        "workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, " +
-        "projectcode int, projectjoindate String, projectenddate String,attendance double," +
+      "CREATE table testtable (empno string, empname String, designation String, doj String, " +
+        "workgroupcategory string, workgroupcategoryname String, deptno string, deptname String, " +
+        "projectcode string, projectjoindate String, projectenddate String,attendance double," +
         "utilization double,salary double) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES" +
         "('DICTIONARY_EXCLUDE'='empno,empname,designation,doj,workgroupcategory," +
         "workgroupcategoryname,deptno,deptname,projectcode,projectjoindate,projectenddate')"
     )
     sql(
-      "create table testhivetable(empno int, empname String, designation string, doj String, " +
-        "workgroupcategory int, workgroupcategoryname String,deptno int, deptname String, " +
-        "projectcode int, projectjoindate String,projectenddate String, attendance double," +
+      "create table testhivetable(empno string, empname String, designation string, doj String, " +
+        "workgroupcategory string, workgroupcategoryname String,deptno string, deptname String, " +
+        "projectcode string, projectjoindate String,projectenddate String, attendance double," +
         "utilization double,salary double)row format delimited fields terminated by ','"
     )
     //load data into test cube and hive table and validate query result
@@ -96,17 +96,17 @@ class TestLoadDataWithHiveSyntax extends QueryTest with BeforeAndAfterAll {
   test("test data loading with different case file header and validate query output") {
     //Create test cube and hive table
     sql(
-      "CREATE table testtable1 (empno int, empname String, designation String, doj String, " +
-        "workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, " +
-        "projectcode int, projectjoindate String, projectenddate String,attendance double," +
+      "CREATE table testtable1 (empno string, empname String, designation String, doj String, " +
+        "workgroupcategory string, workgroupcategoryname String, deptno string, deptname String, " +
+        "projectcode string, projectjoindate String, projectenddate String,attendance double," +
         "utilization double,salary double) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES" +
         "('DICTIONARY_EXCLUDE'='empno,empname,designation,doj,workgroupcategory," +
         "workgroupcategoryname,deptno,deptname,projectcode,projectjoindate,projectenddate')"
     )
     sql(
-      "create table testhivetable1(empno int, empname String, designation string, doj String, " +
-        "workgroupcategory int, workgroupcategoryname String,deptno int, deptname String, " +
-        "projectcode int, projectjoindate String,projectenddate String, attendance double," +
+      "create table testhivetable1(empno string, empname String, designation string, doj String, " +
+        "workgroupcategory string, workgroupcategoryname String,deptno string, deptname String, " +
+        "projectcode string, projectjoindate String,projectenddate String, attendance double," +
         "utilization double,salary double)row format delimited fields terminated by ','"
     )
     //load data into test cube and hive table and validate query result
@@ -566,17 +566,17 @@ class TestLoadDataWithHiveSyntax extends QueryTest with BeforeAndAfterAll {
 
   test("test data loading when delimiter is '|' and data with header") {
     sql(
-      "CREATE table carbontable1 (empno int, empname String, designation String, doj String, " +
-        "workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, " +
-        "projectcode int, projectjoindate String, projectenddate String,attendance double," +
+      "CREATE table carbontable1 (empno string, empname String, designation String, doj String, " +
+        "workgroupcategory string, workgroupcategoryname String, deptno string, deptname String, " +
+        "projectcode string, projectjoindate String, projectenddate String,attendance double," +
         "utilization double,salary double) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES" +
         "('DICTIONARY_EXCLUDE'='empno,empname,designation,doj,workgroupcategory," +
         "workgroupcategoryname,deptno,deptname,projectcode,projectjoindate,projectenddate')"
     )
     sql(
-      "create table hivetable1 (empno int, empname String, designation string, doj String, " +
-        "workgroupcategory int, workgroupcategoryname String,deptno int, deptname String, " +
-        "projectcode int, projectjoindate String,projectenddate String, attendance double," +
+      "create table hivetable1 (empno string, empname String, designation string, doj String, " +
+        "workgroupcategory string, workgroupcategoryname String,deptno string, deptname String, " +
+        "projectcode string, projectjoindate String,projectenddate String, attendance double," +
         "utilization double,salary double)row format delimited fields terminated by ','"
     )
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/7cfc3ec2/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
index bd822e4..67c2d4a 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
@@ -159,7 +159,7 @@ class TestDeleteTableNewDDL extends QueryTest with BeforeAndAfterAll {
       "CREATE table dropTableTest2 (ID int, date String, country String, name " +
       "String," +
       "phonetype String, serialname String, salary int) stored by 'org.apache.carbondata.format' " +
-      "TBLPROPERTIES('DICTIONARY_EXCLUDE'='salary')"
+      "TBLPROPERTIES('DICTIONARY_INCLUDE'='salary')"
     )
     sql(
       "LOAD DATA LOCAL INPATH '" + resource + "dataretention1.csv' INTO TABLE dropTableTest2 " +
@@ -170,7 +170,7 @@ class TestDeleteTableNewDDL extends QueryTest with BeforeAndAfterAll {
       "CREATE table dropTableTest2 (ID int, date String, country String, name " +
       "String," +
       "phonetype String, serialname String, salary decimal) stored by 'org.apache.carbondata.format' " +
-      "TBLPROPERTIES('DICTIONARY_EXCLUDE'='date')"
+      "TBLPROPERTIES('DICTIONARY_INCLUDE'='date')"
     )
     sql(
       "LOAD DATA LOCAL INPATH '" + resource + "dataretention1.csv' INTO TABLE dropTableTest2 " +
@@ -185,7 +185,7 @@ class TestDeleteTableNewDDL extends QueryTest with BeforeAndAfterAll {
       "CREATE table test.dropTableTest3 (ID int, date String, country String, name " +
       "String," +
       "phonetype String, serialname String, salary int) stored by 'org.apache.carbondata.format' " +
-      "TBLPROPERTIES('DICTIONARY_EXCLUDE'='salary')"
+      "TBLPROPERTIES('DICTIONARY_INCLUDE'='salary')"
     )
     sql(
       "LOAD DATA LOCAL INPATH '" + resource + "dataretention1.csv' INTO TABLE test.dropTableTest3 " +

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/7cfc3ec2/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/HighCardinalityDataTypesTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/HighCardinalityDataTypesTestCase.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/HighCardinalityDataTypesTestCase.scala
index f349150..062c055 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/HighCardinalityDataTypesTestCase.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/HighCardinalityDataTypesTestCase.scala
@@ -42,7 +42,7 @@ class NO_DICTIONARY_COL_TestCase extends QueryTest with BeforeAndAfterAll {
     sql("drop table if exists NO_DICTIONARY_CARBON_7")
     
     sql(
-      "create table NO_DICTIONARY_HIVE_6(empno int,empname string,designation string,doj " +
+      "create table NO_DICTIONARY_HIVE_6(empno string,empname string,designation string,doj " +
         "Timestamp,workgroupcategory int, " +
         "workgroupcategoryname string,deptno int, deptname string, projectcode int, " +
         "projectjoindate Timestamp,projectenddate Timestamp,attendance int, "
@@ -55,12 +55,12 @@ class NO_DICTIONARY_COL_TestCase extends QueryTest with BeforeAndAfterAll {
         "NO_DICTIONARY_HIVE_6"
     );
     //For Carbon cube creation.
-    sql("CREATE TABLE NO_DICTIONARY_CARBON_6 (empno Int, " +
+    sql("CREATE TABLE NO_DICTIONARY_CARBON_6 (empno string, " +
       "doj Timestamp, workgroupcategory Int, empname String,workgroupcategoryname String, " +
       "deptno Int, deptname String, projectcode Int, projectjoindate Timestamp, " +
       "projectenddate Timestamp, designation String,attendance Int,utilization " +
       "Int,salary Int) STORED BY 'org.apache.carbondata.format' " +
-        "TBLPROPERTIES('DICTIONARY_EXCLUDE'='empno,empname,designation')"
+        "TBLPROPERTIES('DICTIONARY_EXCLUDE'='empname,designation')"
     )
     sql(
       "LOAD DATA LOCAL INPATH './src/test/resources/data.csv' INTO TABLE NO_DICTIONARY_CARBON_6 " +
@@ -78,7 +78,7 @@ class NO_DICTIONARY_COL_TestCase extends QueryTest with BeforeAndAfterAll {
       "LOAD DATA LOCAL INPATH './src/test/resources/data.csv' INTO TABLE NO_DICTIONARY_CARBON_7 " +
       "OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')"
     )
-    sql("CREATE TABLE filtertestTable (ID Int,date Timestamp, country String, " +
+    sql("CREATE TABLE filtertestTable (ID string,date Timestamp, country String, " +
       "name String, phonetype String, serialname String, salary Int) " +
         "STORED BY 'org.apache.carbondata.format' " +  "TBLPROPERTIES('DICTIONARY_EXCLUDE'='ID')"
     )
@@ -94,7 +94,7 @@ class NO_DICTIONARY_COL_TestCase extends QueryTest with BeforeAndAfterAll {
 
   test("Count (*) with filter") {
     checkAnswer(
-      sql("select count(*) from NO_DICTIONARY_CARBON_6 where empno=11"),
+      sql("select count(*) from NO_DICTIONARY_CARBON_6 where empno='11'"),
       Seq(Row(1))
     )
   }
@@ -104,7 +104,7 @@ class NO_DICTIONARY_COL_TestCase extends QueryTest with BeforeAndAfterAll {
 
     checkAnswer(
       sql("select empno from NO_DICTIONARY_CARBON_6"),
-      Seq(Row(11), Row(12), Row(13), Row(14), Row(15), Row(16), Row(17), Row(18), Row(19), Row(20))
+      Seq(Row("11"), Row("12"), Row("13"), Row("14"), Row("15"), Row("16"), Row("17"), Row("18"), Row("19"), Row("20"))
     )
 
 
@@ -132,16 +132,16 @@ class NO_DICTIONARY_COL_TestCase extends QueryTest with BeforeAndAfterAll {
 
 
     checkAnswer(
-      sql("select empno from NO_DICTIONARY_CARBON_6 where empno in(11,12,13)"),
-      Seq(Row(11), Row(12), Row(13))
+      sql("select empno from NO_DICTIONARY_CARBON_6 where empno in('11','12','13')"),
+      Seq(Row("11"), Row("12"), Row("13"))
     )
   }
   test("Detail Query with NO_DICTIONARY_COLUMN with not in filter Compare With HIVE RESULT") {
 
 
     checkAnswer(
-      sql("select empno from NO_DICTIONARY_CARBON_6 where empno not in(11,12,13,14,15,16,17)"),
-      Seq(Row(18), Row(19), Row(20))
+      sql("select empno from NO_DICTIONARY_CARBON_6 where empno not in('11','12','13','14','15','16','17')"),
+      Seq(Row("18"), Row("19"), Row("20"))
     )
   }
 
@@ -149,8 +149,8 @@ class NO_DICTIONARY_COL_TestCase extends QueryTest with BeforeAndAfterAll {
 
 
     checkAnswer(
-      sql("select empno from NO_DICTIONARY_CARBON_6 where empno=17"),
-      Seq(Row(17))
+      sql("select empno from NO_DICTIONARY_CARBON_6 where empno='17'"),
+      Seq(Row("17"))
     )
   }
   test("Detail Query with NO_DICTIONARY_COLUMN with IS NOT NULL filter") {
@@ -158,13 +158,13 @@ class NO_DICTIONARY_COL_TestCase extends QueryTest with BeforeAndAfterAll {
 
     checkAnswer(
       sql("select id  from filtertestTable where id is not null"),
-      Seq(Row(4), Row(6))
+      Seq(Row("4"),Row("6"),Row("abc"))
     )
   }
 test("filter with arithmetic expression") {
     checkAnswer(
       sql("select id from filtertestTable " + "where id+2 = 6"),
-      Seq(Row(4))
+      Seq(Row("4"))
     )
   }
   test("Detail Query with NO_DICTIONARY_COLUMN with equals multiple filter Compare With HIVE " +
@@ -173,8 +173,8 @@ test("filter with arithmetic expression") {
 
 
     checkAnswer(
-      sql("select empno,empname,workgroupcategory from NO_DICTIONARY_CARBON_6 where empno=17"),
-      sql("select empno,empname,workgroupcategory from NO_DICTIONARY_HIVE_6 where empno=17")
+      sql("select empno,empname,workgroupcategory from NO_DICTIONARY_CARBON_6 where empno='17'"),
+      sql("select empno,empname,workgroupcategory from NO_DICTIONARY_HIVE_6 where empno='17'")
     )
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/7cfc3ec2/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/NoDictionaryColumnTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/NoDictionaryColumnTestCase.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/NoDictionaryColumnTestCase.scala
index b36da1f..9ed969a 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/NoDictionaryColumnTestCase.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/NoDictionaryColumnTestCase.scala
@@ -35,7 +35,7 @@ class NoDictionaryColumnTestCase extends QueryTest with BeforeAndAfterAll {
     sql("DROP TABLE IF EXISTS hiveTable")
     sql("DROP TABLE IF EXISTS carbonEmpty")
     sql("DROP TABLE IF EXISTS hiveEmpty")
-    sql("CREATE TABLE carbonTable (imei String, age Int, num BigInt) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_EXCLUDE'='age,num')")
+    sql("CREATE TABLE carbonTable (imei String, age Int, num BigInt) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='age,num')")
     sql("LOAD DATA LOCAL INPATH './src/test/resources/datawithNegtiveNumber.csv' INTO TABLE carbonTable")
     sql("CREATE TABLE hiveTable (imei String, age Int, num BigInt) ROW FORMAT DELIMITED FIELDS TERMINATED BY ','")
     sql("LOAD DATA LOCAL INPATH './src/test/resources/datawithNegeativewithoutHeader.csv' INTO TABLE hiveTable")

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/7cfc3ec2/integration/spark/src/test/scala/org/apache/spark/sql/TestCarbonSqlParser.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/apache/spark/sql/TestCarbonSqlParser.scala b/integration/spark/src/test/scala/org/apache/spark/sql/TestCarbonSqlParser.scala
index e18e0ed..04e47bb 100644
--- a/integration/spark/src/test/scala/org/apache/spark/sql/TestCarbonSqlParser.scala
+++ b/integration/spark/src/test/scala/org/apache/spark/sql/TestCarbonSqlParser.scala
@@ -65,10 +65,10 @@ class TestCarbonSqlParser extends QueryTest {
   def loadAllFields: Seq[Field] = {
     var fields: Seq[Field] = Seq[Field]()
 
-    var col1 = Field("col1", Option("Int"), Option("col1"), None, null, Some("columnar"))
+    var col1 = Field("col1", Option("String"), Option("col1"), None, null, Some("columnar"))
     var col2 = Field("col2", Option("String"), Option("col2"), None, null, Some("columnar"))
     var col3 = Field("col3", Option("String"), Option("col3"), None, null, Some("columnar"))
-    var col4 = Field("col4", Option("Int"), Option("col4"), None, null, Some("columnar"))
+    var col4 = Field("col4", Option("int"), Option("col4"), None, null, Some("columnar"))
     var col5 = Field("col5", Option("String"), Option("col5"), None, null, Some("columnar"))
     var col6 = Field("col6", Option("String"), Option("col6"), None, null, Some("columnar"))
     var col7 = Field("col7", Option("String"), Option("col7"), None, null, Some("columnar"))
@@ -203,10 +203,11 @@ class TestCarbonSqlParser extends QueryTest {
     // testing col
 
     //All dimension fields should be available in dimensions list
-    assert(dimCols.size == 7)
-    assert(dimCols.lift(0).get.column.equalsIgnoreCase("col2"))
-    assert(dimCols.lift(1).get.column.equalsIgnoreCase("col3"))
-    assert(dimCols.lift(2).get.column.equalsIgnoreCase("col4"))
+    assert(dimCols.size == 8)
+    assert(dimCols.lift(0).get.column.equalsIgnoreCase("col1"))
+    assert(dimCols.lift(1).get.column.equalsIgnoreCase("col2"))
+    assert(dimCols.lift(2).get.column.equalsIgnoreCase("col3"))
+    assert(dimCols.lift(3).get.column.equalsIgnoreCase("col4"))
 
     //No dictionary column names will be available in noDictionary list
     assert(noDictionary.size == 1)
@@ -290,22 +291,22 @@ class TestCarbonSqlParser extends QueryTest {
     val msrCols = stub.extractMsrColsFromFieldsTest(fields, tableProperties)
 
     //below dimension fields should be available in dimensions list
-    assert(dimCols.size == 6)
-    assert(dimCols.lift(0).get.column.equalsIgnoreCase("col2"))
-    assert(dimCols.lift(1).get.column.equalsIgnoreCase("col3"))
+    assert(dimCols.size == 7)
+    assert(dimCols.lift(0).get.column.equalsIgnoreCase("col1"))
+    assert(dimCols.lift(1).get.column.equalsIgnoreCase("col2"))
+    assert(dimCols.lift(2).get.column.equalsIgnoreCase("col3"))
 
     //below column names will be available in noDictionary list
     assert(noDictionary.size == 1)
     assert(noDictionary.lift(0).get.equalsIgnoreCase("col3"))
 
     //check msr
-    assert(msrCols.size == 2)
-    assert(msrCols.lift(0).get.column.equalsIgnoreCase("col1"))
-    assert(msrCols.lift(1).get.column.equalsIgnoreCase("col4"))
+    assert(msrCols.size == 1)
+    assert(msrCols.lift(0).get.column.equalsIgnoreCase("col4"))
   }
 
   test("Test-DimAndMsrColsWithNoDictionaryFields5") {
-    val tableProperties = Map(CarbonCommonConstants.DICTIONARY_EXCLUDE -> "col4", CarbonCommonConstants.DICTIONARY_INCLUDE -> "col2")
+    val tableProperties = Map(CarbonCommonConstants.DICTIONARY_EXCLUDE -> "col1", CarbonCommonConstants.DICTIONARY_INCLUDE -> "col2")
     val fields: Seq[Field] = loadAllFields
     val stub = new TestCarbonSqlParserStub()
     val (dimCols, noDictionary) = stub
@@ -314,17 +315,17 @@ class TestCarbonSqlParser extends QueryTest {
 
     //below dimension fields should be available in dimensions list
     assert(dimCols.size == 7)
-    assert(dimCols.lift(0).get.column.equalsIgnoreCase("col2"))
-    assert(dimCols.lift(1).get.column.equalsIgnoreCase("col3"))
-    assert(dimCols.lift(2).get.column.equalsIgnoreCase("col4"))
+    assert(dimCols.lift(0).get.column.equalsIgnoreCase("col1"))
+    assert(dimCols.lift(1).get.column.equalsIgnoreCase("col2"))
+    assert(dimCols.lift(2).get.column.equalsIgnoreCase("col3"))
 
     //below column names will be available in noDictionary list
     assert(noDictionary.size == 1)
-    assert(noDictionary.lift(0).get.equalsIgnoreCase("col4"))
+    assert(noDictionary.lift(0).get.equalsIgnoreCase("col1"))
 
     //check msr
     assert(msrCols.size == 1)
-    assert(msrCols.lift(0).get.column.equalsIgnoreCase("col1"))
+    assert(msrCols.lift(0).get.column.equalsIgnoreCase("col4"))
   }
 
   test("Test-DimAndMsrColsWithNoDictionaryFields6") {
@@ -377,7 +378,7 @@ class TestCarbonSqlParser extends QueryTest {
   }
 
   test("Test-DimAndMsrColsWithNoDictionaryFields8") {
-    val tableProperties = Map(CarbonCommonConstants.DICTIONARY_EXCLUDE-> "col2,col4", CarbonCommonConstants.DICTIONARY_INCLUDE -> "col3")
+    val tableProperties = Map(CarbonCommonConstants.DICTIONARY_EXCLUDE-> "col2", CarbonCommonConstants.DICTIONARY_INCLUDE -> "col3")
     val fields: Seq[Field] = loadAllFields
     val stub = new TestCarbonSqlParserStub()
     val (dimCols, noDictionary) = stub
@@ -386,29 +387,27 @@ class TestCarbonSqlParser extends QueryTest {
 
     //below dimension fields should be available in dimensions list
     assert(dimCols.size == 7)
-    assert(dimCols.lift(0).get.column.equalsIgnoreCase("col2"))
-    assert(dimCols.lift(1).get.column.equalsIgnoreCase("col3"))
-    assert(dimCols.lift(2).get.column.equalsIgnoreCase("col4"))
+    assert(dimCols.lift(0).get.column.equalsIgnoreCase("col1"))
+    assert(dimCols.lift(1).get.column.equalsIgnoreCase("col2"))
 
     //below column names will be available in noDictionary list
-    assert(noDictionary.size == 2)
+    assert(noDictionary.size == 1)
     assert(noDictionary.lift(0).get.equalsIgnoreCase("col2"))
-    assert(noDictionary.lift(1).get.equalsIgnoreCase("col4"))
 
     //check msr
     assert(msrCols.size == 1)
-    assert(msrCols.lift(0).get.column.equalsIgnoreCase("col1"))
+    assert(msrCols.lift(0).get.column.equalsIgnoreCase("col4"))
   }
 
   // Testing the extracting of measures
   test("Test-extractMsrColsFromFields") {
-    val tableProperties = Map(CarbonCommonConstants.DICTIONARY_EXCLUDE -> "col2", CarbonCommonConstants.DICTIONARY_INCLUDE -> "col4")
+    val tableProperties = Map(CarbonCommonConstants.DICTIONARY_EXCLUDE -> "col2", CarbonCommonConstants.DICTIONARY_INCLUDE -> "col1")
     val fields: Seq[Field] = loadAllFields
     val stub = new TestCarbonSqlParserStub()
     val msrCols = stub.extractMsrColsFromFieldsTest(fields, tableProperties)
 
     // testing col
-    assert(msrCols.lift(0).get.column.equalsIgnoreCase("col1"))
+    assert(msrCols.lift(0).get.column.equalsIgnoreCase("col4"))
 
   }