You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ra...@apache.org on 2018/08/09 18:26:18 UTC

[34/47] carbondata git commit: [CARBONDATA-2585] Fix local dictionary for both table level and system level property based on priority

[CARBONDATA-2585] Fix local dictionary for both table level and system level property based on priority

Added a System level Property for local dictionary Support.
Property 'carbon.local.dictionary.enable' can be set to true/false to enable/disable local dictionary at system level.
If table level property LOCAL_DICTIONARY_ENABLE is configured, then Local Dictionary generation will be considered based on the table level property irrespective of the system level property.
If not, then the System level property 'carbon.local.dictionary.enable' value will be considered for local dictionary generation.

By default, both 'carbon.local.dictionary.enable' and LOCAL_DICTIONARY_ENABLE are false (Local Dictionary generation is disabled).

This closes #2605


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/e1167ab2
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/e1167ab2
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/e1167ab2

Branch: refs/heads/branch-1.4
Commit: e1167ab2692a922fc3d56b721f34205868c31c0a
Parents: a363995
Author: akashrn5 <ak...@gmail.com>
Authored: Thu Aug 2 20:20:48 2018 +0530
Committer: ravipesala <ra...@gmail.com>
Committed: Thu Aug 9 23:51:17 2018 +0530

----------------------------------------------------------------------
 .../core/constants/CarbonCommonConstants.java   |  5 ++
 .../carbondata/core/util/CarbonProperties.java  |  9 ++
 .../LocalDictionarySupportAlterTableTest.scala  | 53 +++++++++++
 .../LocalDictionarySupportCreateTableTest.scala | 35 +++++++-
 .../carbondata/spark/util/CarbonScalaUtil.scala | 74 ++++++++++++++-
 .../spark/sql/catalyst/CarbonDDLSqlParser.scala | 95 ++++----------------
 .../command/carbonTableSchemaCommon.scala       | 55 ++++++++++--
 .../scala/org/apache/spark/sql/CarbonEnv.scala  |  3 +-
 .../sql/parser/CarbonSparkSqlParserUtil.scala   |  8 +-
 9 files changed, 243 insertions(+), 94 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/e1167ab2/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
index e480007..f2b9308 100644
--- a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
+++ b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
@@ -932,6 +932,11 @@ public final class CarbonCommonConstants {
   public static final String LOCAL_DICTIONARY_ENABLE_DEFAULT = "false";
 
   /**
+   * System property to enable or disable local dictionary generation
+   */
+  public static final String LOCAL_DICTIONARY_SYSTEM_ENABLE = "carbon.local.dictionary.enable";
+
+  /**
    * Threshold value for local dictionary
    */
   public static final String LOCAL_DICTIONARY_THRESHOLD = "local_dictionary_threshold";

http://git-wip-us.apache.org/repos/asf/carbondata/blob/e1167ab2/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
index 8a91a43..c3a4934 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
@@ -875,6 +875,15 @@ public final class CarbonProperties {
   }
 
   /**
+   * This method will be used to add a new property which need not be serialized
+   *
+   * @param key
+   */
+  public void addNonSerializableProperty(String key, String value) {
+    carbonProperties.setProperty(key, value);
+  }
+
+  /**
    * Remove the specified key in property
    */
   public CarbonProperties removeProperty(String key) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/e1167ab2/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/localdictionary/LocalDictionarySupportAlterTableTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/localdictionary/LocalDictionarySupportAlterTableTest.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/localdictionary/LocalDictionarySupportAlterTableTest.scala
index 24af99e..38ecde8 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/localdictionary/LocalDictionarySupportAlterTableTest.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/localdictionary/LocalDictionarySupportAlterTableTest.scala
@@ -21,6 +21,8 @@ import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
 import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.util.CarbonProperties
 
 class LocalDictionarySupportAlterTableTest extends QueryTest with BeforeAndAfterAll{
 
@@ -1405,7 +1407,58 @@ class LocalDictionarySupportAlterTableTest extends QueryTest with BeforeAndAfter
     }
   }
 
+  test("test alter table add column system level property and table level property") {
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.LOCAL_DICTIONARY_SYSTEM_ENABLE, "false")
+    sql("drop table if exists local1")
+    sql(
+      """
+        | CREATE TABLE local1(id int, name string, city string, age int)
+        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='true',
+        | 'local_dictionary_threshold'='20000','local_dictionary_include'='city','no_inverted_index'='name')
+      """.stripMargin)
+    sql("alter table local1 add columns (alt string) tblproperties('local_dictionary_include'='alt')")
+    val descLoc = sql("describe formatted local1").collect
+    descLoc.find(_.get(0).toString.contains("Local Dictionary Threshold")) match {
+      case Some(row) => assert(row.get(1).toString.contains("20000"))
+      case None => assert(false)
+    }
+    descLoc.find(_.get(0).toString.contains("Local Dictionary Enabled")) match {
+      case Some(row) => assert(row.get(1).toString.contains("true"))
+      case None => assert(false)
+    }
+    descLoc.find(_.get(0).toString.contains("Local Dictionary Include")) match {
+      case Some(row) => assert(row.get(1).toString.contains("city,alt"))
+      case None => assert(false)
+    }
+  }
+
+  test("test alter table add column system level property") {
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.LOCAL_DICTIONARY_SYSTEM_ENABLE, "false")
+    sql("drop table if exists local1")
+    sql(
+      """
+        | CREATE TABLE local1(id int, name string, city string, age int)
+        | STORED BY 'org.apache.carbondata.format' tblproperties
+        | ('local_dictionary_threshold'='20000','local_dictionary_include'='city',
+        | 'no_inverted_index'='name')
+      """.stripMargin)
+    // exception will not be thrown as validation is not done, because table level local
+    // dictionary property is not configured, but system level it is configured false
+    sql(
+      "alter table local1 add columns (alt int)")
+    val descLoc = sql("describe formatted local1").collect
+    descLoc.find(_.get(0).toString.contains("Local Dictionary Enable")) match {
+      case Some(row) => assert(row.get(1).toString.contains("false"))
+      case None => assert(false)
+    }
+  }
+
   override protected def afterAll(): Unit = {
     sql("DROP TABLE IF EXISTS LOCAL1")
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.LOCAL_DICTIONARY_SYSTEM_ENABLE,
+        CarbonCommonConstants.LOCAL_DICTIONARY_ENABLE_DEFAULT)
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/e1167ab2/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/localdictionary/LocalDictionarySupportCreateTableTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/localdictionary/LocalDictionarySupportCreateTableTest.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/localdictionary/LocalDictionarySupportCreateTableTest.scala
index a02d3ef..6162cd8 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/localdictionary/LocalDictionarySupportCreateTableTest.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/localdictionary/LocalDictionarySupportCreateTableTest.scala
@@ -21,6 +21,8 @@ import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
 import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.util.CarbonProperties
 
 class LocalDictionarySupportCreateTableTest extends QueryTest with BeforeAndAfterAll {
 
@@ -67,7 +69,6 @@ class LocalDictionarySupportCreateTableTest extends QueryTest with BeforeAndAfte
 
   test("test local dictionary custom configurations for local dict columns _002") {
     sql("drop table if exists local1")
-
     intercept[MalformedCarbonCommandException] {
       sql(
         """
@@ -2426,7 +2427,39 @@ class LocalDictionarySupportCreateTableTest extends QueryTest with BeforeAndAfte
     }
   }
 
+
+  test("test local dictionary for system level configuration") {
+    sql("drop table if exists local1")
+    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.LOCAL_DICTIONARY_SYSTEM_ENABLE, "false")
+    // should not throw exception as system level it is false and table level is not configured
+      sql(
+        """
+          | CREATE TABLE local1(id int, name string, city string, age int)
+          | STORED BY 'org.apache.carbondata.format'
+          | tblproperties('local_dictionary_include'='name,name')
+        """.stripMargin)
+  }
+
+  test("test local dictionary for system level configuration and table level priority") {
+    sql("drop table if exists local1")
+    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.LOCAL_DICTIONARY_SYSTEM_ENABLE, "false")
+    // should not throw exception as system level it is false and table level is not configured
+    intercept[MalformedCarbonCommandException] {
+      sql(
+        """
+        | CREATE TABLE local1(id int, name string, city string, age int)
+        | STORED BY 'org.apache.carbondata.format'
+        | tblproperties('local_dictionary_enable'='true','local_dictionary_include'='name,name')
+      """.
+          stripMargin)
+      }
+  }
+
+
   override protected def afterAll(): Unit = {
     sql("DROP TABLE IF EXISTS LOCAL1")
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.LOCAL_DICTIONARY_SYSTEM_ENABLE,
+        CarbonCommonConstants.LOCAL_DICTIONARY_ENABLE_DEFAULT)
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/e1167ab2/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CarbonScalaUtil.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CarbonScalaUtil.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CarbonScalaUtil.scala
index 28cd7ef..9e76021 100644
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CarbonScalaUtil.scala
+++ b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CarbonScalaUtil.scala
@@ -31,7 +31,7 @@ import org.apache.spark.SparkException
 import org.apache.spark.sql._
 import org.apache.spark.sql.catalyst.catalog.CatalogTablePartition
 import org.apache.spark.sql.catalyst.util.DateTimeUtils
-import org.apache.spark.sql.execution.command.{DataTypeInfo, UpdateTableModel}
+import org.apache.spark.sql.execution.command.{Field, UpdateTableModel}
 import org.apache.spark.sql.types._
 import org.apache.spark.util.CarbonReflectionUtils
 
@@ -641,6 +641,78 @@ object CarbonScalaUtil {
     }
   }
 
+  /**
+   * This method validates all the child columns of complex column recursively to check whether
+   * any of the child column is of string dataType or not
+   *
+   * @param field
+   */
+  def validateChildColumnsRecursively(field: Field): Boolean = {
+    if (field.children.isDefined && null != field.children.get) {
+      field.children.get.exists { childColumn =>
+        if (childColumn.children.isDefined && null != childColumn.children.get) {
+          validateChildColumnsRecursively(childColumn)
+        } else {
+          childColumn.dataType.get.equalsIgnoreCase("string")
+        }
+      }
+    } else {
+      false
+    }
+  }
+
+  /**
+   * This method validates the local dictionary configured columns
+   *
+   * @param fields
+   * @param tableProperties
+   */
+  def validateLocalConfiguredDictionaryColumns(fields: Seq[Field],
+      tableProperties: mutable.Map[String, String], localDictColumns: Seq[String]): Unit = {
+    var dictIncludeColumns: Seq[String] = Seq[String]()
+
+    // validate the local dict columns
+    CarbonScalaUtil.validateLocalDictionaryColumns(tableProperties, localDictColumns)
+    // check if the column specified exists in table schema
+    localDictColumns.foreach { distCol =>
+      if (!fields.exists(x => x.column.equalsIgnoreCase(distCol.trim))) {
+        val errormsg = "LOCAL_DICTIONARY_INCLUDE/LOCAL_DICTIONARY_EXCLUDE column: " + distCol.trim +
+                       " does not exist in table. Please check the DDL."
+        throw new MalformedCarbonCommandException(errormsg)
+      }
+    }
+
+    // check if column is other than STRING or VARCHAR datatype
+    localDictColumns.foreach { dictColm =>
+      if (fields
+        .exists(x => x.column.equalsIgnoreCase(dictColm) &&
+                     !x.dataType.get.equalsIgnoreCase("STRING") &&
+                     !x.dataType.get.equalsIgnoreCase("VARCHAR") &&
+                     !x.dataType.get.equalsIgnoreCase("STRUCT") &&
+                     !x.dataType.get.equalsIgnoreCase("ARRAY"))) {
+        val errormsg = "LOCAL_DICTIONARY_INCLUDE/LOCAL_DICTIONARY_EXCLUDE column: " +
+                       dictColm.trim +
+                       " is not a string/complex/varchar datatype column. LOCAL_DICTIONARY_COLUMN" +
+                       " should be no dictionary string/complex/varchar datatype column." +
+                       "Please check the DDL."
+        throw new MalformedCarbonCommandException(errormsg)
+      }
+    }
+
+    // Validate whether any of the child columns of complex dataType column is a string column
+    localDictColumns.foreach { dictColm =>
+      if (fields
+        .exists(x => x.column.equalsIgnoreCase(dictColm) && x.children.isDefined &&
+                     null != x.children.get &&
+                     !validateChildColumnsRecursively(x))) {
+        val errMsg =
+          s"None of the child columns of complex dataType column $dictColm specified in " +
+          "local_dictionary_include are not of string dataType."
+        throw new MalformedCarbonCommandException(errMsg)
+      }
+    }
+  }
+
   def isStringDataType(dataType: DataType): Boolean = {
     dataType == StringType
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/e1167ab2/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala b/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
index bb68ec5..12999d0 100644
--- a/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
+++ b/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
@@ -306,9 +306,12 @@ abstract class CarbonDDLSqlParser extends AbstractCarbonSparkSQLParser {
           CarbonCommonConstants.LOCAL_DICTIONARY_ENABLE_DEFAULT)
       }
     } else if (!isAlterFlow) {
-      // if LOCAL_DICTIONARY_ENABLE is not defined, consider the default value which is true
-      tableProperties.put(CarbonCommonConstants.LOCAL_DICTIONARY_ENABLE,
-        CarbonCommonConstants.LOCAL_DICTIONARY_ENABLE_DEFAULT)
+      // if LOCAL_DICTIONARY_ENABLE is not defined, try to get from system level property
+      tableProperties
+        .put(CarbonCommonConstants.LOCAL_DICTIONARY_ENABLE,
+          CarbonProperties.getInstance()
+            .getProperty(CarbonCommonConstants.LOCAL_DICTIONARY_SYSTEM_ENABLE,
+              CarbonCommonConstants.LOCAL_DICTIONARY_ENABLE_DEFAULT))
     }
 
     // validate the local dictionary threshold property if defined
@@ -328,9 +331,9 @@ abstract class CarbonDDLSqlParser extends AbstractCarbonSparkSQLParser {
     // is enabled, else it is not validated
     // if it is preaggregate flow no need to validate anything, as all the properties will be
     // inherited from parent table
-    if (!(tableProperties.get(CarbonCommonConstants.LOCAL_DICTIONARY_ENABLE).isDefined &&
+    if ((tableProperties.get(CarbonCommonConstants.LOCAL_DICTIONARY_ENABLE).isDefined &&
           tableProperties(CarbonCommonConstants.LOCAL_DICTIONARY_ENABLE).trim
-            .equalsIgnoreCase("false")) && !isPreAggFlow || isAlterFlow) {
+            .equalsIgnoreCase("true")) && !isPreAggFlow) {
       var localDictIncludeColumns: Seq[String] = Seq[String]()
       var localDictExcludeColumns: Seq[String] = Seq[String]()
       val isLocalDictIncludeDefined = tableProperties
@@ -343,13 +346,19 @@ abstract class CarbonDDLSqlParser extends AbstractCarbonSparkSQLParser {
         localDictIncludeColumns =
           tableProperties(CarbonCommonConstants.LOCAL_DICTIONARY_INCLUDE).split(",").map(_.trim)
         // validate all the local dictionary include columns
-        validateLocalDictionaryColumns(fields, tableProperties, localDictIncludeColumns)
+        CarbonScalaUtil
+          .validateLocalConfiguredDictionaryColumns(fields,
+            tableProperties,
+            localDictIncludeColumns)
       }
       if (isLocalDictExcludeDefined) {
         localDictExcludeColumns =
           tableProperties(CarbonCommonConstants.LOCAL_DICTIONARY_EXCLUDE).split(",").map(_.trim)
         // validate all the local dictionary exclude columns
-        validateLocalDictionaryColumns(fields, tableProperties, localDictExcludeColumns)
+        CarbonScalaUtil
+          .validateLocalConfiguredDictionaryColumns(fields,
+            tableProperties,
+            localDictExcludeColumns)
       }
 
       // validate if both local dictionary include and exclude contains same column
@@ -435,78 +444,6 @@ abstract class CarbonDDLSqlParser extends AbstractCarbonSparkSQLParser {
   }
 
   /**
-   * This method validates all the child columns of complex column recursively to check whether
-   * any of the child column is of string dataType or not
-   *
-   * @param field
-   */
-  def validateChildColumnsRecursively(field: Field): Boolean = {
-    if (field.children.isDefined && null != field.children.get) {
-      field.children.get.exists { childColumn =>
-        if (childColumn.children.isDefined && null != childColumn.children.get) {
-          validateChildColumnsRecursively(childColumn)
-        } else {
-          childColumn.dataType.get.equalsIgnoreCase("string")
-        }
-      }
-    } else {
-      false
-    }
-  }
-
-  /**
-   * This method validates the local dictionary configured columns
-   *
-   * @param fields
-   * @param tableProperties
-   */
-  private def validateLocalDictionaryColumns(fields: Seq[Field],
-      tableProperties: Map[String, String], localDictColumns: Seq[String]): Unit = {
-    var dictIncludeColumns: Seq[String] = Seq[String]()
-
-    // validate the local dict columns
-    CarbonScalaUtil.validateLocalDictionaryColumns(tableProperties, localDictColumns)
-    // check if the column specified exists in table schema
-    localDictColumns.foreach { distCol =>
-      if (!fields.exists(x => x.column.equalsIgnoreCase(distCol.trim))) {
-        val errormsg = "LOCAL_DICTIONARY_INCLUDE/LOCAL_DICTIONARY_EXCLUDE column: " + distCol.trim +
-                       " does not exist in table. Please check the DDL."
-        throw new MalformedCarbonCommandException(errormsg)
-      }
-    }
-
-    // check if column is other than STRING or VARCHAR datatype
-    localDictColumns.foreach { dictColm =>
-      if (fields
-        .exists(x => x.column.equalsIgnoreCase(dictColm) &&
-                     !x.dataType.get.equalsIgnoreCase("STRING") &&
-                     !x.dataType.get.equalsIgnoreCase("VARCHAR") &&
-                     !x.dataType.get.equalsIgnoreCase("STRUCT") &&
-                     !x.dataType.get.equalsIgnoreCase("ARRAY"))) {
-        val errormsg = "LOCAL_DICTIONARY_INCLUDE/LOCAL_DICTIONARY_EXCLUDE column: " +
-                       dictColm.trim +
-                       " is not a string/complex/varchar datatype column. LOCAL_DICTIONARY_COLUMN" +
-                       " should be no dictionary string/complex/varchar datatype column." +
-                       "Please check the DDL."
-        throw new MalformedCarbonCommandException(errormsg)
-      }
-    }
-
-    // Validate whether any of the child columns of complex dataType column is a string column
-    localDictColumns.foreach { dictColm =>
-      if (fields
-        .exists(x => x.column.equalsIgnoreCase(dictColm) && x.children.isDefined &&
-                     null != x.children.get &&
-                     !validateChildColumnsRecursively(x))) {
-        val errMsg =
-          s"None of the child columns of complex dataType column $dictColm specified in " +
-          "local_dictionary_include are not of string dataType."
-        throw new MalformedCarbonCommandException(errMsg)
-      }
-    }
-  }
-
-  /**
    * This method validates the long string columns, will check:
    * 1.the column in tblproperty long_string_columns must be in table fields.
    * 2.the column datatype in tblproperty long_string_columns should be string.

http://git-wip-us.apache.org/repos/asf/carbondata/blob/e1167ab2/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala b/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
index 4a99ac7..1b48c08 100644
--- a/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
+++ b/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
@@ -39,10 +39,8 @@ import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier
 import org.apache.carbondata.core.metadata.datatype.{DataType, DataTypes, DecimalType}
 import org.apache.carbondata.core.metadata.encoder.Encoding
 import org.apache.carbondata.core.metadata.schema._
-import org.apache.carbondata.core.metadata.schema.table.{CarbonTable, RelationIdentifier,
-  TableInfo, TableSchema}
-import org.apache.carbondata.core.metadata.schema.table.column.{ColumnSchema,
-  ParentColumnTableRelation}
+import org.apache.carbondata.core.metadata.schema.table.{CarbonTable, RelationIdentifier, TableInfo, TableSchema}
+import org.apache.carbondata.core.metadata.schema.table.column.{ColumnSchema, ParentColumnTableRelation}
 import org.apache.carbondata.core.service.impl.ColumnUniqueIdGenerator
 import org.apache.carbondata.core.statusmanager.{LoadMetadataDetails, SegmentUpdateStatusManager}
 import org.apache.carbondata.core.util.{CarbonProperties, CarbonUtil, DataTypeUtil}
@@ -50,7 +48,7 @@ import org.apache.carbondata.processing.loading.FailureCauses
 import org.apache.carbondata.processing.loading.model.CarbonLoadModel
 import org.apache.carbondata.processing.merger.CompactionType
 import org.apache.carbondata.spark.CarbonSparkFactory
-import org.apache.carbondata.spark.util.DataTypeConverterUtil
+import org.apache.carbondata.spark.util.{CarbonScalaUtil, DataTypeConverterUtil}
 
 case class TableModel(
     ifNotExistsSet: Boolean,
@@ -378,19 +376,58 @@ class AlterTableColumnSchemaGenerator(
       }
     }
 
+    val isLocalDictEnabledForMainTable = tableSchema.getTableProperties
+      .get(CarbonCommonConstants.LOCAL_DICTIONARY_ENABLE)
+
+    val alterMutableTblProperties: scala.collection.mutable.Map[String, String] = mutable
+      .Map(alterTableModel.tableProperties.toSeq: _*)
+
+    // if local dictionary is enabled, then validate include and exclude columns if defined
+    if (null != isLocalDictEnabledForMainTable && isLocalDictEnabledForMainTable.toBoolean) {
+      var localDictIncludeColumns: Seq[String] = Seq[String]()
+      var localDictExcludeColumns: Seq[String] = Seq[String]()
+      // validate local dictionary include columns if defined
+      if (alterTableModel.tableProperties.get(CarbonCommonConstants.LOCAL_DICTIONARY_INCLUDE)
+        .isDefined) {
+        localDictIncludeColumns =
+          alterTableModel.tableProperties(CarbonCommonConstants.LOCAL_DICTIONARY_INCLUDE).split(",")
+            .map(_.trim)
+        CarbonScalaUtil
+          .validateLocalDictionaryColumns(alterMutableTblProperties, localDictIncludeColumns)
+        CarbonScalaUtil
+          .validateLocalConfiguredDictionaryColumns(
+            alterTableModel.dimCols ++ alterTableModel.msrCols,
+            alterMutableTblProperties,
+            localDictIncludeColumns)
+      }
+
+      // validate local dictionary exclude columns if defined
+      if (alterTableModel.tableProperties.get(CarbonCommonConstants.LOCAL_DICTIONARY_EXCLUDE)
+        .isDefined) {
+        localDictExcludeColumns =
+          alterTableModel.tableProperties(CarbonCommonConstants.LOCAL_DICTIONARY_EXCLUDE).split(",")
+            .map(_.trim)
+        CarbonScalaUtil
+          .validateLocalDictionaryColumns(alterMutableTblProperties, localDictExcludeColumns)
+        CarbonScalaUtil
+          .validateLocalConfiguredDictionaryColumns(
+            alterTableModel.dimCols ++ alterTableModel.msrCols,
+            alterMutableTblProperties,
+            localDictExcludeColumns)
+      }
+
+      // validate if both local dictionary include and exclude contains same column
+      CarbonScalaUtil.validateDuplicateLocalDictIncludeExcludeColmns(alterMutableTblProperties)
 
-    if (alterTableModel.tableProperties != null) {
       CarbonUtil
         .setLocalDictColumnsToWrapperSchema(newCols.asJava,
           alterTableModel.tableProperties.asJava,
-          tableSchema.getTableProperties.get(CarbonCommonConstants.LOCAL_DICTIONARY_ENABLE))
+          isLocalDictEnabledForMainTable)
     }
 
     val includeExcludeColOfMainTable = getLocalDictColumnList(tableSchema.getTableProperties
       .asScala,
       columnsWithoutNewCols)
-    val alterMutableTblProperties: scala.collection.mutable.Map[String, String] = mutable
-      .Map(alterTableModel.tableProperties.toSeq: _*)
     val includeExcludeColOfAlterTable = getLocalDictColumnList(alterMutableTblProperties,
       newCols.to[mutable.ListBuffer])
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/e1167ab2/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonEnv.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonEnv.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonEnv.scala
index 074568d..7f26888 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonEnv.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonEnv.scala
@@ -116,7 +116,8 @@ class CarbonEnv {
 
           CarbonMetaStoreFactory.createCarbonMetaStore(sparkSession.conf)
         }
-        CarbonProperties.getInstance.addProperty(CarbonCommonConstants.IS_DRIVER_INSTANCE, "true")
+        CarbonProperties.getInstance
+          .addNonSerializableProperty(CarbonCommonConstants.IS_DRIVER_INSTANCE, "true")
         initialized = true
       }
     }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/e1167ab2/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParserUtil.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParserUtil.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParserUtil.scala
index 9c0a099..9752535 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParserUtil.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParserUtil.scala
@@ -27,8 +27,7 @@ import org.apache.spark.sql.catalyst.parser.ParserUtils.operationNotAllowed
 import org.apache.spark.sql.catalyst.parser.SqlBaseParser._
 import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
 import org.apache.spark.sql.execution.command.{PartitionerField, TableModel, TableNewProcessor}
-import org.apache.spark.sql.execution.command.table.{CarbonCreateTableAsSelectCommand,
-CarbonCreateTableCommand}
+import org.apache.spark.sql.execution.command.table.{CarbonCreateTableAsSelectCommand, CarbonCreateTableCommand}
 import org.apache.spark.sql.types.StructField
 
 import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
@@ -37,6 +36,7 @@ import org.apache.carbondata.core.datastore.impl.FileFactory
 import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier
 import org.apache.carbondata.core.metadata.datatype.DataTypes
 import org.apache.carbondata.core.metadata.schema.SchemaReader
+import org.apache.carbondata.core.util.CarbonProperties
 import org.apache.carbondata.core.util.path.CarbonTablePath
 import org.apache.carbondata.spark.CarbonOption
 import org.apache.carbondata.spark.util.{CarbonScalaUtil, CommonUtil}
@@ -164,7 +164,9 @@ object CarbonSparkSqlParserUtil {
       if (null == isLocalDic_enabled) {
         table.getFactTable.getTableProperties
           .put(CarbonCommonConstants.LOCAL_DICTIONARY_ENABLE,
-            CarbonCommonConstants.LOCAL_DICTIONARY_ENABLE_DEFAULT)
+            CarbonProperties.getInstance()
+              .getProperty(CarbonCommonConstants.LOCAL_DICTIONARY_SYSTEM_ENABLE,
+                CarbonCommonConstants.LOCAL_DICTIONARY_ENABLE_DEFAULT))
       }
       isLocalDic_enabled = table.getFactTable.getTableProperties
         .get(CarbonCommonConstants.LOCAL_DICTIONARY_ENABLE)