You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by gv...@apache.org on 2017/12/21 17:56:20 UTC

carbondata git commit: [CARBONDATA-1914][Dictionary Cache] Cache Access Rectification.

Repository: carbondata
Updated Branches:
  refs/heads/master 9659edccb -> f79b9ea32


[CARBONDATA-1914][Dictionary Cache] Cache Access Rectification.

This closes #1686


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/f79b9ea3
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/f79b9ea3
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/f79b9ea3

Branch: refs/heads/master
Commit: f79b9ea321ce0c9cb12214e0555f9425f5438c5a
Parents: 9659edc
Author: sounakr <so...@gmail.com>
Authored: Wed Dec 20 09:16:22 2017 +0530
Committer: Venkata Ramana G <ra...@huawei.com>
Committed: Thu Dec 21 23:25:55 2017 +0530

----------------------------------------------------------------------
 .../core/cache/dictionary/Dictionary.java       |   6 +
 .../cache/dictionary/ForwardDictionary.java     |  11 +
 .../cache/dictionary/ReverseDictionary.java     |  10 +
 .../IncrementalColumnDictionaryGenerator.java   |   4 +
 .../testsuite/datamap/DataMapWriterSuite.scala  | 132 +++----
 .../spark/util/DictionaryLRUCacheTestCase.scala | 376 +++++++++++++++++++
 .../loading/converter/FieldConverter.java       |   5 +
 .../impl/ComplexFieldConverterImpl.java         |   6 +
 .../impl/DictionaryFieldConverterImpl.java      |   7 +
 .../DirectDictionaryFieldConverterImpl.java     |   6 +
 .../impl/MeasureFieldConverterImpl.java         |   7 +
 .../impl/NonDictionaryFieldConverterImpl.java   |   3 +
 .../converter/impl/RowConverterImpl.java        |   4 +
 13 files changed, 514 insertions(+), 63 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/f79b9ea3/core/src/main/java/org/apache/carbondata/core/cache/dictionary/Dictionary.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/Dictionary.java b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/Dictionary.java
index 7302de2..232d5f5 100644
--- a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/Dictionary.java
+++ b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/Dictionary.java
@@ -108,4 +108,10 @@ public interface Dictionary {
    * This method will release the objects and set default value for primitive types
    */
   void clear();
+
+  /**
+   * This method return the access count associated with the dictionary.
+   * @return
+   */
+  int getAccessCount();
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f79b9ea3/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ForwardDictionary.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ForwardDictionary.java b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ForwardDictionary.java
index abc95e8..bb5bb57 100644
--- a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ForwardDictionary.java
+++ b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ForwardDictionary.java
@@ -161,4 +161,15 @@ public class ForwardDictionary implements Dictionary {
     columnDictionaryInfo
         .getIncrementalSurrogateKeyFromDictionary(byteValuesOfFilterMembers, surrogates);
   }
+
+  /**
+   * This method return the access count associated with the dictionary.
+   * @return
+   */
+  @Override public int getAccessCount() {
+    if (null != columnDictionaryInfo) {
+      return columnDictionaryInfo.getAccessCount();
+    }
+    return 0;
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f79b9ea3/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ReverseDictionary.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ReverseDictionary.java b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ReverseDictionary.java
index ff0e687..4672011 100644
--- a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ReverseDictionary.java
+++ b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ReverseDictionary.java
@@ -137,4 +137,14 @@ public class ReverseDictionary implements Dictionary {
     }
   }
 
+  /**
+   * This method return the access count associated with the dictionary.
+   * @return
+   */
+  @Override public int getAccessCount() {
+    if (null != columnReverseDictionaryInfo) {
+      return columnReverseDictionaryInfo.getAccessCount();
+    }
+    return 0;
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f79b9ea3/core/src/main/java/org/apache/carbondata/core/dictionary/generator/IncrementalColumnDictionaryGenerator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/dictionary/generator/IncrementalColumnDictionaryGenerator.java b/core/src/main/java/org/apache/carbondata/core/dictionary/generator/IncrementalColumnDictionaryGenerator.java
index 5eeeae4..e0feb04 100644
--- a/core/src/main/java/org/apache/carbondata/core/dictionary/generator/IncrementalColumnDictionaryGenerator.java
+++ b/core/src/main/java/org/apache/carbondata/core/dictionary/generator/IncrementalColumnDictionaryGenerator.java
@@ -151,6 +151,10 @@ public class IncrementalColumnDictionaryGenerator implements BiDictionary<Intege
             "\n create dictionary cache: " + dictCacheTime +
             "\n sort list, distinct and write: " + dictWriteTime +
             "\n write sort info: " + sortIndexWriteTime);
+
+    if (isDictExists) {
+      CarbonUtil.clearDictionaryCache(dictionary);
+    }
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f79b9ea3/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/DataMapWriterSuite.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/DataMapWriterSuite.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/DataMapWriterSuite.scala
index f73a202..04a5f9c 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/DataMapWriterSuite.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/DataMapWriterSuite.scala
@@ -68,9 +68,8 @@ class DataMapWriterSuite extends QueryTest with BeforeAndAfterAll {
   def buildTestData(numRows: Int): DataFrame = {
     import sqlContext.implicits._
     sqlContext.sparkContext.parallelize(1 to numRows)
-      .map(x => ("a", "b", x))
+      .map(x => ("a" + x, "b", x))
       .toDF("c1", "c2", "c3")
-      .sort("c3")
   }
 
   def dropTable(): Unit = {
@@ -84,74 +83,81 @@ class DataMapWriterSuite extends QueryTest with BeforeAndAfterAll {
 
   test("test write datamap 2 pages") {
     // register datamap writer
-      DataMapStoreManager.getInstance().createAndRegisterDataMap(
-        AbsoluteTableIdentifier.from(storeLocation + "/carbon1", "default", "carbon1"),
-        classOf[C2DataMapFactory].getName,
-        "test")
-
-      val df = buildTestData(33000)
-
-      // save dataframe to carbon file
-      df.write
-        .format("carbondata")
-        .option("tableName", "carbon1")
-        .mode(SaveMode.Overwrite)
-        .save()
-
-      assert(DataMapWriterSuite.callbackSeq.head.contains("block start"))
-      assert(DataMapWriterSuite.callbackSeq.last.contains("block end"))
-      assert(
-        DataMapWriterSuite.callbackSeq.slice(1, DataMapWriterSuite.callbackSeq.length - 1) == Seq(
-          "blocklet start 0",
-          "add page data: blocklet 0, page 0",
-          "add page data: blocklet 0, page 1",
-          "blocklet end: 0"
-        ))
-      DataMapWriterSuite.callbackSeq = Seq()
+    DataMapStoreManager.getInstance().createAndRegisterDataMap(
+      AbsoluteTableIdentifier.from(storeLocation + "/carbon1", "default", "carbon1"),
+      classOf[C2DataMapFactory].getName,
+      "test")
+
+    val df = buildTestData(33000)
+
+    // save dataframe to carbon file
+    df.write
+      .format("carbondata")
+      .option("tableName", "carbon1")
+      .option("tempCSV", "false")
+      .option("sort_columns","c1")
+      .mode(SaveMode.Overwrite)
+      .save()
+
+    assert(DataMapWriterSuite.callbackSeq.head.contains("block start"))
+    assert(DataMapWriterSuite.callbackSeq.last.contains("block end"))
+    assert(
+      DataMapWriterSuite.callbackSeq.slice(1, DataMapWriterSuite.callbackSeq.length - 1) == Seq(
+        "blocklet start 0",
+        "add page data: blocklet 0, page 0",
+        "add page data: blocklet 0, page 1",
+        "blocklet end: 0"
+      ))
+    DataMapWriterSuite.callbackSeq = Seq()
   }
 
   test("test write datamap 2 blocklet") {
     // register datamap writer
-      DataMapStoreManager.getInstance().createAndRegisterDataMap(
-        AbsoluteTableIdentifier.from(storeLocation + "/carbon2", "default", "carbon2"),
-        classOf[C2DataMapFactory].getName,
-        "test")
+    DataMapStoreManager.getInstance().createAndRegisterDataMap(
+      AbsoluteTableIdentifier.from(storeLocation + "/carbon2", "default", "carbon2"),
+      classOf[C2DataMapFactory].getName,
+      "test")
 
-      CarbonProperties.getInstance()
-        .addProperty("carbon.blockletgroup.size.in.mb", "1")
+    CarbonProperties.getInstance()
+      .addProperty("carbon.blockletgroup.size.in.mb", "1")
     CarbonProperties.getInstance()
       .addProperty("carbon.number.of.cores.while.loading",
-          CarbonCommonConstants.NUM_CORES_DEFAULT_VAL)
-
-      val df = buildTestData(300000)
-
-      // save dataframe to carbon file
-      df.write
-        .format("carbondata")
-        .option("tableName", "carbon2")
-        .mode(SaveMode.Overwrite)
-        .save()
-
-      assert(DataMapWriterSuite.callbackSeq.head.contains("block start"))
-      assert(DataMapWriterSuite.callbackSeq.last.contains("block end"))
-      assert(
-        DataMapWriterSuite.callbackSeq.slice(1, DataMapWriterSuite.callbackSeq.length - 1) == Seq(
-          "blocklet start 0",
-          "add page data: blocklet 0, page 0",
-          "add page data: blocklet 0, page 1",
-          "add page data: blocklet 0, page 2",
-          "add page data: blocklet 0, page 3",
-          "add page data: blocklet 0, page 4",
-          "add page data: blocklet 0, page 5",
-          "add page data: blocklet 0, page 6",
-          "add page data: blocklet 0, page 7",
-          "blocklet end: 0",
-          "blocklet start 1",
-          "add page data: blocklet 1, page 0",
-          "add page data: blocklet 1, page 1",
-          "blocklet end: 1"
-        ))
-      DataMapWriterSuite.callbackSeq = Seq()
+        CarbonCommonConstants.NUM_CORES_DEFAULT_VAL)
+
+    val df = buildTestData(300000)
+
+    // save dataframe to carbon file
+    df.write
+      .format("carbondata")
+      .option("tableName", "carbon2")
+      .option("tempCSV", "false")
+      .option("sort_columns","c1")
+      .option("SORT_SCOPE","GLOBAL_SORT")
+      .mode(SaveMode.Overwrite)
+      .save()
+
+    assert(DataMapWriterSuite.callbackSeq.head.contains("block start"))
+    assert(DataMapWriterSuite.callbackSeq.last.contains("block end"))
+    assert(
+      DataMapWriterSuite.callbackSeq.slice(1, DataMapWriterSuite.callbackSeq.length - 1) == Seq(
+        "blocklet start 0",
+        "add page data: blocklet 0, page 0",
+        "add page data: blocklet 0, page 1",
+        "add page data: blocklet 0, page 2",
+        "add page data: blocklet 0, page 3",
+        "blocklet end: 0",
+        "blocklet start 1",
+        "add page data: blocklet 1, page 0",
+        "add page data: blocklet 1, page 1",
+        "add page data: blocklet 1, page 2",
+        "add page data: blocklet 1, page 3",
+        "blocklet end: 1",
+        "blocklet start 2",
+        "add page data: blocklet 2, page 0",
+        "add page data: blocklet 2, page 1",
+        "blocklet end: 2"
+      ))
+    DataMapWriterSuite.callbackSeq = Seq()
   }
 
   override def afterAll {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f79b9ea3/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/DictionaryLRUCacheTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/DictionaryLRUCacheTestCase.scala b/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/DictionaryLRUCacheTestCase.scala
new file mode 100644
index 0000000..245ee7c
--- /dev/null
+++ b/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/DictionaryLRUCacheTestCase.scala
@@ -0,0 +1,376 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.carbondata.spark.util
+
+import java.io.File
+
+import scala.collection.JavaConverters._
+
+import org.apache.spark.sql.common.util.Spark2QueryTest
+import org.apache.spark.sql.hive.CarbonRelation
+import org.apache.spark.sql.test.Spark2TestQueryExecutor
+import org.apache.spark.sql.{CarbonEnv, SparkSession}
+import org.scalatest.BeforeAndAfterAll
+
+import org.apache.carbondata.core.cache.{Cache, CacheProvider, CacheType}
+import org.apache.carbondata.core.cache.dictionary.{Dictionary, DictionaryColumnUniqueIdentifier}
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.util.{CarbonProperties, CarbonUtil}
+
+/**
+  * Test Case for Dictionary LRU Cache.
+  */
+class DictionaryLRUCacheTestCase extends Spark2QueryTest with BeforeAndAfterAll {
+  var spark : SparkSession = null
+  var path : String = null
+
+  def checkDictionaryAccessCount(databaseName: String, tableName: String): Unit = {
+    val carbonTable = CarbonEnv.getInstance(Spark2TestQueryExecutor.spark).carbonMetastore
+      .lookupRelation(Option(databaseName), tableName)(Spark2TestQueryExecutor.spark)
+      .asInstanceOf[CarbonRelation].carbonTable
+    val absoluteTableIdentifier = carbonTable.getAbsoluteTableIdentifier
+
+    val dimensions = carbonTable.getAllDimensions.asScala.toList
+    dimensions.foreach { dim =>
+      val columnIdentifier = dim.getColumnIdentifier
+      // Check the dictionary cache access.
+      val identifier: DictionaryColumnUniqueIdentifier = new DictionaryColumnUniqueIdentifier(
+        absoluteTableIdentifier,
+        columnIdentifier,
+        columnIdentifier.getDataType)
+
+      val isDictExists: Boolean = CarbonUtil.isFileExistsForGivenColumn(identifier)
+      var dictionary: Dictionary = null
+      if (isDictExists) {
+        val dictCacheReverse: Cache[DictionaryColumnUniqueIdentifier, Dictionary]
+        = CacheProvider.getInstance().createCache(CacheType.REVERSE_DICTIONARY)
+        dictionary = dictCacheReverse.get(identifier)
+        assert(dictionary.getAccessCount == 1)
+        CarbonUtil.clearDictionaryCache(dictionary)
+
+        val dictCacheForward: Cache[DictionaryColumnUniqueIdentifier, Dictionary]
+        = CacheProvider.getInstance().createCache(CacheType.FORWARD_DICTIONARY)
+        dictionary = dictCacheForward.get(identifier)
+        assert(dictionary.getAccessCount == 1)
+        CarbonUtil.clearDictionaryCache(dictionary)
+      }
+    }
+  }
+
+
+  override def beforeAll {
+
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_MAX_DRIVER_LRU_CACHE_SIZE, "1")
+      .addProperty(CarbonCommonConstants.CARBON_MAX_EXECUTOR_LRU_CACHE_SIZE, "1")
+
+    path = s"$resourcesPath/restructure/data_2000.csv"
+
+    sql("drop table if exists carbon_new1")
+    sql("drop table if exists carbon_new2")
+    sql("drop table if exists carbon_new3")
+    sql("drop table if exists carbon_new4")
+    sql("drop table if exists carbon_new5")
+    sql("drop table if exists carbon_new6")
+    sql("drop table if exists carbon_new7")
+    sql("drop table if exists carbon_new8")
+    sql("drop table if exists carbon_new9")
+    sql("drop table if exists carbon_new10")
+  }
+
+  test("test for dictionary LRU Cache for Load Single Pass") {
+
+    sql(
+        "CREATE TABLE carbon_new1 (CUST_ID INT,CUST_NAME STRING,ACTIVE_EMUI_VERSION STRING, DOB " +
+        "TIMESTAMP, DOJ TIMESTAMP, BIGINT_COLUMN1 BIGINT,BIGINT_COLUMN2 BIGINT,DECIMAL_COLUMN1 " +
+        "decimal(30,10), DECIMAL_COLUMN2 DECIMAL(36,10),Double_COLUMN1 double, Double_COLUMN2 " +
+        "double,INTEGER_COLUMN1 INT) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES " +
+        "('dictionary_include'='CUST_NAME, ACTIVE_EMUI_VERSION,BIGINT_COLUMN1,Double_COLUMN1, " +
+        "Double_COLUMN2')")
+
+    sql(
+        s"LOAD DATA INPATH '$path' INTO TABLE carbon_new1 OPTIONS" +
+        "('DELIMITER'=',' , 'QUOTECHAR'='\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='TRUE'," +
+        "'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
+        "BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
+        "INTEGER_COLUMN1')")
+
+    sql(
+        s"LOAD DATA INPATH '$path' INTO TABLE carbon_new1 OPTIONS" +
+        "('DELIMITER'=',' , 'QUOTECHAR'='\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='TRUE'," +
+        "'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
+        "BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
+        "INTEGER_COLUMN1')")
+
+    sql(
+        "CREATE TABLE carbon_new2 (CUST_ID INT,CUST_NAME STRING,ACTIVE_EMUI_VERSION STRING, DOB " +
+        "TIMESTAMP, DOJ TIMESTAMP, BIGINT_COLUMN1 BIGINT,BIGINT_COLUMN2 BIGINT,DECIMAL_COLUMN1 " +
+        "decimal(30,10), DECIMAL_COLUMN2 DECIMAL(36,10),Double_COLUMN1 double, Double_COLUMN2 " +
+        "double,INTEGER_COLUMN1 INT) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES " +
+        "('dictionary_include'='CUST_NAME, ACTIVE_EMUI_VERSION,BIGINT_COLUMN1,Double_COLUMN1, " +
+        "Double_COLUMN2')")
+
+    sql(
+        s"LOAD DATA INPATH '$path' INTO TABLE carbon_new2 OPTIONS" +
+        "('DELIMITER'=',' , 'QUOTECHAR'='\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='TRUE'," +
+        "'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
+        "BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
+        "INTEGER_COLUMN1')")
+
+    sql(
+        s"LOAD DATA INPATH '$path' INTO TABLE carbon_new2 OPTIONS" +
+        "('DELIMITER'=',' , 'QUOTECHAR'='\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='TRUE'," +
+        "'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
+        "BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
+        "INTEGER_COLUMN1')")
+
+    checkDictionaryAccessCount("default", "carbon_new2")
+  }
+
+  test("test for dictionary LRU Cache for Load Non Single Pass") {
+
+    sql(
+        "CREATE TABLE carbon_new3 (CUST_ID INT,CUST_NAME STRING,ACTIVE_EMUI_VERSION STRING, DOB " +
+        "TIMESTAMP, DOJ TIMESTAMP, BIGINT_COLUMN1 BIGINT,BIGINT_COLUMN2 BIGINT,DECIMAL_COLUMN1 " +
+        "decimal(30,10), DECIMAL_COLUMN2 DECIMAL(36,10),Double_COLUMN1 double, Double_COLUMN2 " +
+        "double,INTEGER_COLUMN1 INT) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES " +
+        "('dictionary_include'='CUST_NAME, ACTIVE_EMUI_VERSION,BIGINT_COLUMN1,Double_COLUMN1, " +
+        "Double_COLUMN2')")
+
+    sql(
+        s"LOAD DATA INPATH '$path' INTO TABLE carbon_new3 OPTIONS" +
+        "('DELIMITER'=',' , 'QUOTECHAR'='\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE'," +
+        "'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
+        "BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
+        "INTEGER_COLUMN1')")
+
+    sql(
+        s"LOAD DATA INPATH '$path' INTO TABLE carbon_new3 OPTIONS" +
+        "('DELIMITER'=',' , 'QUOTECHAR'='\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE'," +
+        "'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
+        "BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
+        "INTEGER_COLUMN1')")
+
+    sql(
+        "CREATE TABLE carbon_new4 (CUST_ID INT,CUST_NAME STRING,ACTIVE_EMUI_VERSION STRING, DOB " +
+        "TIMESTAMP, DOJ TIMESTAMP, BIGINT_COLUMN1 BIGINT,BIGINT_COLUMN2 BIGINT,DECIMAL_COLUMN1 " +
+        "decimal(30,10), DECIMAL_COLUMN2 DECIMAL(36,10),Double_COLUMN1 double, Double_COLUMN2 " +
+        "double,INTEGER_COLUMN1 INT) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES " +
+        "('dictionary_include'='CUST_NAME, ACTIVE_EMUI_VERSION,BIGINT_COLUMN1,Double_COLUMN1, " +
+        "Double_COLUMN2')")
+
+    sql(
+        s"LOAD DATA INPATH '$path' INTO TABLE carbon_new4 OPTIONS" +
+        "('DELIMITER'=',' , 'QUOTECHAR'='\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE'," +
+        "'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
+        "BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
+        "INTEGER_COLUMN1')")
+
+    sql(
+        s"LOAD DATA INPATH '$path' INTO TABLE carbon_new4 OPTIONS" +
+        "('DELIMITER'=',' , 'QUOTECHAR'='\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE'," +
+        "'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
+        "BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
+        "INTEGER_COLUMN1')")
+
+    checkDictionaryAccessCount("default", "carbon_new4")
+  }
+
+  test("test for dictionary LRU Cache for Select On Table") {
+
+    sql(
+        "CREATE TABLE carbon_new5 (CUST_ID INT,CUST_NAME STRING,ACTIVE_EMUI_VERSION STRING, DOB " +
+        "TIMESTAMP, DOJ TIMESTAMP, BIGINT_COLUMN1 BIGINT,BIGINT_COLUMN2 BIGINT,DECIMAL_COLUMN1 " +
+        "decimal(30,10), DECIMAL_COLUMN2 DECIMAL(36,10),Double_COLUMN1 double, Double_COLUMN2 " +
+        "double,INTEGER_COLUMN1 INT) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES " +
+        "('dictionary_include'='CUST_NAME, ACTIVE_EMUI_VERSION,BIGINT_COLUMN1,Double_COLUMN1, " +
+        "Double_COLUMN2')")
+
+    sql(
+        s"LOAD DATA INPATH '$path' INTO TABLE carbon_new5 OPTIONS" +
+        "('DELIMITER'=',' , 'QUOTECHAR'='\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE'," +
+        "'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
+        "BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
+        "INTEGER_COLUMN1')")
+
+    sql(
+        s"LOAD DATA INPATH '$path' INTO TABLE carbon_new5 OPTIONS" +
+        "('DELIMITER'=',' , 'QUOTECHAR'='\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE'," +
+        "'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
+        "BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
+        "INTEGER_COLUMN1')")
+
+    sql("select * from carbon_new5")
+
+    checkDictionaryAccessCount("default", "carbon_new5")
+
+
+    sql(
+        "CREATE TABLE carbon_new6 (CUST_ID INT,CUST_NAME STRING,ACTIVE_EMUI_VERSION STRING, DOB " +
+        "TIMESTAMP, DOJ TIMESTAMP, BIGINT_COLUMN1 BIGINT,BIGINT_COLUMN2 BIGINT,DECIMAL_COLUMN1 " +
+        "decimal(30,10), DECIMAL_COLUMN2 DECIMAL(36,10),Double_COLUMN1 double, Double_COLUMN2 " +
+        "double,INTEGER_COLUMN1 INT) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES " +
+        "('dictionary_include'='CUST_NAME, ACTIVE_EMUI_VERSION,BIGINT_COLUMN1,Double_COLUMN1, " +
+        "Double_COLUMN2')")
+
+    sql(
+        s"LOAD DATA INPATH '$path' INTO TABLE carbon_new6 OPTIONS" +
+        "('DELIMITER'=',' , 'QUOTECHAR'='\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE'," +
+        "'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
+        "BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
+        "INTEGER_COLUMN1')")
+
+    sql(
+        s"LOAD DATA INPATH '$path' INTO TABLE carbon_new6 OPTIONS" +
+        "('DELIMITER'=',' , 'QUOTECHAR'='\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE'," +
+        "'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
+        "BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
+        "INTEGER_COLUMN1')")
+
+    sql("select * from carbon_new6")
+
+    checkDictionaryAccessCount("default", "carbon_new6")
+  }
+
+  test("test for dictionary LRU Cache for Select With Filter On Table") {
+
+    sql(
+        "CREATE TABLE carbon_new7 (CUST_ID INT,CUST_NAME STRING,ACTIVE_EMUI_VERSION STRING, DOB " +
+        "TIMESTAMP, DOJ TIMESTAMP, BIGINT_COLUMN1 BIGINT,BIGINT_COLUMN2 BIGINT,DECIMAL_COLUMN1 " +
+        "decimal(30,10), DECIMAL_COLUMN2 DECIMAL(36,10),Double_COLUMN1 double, Double_COLUMN2 " +
+        "double,INTEGER_COLUMN1 INT) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES " +
+        "('dictionary_include'='CUST_NAME, ACTIVE_EMUI_VERSION,BIGINT_COLUMN1,Double_COLUMN1, " +
+        "Double_COLUMN2')")
+
+    sql(
+        s"LOAD DATA INPATH '$path' INTO TABLE carbon_new7 OPTIONS" +
+        "('DELIMITER'=',' , 'QUOTECHAR'='\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE'," +
+        "'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
+        "BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
+        "INTEGER_COLUMN1')")
+
+    sql(
+        s"LOAD DATA INPATH '$path' INTO TABLE carbon_new7 OPTIONS" +
+        "('DELIMITER'=',' , 'QUOTECHAR'='\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE'," +
+        "'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
+        "BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
+        "INTEGER_COLUMN1')")
+
+    sql("select * from carbon_new7 where CUST_ID > 10")
+
+    checkDictionaryAccessCount("default", "carbon_new7")
+
+
+    sql(
+        "CREATE TABLE carbon_new8 (CUST_ID INT,CUST_NAME STRING,ACTIVE_EMUI_VERSION STRING, DOB " +
+        "TIMESTAMP, DOJ TIMESTAMP, BIGINT_COLUMN1 BIGINT,BIGINT_COLUMN2 BIGINT,DECIMAL_COLUMN1 " +
+        "decimal(30,10), DECIMAL_COLUMN2 DECIMAL(36,10),Double_COLUMN1 double, Double_COLUMN2 " +
+        "double,INTEGER_COLUMN1 INT) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES " +
+        "('dictionary_include'='CUST_NAME, ACTIVE_EMUI_VERSION,BIGINT_COLUMN1,Double_COLUMN1, " +
+        "Double_COLUMN2')")
+
+    sql(
+        s"LOAD DATA INPATH '$path' INTO TABLE carbon_new8 OPTIONS" +
+        "('DELIMITER'=',' , 'QUOTECHAR'='\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE'," +
+        "'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
+        "BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
+        "INTEGER_COLUMN1')")
+
+    sql(
+        s"LOAD DATA INPATH '$path' INTO TABLE carbon_new8 OPTIONS" +
+        "('DELIMITER'=',' , 'QUOTECHAR'='\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE'," +
+        "'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
+        "BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
+        "INTEGER_COLUMN1')")
+
+    sql("select * from carbon_new8 where CUST_ID > 100")
+
+    checkDictionaryAccessCount("default", "carbon_new8")
+  }
+
+  test("test for dictionary LRU Cache for Insert Into") {
+
+    sql(
+        "CREATE TABLE carbon_new9 (CUST_ID INT,CUST_NAME STRING,ACTIVE_EMUI_VERSION STRING, DOB " +
+        "TIMESTAMP, DOJ TIMESTAMP, BIGINT_COLUMN1 BIGINT,BIGINT_COLUMN2 BIGINT,DECIMAL_COLUMN1 " +
+        "decimal(30,10), DECIMAL_COLUMN2 DECIMAL(36,10),Double_COLUMN1 double, Double_COLUMN2 " +
+        "double,INTEGER_COLUMN1 INT) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES " +
+        "('dictionary_include'='CUST_NAME, ACTIVE_EMUI_VERSION,BIGINT_COLUMN1,Double_COLUMN1, " +
+        "Double_COLUMN2')")
+
+    sql(
+        s"LOAD DATA INPATH '$path' INTO TABLE carbon_new9 OPTIONS" +
+        "('DELIMITER'=',' , 'QUOTECHAR'='\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE'," +
+        "'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
+        "BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
+        "INTEGER_COLUMN1')")
+
+    sql(
+        s"LOAD DATA INPATH '$path' INTO TABLE carbon_new9 OPTIONS" +
+        "('DELIMITER'=',' , 'QUOTECHAR'='\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE'," +
+        "'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
+        "BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
+        "INTEGER_COLUMN1')")
+
+    sql("select * from carbon_new9 where CUST_ID > 10")
+
+    checkDictionaryAccessCount("default", "carbon_new9")
+
+
+    sql(
+        "CREATE TABLE carbon_new10 (CUST_ID INT,CUST_NAME STRING,ACTIVE_EMUI_VERSION STRING, DOB " +
+        "TIMESTAMP, DOJ TIMESTAMP, BIGINT_COLUMN1 BIGINT,BIGINT_COLUMN2 BIGINT,DECIMAL_COLUMN1 " +
+        "decimal(30,10), DECIMAL_COLUMN2 DECIMAL(36,10),Double_COLUMN1 double, Double_COLUMN2 " +
+        "double,INTEGER_COLUMN1 INT) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES " +
+        "('dictionary_include'='CUST_NAME')")
+
+    sql("insert into carbon_new10 select * from carbon_new9")
+
+    checkDictionaryAccessCount("default", "carbon_new10")
+
+    sql(
+        s"LOAD DATA INPATH '$path' INTO TABLE carbon_new10 OPTIONS" +
+        "('DELIMITER'=',' , 'QUOTECHAR'='\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE'," +
+        "'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
+        "BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
+        "INTEGER_COLUMN1')")
+
+    checkDictionaryAccessCount("default", "carbon_new10")
+
+  }
+
+
+
+  override def afterAll {
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_MAX_DRIVER_LRU_CACHE_SIZE,
+        CarbonCommonConstants.CARBON_MAX_LRU_CACHE_SIZE_DEFAULT)
+      .addProperty(CarbonCommonConstants.CARBON_MAX_EXECUTOR_LRU_CACHE_SIZE,
+        CarbonCommonConstants.CARBON_MAX_LRU_CACHE_SIZE_DEFAULT)
+
+    sql("drop table if exists carbon_new1")
+    sql("drop table if exists carbon_new2")
+    sql("drop table if exists carbon_new3")
+    sql("drop table if exists carbon_new4")
+    sql("drop table if exists carbon_new5")
+    sql("drop table if exists carbon_new6")
+    sql("drop table if exists carbon_new7")
+    sql("drop table if exists carbon_new8")
+    sql("drop table if exists carbon_new9")
+    sql("drop table if exists carbon_new10")
+  }
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f79b9ea3/processing/src/main/java/org/apache/carbondata/processing/loading/converter/FieldConverter.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/converter/FieldConverter.java b/processing/src/main/java/org/apache/carbondata/processing/loading/converter/FieldConverter.java
index 8a3e2eb..1ce8f9a 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/converter/FieldConverter.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/converter/FieldConverter.java
@@ -33,4 +33,9 @@ public interface FieldConverter {
    * @throws CarbonDataLoadingException
    */
   void convert(CarbonRow row, BadRecordLogHolder logHolder) throws CarbonDataLoadingException;
+
+  /**
+   * This method clears all the dictionary caches being acquired.
+   */
+  void clear();
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f79b9ea3/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/ComplexFieldConverterImpl.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/ComplexFieldConverterImpl.java b/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/ComplexFieldConverterImpl.java
index 5ac832d..b26ef36 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/ComplexFieldConverterImpl.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/ComplexFieldConverterImpl.java
@@ -52,6 +52,12 @@ public class ComplexFieldConverterImpl extends AbstractDictionaryFieldConverterI
     }
   }
 
+  /**
+   * Method to clear out the dictionary caches. In this instance nothing to clear.
+   */
+  @Override public void clear() {
+  }
+
   @Override public void fillColumnCardinality(List<Integer> cardinality) {
     genericDataType.fillCardinality(cardinality);
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f79b9ea3/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/DictionaryFieldConverterImpl.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/DictionaryFieldConverterImpl.java b/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/DictionaryFieldConverterImpl.java
index 4ac8850..1fb4086 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/DictionaryFieldConverterImpl.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/DictionaryFieldConverterImpl.java
@@ -123,6 +123,13 @@ public class DictionaryFieldConverterImpl extends AbstractDictionaryFieldConvert
     }
   }
 
+  /**
+   * Method to clear out the dictionary cache.
+   */
+  @Override public void clear() {
+    CarbonUtil.clearDictionaryCache(dictionary);
+  }
+
   @Override
   public void fillColumnCardinality(List<Integer> cardinality) {
     cardinality.add(dictionaryGenerator.size());

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f79b9ea3/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/DirectDictionaryFieldConverterImpl.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/DirectDictionaryFieldConverterImpl.java b/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/DirectDictionaryFieldConverterImpl.java
index 7dcef81..b49cd90 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/DirectDictionaryFieldConverterImpl.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/DirectDictionaryFieldConverterImpl.java
@@ -89,6 +89,12 @@ public class DirectDictionaryFieldConverterImpl extends AbstractDictionaryFieldC
     }
   }
 
+  /**
+   * Method to clean the dictionary cache. In this instance nothing to clear.
+   */
+  @Override public void clear() {
+  }
+
   @Override
   public void fillColumnCardinality(List<Integer> cardinality) {
     cardinality.add(Integer.MAX_VALUE);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f79b9ea3/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/MeasureFieldConverterImpl.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/MeasureFieldConverterImpl.java b/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/MeasureFieldConverterImpl.java
index 06f7589..2d70f03 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/MeasureFieldConverterImpl.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/MeasureFieldConverterImpl.java
@@ -98,4 +98,11 @@ public class MeasureFieldConverterImpl implements FieldConverter {
     }
 
   }
+
+  /**
+   * Method to clean the dictionary cache. As in this MeasureFieldConverterImpl convert no
+   * dictionary caches are acquired so nothing to clear. s
+   */
+  @Override public void clear() {
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f79b9ea3/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/NonDictionaryFieldConverterImpl.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/NonDictionaryFieldConverterImpl.java b/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/NonDictionaryFieldConverterImpl.java
index e606cdb..8f15e2e 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/NonDictionaryFieldConverterImpl.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/NonDictionaryFieldConverterImpl.java
@@ -87,6 +87,9 @@ public class NonDictionaryFieldConverterImpl implements FieldConverter {
     }
   }
 
+  @Override public void clear() {
+  }
+
   private void updateWithNullValue(CarbonRow row) {
     if (dataType == DataTypes.STRING) {
       row.update(CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY, index);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f79b9ea3/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/RowConverterImpl.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/RowConverterImpl.java b/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/RowConverterImpl.java
index 959e4f5..7fc8ed3 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/RowConverterImpl.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/RowConverterImpl.java
@@ -183,6 +183,10 @@ public class RowConverterImpl implements RowConverter {
 
   @Override
   public void finish() {
+    // Clear up dictionary cache access count.
+    for (int i = 0; i < fieldConverters.length; i ++) {
+      fieldConverters[i].clear();
+    }
     // close dictionary client when finish write
     if (configuration.getUseOnePass()) {
       for (DictionaryClient client : dictClients) {